Merge #161 support arbitary build types for content generator imports

This commit is contained in:
Mike McLean 2016-09-29 18:13:44 +00:00
commit 202fce2b0d
16 changed files with 986 additions and 229 deletions

View file

@ -3299,15 +3299,18 @@ def anon_handle_buildinfo(options, session, args):
print "Tags: %s" % ' '.join(taglist) print "Tags: %s" % ' '.join(taglist)
if info.get('extra'): if info.get('extra'):
print "Extra: %(extra)r" % info print "Extra: %(extra)r" % info
archives_seen = {}
maven_archives = session.listArchives(buildID=info['id'], type='maven') maven_archives = session.listArchives(buildID=info['id'], type='maven')
if maven_archives: if maven_archives:
print "Maven archives:" print "Maven archives:"
for archive in maven_archives: for archive in maven_archives:
archives_seen.setdefault(archive['id'], 1)
print os.path.join(koji.pathinfo.mavenbuild(info), koji.pathinfo.mavenfile(archive)) print os.path.join(koji.pathinfo.mavenbuild(info), koji.pathinfo.mavenfile(archive))
win_archives = session.listArchives(buildID=info['id'], type='win') win_archives = session.listArchives(buildID=info['id'], type='win')
if win_archives: if win_archives:
print "Windows archives:" print "Windows archives:"
for archive in win_archives: for archive in win_archives:
archives_seen.setdefault(archive['id'], 1)
print os.path.join(koji.pathinfo.winbuild(info), koji.pathinfo.winfile(archive)) print os.path.join(koji.pathinfo.winbuild(info), koji.pathinfo.winfile(archive))
rpms = session.listRPMs(buildID=info['id']) rpms = session.listRPMs(buildID=info['id'])
image_info = session.getImageBuild(info['id']) image_info = session.getImageBuild(info['id'])
@ -3315,7 +3318,18 @@ def anon_handle_buildinfo(options, session, args):
if img_archives: if img_archives:
print 'Image archives:' print 'Image archives:'
for archive in img_archives: for archive in img_archives:
archives_seen.setdefault(archive['id'], 1)
print os.path.join(koji.pathinfo.imagebuild(info), archive['filename']) print os.path.join(koji.pathinfo.imagebuild(info), archive['filename'])
archive_idx = {}
for archive in session.listArchives(buildID=info['id']):
if archive['id'] in archives_seen:
continue
archive_idx.setdefault(archive['btype'], []).append(archive)
for btype in archive_idx:
archives = archive_idx[btype]
print '%s Archives:' % btype.capitalize()
for archive in archives:
print os.path.join(koji.pathinfo.typedir(info, btype), archive['filename'])
if rpms: if rpms:
print "RPMs:" print "RPMs:"
for rpm in rpms: for rpm in rpms:

View file

@ -9,7 +9,7 @@ BUILDDIR = build
# User-friendly check for sphinx-build # User-friendly check for sphinx-build
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) $(warning The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
#' <- avoids bad syntax highlighting #' <- avoids bad syntax highlighting
endif endif

View file

@ -0,0 +1,85 @@
BEGIN;
-- New tables
SELECT statement_timestamp(), 'Creating new tables' as msg;
CREATE TABLE btype (
id SERIAL NOT NULL PRIMARY KEY,
name TEXT UNIQUE NOT NULL
) WITHOUT OIDS;
CREATE TABLE build_types (
build_id INTEGER NOT NULL REFERENCES build(id),
btype_id INTEGER NOT NULL REFERENCES btype(id),
PRIMARY KEY (build_id, btype_id)
) WITHOUT OIDS;
-- predefined build types
SELECT statement_timestamp(), 'Adding predefined build types' as msg;
INSERT INTO btype(name) VALUES ('rpm');
INSERT INTO btype(name) VALUES ('maven');
INSERT INTO btype(name) VALUES ('win');
INSERT INTO btype(name) VALUES ('image');
-- new column for archiveinfo
SELECT statement_timestamp(), 'Altering archiveinfo table' as msg;
ALTER TABLE archiveinfo ADD COLUMN btype_id INTEGER REFERENCES btype(id);
-- fill in legacy types
SELECT statement_timestamp(), 'Adding legacy btypes to builds' as msg;
INSERT INTO build_types(btype_id, build_id)
SELECT btype.id, maven_builds.build_id FROM btype JOIN maven_builds ON btype.name='maven';
INSERT INTO build_types(btype_id, build_id)
SELECT btype.id, win_builds.build_id FROM btype JOIN win_builds ON btype.name='win';
INSERT INTO build_types(btype_id, build_id)
SELECT btype.id, image_builds.build_id FROM btype JOIN image_builds ON btype.name='image';
-- not sure if this is the best way to select rpm builds...
INSERT INTO build_types(btype_id, build_id)
SELECT DISTINCT btype.id, build_id FROM btype JOIN rpminfo ON btype.name='rpm'
WHERE build_id IS NOT NULL;
SELECT statement_timestamp(), 'Adding legacy btypes to archiveinfo' as msg;
UPDATE archiveinfo SET btype_id=(SELECT id FROM btype WHERE name='maven' LIMIT 1)
WHERE (SELECT archive_id FROM maven_archives WHERE archive_id=archiveinfo.id) IS NOT NULL;
UPDATE archiveinfo SET btype_id=(SELECT id FROM btype WHERE name='win' LIMIT 1)
WHERE (SELECT archive_id FROM win_archives WHERE archive_id=archiveinfo.id) IS NOT NULL;
UPDATE archiveinfo SET btype_id=(SELECT id FROM btype WHERE name='image' LIMIT 1)
WHERE (SELECT archive_id FROM image_archives WHERE archive_id=archiveinfo.id) IS NOT NULL;
-- new component tables
SELECT statement_timestamp(), 'Creating new component tables' as msg;
CREATE TABLE archive_rpm_components AS SELECT image_id, rpm_id from image_listing;
CREATE TABLE archive_components AS SELECT image_id, archive_id from image_archive_listing;
-- doing it this way and fixing up after is *much* faster than creating the empty table
-- and using insert..select to populate
SELECT statement_timestamp(), 'Fixing up component tables, rename columns' as msg;
ALTER TABLE archive_rpm_components RENAME image_id TO archive_id;
ALTER TABLE archive_components RENAME archive_id TO component_id;
ALTER TABLE archive_components RENAME image_id TO archive_id;
ALTER TABLE archive_rpm_components ALTER COLUMN rpm_id SET NOT NULL;
ALTER TABLE archive_rpm_components ALTER COLUMN archive_id SET NOT NULL;
ALTER TABLE archive_components ALTER COLUMN component_id SET NOT NULL;
ALTER TABLE archive_components ALTER COLUMN archive_id SET NOT NULL;
SELECT statement_timestamp(), 'Fixing up component tables, adding constraints' as msg;
ALTER TABLE archive_rpm_components ADD CONSTRAINT archive_rpm_components_archive_id_fkey FOREIGN KEY (archive_id) REFERENCES archiveinfo(id);
ALTER TABLE archive_rpm_components ADD CONSTRAINT archive_rpm_components_rpm_id_fkey FOREIGN KEY (rpm_id) REFERENCES rpminfo(id);
ALTER TABLE archive_rpm_components ADD CONSTRAINT archive_rpm_components_archive_id_rpm_id_key UNIQUE (archive_id, rpm_id);
ALTER TABLE archive_components ADD CONSTRAINT archive_components_archive_id_fkey FOREIGN KEY (archive_id) REFERENCES archiveinfo(id);
ALTER TABLE archive_components ADD CONSTRAINT archive_components_component_id_fkey FOREIGN KEY (component_id) REFERENCES archiveinfo(id);
ALTER TABLE archive_components ADD CONSTRAINT archive_components_archive_id_component_id_key UNIQUE (archive_id, component_id);
SELECT statement_timestamp(), 'Adding component table indexes' as msg;
CREATE INDEX rpm_components_idx on archive_rpm_components(rpm_id);
CREATE INDEX archive_components_idx on archive_components(component_id);
-- image_listing and image_archive_listing are no longer used
COMMIT;

View file

@ -1,5 +1,5 @@
-- vim:noet:sw=8 -- vim:et:sw=8
-- drop statements for old data have moved to schema-clear.sql -- drop statements for old data have moved to schema-clear.sql
@ -251,6 +251,27 @@ CREATE TABLE build (
CREATE INDEX build_by_pkg_id ON build (pkg_id); CREATE INDEX build_by_pkg_id ON build (pkg_id);
CREATE INDEX build_completion ON build(completion_time); CREATE INDEX build_completion ON build(completion_time);
CREATE TABLE btype (
id SERIAL NOT NULL PRIMARY KEY,
name TEXT UNIQUE NOT NULL
) WITHOUT OIDS;
-- legacy build types
INSERT INTO btype(name) VALUES ('rpm');
INSERT INTO btype(name) VALUES ('maven');
INSERT INTO btype(name) VALUES ('win');
INSERT INTO btype(name) VALUES ('image');
CREATE TABLE build_types (
build_id INTEGER NOT NULL REFERENCES build(id),
btype_id INTEGER NOT NULL REFERENCES btype(id),
PRIMARY KEY (build_id, btype_id)
) WITHOUT OIDS;
-- Note: some of these CREATEs may seem a little out of order. This is done to keep -- Note: some of these CREATEs may seem a little out of order. This is done to keep
-- the references sane. -- the references sane.
@ -780,6 +801,8 @@ insert into archivetypes (name, description, extensions) values ('jnilib', 'Java
CREATE TABLE archiveinfo ( CREATE TABLE archiveinfo (
id SERIAL NOT NULL PRIMARY KEY, id SERIAL NOT NULL PRIMARY KEY,
type_id INTEGER NOT NULL REFERENCES archivetypes (id), type_id INTEGER NOT NULL REFERENCES archivetypes (id),
btype_id INTEGER REFERENCES btype(id),
-- ^ TODO add NOT NULL
build_id INTEGER NOT NULL REFERENCES build (id), build_id INTEGER NOT NULL REFERENCES build (id),
buildroot_id INTEGER REFERENCES buildroot (id), buildroot_id INTEGER REFERENCES buildroot (id),
filename TEXT NOT NULL, filename TEXT NOT NULL,
@ -806,21 +829,21 @@ CREATE TABLE image_archives (
arch VARCHAR(16) NOT NULL arch VARCHAR(16) NOT NULL
) WITHOUT OIDS; ) WITHOUT OIDS;
-- tracks the contents of an image -- tracks the rpm contents of an image or other archive
CREATE TABLE image_listing ( CREATE TABLE archive_rpm_components (
image_id INTEGER NOT NULL REFERENCES image_archives(archive_id),
rpm_id INTEGER NOT NULL REFERENCES rpminfo(id),
UNIQUE (image_id, rpm_id)
) WITHOUT OIDS;
CREATE INDEX image_listing_rpms on image_listing(rpm_id);
-- track the archive contents of an image
CREATE TABLE image_archive_listing (
image_id INTEGER NOT NULL REFERENCES image_archives(archive_id),
archive_id INTEGER NOT NULL REFERENCES archiveinfo(id), archive_id INTEGER NOT NULL REFERENCES archiveinfo(id),
UNIQUE (image_id, archive_id) rpm_id INTEGER NOT NULL REFERENCES rpminfo(id),
UNIQUE (archive_id, rpm_id)
) WITHOUT OIDS; ) WITHOUT OIDS;
CREATE INDEX image_listing_archives on image_archive_listing(archive_id); CREATE INDEX rpm_components_idx on archive_rpm_components(rpm_id);
-- track the archive contents of an image or other archive
CREATE TABLE archive_components (
archive_id INTEGER NOT NULL REFERENCES archiveinfo(id),
component_id INTEGER NOT NULL REFERENCES archiveinfo(id),
UNIQUE (archive_id, component_id)
) WITHOUT OIDS;
CREATE INDEX archive_components_idx on archive_components(component_id);
CREATE TABLE buildroot_archives ( CREATE TABLE buildroot_archives (

View file

@ -1173,7 +1173,12 @@ def readTaggedBuilds(tag, event=None, inherit=False, latest=False, package=None,
type_join = 'JOIN image_builds ON image_builds.build_id = tag_listing.build_id' type_join = 'JOIN image_builds ON image_builds.build_id = tag_listing.build_id'
fields.append(('image_builds.build_id', 'build_id')) fields.append(('image_builds.build_id', 'build_id'))
else: else:
raise koji.GenericError, 'unsupported build type: %s' % type btype = lookup_name('btype', type, strict=False)
if not btype:
raise koji.GenericError, 'unsupported build type: %s' % type
btype_id = btype['id']
type_join = ('JOIN build_types ON build.id = build_types.build_id '
'AND btype_id = %(btype_id)s')
q = """SELECT %s q = """SELECT %s
FROM tag_listing FROM tag_listing
@ -1337,6 +1342,8 @@ def readTaggedArchives(tag, package=None, event=None, inherit=False, latest=True
#the following query is run for each tag in the inheritance #the following query is run for each tag in the inheritance
fields = [('archiveinfo.id', 'id'), fields = [('archiveinfo.id', 'id'),
('archiveinfo.type_id', 'type_id'), ('archiveinfo.type_id', 'type_id'),
('archiveinfo.btype_id', 'btype_id'),
('btype.name', 'btype'),
('archiveinfo.build_id', 'build_id'), ('archiveinfo.build_id', 'build_id'),
('archiveinfo.buildroot_id', 'buildroot_id'), ('archiveinfo.buildroot_id', 'buildroot_id'),
('archiveinfo.filename', 'filename'), ('archiveinfo.filename', 'filename'),
@ -1347,7 +1354,8 @@ def readTaggedArchives(tag, package=None, event=None, inherit=False, latest=True
('archiveinfo.extra', 'extra'), ('archiveinfo.extra', 'extra'),
] ]
tables = ['archiveinfo'] tables = ['archiveinfo']
joins = ['tag_listing ON archiveinfo.build_id = tag_listing.build_id'] joins = ['tag_listing ON archiveinfo.build_id = tag_listing.build_id',
'btype ON archiveinfo.btype_id = btype.id']
clauses = [eventCondition(event), 'tag_listing.tag_id = %(tagid)i'] clauses = [eventCondition(event), 'tag_listing.tag_id = %(tagid)i']
if package: if package:
joins.append('build ON archiveinfo.build_id = build.id') joins.append('build ON archiveinfo.build_id = build.id')
@ -3309,6 +3317,7 @@ def get_build(buildInfo, strict=False):
return None return None
fields = (('build.id', 'id'), ('build.version', 'version'), ('build.release', 'release'), fields = (('build.id', 'id'), ('build.version', 'version'), ('build.release', 'release'),
('build.id', 'build_id'),
('build.epoch', 'epoch'), ('build.state', 'state'), ('build.completion_time', 'completion_time'), ('build.epoch', 'epoch'), ('build.state', 'state'), ('build.completion_time', 'completion_time'),
('build.start_time', 'start_time'), ('build.start_time', 'start_time'),
('build.task_id', 'task_id'), ('events.id', 'creation_event_id'), ('events.time', 'creation_time'), ('build.task_id', 'task_id'), ('events.id', 'creation_event_id'), ('events.time', 'creation_time'),
@ -3537,8 +3546,8 @@ def list_rpms(buildID=None, buildrootID=None, imageID=None, componentBuildrootID
# image specific constraints # image specific constraints
if imageID != None: if imageID != None:
clauses.append('image_listing.image_id = %(imageID)i') clauses.append('archive_rpm_components.archive_id = %(imageID)i')
joins.append('image_listing ON rpminfo.id = image_listing.rpm_id') joins.append('archive_rpm_components ON rpminfo.id = archive_rpm_components.rpm_id')
if hostID != None: if hostID != None:
joins.append('standard_buildroot ON rpminfo.buildroot_id = standard_buildroot.buildroot_id') joins.append('standard_buildroot ON rpminfo.buildroot_id = standard_buildroot.buildroot_id')
@ -3625,8 +3634,89 @@ def get_image_build(buildInfo, strict=False):
raise koji.GenericError, 'no such image build: %s' % buildInfo raise koji.GenericError, 'no such image build: %s' % buildInfo
return result return result
def get_build_type(buildInfo, strict=False):
"""Return type info about the build
buildInfo should be a valid build specification
Returns a dictionary whose keys are type names and whose values are
the type info corresponding to that type
"""
binfo = get_build(buildInfo, strict=strict)
if not binfo:
return None
query = QueryProcessor(
tables=['btype'],
columns=['name'],
joins=['build_types ON btype_id=btype.id'],
clauses=['build_id = %(id)i'],
values=binfo,
opts={'asList':True},
)
ret = {}
extra = binfo['extra'] or {}
for (btype,) in query.execute():
ret[btype] = extra.get('typeinfo', {}).get(btype)
#deal with legacy types
l_funcs = [['maven', get_maven_build], ['win', get_win_build],
['image', get_image_build]]
for ltype, func in l_funcs:
# For now, we let the legacy data take precedence, but at some point
# we will want to change that
ltinfo = func(binfo['id'], strict=False)
if ltinfo:
ret[ltype] = ltinfo
return ret
def list_btypes(query=None, queryOpts=None):
"""List btypes matching query
Options:
query - dictionary specifying selection parameters
queryOpts - dictionary specifying other query options
Supported query parameters:
name - select btypes by name
id - select btypes by id
If query is None, then all btypes are returned
"""
if query is None:
query = {}
qparams = {'tables': ['btype'],
'columns': ['id', 'name'],
'opts': queryOpts}
clauses = []
values = query.copy()
if 'name' in query:
clauses.append('btype.name = %(name)s')
if 'id' in query:
clauses.append('btype.id = %(id)s')
qparams['clauses'] = clauses
qparams['values'] = values
return QueryProcessor(**qparams).execute()
def add_btype(name):
"""Add a new btype with the given name"""
context.session.assertPerm('admin')
data = {'name': name}
if list_btypes(data):
raise koji.GenericError("btype already exists")
insert = InsertProcessor('btype', data=data)
insert.execute()
def list_archives(buildID=None, buildrootID=None, componentBuildrootID=None, hostID=None, type=None, def list_archives(buildID=None, buildrootID=None, componentBuildrootID=None, hostID=None, type=None,
filename=None, size=None, checksum=None, typeInfo=None, queryOpts=None, imageID=None): filename=None, size=None, checksum=None, typeInfo=None, queryOpts=None, imageID=None,
archiveID=None):
""" """
Retrieve information about archives. Retrieve information about archives.
If buildID is not null it will restrict the list to archives built by the build with that ID. If buildID is not null it will restrict the list to archives built by the build with that ID.
@ -3690,9 +3780,12 @@ def list_archives(buildID=None, buildrootID=None, componentBuildrootID=None, hos
values = {} values = {}
tables = ['archiveinfo'] tables = ['archiveinfo']
joins = ['archivetypes on archiveinfo.type_id = archivetypes.id'] joins = ['archivetypes on archiveinfo.type_id = archivetypes.id',
'btype ON archiveinfo.btype_id = btype.id']
fields = [('archiveinfo.id', 'id'), fields = [('archiveinfo.id', 'id'),
('archiveinfo.type_id', 'type_id'), ('archiveinfo.type_id', 'type_id'),
('archiveinfo.btype_id', 'btype_id'),
('btype.name', 'btype'),
('archiveinfo.build_id', 'build_id'), ('archiveinfo.build_id', 'build_id'),
('archiveinfo.buildroot_id', 'buildroot_id'), ('archiveinfo.buildroot_id', 'buildroot_id'),
('archiveinfo.filename', 'filename'), ('archiveinfo.filename', 'filename'),
@ -3720,8 +3813,10 @@ def list_archives(buildID=None, buildrootID=None, componentBuildrootID=None, hos
fields.append(['buildroot_archives.buildroot_id', 'component_buildroot_id']) fields.append(['buildroot_archives.buildroot_id', 'component_buildroot_id'])
fields.append(['buildroot_archives.project_dep', 'project']) fields.append(['buildroot_archives.project_dep', 'project'])
if imageID != None: if imageID != None:
clauses.append('image_archive_listing.image_id = %(imageID)i') # TODO: arg name is now a misnomer, could be any archive
joins.append('image_archive_listing ON archiveinfo.id = image_archive_listing.archive_id') clauses.append('archive_components.archive_id = %(imageID)i')
values['imageID'] = imageID
joins.append('archive_components ON archiveinfo.id = archive_components.component_id')
if hostID is not None: if hostID is not None:
joins.append('standard_buildroot on archiveinfo.buildroot_id = standard_buildroot.buildroot_id') joins.append('standard_buildroot on archiveinfo.buildroot_id = standard_buildroot.buildroot_id')
clauses.append('standard_buildroot.host_id = %(host_id)i') clauses.append('standard_buildroot.host_id = %(host_id)i')
@ -3736,6 +3831,9 @@ def list_archives(buildID=None, buildrootID=None, componentBuildrootID=None, hos
if checksum is not None: if checksum is not None:
clauses.append('checksum = %(checksum)s') clauses.append('checksum = %(checksum)s')
values['checksum'] = checksum values['checksum'] = checksum
if archiveID is not None:
clauses.append('archiveinfo.id = %(archive_id)s')
values['archive_id'] = archiveID
if type is None: if type is None:
pass pass
@ -3778,7 +3876,14 @@ def list_archives(buildID=None, buildrootID=None, componentBuildrootID=None, hos
clauses.append('image_archives.%s = %%(%s)s' % (key, key)) clauses.append('image_archives.%s = %%(%s)s' % (key, key))
values[key] = typeInfo[key] values[key] = typeInfo[key]
else: else:
raise koji.GenericError, 'unsupported archive type: %s' % type btype = lookup_name('btype', type, strict=False)
if not btype:
raise koji.GenericError('unsupported archive type: %s' % type)
if typeInfo:
raise koji.GenericError('typeInfo queries not supported for type '
'%(name)s' % btype)
clauses.append('archiveinfo.btype_id = %(btype_id)s')
values['btype_id'] = btype['id']
columns, aliases = zip(*fields) columns, aliases = zip(*fields)
ret = QueryProcessor(tables=tables, columns=columns, aliases=aliases, joins=joins, ret = QueryProcessor(tables=tables, columns=columns, aliases=aliases, joins=joins,
@ -3815,13 +3920,14 @@ def get_archive(archive_id, strict=False):
rootid rootid
arch arch
""" """
fields = ('id', 'type_id', 'build_id', 'buildroot_id', 'filename', 'size', data = list_archives(archiveID=archive_id)
'checksum', 'checksum_type', 'metadata_only', 'extra') if not data:
archive = QueryProcessor(tables=['archiveinfo'], columns=fields, transform=_fix_archive_row, if strict:
clauses=['id=%(archive_id)s'], values=locals()).executeOne() raise koji.GenericError('No such archive: %s' % archive_id)
if not archive: else:
# strict is taken care of by _singleRow() return None
return None
archive = data[0]
maven_info = get_maven_archive(archive_id) maven_info = get_maven_archive(archive_id)
if maven_info: if maven_info:
del maven_info['archive_id'] del maven_info['archive_id']
@ -3882,9 +3988,9 @@ def get_image_archive(archive_id, strict=False):
if not results: if not results:
return None return None
results['rootid'] = False results['rootid'] = False
fields = ('image_id', 'rpm_id') fields = ['rpm_id']
select = """SELECT %s FROM image_listing select = """SELECT %s FROM archive_rpm_components
WHERE image_id = %%(archive_id)i""" % ', '.join(fields) WHERE archive_id = %%(archive_id)i""" % ', '.join(fields)
rpms = _singleRow(select, locals(), fields) rpms = _singleRow(select, locals(), fields)
if rpms: if rpms:
results['rootid'] = True results['rootid'] = True
@ -4453,39 +4559,13 @@ def new_build(data):
data.setdefault('volume_id', 0) data.setdefault('volume_id', 0)
#check for existing build #check for existing build
query = QueryProcessor( old_binfo = get_build(data)
tables=['build'], columns=['id', 'state', 'task_id'], if old_binfo:
clauses=['pkg_id=%(pkg_id)s', 'version=%(version)s', recycle_build(old_binfo, data)
'release=%(release)s'], # Raises exception if there is a problem
values=data, opts={'rowlock':True, 'asList':True}) return old_binfo['id']
row = query.executeOne() #else
if row: koji.plugin.run_callbacks('preBuildStateChange', attribute='state', old=None, new=data['state'], info=data)
build_id, state, task_id = row
data['id'] = build_id
koji.plugin.run_callbacks('preBuildStateChange', attribute='state', old=state, new=data['state'], info=data)
st_desc = koji.BUILD_STATES[state]
if st_desc == 'BUILDING':
# check to see if this is the controlling task
if data['state'] == state and data.get('task_id', '') == task_id:
#the controlling task must have restarted (and called initBuild again)
return build_id
raise koji.GenericError, "Build already in progress (task %d)" % task_id
# TODO? - reclaim 'stale' builds (state=BUILDING and task_id inactive)
if st_desc in ('FAILED', 'CANCELED'):
#should be ok to replace
update = UpdateProcessor('build', clauses=['id=%(id)s'], values=data)
update.set(**dslice(data, ['state', 'task_id', 'owner', 'start_time', 'completion_time', 'epoch']))
update.rawset(create_event='get_event()')
update.execute()
builddir = koji.pathinfo.build(data)
if os.path.exists(builddir):
shutil.rmtree(builddir)
koji.plugin.run_callbacks('postBuildStateChange', attribute='state', old=state, new=data['state'], info=data)
return build_id
raise koji.GenericError, "Build already exists (id=%d, state=%s): %r" \
% (build_id, st_desc, data)
else:
koji.plugin.run_callbacks('preBuildStateChange', attribute='state', old=None, new=data['state'], info=data)
#insert the new data #insert the new data
insert_data = dslice(data, ['pkg_id', 'version', 'release', 'epoch', 'state', 'volume_id', insert_data = dslice(data, ['pkg_id', 'version', 'release', 'epoch', 'state', 'volume_id',
@ -4498,6 +4578,69 @@ def new_build(data):
return data['id'] return data['id']
def recycle_build(old, data):
"""Check to see if a build can by recycled and if so, update it"""
st_desc = koji.BUILD_STATES[old['state']]
if st_desc == 'BUILDING':
# check to see if this is the controlling task
if data['state'] == old['state'] and data.get('task_id', '') == old['task_id']:
#the controlling task must have restarted (and called initBuild again)
return
raise koji.GenericError("Build already in progress (task %(task_id)d)"
% old)
# TODO? - reclaim 'stale' builds (state=BUILDING and task_id inactive)
if st_desc not in ('FAILED', 'CANCELED'):
raise koji.GenericError("Build already exists (id=%d, state=%s): %r"
% (old['id'], st_desc, data))
# check for evidence of tag activity
query = QueryProcessor(columns=['tag_id'], tables=['tag_listing'],
clauses = ['build_id = %(id)s'], values=old)
if query.execute():
raise koji.GenericError("Build already exists. Unable to recycle, "
"has tag history")
# check for rpms or archives
query = QueryProcessor(columns=['id'], tables=['rpminfo'],
clauses = ['build_id = %(id)s'], values=old)
if query.execute():
raise koji.GenericError("Build already exists. Unable to recycle, "
"has rpm data")
query = QueryProcessor(columns=['id'], tables=['archiveinfo'],
clauses = ['build_id = %(id)s'], values=old)
if query.execute():
raise koji.GenericError("Build already exists. Unable to recycle, "
"has archive data")
# If we reach here, should be ok to replace
koji.plugin.run_callbacks('preBuildStateChange', attribute='state',
old=old['state'], new=data['state'], info=data)
# If there is any old build type info, clear it
delete = """DELETE FROM maven_builds WHERE build_id = %(id)i"""
_dml(delete, old)
delete = """DELETE FROM win_builds WHERE build_id = %(id)i"""
_dml(delete, old)
delete = """DELETE FROM image_builds WHERE build_id = %(id)i"""
_dml(delete, old)
delete = """DELETE FROM build_types WHERE build_id = %(id)i"""
_dml(delete, old)
data['id'] = old['id']
update = UpdateProcessor('build', clauses=['id=%(id)s'], values=data)
update.set(**dslice(data, ['state', 'task_id', 'owner', 'start_time', 'completion_time', 'epoch']))
update.rawset(create_event='get_event()')
update.execute()
builddir = koji.pathinfo.build(data)
if os.path.exists(builddir):
shutil.rmtree(builddir)
koji.plugin.run_callbacks('postBuildStateChange', attribute='state',
old=old['state'], new=data['state'], info=data)
def check_noarch_rpms(basepath, rpms): def check_noarch_rpms(basepath, rpms):
""" """
If rpms contains any noarch rpms with identical names, If rpms contains any noarch rpms with identical names,
@ -4568,6 +4711,7 @@ def import_build(srpm, rpms, brmap=None, task_id=None, build_id=None, logs=None)
if build_id is None: if build_id is None:
build_id = new_build(build) build_id = new_build(build)
binfo = get_build(build_id, strict=True) binfo = get_build(build_id, strict=True)
new_typed_build(binfo, 'rpm')
else: else:
#build_id was passed in - sanity check #build_id was passed in - sanity check
binfo = get_build(build_id, strict=True) binfo = get_build(build_id, strict=True)
@ -4630,6 +4774,7 @@ def import_rpm(fn, buildinfo=None, brootid=None, wrapper=False, fileinfo=None):
if not buildinfo: if not buildinfo:
# create a new build # create a new build
build_id = new_build(rpminfo) build_id = new_build(rpminfo)
# we add the rpm build type below
buildinfo = get_build(build_id, strict=True) buildinfo = get_build(build_id, strict=True)
else: else:
#figure it out from sourcerpm string #figure it out from sourcerpm string
@ -4655,6 +4800,10 @@ def import_rpm(fn, buildinfo=None, brootid=None, wrapper=False, fileinfo=None):
raise koji.GenericError, "srpm mismatch for %s: %s (expected %s)" \ raise koji.GenericError, "srpm mismatch for %s: %s (expected %s)" \
% (fn, basename, srpmname) % (fn, basename, srpmname)
# if we're adding an rpm to it, then this build is of rpm type
# harmless if build already has this type
new_typed_build(buildinfo, 'rpm')
#add rpminfo entry #add rpminfo entry
rpminfo['id'] = _singleValue("""SELECT nextval('rpminfo_id_seq')""") rpminfo['id'] = _singleValue("""SELECT nextval('rpminfo_id_seq')""")
rpminfo['build_id'] = buildinfo['id'] rpminfo['build_id'] = buildinfo['id']
@ -4812,6 +4961,25 @@ class CG_Importer(object):
buildinfo['completion_time'] = \ buildinfo['completion_time'] = \
datetime.datetime.fromtimestamp(float(metadata['build']['end_time'])).isoformat(' ') datetime.datetime.fromtimestamp(float(metadata['build']['end_time'])).isoformat(' ')
self.buildinfo = buildinfo self.buildinfo = buildinfo
# get typeinfo
b_extra = self.metadata['build'].get('extra', {})
typeinfo = b_extra.get('typeinfo', {})
# legacy types can be at top level of extra
for btype in ['maven', 'win', 'image']:
if btype not in b_extra:
continue
if btype in typeinfo:
# he says they've already got one
raise koji.GenericError('Duplicate typeinfo for %r' % btype)
typeinfo[btype] = b_extra[btype]
# sanity check
for btype in typeinfo:
lookup_name('btype', btype, strict=True)
self.typeinfo = typeinfo
return buildinfo return buildinfo
@ -4820,14 +4988,23 @@ class CG_Importer(object):
buildinfo = get_build(build_id, strict=True) buildinfo = get_build(build_id, strict=True)
# handle special build types # handle special build types
b_extra = self.metadata['build'].get('extra', {}) for btype in self.typeinfo:
if 'maven' in b_extra: tinfo = self.typeinfo[btype]
new_maven_build(buildinfo, b_extra['maven']) if btype == 'maven':
if 'win' in b_extra: new_maven_build(buildinfo, tinfo)
new_win_build(buildinfo, b_extra['win']) elif btype == 'win':
if 'image' in b_extra: new_win_build(buildinfo, tinfo)
# no extra info tracked at build level elif btype == 'image':
new_image_build(buildinfo) # no extra info tracked at build level
new_image_build(buildinfo)
else:
new_typed_build(buildinfo, btype)
# rpm builds not required to have typeinfo
if 'rpm' not in self.typeinfo:
# if the build contains rpms then it has the rpm type
if [o for o in self.prepped_outputs if o['type'] == 'rpm']:
new_typed_build(buildinfo, 'rpm')
self.buildinfo = buildinfo self.buildinfo = buildinfo
return buildinfo return buildinfo
@ -5008,23 +5185,44 @@ class CG_Importer(object):
def prep_archive(self, fileinfo): def prep_archive(self, fileinfo):
# determine archive import type (maven/win/image/other) # determine archive import type
extra = fileinfo.get('extra', {}) extra = fileinfo.get('extra', {})
legacy_types = ['maven', 'win', 'image'] legacy_types = ['maven', 'win', 'image']
l_type = None btype = None
type_info = None type_info = None
for key in legacy_types: for key in extra:
if key in extra: if key not in legacy_types:
if l_type is not None: continue
raise koji.GenericError("Output file has multiple archive" if btype is not None:
"types: %(filename)s" % fileinfo) raise koji.GenericError("Output file has multiple types: "
l_type = key "%(filename)s" % fileinfo)
type_info = extra[key] btype = key
fileinfo['hub.l_type'] = l_type type_info = extra[key]
for key in extra.get('typeinfo', {}):
if btype == key:
raise koji.GenericError("Duplicate typeinfo for: %r" % btype)
elif btype is not None:
raise koji.GenericError("Output file has multiple types: "
"%(filename)s" % fileinfo)
btype = key
type_info = extra['typeinfo'][key]
if btype is None:
raise koji.GenericError("No typeinfo for: %(filename)s" % fileinfo)
if btype not in self.typeinfo:
raise koji.GenericError('Output type %s not listed in build '
'types' % btype)
fileinfo['hub.btype'] = btype
fileinfo['hub.type_info'] = type_info fileinfo['hub.type_info'] = type_info
if l_type == 'image': if 'components' in fileinfo:
components = fileinfo.get('components', []) if btype in ('maven', 'win'):
raise koji.GenericError("Component list not allowed for "
"archives of type %s" % btype)
# for new types, we trust the metadata
components = fileinfo['components']
rpmlist, archives = self.match_components(components) rpmlist, archives = self.match_components(components)
# TODO - note presence of external components # TODO - note presence of external components
fileinfo['hub.rpmlist'] = rpmlist fileinfo['hub.rpmlist'] = rpmlist
@ -5052,29 +5250,29 @@ class CG_Importer(object):
def import_archive(self, buildinfo, brinfo, fileinfo): def import_archive(self, buildinfo, brinfo, fileinfo):
fn = fileinfo['hub.path'] fn = fileinfo['hub.path']
l_type = fileinfo['hub.l_type'] btype = fileinfo['hub.btype']
type_info = fileinfo['hub.type_info'] type_info = fileinfo['hub.type_info']
archiveinfo = import_archive_internal(fn, buildinfo, l_type, type_info, brinfo.id, fileinfo) archiveinfo = import_archive_internal(fn, buildinfo, btype, type_info, brinfo.id, fileinfo)
if l_type == 'image': if 'components' in fileinfo:
self.import_components(archiveinfo['id'], fileinfo) self.import_components(archiveinfo['id'], fileinfo)
def import_components(self, image_id, fileinfo): def import_components(self, archive_id, fileinfo):
rpmlist = fileinfo['hub.rpmlist'] rpmlist = fileinfo['hub.rpmlist']
archives = fileinfo['hub.archives'] archives = fileinfo['hub.archives']
insert = InsertProcessor('image_listing') insert = InsertProcessor('archive_rpm_components')
insert.set(image_id=image_id) insert.set(archive_id=archive_id)
for rpminfo in rpmlist: for rpminfo in rpmlist:
insert.set(rpm_id=rpminfo['id']) insert.set(rpm_id=rpminfo['id'])
insert.execute() insert.execute()
insert = InsertProcessor('image_archive_listing') insert = InsertProcessor('archive_components')
insert.set(image_id=image_id) insert.set(archive_id=archive_id)
for archiveinfo in archives: for archiveinfo in archives:
insert.set(archive_id=archiveinfo['id']) insert.set(component_id=archiveinfo['id'])
insert.execute() insert.execute()
@ -5434,9 +5632,11 @@ def new_maven_build(build, maven_info):
(field, current_maven_info[field], maven_info[field]) (field, current_maven_info[field], maven_info[field])
else: else:
maven_info['build_id'] = build['id'] maven_info['build_id'] = build['id']
insert = """INSERT INTO maven_builds (build_id, group_id, artifact_id, version) data = dslice(maven_info, ['build_id', 'group_id', 'artifact_id', 'version'])
VALUES (%(build_id)i, %(group_id)s, %(artifact_id)s, %(version)s)""" insert = InsertProcessor('maven_builds', data=data)
_dml(insert, maven_info) insert.execute()
# also add build_types entry
new_typed_build(build, 'maven')
def new_win_build(build_info, win_info): def new_win_build(build_info, win_info):
""" """
@ -5456,6 +5656,8 @@ def new_win_build(build_info, win_info):
insert.set(build_id=build_id) insert.set(build_id=build_id)
insert.set(platform=win_info['platform']) insert.set(platform=win_info['platform'])
insert.execute() insert.execute()
# also add build_types entry
new_typed_build(build_info, 'win')
def new_image_build(build_info): def new_image_build(build_info):
""" """
@ -5473,6 +5675,26 @@ def new_image_build(build_info):
insert = InsertProcessor('image_builds') insert = InsertProcessor('image_builds')
insert.set(build_id=build_info['id']) insert.set(build_id=build_info['id'])
insert.execute() insert.execute()
# also add build_types entry
new_typed_build(build_info, 'image')
def new_typed_build(build_info, btype):
"""Mark build as a given btype"""
btype_id=lookup_name('btype', btype, strict=True)['id']
query = QueryProcessor(tables=('build_types',), columns=('build_id',),
clauses=('build_id = %(build_id)i',
'btype_id = %(btype_id)i',),
values={'build_id': build_info['id'],
'btype_id': btype_id})
result = query.executeOne()
if not result:
insert = InsertProcessor('build_types')
insert.set(build_id=build_info['id'])
insert.set(btype_id=btype_id)
insert.execute()
def old_image_data(old_image_id): def old_image_data(old_image_id):
"""Return old image data for given id""" """Return old image data for given id"""
@ -5588,15 +5810,15 @@ def import_old_image(old, name, version):
archive_id = archives[0]['id'] archive_id = archives[0]['id']
logger.debug('root archive id is %s' % archive_id) logger.debug('root archive id is %s' % archive_id)
query = QueryProcessor(columns=['rpm_id'], tables=['imageinfo_listing'], query = QueryProcessor(columns=['rpm_id'], tables=['imageinfo_listing'],
clauses=['image_id=%(id)i'], values=old, clauses=['archive_id=%(id)i'], values=old,
opts={'asList': True}) opts={'asList': True})
rpm_ids = [r[0] for r in query.execute()] rpm_ids = [r[0] for r in query.execute()]
insert = InsertProcessor('image_listing') insert = InsertProcessor('archive_rpm_components')
insert.set(image_id=archive_id) insert.set(archive_id=archive_id)
for rpm_id in rpm_ids: for rpm_id in rpm_ids:
insert.set(rpm_id=rpm_id) insert.set(rpm_id=rpm_id)
insert.execute() insert.execute()
logger.info('updated image_listing') logger.info('updated archive_rpm_components')
# grab old logs # grab old logs
old_log_dir = os.path.join(old['dir'], 'data', 'logs', old['arch']) old_log_dir = os.path.join(old['dir'], 'data', 'logs', old['arch'])
@ -5689,6 +5911,10 @@ def import_archive_internal(filepath, buildinfo, type, typeInfo, buildroot_id=No
(filename, archiveinfo['checksum'], fileinfo['checksum'])) (filename, archiveinfo['checksum'], fileinfo['checksum']))
archivetype = get_archive_type(filename, strict=True) archivetype = get_archive_type(filename, strict=True)
archiveinfo['type_id'] = archivetype['id'] archiveinfo['type_id'] = archivetype['id']
btype = lookup_name('btype', type, strict=False)
if btype is None:
raise koji.BuildError, 'unsupported archive type: %s' % type
archiveinfo['btype_id'] = btype['id']
# cg extra data # cg extra data
extra = fileinfo.get('extra', None) extra = fileinfo.get('extra', None)
@ -5758,7 +5984,10 @@ def import_archive_internal(filepath, buildinfo, type, typeInfo, buildroot_id=No
_import_archive_file(filepath, imgdir) _import_archive_file(filepath, imgdir)
# import log files? # import log files?
else: else:
raise koji.BuildError, 'unsupported archive type: %s' % type # new style type, no supplementary table
if not metadata_only:
destdir = koji.pathinfo.typedir(buildinfo, btype['name'])
_import_archive_file(filepath, destdir)
archiveinfo = get_archive(archive_id, strict=True) archiveinfo = get_archive(archive_id, strict=True)
koji.plugin.run_callbacks('postImport', type='archive', archive=archiveinfo, build=buildinfo, koji.plugin.run_callbacks('postImport', type='archive', archive=archiveinfo, build=buildinfo,
@ -6353,11 +6582,7 @@ def build_references(build_id, limit=None):
The optional limit arg is used to limit the size of the buildroot The optional limit arg is used to limit the size of the buildroot
references. references.
""" """
#references (that matter):
# tag_listing
# buildroot_listing (via rpminfo)
# buildroot_archives (via archiveinfo)
# ?? rpmsigs (via rpminfo)
ret = {} ret = {}
# find tags # find tags
@ -6365,9 +6590,11 @@ def build_references(build_id, limit=None):
WHERE build_id = %(build_id)i AND active = TRUE""" WHERE build_id = %(build_id)i AND active = TRUE"""
ret['tags'] = _multiRow(q, locals(), ('id', 'name')) ret['tags'] = _multiRow(q, locals(), ('id', 'name'))
#we'll need the component rpm ids for the rest #we'll need the component rpm and archive ids for the rest
q = """SELECT id FROM rpminfo WHERE build_id=%(build_id)i""" q = """SELECT id FROM rpminfo WHERE build_id=%(build_id)i"""
rpm_ids = _fetchMulti(q, locals()) build_rpm_ids = _fetchMulti(q, locals())
q = """SELECT id FROM archiveinfo WHERE build_id=%(build_id)i"""
build_archive_ids = _fetchMulti(q, locals())
# find rpms whose buildroots we were in # find rpms whose buildroots we were in
st_complete = koji.BUILD_STATES['COMPLETE'] st_complete = koji.BUILD_STATES['COMPLETE']
@ -6381,28 +6608,26 @@ def build_references(build_id, limit=None):
AND build.state = %(st_complete)i""" AND build.state = %(st_complete)i"""
if limit is not None: if limit is not None:
q += "\nLIMIT %(limit)i" q += "\nLIMIT %(limit)i"
for (rpm_id,) in rpm_ids: for (rpm_id,) in build_rpm_ids:
for row in _multiRow(q, locals(), fields): for row in _multiRow(q, locals(), fields):
idx.setdefault(row['id'], row) idx.setdefault(row['id'], row)
if limit is not None and len(idx) > limit: if limit is not None and len(idx) > limit:
break break
ret['rpms'] = idx.values() ret['rpms'] = idx.values()
ret['images'] = [] ret['component_of'] = []
# find images that contain the build rpms # find images/archives that contain the build rpms
fields = ['image_id'] fields = ['archive_id']
clauses = ['image_listing.rpm_id = %(rpm_id)s'] clauses = ['archive_rpm_components.rpm_id = %(rpm_id)s']
# TODO: join in other tables to provide something more than image id # TODO: join in other tables to provide something more than archive id
query = QueryProcessor(columns=fields, tables=['image_listing'], clauses=clauses, query = QueryProcessor(columns=fields, tables=['archive_rpm_components'], clauses=clauses,
opts={'asList': True}) opts={'asList': True})
for (rpm_id,) in rpm_ids: for (rpm_id,) in build_rpm_ids:
query.values = {'rpm_id': rpm_id} query.values = {'rpm_id': rpm_id}
image_ids = [i[0] for i in query.execute()] archive_ids = [i[0] for i in query.execute()]
ret['images'].extend(image_ids) ret['component_of'].extend(archive_ids)
# find archives whose buildroots we were in # find archives whose buildroots we were in
q = """SELECT id FROM archiveinfo WHERE build_id = %(build_id)i"""
archive_ids = _fetchMulti(q, locals())
fields = ('id', 'type_id', 'type_name', 'build_id', 'filename') fields = ('id', 'type_id', 'type_name', 'build_id', 'filename')
idx = {} idx = {}
q = """SELECT archiveinfo.id, archiveinfo.type_id, archivetypes.name, archiveinfo.build_id, archiveinfo.filename q = """SELECT archiveinfo.id, archiveinfo.type_id, archivetypes.name, archiveinfo.build_id, archiveinfo.filename
@ -6414,23 +6639,23 @@ def build_references(build_id, limit=None):
AND build.state = %(st_complete)i""" AND build.state = %(st_complete)i"""
if limit is not None: if limit is not None:
q += "\nLIMIT %(limit)i" q += "\nLIMIT %(limit)i"
for (archive_id,) in archive_ids: for (archive_id,) in build_archive_ids:
for row in _multiRow(q, locals(), fields): for row in _multiRow(q, locals(), fields):
idx.setdefault(row['id'], row) idx.setdefault(row['id'], row)
if limit is not None and len(idx) > limit: if limit is not None and len(idx) > limit:
break break
ret['archives'] = idx.values() ret['archives'] = idx.values()
# find images that contain the build archives # find images/archives that contain the build archives
fields = ['image_id'] fields = ['archive_id']
clauses = ['image_archive_listing.archive_id = %(archive_id)s'] clauses = ['archive_components.component_id = %(archive_id)s']
# TODO: join in other tables to provide something more than image id # TODO: join in other tables to provide something more than archive id
query = QueryProcessor(columns=fields, tables=['image_archive_listing'], clauses=clauses, query = QueryProcessor(columns=fields, tables=['archive_components'], clauses=clauses,
opts={'asList': True}) opts={'asList': True})
for (archive_id,) in archive_ids: for (archive_id,) in build_archive_ids:
query.values = {'archive_id': archive_id} query.values = {'archive_id': archive_id}
image_ids = [i[0] for i in query.execute()] archive_ids = [i[0] for i in query.execute()]
ret['images'].extend(image_ids) ret['component_of'].extend(archive_ids)
# find timestamp of most recent use in a buildroot # find timestamp of most recent use in a buildroot
query = QueryProcessor( query = QueryProcessor(
@ -6440,7 +6665,7 @@ def build_references(build_id, limit=None):
clauses=['buildroot_listing.rpm_id = %(rpm_id)s'], clauses=['buildroot_listing.rpm_id = %(rpm_id)s'],
opts={'order': '-standard_buildroot.create_event', 'limit': 1}) opts={'order': '-standard_buildroot.create_event', 'limit': 1})
event_id = -1 event_id = -1
for (rpm_id,) in rpm_ids: for (rpm_id,) in build_rpm_ids:
query.values = {'rpm_id': rpm_id} query.values = {'rpm_id': rpm_id}
tmp_id = query.singleValue(strict=False) tmp_id = query.singleValue(strict=False)
if tmp_id is not None and tmp_id > event_id: if tmp_id is not None and tmp_id > event_id:
@ -6458,7 +6683,7 @@ def build_references(build_id, limit=None):
ORDER BY standard_buildroot.create_event DESC ORDER BY standard_buildroot.create_event DESC
LIMIT 1""" LIMIT 1"""
event_id = -1 event_id = -1
for (archive_id,) in archive_ids: for (archive_id,) in build_archive_ids:
tmp_id = _singleValue(q, locals(), strict=False) tmp_id = _singleValue(q, locals(), strict=False)
if tmp_id is not None and tmp_id > event_id: if tmp_id is not None and tmp_id > event_id:
event_id = tmp_id event_id = tmp_id
@ -6470,6 +6695,9 @@ def build_references(build_id, limit=None):
if ret['last_used'] is None or last_archive_use > ret['last_used']: if ret['last_used'] is None or last_archive_use > ret['last_used']:
ret['last_used'] = last_archive_use ret['last_used'] = last_archive_use
# set 'images' field for backwards compat
ret['images'] = ret['component_of']
return ret return ret
def delete_build(build, strict=True, min_ref_age=604800): def delete_build(build, strict=True, min_ref_age=604800):
@ -6526,6 +6754,9 @@ def _delete_build(binfo):
# build-related data: # build-related data:
# build KEEP (marked deleted) # build KEEP (marked deleted)
# maven_builds KEEP # maven_builds KEEP
# win_builds KEEP
# image_builds KEEP
# build_types KEEP
# task ?? # task ??
# tag_listing REVOKE (versioned) (but should ideally be empty anyway) # tag_listing REVOKE (versioned) (but should ideally be empty anyway)
# rpminfo KEEP # rpminfo KEEP
@ -6565,17 +6796,13 @@ def _delete_build(binfo):
def reset_build(build): def reset_build(build):
"""Reset a build so that it can be reimported """Reset a build so that it can be reimported
WARNING: this function is potentially destructive. use with care. WARNING: this function is highly destructive. use with care.
nulls task_id nulls task_id
sets state to CANCELED sets state to CANCELED
clears data in rpminfo clears all referenced data in other tables, including buildroot and
removes rpminfo entries from any buildroot_listings [!] archive component tables
clears data in archiveinfo, maven_info
removes archiveinfo entries from buildroot_archives
remove files related to the build
note, we don't actually delete the build data, so tags after reset, only the build table entry is left
remain intact
""" """
# Only an admin may do this # Only an admin may do this
context.session.assertPerm('admin') context.session.assertPerm('admin')
@ -6591,6 +6818,8 @@ def reset_build(build):
_dml(delete, locals()) _dml(delete, locals())
delete = """DELETE FROM buildroot_listing WHERE rpm_id=%(rpm_id)i""" delete = """DELETE FROM buildroot_listing WHERE rpm_id=%(rpm_id)i"""
_dml(delete, locals()) _dml(delete, locals())
delete = """DELETE FROM archive_rpm_components WHERE rpm_id=%(rpm_id)i"""
_dml(delete, locals())
delete = """DELETE FROM rpminfo WHERE build_id=%(id)i""" delete = """DELETE FROM rpminfo WHERE build_id=%(id)i"""
_dml(delete, binfo) _dml(delete, binfo)
q = """SELECT id FROM archiveinfo WHERE build_id=%(id)i""" q = """SELECT id FROM archiveinfo WHERE build_id=%(id)i"""
@ -6600,14 +6829,28 @@ def reset_build(build):
_dml(delete, locals()) _dml(delete, locals())
delete = """DELETE FROM win_archives WHERE archive_id=%(archive_id)i""" delete = """DELETE FROM win_archives WHERE archive_id=%(archive_id)i"""
_dml(delete, locals()) _dml(delete, locals())
delete = """DELETE FROM image_archives WHERE archive_id=%(archive_id)i"""
_dml(delete, locals())
delete = """DELETE FROM buildroot_archives WHERE archive_id=%(archive_id)i""" delete = """DELETE FROM buildroot_archives WHERE archive_id=%(archive_id)i"""
_dml(delete, locals()) _dml(delete, locals())
delete = """DELETE FROM archive_rpm_components WHERE archive_id=%(archive_id)i"""
_dml(delete, locals())
delete = """DELETE FROM archive_components WHERE archive_id=%(archive_id)i"""
_dml(delete, locals())
delete = """DELETE FROM archive_components WHERE component_id=%(archive_id)i"""
_dml(delete, locals())
delete = """DELETE FROM archiveinfo WHERE build_id=%(id)i""" delete = """DELETE FROM archiveinfo WHERE build_id=%(id)i"""
_dml(delete, binfo) _dml(delete, binfo)
delete = """DELETE FROM maven_builds WHERE build_id = %(id)i""" delete = """DELETE FROM maven_builds WHERE build_id = %(id)i"""
_dml(delete, binfo) _dml(delete, binfo)
delete = """DELETE FROM win_builds WHERE build_id = %(id)i""" delete = """DELETE FROM win_builds WHERE build_id = %(id)i"""
_dml(delete, binfo) _dml(delete, binfo)
delete = """DELETE FROM image_builds WHERE build_id = %(id)i"""
_dml(delete, binfo)
delete = """DELETE FROM build_types WHERE build_id = %(id)i"""
_dml(delete, binfo)
delete = """DELETE FROM tag_listing WHERE build_id = %(id)i"""
_dml(delete, binfo)
binfo['state'] = koji.BUILD_STATES['CANCELED'] binfo['state'] = koji.BUILD_STATES['CANCELED']
update = """UPDATE build SET state=%(state)i, task_id=NULL WHERE id=%(id)i""" update = """UPDATE build SET state=%(state)i, task_id=NULL WHERE id=%(id)i"""
_dml(update, binfo) _dml(update, binfo)
@ -7912,15 +8155,15 @@ def importImageInternal(task_id, build_id, imgdata):
rpm_ids.append(data['id']) rpm_ids.append(data['id'])
# associate those RPMs with the image # associate those RPMs with the image
q = """INSERT INTO image_listing (image_id,rpm_id) q = """INSERT INTO archive_rpm_components (archive_id,rpm_id)
VALUES (%(image_id)i,%(rpm_id)i)""" VALUES (%(archive_id)i,%(rpm_id)i)"""
for archive in archives: for archive in archives:
sys.stderr.write('working on archive %s' % archive) sys.stderr.write('working on archive %s' % archive)
if archive['filename'].endswith('xml'): if archive['filename'].endswith('xml'):
continue continue
sys.stderr.write('associating installed rpms with %s' % archive['id']) sys.stderr.write('associating installed rpms with %s' % archive['id'])
for rpm_id in rpm_ids: for rpm_id in rpm_ids:
_dml(q, {'image_id': archive['id'], 'rpm_id': rpm_id}) _dml(q, {'archive_id': archive['id'], 'rpm_id': rpm_id})
koji.plugin.run_callbacks('postImport', type='image', image=imgdata, koji.plugin.run_callbacks('postImport', type='image', image=imgdata,
fullpath=fullpath) fullpath=fullpath)
@ -8794,6 +9037,7 @@ class RootExports(object):
getMavenBuild = staticmethod(get_maven_build) getMavenBuild = staticmethod(get_maven_build)
getWinBuild = staticmethod(get_win_build) getWinBuild = staticmethod(get_win_build)
getImageBuild = staticmethod(get_image_build) getImageBuild = staticmethod(get_image_build)
getBuildType = staticmethod(get_build_type)
getArchiveTypes = staticmethod(get_archive_types) getArchiveTypes = staticmethod(get_archive_types)
getArchiveType = staticmethod(get_archive_type) getArchiveType = staticmethod(get_archive_type)
listArchives = staticmethod(list_archives) listArchives = staticmethod(list_archives)
@ -8804,6 +9048,9 @@ class RootExports(object):
listArchiveFiles = staticmethod(list_archive_files) listArchiveFiles = staticmethod(list_archive_files)
getArchiveFile = staticmethod(get_archive_file) getArchiveFile = staticmethod(get_archive_file)
listBTypes = staticmethod(list_btypes)
addBType = staticmethod(add_btype)
def getChangelogEntries(self, buildID=None, taskID=None, filepath=None, author=None, before=None, after=None, queryOpts=None): def getChangelogEntries(self, buildID=None, taskID=None, filepath=None, author=None, before=None, after=None, queryOpts=None):
"""Get changelog entries for the build with the given ID, """Get changelog entries for the build with the given ID,
or for the rpm generated by the given task at the given path or for the rpm generated by the given task at the given path
@ -9038,6 +9285,8 @@ class RootExports(object):
fields = [('build.id', 'build_id'), ('build.version', 'version'), ('build.release', 'release'), fields = [('build.id', 'build_id'), ('build.version', 'version'), ('build.release', 'release'),
('build.epoch', 'epoch'), ('build.state', 'state'), ('build.completion_time', 'completion_time'), ('build.epoch', 'epoch'), ('build.state', 'state'), ('build.completion_time', 'completion_time'),
('build.start_time', 'start_time'), ('build.start_time', 'start_time'),
('build.source', 'source'),
('build.extra', 'extra'),
('events.id', 'creation_event_id'), ('events.time', 'creation_time'), ('build.task_id', 'task_id'), ('events.id', 'creation_event_id'), ('events.time', 'creation_time'), ('build.task_id', 'task_id'),
('EXTRACT(EPOCH FROM events.time)', 'creation_ts'), ('EXTRACT(EPOCH FROM events.time)', 'creation_ts'),
('EXTRACT(EPOCH FROM build.start_time)', 'start_ts'), ('EXTRACT(EPOCH FROM build.start_time)', 'start_ts'),
@ -9111,11 +9360,17 @@ class RootExports(object):
joins.append('image_builds ON build.id = image_builds.build_id') joins.append('image_builds ON build.id = image_builds.build_id')
fields.append(('image_builds.build_id', 'build_id')) fields.append(('image_builds.build_id', 'build_id'))
else: else:
raise koji.GenericError, 'unsupported build type: %s' % type btype = lookup_name('btype', type, strict=False)
if not btype:
raise koji.GenericError, 'unsupported build type: %s' % type
btype_id = btype['id']
joins.append('build_types ON build.id = build_types.build_id '
'AND btype_id = %(btype_id)s')
query = QueryProcessor(columns=[pair[0] for pair in fields], query = QueryProcessor(columns=[pair[0] for pair in fields],
aliases=[pair[1] for pair in fields], aliases=[pair[1] for pair in fields],
tables=tables, joins=joins, clauses=clauses, tables=tables, joins=joins, clauses=clauses,
transform=_fix_extra_field,
values=locals(), opts=queryOpts) values=locals(), opts=queryOpts)
return query.iterate() return query.iterate()
@ -11088,10 +11343,13 @@ class HostExports(object):
os.symlink(dest, src) os.symlink(dest, src)
def initBuild(self, data): def initBuild(self, data):
"""Create a stub build entry. """Create a stub (rpm) build entry.
This is done at the very beginning of the build to inform the This is done at the very beginning of the build to inform the
system the build is underway. system the build is underway.
This function is only called for rpm builds, other build types
have their own init function
""" """
host = Host() host = Host()
host.verify() host.verify()
@ -11102,7 +11360,10 @@ class HostExports(object):
data['owner'] = task.getOwner() data['owner'] = task.getOwner()
data['state'] = koji.BUILD_STATES['BUILDING'] data['state'] = koji.BUILD_STATES['BUILDING']
data['completion_time'] = None data['completion_time'] = None
return new_build(data) build_id = new_build(data)
binfo = get_build(build_id, strict=True)
new_typed_build(binfo, 'rpm')
return build_id
def completeBuild(self, task_id, build_id, srpm, rpms, brmap=None, logs=None): def completeBuild(self, task_id, build_id, srpm, rpms, brmap=None, logs=None):
"""Import final build contents into the database""" """Import final build contents into the database"""

View file

@ -1674,6 +1674,17 @@ class PathInfo(object):
"""Return the directory where the image for the build are stored""" """Return the directory where the image for the build are stored"""
return self.build(build) + '/images' return self.build(build) + '/images'
def typedir(self, build, btype):
"""Return the directory where typed files for a build are stored"""
if btype == 'maven':
return self.mavenbuild(build)
elif btype == 'win':
return self.winbuild(build)
elif btype == 'image':
return self.imagebuild(build)
else:
return "%s/files/%s" % (self.build(build), btype)
def rpm(self, rpminfo): def rpm(self, rpminfo):
"""Return the path (relative to build_dir) where an rpm belongs""" """Return the path (relative to build_dir) where an rpm belongs"""
return "%(arch)s/%(name)s-%(version)s-%(release)s.%(arch)s.rpm" % rpminfo return "%(arch)s/%(name)s-%(version)s-%(release)s.%(arch)s.rpm" % rpminfo

View file

@ -0,0 +1,45 @@
import unittest
import mock
import koji
import kojihub
IP = kojihub.InsertProcessor
class TestAddBType(unittest.TestCase):
@mock.patch('kojihub.list_btypes')
@mock.patch('kojihub.InsertProcessor')
def test_add_btype(self, InsertProcessor, list_btypes):
# Not sure why mock can't patch kojihub.context, so we do this
session = kojihub.context.session = mock.MagicMock()
mocks = [InsertProcessor, list_btypes, session]
# It seems MagicMock will not automatically handle attributes that
# start with "assert"
session.assertPerm = mock.MagicMock()
# expected case
list_btypes.return_value = None
insert = InsertProcessor.return_value
kojihub.add_btype('new_btype')
InsertProcessor.assert_called_once()
insert.execute.assert_called_once()
args, kwargs = InsertProcessor.call_args
ip = IP(*args, **kwargs)
self.assertEquals(ip.table, 'btype')
self.assertEquals(ip.data, {'name': 'new_btype'})
self.assertEquals(ip.rawdata, {})
session.assertPerm.assert_called_with('admin')
for m in mocks:
m.reset_mock()
session.assertPerm = mock.MagicMock()
# already exists
list_btypes.return_value = True
with self.assertRaises(koji.GenericError):
kojihub.add_btype('new_btype')
InsertProcessor.assert_not_called()
session.assertPerm.assert_called_with('admin')

View file

@ -0,0 +1,49 @@
import unittest
import mock
import koji
import kojihub
class TestGetBuildType(unittest.TestCase):
@mock.patch('kojihub.get_build')
@mock.patch('kojihub.QueryProcessor')
def test_no_build(self, QueryProcessor, get_build):
mocks = [QueryProcessor, get_build]
get_build.return_value = None
# strict on
kojihub.get_build_type('mytestbuild-1-1', strict=True)
QueryProcessor.assert_not_called()
get_build.assert_called_with('mytestbuild-1-1', strict=True)
@mock.patch('kojihub.get_maven_build')
@mock.patch('kojihub.get_win_build')
@mock.patch('kojihub.get_image_build')
@mock.patch('kojihub.get_build')
@mock.patch('kojihub.QueryProcessor')
def test_has_build(self, QueryProcessor, get_build, get_image_build,
get_win_build, get_maven_build):
mocks = [x for x in locals().values() if x is not self]
typeinfo = {'maven': {'maven': 'foo'},
'win': {'win': 'foo'},
'image': {'image': 'foo'},
'new_type': {'bar': 42}}
binfo = {'id' : 1, 'extra' : {'typeinfo': {'new_type': typeinfo['new_type']}}}
get_build.return_value = binfo
get_maven_build.return_value = typeinfo['maven']
get_win_build.return_value = typeinfo['win']
get_image_build.return_value = typeinfo['image']
query = QueryProcessor.return_value
query.execute.return_value = [['new_type']]
ret = kojihub.get_build_type('mytestbuild-1-1', strict=True)
assert ret == typeinfo
get_build.assert_called_with('mytestbuild-1-1', strict=True)
get_maven_build.assert_called_with(binfo['id'], strict=False)
get_win_build.assert_called_with(binfo['id'], strict=False)
get_image_build.assert_called_with(binfo['id'], strict=False)

View file

@ -53,12 +53,14 @@ class TestImportRPM(unittest.TestCase):
with self.assertRaises(koji.GenericError): with self.assertRaises(koji.GenericError):
kojihub.import_rpm(self.filename) kojihub.import_rpm(self.filename)
@mock.patch('kojihub.new_typed_build')
@mock.patch('kojihub._dml') @mock.patch('kojihub._dml')
@mock.patch('kojihub._singleValue') @mock.patch('kojihub._singleValue')
@mock.patch('kojihub.get_build') @mock.patch('kojihub.get_build')
@mock.patch('koji.get_rpm_header') @mock.patch('koji.get_rpm_header')
def test_import_rpm_completed_build(self, get_rpm_header, get_build, def test_import_rpm_completed_build(self, get_rpm_header, get_build,
_singleValue, _dml): _singleValue, _dml,
new_typed_build):
get_rpm_header.return_value = self.rpm_header_retval get_rpm_header.return_value = self.rpm_header_retval
get_build.return_value = { get_build.return_value = {
'state': koji.BUILD_STATES['COMPLETE'], 'state': koji.BUILD_STATES['COMPLETE'],
@ -103,12 +105,14 @@ class TestImportRPM(unittest.TestCase):
} }
_dml.assert_called_once_with(statement, values) _dml.assert_called_once_with(statement, values)
@mock.patch('kojihub.new_typed_build')
@mock.patch('kojihub._dml') @mock.patch('kojihub._dml')
@mock.patch('kojihub._singleValue') @mock.patch('kojihub._singleValue')
@mock.patch('kojihub.get_build') @mock.patch('kojihub.get_build')
@mock.patch('koji.get_rpm_header') @mock.patch('koji.get_rpm_header')
def test_import_rpm_completed_source_build(self, get_rpm_header, get_build, def test_import_rpm_completed_source_build(self, get_rpm_header, get_build,
_singleValue, _dml): _singleValue, _dml,
new_typed_build):
retval = copy.copy(self.rpm_header_retval) retval = copy.copy(self.rpm_header_retval)
retval.update({ retval.update({
'filename': 'name-version-release.arch.rpm', 'filename': 'name-version-release.arch.rpm',
@ -189,6 +193,7 @@ class TestImportBuild(unittest.TestCase):
def tearDown(self): def tearDown(self):
shutil.rmtree(self.tempdir) shutil.rmtree(self.tempdir)
@mock.patch('kojihub.new_typed_build')
@mock.patch('kojihub._dml') @mock.patch('kojihub._dml')
@mock.patch('kojihub._singleValue') @mock.patch('kojihub._singleValue')
@mock.patch('kojihub.get_build') @mock.patch('kojihub.get_build')
@ -205,7 +210,8 @@ class TestImportBuild(unittest.TestCase):
new_package, context, query, new_package, context, query,
import_rpm, import_rpm_file, import_rpm, import_rpm_file,
rip_rpm_sighdr, add_rpm_sig, rip_rpm_sighdr, add_rpm_sig,
get_build, _singleValue, _dml): get_build, _singleValue, _dml,
new_typed_build):
rip_rpm_sighdr.return_value = (0, 0) rip_rpm_sighdr.return_value = (0, 0)
@ -225,6 +231,16 @@ class TestImportBuild(unittest.TestCase):
1106: 1, 1106: 1,
}) })
get_rpm_header.return_value = retval get_rpm_header.return_value = retval
binfo = {
'state': koji.BUILD_STATES['COMPLETE'],
'name': 'name',
'version': 'version',
'release': 'release',
'id': 12345,
}
# get_build called once to check for existing,
# then later to get the build info
get_build.side_effect = [None, binfo]
kojihub.import_build(self.src_filename, [self.filename]) kojihub.import_build(self.src_filename, [self.filename])

View file

@ -105,7 +105,7 @@ class TestImportImageInternal(unittest.TestCase):
self.assertEquals(len(cursor.execute.mock_calls), 1) self.assertEquals(len(cursor.execute.mock_calls), 1)
expression, kwargs = cursor.execute.mock_calls[0][1] expression, kwargs = cursor.execute.mock_calls[0][1]
expression = " ".join(expression.split()) expression = " ".join(expression.split())
expected = 'INSERT INTO image_listing (image_id,rpm_id) ' + \ expected = 'INSERT INTO archive_rpm_components (archive_id,rpm_id) ' + \
'VALUES (%(image_id)i,%(rpm_id)i)' 'VALUES (%(archive_id)i,%(rpm_id)i)'
self.assertEquals(expression, expected) self.assertEquals(expression, expected)
self.assertEquals(kwargs, {'image_id': 9, 'rpm_id': 6}) self.assertEquals(kwargs, {'archive_id': 9, 'rpm_id': 6})

View file

@ -0,0 +1,72 @@
import unittest
import mock
import koji
import kojihub
QP = kojihub.QueryProcessor
class TestListBTypes(unittest.TestCase):
@mock.patch('kojihub.QueryProcessor')
def test_list_btypes(self, QueryProcessor):
# default query
query = QueryProcessor.return_value
query.execute.return_value = "return value"
ret = kojihub.list_btypes()
QueryProcessor.assert_called_once()
query.execute.assert_called_once()
self.assertEquals(ret, "return value")
args, kwargs = QueryProcessor.call_args
self.assertEquals(args, ())
qp = QP(**kwargs)
self.assertEquals(qp.tables, ['btype'])
self.assertEquals(qp.columns, ['id', 'name'])
self.assertEquals(qp.clauses, [])
self.assertEquals(qp.joins, None)
QueryProcessor.reset_mock()
# query by name
query = QueryProcessor.return_value
query.execute.return_value = "return value"
ret = kojihub.list_btypes({'name': 'rpm'})
QueryProcessor.assert_called_once()
query.execute.assert_called_once()
self.assertEquals(ret, "return value")
args, kwargs = QueryProcessor.call_args
self.assertEquals(args, ())
qp = QP(**kwargs)
self.assertEquals(qp.tables, ['btype'])
self.assertEquals(qp.columns, ['id', 'name'])
self.assertEquals(qp.clauses, ['btype.name = %(name)s'])
self.assertEquals(qp.values, {'name': 'rpm'})
self.assertEquals(qp.joins, None)
QueryProcessor.reset_mock()
# query by id, with opts
query = QueryProcessor.return_value
query.execute.return_value = "return value"
ret = kojihub.list_btypes({'id': 1}, {'order': 'id'})
QueryProcessor.assert_called_once()
query.execute.assert_called_once()
self.assertEquals(ret, "return value")
args, kwargs = QueryProcessor.call_args
self.assertEquals(args, ())
qp = QP(**kwargs)
self.assertEquals(qp.tables, ['btype'])
self.assertEquals(qp.columns, ['id', 'name'])
self.assertEquals(qp.clauses, ['btype.id = %(id)s'])
self.assertEquals(qp.values, {'id': 1})
self.assertEquals(qp.opts, {'order': 'id'})
self.assertEquals(qp.joins, None)
QueryProcessor.reset_mock()
# query by name

View file

@ -0,0 +1,39 @@
import unittest
import mock
import koji
import kojihub
class TestNewTypedBuild(unittest.TestCase):
@mock.patch('kojihub.lookup_name')
@mock.patch('kojihub.QueryProcessor')
@mock.patch('kojihub.InsertProcessor')
def test_new_typed_build(self, InsertProcessor, QueryProcessor, lookup_name):
binfo = {'id': 1, 'foo': '137'}
btype = 'sometype'
btype_id = 99
lookup_name.return_value = {'id':99, 'name':btype}
# no current entry
query = QueryProcessor.return_value
query.executeOne.return_value = None
insert = InsertProcessor.return_value
kojihub.new_typed_build(binfo, btype)
QueryProcessor.assert_called_once()
query.executeOne.assert_called_once()
InsertProcessor.assert_called_once()
insert.execute.assert_called_once()
InsertProcessor.reset_mock()
QueryProcessor.reset_mock()
# current entry
query = QueryProcessor.return_value
query.executeOne.return_value = {'build_id':binfo['id']}
kojihub.new_typed_build(binfo, btype)
QueryProcessor.assert_called_once()
query.executeOne.assert_called_once()
InsertProcessor.assert_not_called()

View file

@ -0,0 +1,147 @@
import unittest
import mock
import koji
import kojihub
QP = kojihub.QueryProcessor
UP = kojihub.UpdateProcessor
class TestRecycleBuild():
# NOT a subclass of unittest.TestCase so that we can use generator
# methods
def setUp(self):
self.QueryProcessor = mock.patch('kojihub.QueryProcessor').start()
self.UpdateProcessor = mock.patch('kojihub.UpdateProcessor',
side_effect=self.getUpdate).start()
self._dml = mock.patch('kojihub._dml').start()
self.run_callbacks = mock.patch('koji.plugin.run_callbacks').start()
self.rmtree = mock.patch('shutil.rmtree').start()
self.exists = mock.patch('os.path.exists').start()
self.updates = []
def tearDown(self):
mock.patch.stopall()
def getUpdate(self, *args, **kwargs):
update = UP(*args, **kwargs)
update.execute = mock.MagicMock()
self.updates.append(update)
return update
# Basic old and new build infos
old = {'id': 2,
'state': 0,
'task_id': None,
'epoch': None,
'name': 'GConf2',
'nvr': 'GConf2-3.2.6-15.fc23',
'package_id': 2,
'package_name': 'GConf2',
'release': '15.fc23',
'version': '3.2.6',
'volume_id': 0,
'volume_name': 'DEFAULT'}
new = {'state': 0,
'name': 'GConf2',
'version': '3.2.6',
'release': '15.fc23',
'epoch': None,
'nvr': 'GConf2-3.2.6-15.fc23',
'completion_time': '2016-09-16',
'start_time': '2016-09-16',
'owner': 2}
def test_recycle_building(self):
new = self.new.copy()
old = self.old.copy()
old['state'] = new['state'] = koji.BUILD_STATES['BUILDING']
old['task_id'] = new['task_id'] = 137
kojihub.recycle_build(old, new)
self.UpdateProcessor.assert_not_called()
self.QueryProcessor.assert_not_called()
self._dml.assert_not_called()
self.run_callbacks.assert_not_called()
def test_recycle_building_bad(self):
new = self.new.copy()
old = self.old.copy()
old['state'] = new['state'] = koji.BUILD_STATES['BUILDING']
old['task_id'] = 137
new['task_id'] = 200
self.run_fail(old, new)
self.QueryProcessor.assert_not_called()
def test_recycle_states_good(self):
for state in 'FAILED', 'CANCELED':
yield self.check_recycle_states_good, koji.BUILD_STATES[state]
def check_recycle_states_good(self, state):
new = self.new.copy()
old = self.old.copy()
old['state'] = state
new['state'] = koji.BUILD_STATES['BUILDING']
old['task_id'] = 99
new['task_id'] = 137
query = self.QueryProcessor.return_value
query.execute.return_value = []
self.run_pass(old, new)
def run_pass(self, old, new):
kojihub.recycle_build(old, new)
self.UpdateProcessor.assert_called_once()
update = self.updates[0]
assert update.table == 'build'
for key in ['state', 'task_id', 'owner', 'start_time',
'completion_time', 'epoch']:
assert update.data[key] == new[key]
assert update.rawdata == {'create_event': 'get_event()'}
assert update.clauses == ['id=%(id)s']
assert update.values['id'] == old['id']
def run_fail(self, old, new):
try:
kojihub.recycle_build(old, new)
except koji.GenericError:
pass
else:
raise Exception("expected koji.GenericError")
self.UpdateProcessor.assert_not_called()
self._dml.assert_not_called()
self.run_callbacks.assert_not_called()
def test_recycle_states_bad(self):
for state in 'BUILDING', 'COMPLETE', 'DELETED':
yield self.check_recycle_states_bad, koji.BUILD_STATES[state]
def check_recycle_states_bad(self, state):
new = self.new.copy()
old = self.old.copy()
old['state'] = state
new['state'] = koji.BUILD_STATES['BUILDING']
old['task_id'] = 99
new['task_id'] = 137
self.run_fail(old, new)
self.QueryProcessor.assert_not_called()
def test_recycle_query_bad(self):
vlists = [
[[], [], True],
[True, [], []],
[[], True, []],
]
for values in vlists:
yield self.check_recycle_query_bad, values
def check_recycle_query_bad(self, values):
new = self.new.copy()
old = self.old.copy()
old['state'] = koji.BUILD_STATES['FAILED']
new['state'] = koji.BUILD_STATES['BUILDING']
old['task_id'] = 99
new['task_id'] = 137
query = self.QueryProcessor.return_value
query.execute.side_effect = values
self.run_fail(old, new)

View file

@ -29,15 +29,15 @@
<th>Source</th><td>$build['source']</td> <th>Source</th><td>$build['source']</td>
</tr> </tr>
#end if #end if
#if $mavenbuild #if 'maven' in $typeinfo
<tr> <tr>
<th>Maven&nbsp;groupId</th><td>$mavenbuild.group_id</td> <th>Maven&nbsp;groupId</th><td>$typeinfo.maven.group_id</td>
</tr> </tr>
<tr> <tr>
<th>Maven&nbsp;artifactId</th><td>$mavenbuild.artifact_id</td> <th>Maven&nbsp;artifactId</th><td>$typeinfo.maven.artifact_id</td>
</tr> </tr>
<tr> <tr>
<th>Maven&nbsp;version</th><td>$mavenbuild.version</td> <th>Maven&nbsp;version</th><td>$typeinfo.maven.version</td>
</tr> </tr>
#end if #end if
#if $summary #if $summary
@ -159,44 +159,50 @@
#end if #end if
</td> </td>
</tr> </tr>
#if $archives #for btype in $archiveIndex
#set $archivesByExt = $archiveIndex[btype]
#if not $archivesByExt
#continue
#end if
<tr> <tr>
<th>Archives</th> <th>$btype.capitalize() Archives</th>
<td class="container"> <td class="container">
<table class="nested"> <table class="nested">
#set $exts = $archivesByExt.keys() #set $exts = $archivesByExt.keys()
#for ext in $exts #for ext in $exts
<tr> <tr>
<th>$ext</th> <th>$ext</th>
<td> <td>
#if $task and $ext == $exts[0] #if $task and $ext == $exts[0]
#if $mavenbuild #if $btype == 'maven'
(<a href="$nvrpath/data/logs/maven/">build logs</a>) (<a href="$nvrpath/data/logs/maven/">build logs</a>)
#elif $winbuild #elif $btype == 'win'
(<a href="$nvrpath/data/logs/win/">build logs</a>) (<a href="$nvrpath/data/logs/win/">build logs</a>)
#elif $imagebuild #elif $btype == 'image'
(<a href="$nvrpath/data/logs/image">build logs</a>) (<a href="$nvrpath/data/logs/image">build logs</a>)
#end if #else
(<a href="$nvrpath/data/logs">build logs</a>)
#end if
#end if #end if
</td> </td>
</tr> </tr>
#for $archive in $archivesByExt[$ext] #for $archive in $archivesByExt[$ext]
<tr> <tr>
<td/> <td/>
<td> <td>
#if $archive.metadata_only #if $archive.metadata_only
$archive.display (<a href="archiveinfo?archiveID=$archive.id">info</a>) $archive.display (<a href="archiveinfo?archiveID=$archive.id">info</a>)
#else #else
$archive.display (<a href="archiveinfo?archiveID=$archive.id">info</a>) (<a href="$archive.dl_url">download</a>) $archive.display (<a href="archiveinfo?archiveID=$archive.id">info</a>) (<a href="$archive.dl_url">download</a>)
#end if #end if
</td> </td>
</tr> </tr>
#end for #end for
#end for #end for
</table> </table>
</td> </td>
</tr> </tr>
#end if #end for
#if $changelog #if $changelog
<tr> <tr>
<th>Changelog</th> <th>Changelog</th>

View file

@ -5,7 +5,7 @@
#include "includes/header.chtml" #include "includes/header.chtml"
<h4>#if $latest then 'Latest ' else ''##if $state != None then $util.stateName($state).capitalize() + ' ' else ''##if $type == 'maven' then 'Maven ' else ''#Builds#if $package then ' of <a href="packageinfo?packageID=%i">%s</a>' % ($package.id, $package.name) else ''##if $prefix then ' starting with "%s"' % $prefix else ''##if $user then ' by <a href="userinfo?userID=%i">%s</a>' % ($user.id, $user.name) else ''##if $tag then ' in tag <a href="taginfo?tagID=%i">%s</a>' % ($tag.id, $tag.name) else ''#</h4> <h4>#if $latest then 'Latest ' else ''##if $state != None then $util.stateName($state).capitalize() + ' ' else ''##if $type then $type.capitalize() + ' ' else ''#Builds#if $package then ' of <a href="packageinfo?packageID=%i">%s</a>' % ($package.id, $package.name) else ''##if $prefix then ' starting with "%s"' % $prefix else ''##if $user then ' by <a href="userinfo?userID=%i">%s</a>' % ($user.id, $user.name) else ''##if $tag then ' in tag <a href="taginfo?tagID=%i">%s</a>' % ($tag.id, $tag.name) else ''#</h4>
<table class="data-list"> <table class="data-list">
<tr> <tr>
@ -42,25 +42,18 @@
#end for #end for
</select> </select>
</td></tr> </td></tr>
#if $tag or $mavenEnabled or $winEnabled
<tr> <tr>
#if $mavenEnabled or $winEnabled <td>
<td>
<strong>Type</strong>: <strong>Type</strong>:
</td> </td>
<td> <td>
<select name="type" class="filterlist" onchange="javascript: window.location='builds?type=' + this.value + '$util.passthrough_except($self, 'type')';"> <select name="type" class="filterlist" onchange="javascript: window.location='builds?type=' + this.value + '$util.passthrough_except($self, 'type')';">
<option value="all" #if not $type then 'selected="selected"' else ''#>all</option> <option value="all" #if not $type then 'selected="selected"' else ''#>all</option>
#if $mavenEnabled #for $btype in $btypes
<option value="maven" #if $type == 'maven' then 'selected="selected"' else ''#>Maven</option> <option value="$btype" #if $type == $btype then 'selected="selected"' else ''#>$btype</option>
#end if #end for
#if $winEnabled
<option value="win" #if $type == 'win' then 'selected="selected"' else ''#>Windows</option>
#end if
<option value="image" #if $type == 'image' then 'selected="selected"' else ''#>Image</option>
</select> </select>
</td> </td>
#end if
#if $tag #if $tag
<td> <td>
<strong>Inherited</strong>: <strong>Inherited</strong>:
@ -72,7 +65,6 @@
</td> </td>
#end if #end if
</tr> </tr>
#end if
</table> </table>
</td> </td>
</tr> </tr>

View file

@ -1088,6 +1088,8 @@ def externalrepoinfo(environ, extrepoID):
def buildinfo(environ, buildID): def buildinfo(environ, buildID):
values = _initValues(environ, 'Build Info', 'builds') values = _initValues(environ, 'Build Info', 'builds')
server = _getServer(environ) server = _getServer(environ)
topurl = environ['koji.options']['KojiFilesURL']
pathinfo = koji.PathInfo(topdir=topurl)
buildID = int(buildID) buildID = int(buildID)
@ -1099,32 +1101,26 @@ def buildinfo(environ, buildID):
tags.sort(_sortbyname) tags.sort(_sortbyname)
rpms = server.listBuildRPMs(build['id']) rpms = server.listBuildRPMs(build['id'])
rpms.sort(_sortbyname) rpms.sort(_sortbyname)
mavenbuild = server.getMavenBuild(buildID) typeinfo = server.getBuildType(buildID)
winbuild = server.getWinBuild(buildID) archiveIndex = {}
imagebuild = server.getImageBuild(buildID) for btype in typeinfo:
if mavenbuild: archives = server.listArchives(build['id'], type=btype, queryOpts={'order': 'filename'})
archivetype = 'maven' idx = archiveIndex.setdefault(btype, {})
elif winbuild: for archive in archives:
archivetype = 'win' if btype == 'maven':
elif imagebuild: archive['display'] = archive['filename']
archivetype = 'image' archive['dl_url'] = '/'.join([pathinfo.mavenbuild(build), pathinfo.mavenfile(archive)])
else: elif btype == 'win':
archivetype = None archive['display'] = pathinfo.winfile(archive)
archives = server.listArchives(build['id'], type=archivetype, queryOpts={'order': 'filename'}) archive['dl_url'] = '/'.join([pathinfo.winbuild(build), pathinfo.winfile(archive)])
archivesByExt = {} elif btype == 'image':
topurl = environ['koji.options']['KojiFilesURL'] archive['display'] = archive['filename']
pathinfo = koji.PathInfo(topdir=topurl) archive['dl_url'] = '/'.join([pathinfo.imagebuild(build), archive['filename']])
for archive in archives: else:
if mavenbuild: archive['display'] = archive['filename']
archive['display'] = archive['filename'] archive['dl_url'] = '/'.join([pathinfo.typedir(build, btype), archive['filename']])
archive['dl_url'] = '/'.join([pathinfo.mavenbuild(build), pathinfo.mavenfile(archive)]) ext = os.path.splitext(archive['filename'])[1][1:]
elif winbuild: idx.setdefault(ext, []).append(archive)
archive['display'] = pathinfo.winfile(archive)
archive['dl_url'] = '/'.join([pathinfo.winbuild(build), pathinfo.winfile(archive)])
elif imagebuild:
archive['display'] = archive['filename']
archive['dl_url'] = '/'.join([pathinfo.imagebuild(build), archive['filename']])
archivesByExt.setdefault(os.path.splitext(archive['filename'])[1][1:], []).append(archive)
rpmsByArch = {} rpmsByArch = {}
debuginfos = [] debuginfos = []
@ -1192,11 +1188,8 @@ def buildinfo(environ, buildID):
values['tags'] = tags values['tags'] = tags
values['rpmsByArch'] = rpmsByArch values['rpmsByArch'] = rpmsByArch
values['task'] = task values['task'] = task
values['mavenbuild'] = mavenbuild values['typeinfo'] = typeinfo
values['winbuild'] = winbuild values['archiveIndex'] = archiveIndex
values['imagebuild'] = imagebuild
values['archives'] = archives
values['archivesByExt'] = archivesByExt
values['noarch_log_dest'] = noarch_log_dest values['noarch_log_dest'] = noarch_log_dest
if environ['koji.currentUser']: if environ['koji.currentUser']:
@ -1210,7 +1203,7 @@ def buildinfo(environ, buildID):
values['start_time'] = build.get('start_time') or build['creation_time'] values['start_time'] = build.get('start_time') or build['creation_time']
# the build start time is not accurate for maven and win builds, get it from the # the build start time is not accurate for maven and win builds, get it from the
# task start time instead # task start time instead
if mavenbuild or winbuild: if 'maven' in typeinfo or 'win' in typeinfo:
if task: if task:
values['start_time'] = task['start_time'] values['start_time'] = task['start_time']
if build['state'] == koji.BUILD_STATES['BUILDING']: if build['state'] == koji.BUILD_STATES['BUILDING']:
@ -1271,13 +1264,17 @@ def builds(environ, userID=None, tagID=None, packageID=None, state=None, order='
values['prefix'] = prefix values['prefix'] = prefix
values['order'] = order values['order'] = order
if type in ('maven', 'win', 'image'):
btypes = [b['name'] for b in server.listBTypes()]
btypes.sort()
if type in btypes:
pass pass
elif type == 'all': elif type == 'all':
type = None type = None
else: else:
type = None type = None
values['type'] = type values['type'] = type
values['btypes'] = btypes
if tag: if tag:
inherited = int(inherited) inherited = int(inherited)