diff --git a/cli/koji b/cli/koji index 0e9659d9..6386b101 100755 --- a/cli/koji +++ b/cli/koji @@ -3299,15 +3299,18 @@ def anon_handle_buildinfo(options, session, args): print "Tags: %s" % ' '.join(taglist) if info.get('extra'): print "Extra: %(extra)r" % info + archives_seen = {} maven_archives = session.listArchives(buildID=info['id'], type='maven') if maven_archives: print "Maven archives:" for archive in maven_archives: + archives_seen.setdefault(archive['id'], 1) print os.path.join(koji.pathinfo.mavenbuild(info), koji.pathinfo.mavenfile(archive)) win_archives = session.listArchives(buildID=info['id'], type='win') if win_archives: print "Windows archives:" for archive in win_archives: + archives_seen.setdefault(archive['id'], 1) print os.path.join(koji.pathinfo.winbuild(info), koji.pathinfo.winfile(archive)) rpms = session.listRPMs(buildID=info['id']) image_info = session.getImageBuild(info['id']) @@ -3315,7 +3318,18 @@ def anon_handle_buildinfo(options, session, args): if img_archives: print 'Image archives:' for archive in img_archives: + archives_seen.setdefault(archive['id'], 1) print os.path.join(koji.pathinfo.imagebuild(info), archive['filename']) + archive_idx = {} + for archive in session.listArchives(buildID=info['id']): + if archive['id'] in archives_seen: + continue + archive_idx.setdefault(archive['btype'], []).append(archive) + for btype in archive_idx: + archives = archive_idx[btype] + print '%s Archives:' % btype.capitalize() + for archive in archives: + print os.path.join(koji.pathinfo.typedir(info, btype), archive['filename']) if rpms: print "RPMs:" for rpm in rpms: diff --git a/docs/Makefile b/docs/Makefile index 37fc4137..b8bba227 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -9,7 +9,7 @@ BUILDDIR = build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) -$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) +$(warning The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) #' <- avoids bad syntax highlighting endif diff --git a/docs/schema-update-cgen2.sql b/docs/schema-update-cgen2.sql new file mode 100644 index 00000000..bcb58858 --- /dev/null +++ b/docs/schema-update-cgen2.sql @@ -0,0 +1,85 @@ +BEGIN; + +-- New tables + +SELECT statement_timestamp(), 'Creating new tables' as msg; + +CREATE TABLE btype ( + id SERIAL NOT NULL PRIMARY KEY, + name TEXT UNIQUE NOT NULL +) WITHOUT OIDS; + +CREATE TABLE build_types ( + build_id INTEGER NOT NULL REFERENCES build(id), + btype_id INTEGER NOT NULL REFERENCES btype(id), + PRIMARY KEY (build_id, btype_id) +) WITHOUT OIDS; + +-- predefined build types + +SELECT statement_timestamp(), 'Adding predefined build types' as msg; +INSERT INTO btype(name) VALUES ('rpm'); +INSERT INTO btype(name) VALUES ('maven'); +INSERT INTO btype(name) VALUES ('win'); +INSERT INTO btype(name) VALUES ('image'); + +-- new column for archiveinfo + +SELECT statement_timestamp(), 'Altering archiveinfo table' as msg; +ALTER TABLE archiveinfo ADD COLUMN btype_id INTEGER REFERENCES btype(id); + +-- fill in legacy types +SELECT statement_timestamp(), 'Adding legacy btypes to builds' as msg; +INSERT INTO build_types(btype_id, build_id) + SELECT btype.id, maven_builds.build_id FROM btype JOIN maven_builds ON btype.name='maven'; +INSERT INTO build_types(btype_id, build_id) + SELECT btype.id, win_builds.build_id FROM btype JOIN win_builds ON btype.name='win'; +INSERT INTO build_types(btype_id, build_id) + SELECT btype.id, image_builds.build_id FROM btype JOIN image_builds ON btype.name='image'; +-- not sure if this is the best way to select rpm builds... +INSERT INTO build_types(btype_id, build_id) + SELECT DISTINCT btype.id, build_id FROM btype JOIN rpminfo ON btype.name='rpm' + WHERE build_id IS NOT NULL; + +SELECT statement_timestamp(), 'Adding legacy btypes to archiveinfo' as msg; +UPDATE archiveinfo SET btype_id=(SELECT id FROM btype WHERE name='maven' LIMIT 1) + WHERE (SELECT archive_id FROM maven_archives WHERE archive_id=archiveinfo.id) IS NOT NULL; +UPDATE archiveinfo SET btype_id=(SELECT id FROM btype WHERE name='win' LIMIT 1) + WHERE (SELECT archive_id FROM win_archives WHERE archive_id=archiveinfo.id) IS NOT NULL; +UPDATE archiveinfo SET btype_id=(SELECT id FROM btype WHERE name='image' LIMIT 1) + WHERE (SELECT archive_id FROM image_archives WHERE archive_id=archiveinfo.id) IS NOT NULL; + +-- new component tables +SELECT statement_timestamp(), 'Creating new component tables' as msg; +CREATE TABLE archive_rpm_components AS SELECT image_id, rpm_id from image_listing; +CREATE TABLE archive_components AS SELECT image_id, archive_id from image_archive_listing; +-- doing it this way and fixing up after is *much* faster than creating the empty table +-- and using insert..select to populate + +SELECT statement_timestamp(), 'Fixing up component tables, rename columns' as msg; +ALTER TABLE archive_rpm_components RENAME image_id TO archive_id; +ALTER TABLE archive_components RENAME archive_id TO component_id; +ALTER TABLE archive_components RENAME image_id TO archive_id; +ALTER TABLE archive_rpm_components ALTER COLUMN rpm_id SET NOT NULL; +ALTER TABLE archive_rpm_components ALTER COLUMN archive_id SET NOT NULL; +ALTER TABLE archive_components ALTER COLUMN component_id SET NOT NULL; +ALTER TABLE archive_components ALTER COLUMN archive_id SET NOT NULL; + +SELECT statement_timestamp(), 'Fixing up component tables, adding constraints' as msg; +ALTER TABLE archive_rpm_components ADD CONSTRAINT archive_rpm_components_archive_id_fkey FOREIGN KEY (archive_id) REFERENCES archiveinfo(id); +ALTER TABLE archive_rpm_components ADD CONSTRAINT archive_rpm_components_rpm_id_fkey FOREIGN KEY (rpm_id) REFERENCES rpminfo(id); +ALTER TABLE archive_rpm_components ADD CONSTRAINT archive_rpm_components_archive_id_rpm_id_key UNIQUE (archive_id, rpm_id); +ALTER TABLE archive_components ADD CONSTRAINT archive_components_archive_id_fkey FOREIGN KEY (archive_id) REFERENCES archiveinfo(id); +ALTER TABLE archive_components ADD CONSTRAINT archive_components_component_id_fkey FOREIGN KEY (component_id) REFERENCES archiveinfo(id); +ALTER TABLE archive_components ADD CONSTRAINT archive_components_archive_id_component_id_key UNIQUE (archive_id, component_id); + +SELECT statement_timestamp(), 'Adding component table indexes' as msg; +CREATE INDEX rpm_components_idx on archive_rpm_components(rpm_id); +CREATE INDEX archive_components_idx on archive_components(component_id); + + +-- image_listing and image_archive_listing are no longer used + + +COMMIT; + diff --git a/docs/schema.sql b/docs/schema.sql index 8a5d6170..021cc4ec 100644 --- a/docs/schema.sql +++ b/docs/schema.sql @@ -1,5 +1,5 @@ --- vim:noet:sw=8 +-- vim:et:sw=8 -- drop statements for old data have moved to schema-clear.sql @@ -251,6 +251,27 @@ CREATE TABLE build ( CREATE INDEX build_by_pkg_id ON build (pkg_id); CREATE INDEX build_completion ON build(completion_time); + +CREATE TABLE btype ( + id SERIAL NOT NULL PRIMARY KEY, + name TEXT UNIQUE NOT NULL +) WITHOUT OIDS; + + +-- legacy build types +INSERT INTO btype(name) VALUES ('rpm'); +INSERT INTO btype(name) VALUES ('maven'); +INSERT INTO btype(name) VALUES ('win'); +INSERT INTO btype(name) VALUES ('image'); + + +CREATE TABLE build_types ( + build_id INTEGER NOT NULL REFERENCES build(id), + btype_id INTEGER NOT NULL REFERENCES btype(id), + PRIMARY KEY (build_id, btype_id) +) WITHOUT OIDS; + + -- Note: some of these CREATEs may seem a little out of order. This is done to keep -- the references sane. @@ -780,6 +801,8 @@ insert into archivetypes (name, description, extensions) values ('jnilib', 'Java CREATE TABLE archiveinfo ( id SERIAL NOT NULL PRIMARY KEY, type_id INTEGER NOT NULL REFERENCES archivetypes (id), + btype_id INTEGER REFERENCES btype(id), + -- ^ TODO add NOT NULL build_id INTEGER NOT NULL REFERENCES build (id), buildroot_id INTEGER REFERENCES buildroot (id), filename TEXT NOT NULL, @@ -806,21 +829,21 @@ CREATE TABLE image_archives ( arch VARCHAR(16) NOT NULL ) WITHOUT OIDS; --- tracks the contents of an image -CREATE TABLE image_listing ( - image_id INTEGER NOT NULL REFERENCES image_archives(archive_id), - rpm_id INTEGER NOT NULL REFERENCES rpminfo(id), - UNIQUE (image_id, rpm_id) -) WITHOUT OIDS; -CREATE INDEX image_listing_rpms on image_listing(rpm_id); - --- track the archive contents of an image -CREATE TABLE image_archive_listing ( - image_id INTEGER NOT NULL REFERENCES image_archives(archive_id), +-- tracks the rpm contents of an image or other archive +CREATE TABLE archive_rpm_components ( archive_id INTEGER NOT NULL REFERENCES archiveinfo(id), - UNIQUE (image_id, archive_id) + rpm_id INTEGER NOT NULL REFERENCES rpminfo(id), + UNIQUE (archive_id, rpm_id) ) WITHOUT OIDS; -CREATE INDEX image_listing_archives on image_archive_listing(archive_id); +CREATE INDEX rpm_components_idx on archive_rpm_components(rpm_id); + +-- track the archive contents of an image or other archive +CREATE TABLE archive_components ( + archive_id INTEGER NOT NULL REFERENCES archiveinfo(id), + component_id INTEGER NOT NULL REFERENCES archiveinfo(id), + UNIQUE (archive_id, component_id) +) WITHOUT OIDS; +CREATE INDEX archive_components_idx on archive_components(component_id); CREATE TABLE buildroot_archives ( diff --git a/hub/kojihub.py b/hub/kojihub.py index 23070cba..be21e1cf 100644 --- a/hub/kojihub.py +++ b/hub/kojihub.py @@ -1173,7 +1173,12 @@ def readTaggedBuilds(tag, event=None, inherit=False, latest=False, package=None, type_join = 'JOIN image_builds ON image_builds.build_id = tag_listing.build_id' fields.append(('image_builds.build_id', 'build_id')) else: - raise koji.GenericError, 'unsupported build type: %s' % type + btype = lookup_name('btype', type, strict=False) + if not btype: + raise koji.GenericError, 'unsupported build type: %s' % type + btype_id = btype['id'] + type_join = ('JOIN build_types ON build.id = build_types.build_id ' + 'AND btype_id = %(btype_id)s') q = """SELECT %s FROM tag_listing @@ -1337,6 +1342,8 @@ def readTaggedArchives(tag, package=None, event=None, inherit=False, latest=True #the following query is run for each tag in the inheritance fields = [('archiveinfo.id', 'id'), ('archiveinfo.type_id', 'type_id'), + ('archiveinfo.btype_id', 'btype_id'), + ('btype.name', 'btype'), ('archiveinfo.build_id', 'build_id'), ('archiveinfo.buildroot_id', 'buildroot_id'), ('archiveinfo.filename', 'filename'), @@ -1347,7 +1354,8 @@ def readTaggedArchives(tag, package=None, event=None, inherit=False, latest=True ('archiveinfo.extra', 'extra'), ] tables = ['archiveinfo'] - joins = ['tag_listing ON archiveinfo.build_id = tag_listing.build_id'] + joins = ['tag_listing ON archiveinfo.build_id = tag_listing.build_id', + 'btype ON archiveinfo.btype_id = btype.id'] clauses = [eventCondition(event), 'tag_listing.tag_id = %(tagid)i'] if package: joins.append('build ON archiveinfo.build_id = build.id') @@ -3309,6 +3317,7 @@ def get_build(buildInfo, strict=False): return None fields = (('build.id', 'id'), ('build.version', 'version'), ('build.release', 'release'), + ('build.id', 'build_id'), ('build.epoch', 'epoch'), ('build.state', 'state'), ('build.completion_time', 'completion_time'), ('build.start_time', 'start_time'), ('build.task_id', 'task_id'), ('events.id', 'creation_event_id'), ('events.time', 'creation_time'), @@ -3537,8 +3546,8 @@ def list_rpms(buildID=None, buildrootID=None, imageID=None, componentBuildrootID # image specific constraints if imageID != None: - clauses.append('image_listing.image_id = %(imageID)i') - joins.append('image_listing ON rpminfo.id = image_listing.rpm_id') + clauses.append('archive_rpm_components.archive_id = %(imageID)i') + joins.append('archive_rpm_components ON rpminfo.id = archive_rpm_components.rpm_id') if hostID != None: joins.append('standard_buildroot ON rpminfo.buildroot_id = standard_buildroot.buildroot_id') @@ -3625,8 +3634,89 @@ def get_image_build(buildInfo, strict=False): raise koji.GenericError, 'no such image build: %s' % buildInfo return result + +def get_build_type(buildInfo, strict=False): + """Return type info about the build + + buildInfo should be a valid build specification + + Returns a dictionary whose keys are type names and whose values are + the type info corresponding to that type + """ + + binfo = get_build(buildInfo, strict=strict) + if not binfo: + return None + + query = QueryProcessor( + tables=['btype'], + columns=['name'], + joins=['build_types ON btype_id=btype.id'], + clauses=['build_id = %(id)i'], + values=binfo, + opts={'asList':True}, + ) + + ret = {} + extra = binfo['extra'] or {} + for (btype,) in query.execute(): + ret[btype] = extra.get('typeinfo', {}).get(btype) + + #deal with legacy types + l_funcs = [['maven', get_maven_build], ['win', get_win_build], + ['image', get_image_build]] + for ltype, func in l_funcs: + # For now, we let the legacy data take precedence, but at some point + # we will want to change that + ltinfo = func(binfo['id'], strict=False) + if ltinfo: + ret[ltype] = ltinfo + + return ret + + +def list_btypes(query=None, queryOpts=None): + """List btypes matching query + + Options: + query - dictionary specifying selection parameters + queryOpts - dictionary specifying other query options + + Supported query parameters: + name - select btypes by name + id - select btypes by id + + If query is None, then all btypes are returned + """ + if query is None: + query = {} + qparams = {'tables': ['btype'], + 'columns': ['id', 'name'], + 'opts': queryOpts} + clauses = [] + values = query.copy() + if 'name' in query: + clauses.append('btype.name = %(name)s') + if 'id' in query: + clauses.append('btype.id = %(id)s') + qparams['clauses'] = clauses + qparams['values'] = values + return QueryProcessor(**qparams).execute() + + +def add_btype(name): + """Add a new btype with the given name""" + context.session.assertPerm('admin') + data = {'name': name} + if list_btypes(data): + raise koji.GenericError("btype already exists") + insert = InsertProcessor('btype', data=data) + insert.execute() + + def list_archives(buildID=None, buildrootID=None, componentBuildrootID=None, hostID=None, type=None, - filename=None, size=None, checksum=None, typeInfo=None, queryOpts=None, imageID=None): + filename=None, size=None, checksum=None, typeInfo=None, queryOpts=None, imageID=None, + archiveID=None): """ Retrieve information about archives. If buildID is not null it will restrict the list to archives built by the build with that ID. @@ -3690,9 +3780,12 @@ def list_archives(buildID=None, buildrootID=None, componentBuildrootID=None, hos values = {} tables = ['archiveinfo'] - joins = ['archivetypes on archiveinfo.type_id = archivetypes.id'] + joins = ['archivetypes on archiveinfo.type_id = archivetypes.id', + 'btype ON archiveinfo.btype_id = btype.id'] fields = [('archiveinfo.id', 'id'), ('archiveinfo.type_id', 'type_id'), + ('archiveinfo.btype_id', 'btype_id'), + ('btype.name', 'btype'), ('archiveinfo.build_id', 'build_id'), ('archiveinfo.buildroot_id', 'buildroot_id'), ('archiveinfo.filename', 'filename'), @@ -3720,8 +3813,10 @@ def list_archives(buildID=None, buildrootID=None, componentBuildrootID=None, hos fields.append(['buildroot_archives.buildroot_id', 'component_buildroot_id']) fields.append(['buildroot_archives.project_dep', 'project']) if imageID != None: - clauses.append('image_archive_listing.image_id = %(imageID)i') - joins.append('image_archive_listing ON archiveinfo.id = image_archive_listing.archive_id') + # TODO: arg name is now a misnomer, could be any archive + clauses.append('archive_components.archive_id = %(imageID)i') + values['imageID'] = imageID + joins.append('archive_components ON archiveinfo.id = archive_components.component_id') if hostID is not None: joins.append('standard_buildroot on archiveinfo.buildroot_id = standard_buildroot.buildroot_id') clauses.append('standard_buildroot.host_id = %(host_id)i') @@ -3736,6 +3831,9 @@ def list_archives(buildID=None, buildrootID=None, componentBuildrootID=None, hos if checksum is not None: clauses.append('checksum = %(checksum)s') values['checksum'] = checksum + if archiveID is not None: + clauses.append('archiveinfo.id = %(archive_id)s') + values['archive_id'] = archiveID if type is None: pass @@ -3778,7 +3876,14 @@ def list_archives(buildID=None, buildrootID=None, componentBuildrootID=None, hos clauses.append('image_archives.%s = %%(%s)s' % (key, key)) values[key] = typeInfo[key] else: - raise koji.GenericError, 'unsupported archive type: %s' % type + btype = lookup_name('btype', type, strict=False) + if not btype: + raise koji.GenericError('unsupported archive type: %s' % type) + if typeInfo: + raise koji.GenericError('typeInfo queries not supported for type ' + '%(name)s' % btype) + clauses.append('archiveinfo.btype_id = %(btype_id)s') + values['btype_id'] = btype['id'] columns, aliases = zip(*fields) ret = QueryProcessor(tables=tables, columns=columns, aliases=aliases, joins=joins, @@ -3815,13 +3920,14 @@ def get_archive(archive_id, strict=False): rootid arch """ - fields = ('id', 'type_id', 'build_id', 'buildroot_id', 'filename', 'size', - 'checksum', 'checksum_type', 'metadata_only', 'extra') - archive = QueryProcessor(tables=['archiveinfo'], columns=fields, transform=_fix_archive_row, - clauses=['id=%(archive_id)s'], values=locals()).executeOne() - if not archive: - # strict is taken care of by _singleRow() - return None + data = list_archives(archiveID=archive_id) + if not data: + if strict: + raise koji.GenericError('No such archive: %s' % archive_id) + else: + return None + + archive = data[0] maven_info = get_maven_archive(archive_id) if maven_info: del maven_info['archive_id'] @@ -3882,9 +3988,9 @@ def get_image_archive(archive_id, strict=False): if not results: return None results['rootid'] = False - fields = ('image_id', 'rpm_id') - select = """SELECT %s FROM image_listing - WHERE image_id = %%(archive_id)i""" % ', '.join(fields) + fields = ['rpm_id'] + select = """SELECT %s FROM archive_rpm_components + WHERE archive_id = %%(archive_id)i""" % ', '.join(fields) rpms = _singleRow(select, locals(), fields) if rpms: results['rootid'] = True @@ -4453,39 +4559,13 @@ def new_build(data): data.setdefault('volume_id', 0) #check for existing build - query = QueryProcessor( - tables=['build'], columns=['id', 'state', 'task_id'], - clauses=['pkg_id=%(pkg_id)s', 'version=%(version)s', - 'release=%(release)s'], - values=data, opts={'rowlock':True, 'asList':True}) - row = query.executeOne() - if row: - build_id, state, task_id = row - data['id'] = build_id - koji.plugin.run_callbacks('preBuildStateChange', attribute='state', old=state, new=data['state'], info=data) - st_desc = koji.BUILD_STATES[state] - if st_desc == 'BUILDING': - # check to see if this is the controlling task - if data['state'] == state and data.get('task_id', '') == task_id: - #the controlling task must have restarted (and called initBuild again) - return build_id - raise koji.GenericError, "Build already in progress (task %d)" % task_id - # TODO? - reclaim 'stale' builds (state=BUILDING and task_id inactive) - if st_desc in ('FAILED', 'CANCELED'): - #should be ok to replace - update = UpdateProcessor('build', clauses=['id=%(id)s'], values=data) - update.set(**dslice(data, ['state', 'task_id', 'owner', 'start_time', 'completion_time', 'epoch'])) - update.rawset(create_event='get_event()') - update.execute() - builddir = koji.pathinfo.build(data) - if os.path.exists(builddir): - shutil.rmtree(builddir) - koji.plugin.run_callbacks('postBuildStateChange', attribute='state', old=state, new=data['state'], info=data) - return build_id - raise koji.GenericError, "Build already exists (id=%d, state=%s): %r" \ - % (build_id, st_desc, data) - else: - koji.plugin.run_callbacks('preBuildStateChange', attribute='state', old=None, new=data['state'], info=data) + old_binfo = get_build(data) + if old_binfo: + recycle_build(old_binfo, data) + # Raises exception if there is a problem + return old_binfo['id'] + #else + koji.plugin.run_callbacks('preBuildStateChange', attribute='state', old=None, new=data['state'], info=data) #insert the new data insert_data = dslice(data, ['pkg_id', 'version', 'release', 'epoch', 'state', 'volume_id', @@ -4498,6 +4578,69 @@ def new_build(data): return data['id'] +def recycle_build(old, data): + """Check to see if a build can by recycled and if so, update it""" + + st_desc = koji.BUILD_STATES[old['state']] + if st_desc == 'BUILDING': + # check to see if this is the controlling task + if data['state'] == old['state'] and data.get('task_id', '') == old['task_id']: + #the controlling task must have restarted (and called initBuild again) + return + raise koji.GenericError("Build already in progress (task %(task_id)d)" + % old) + # TODO? - reclaim 'stale' builds (state=BUILDING and task_id inactive) + + if st_desc not in ('FAILED', 'CANCELED'): + raise koji.GenericError("Build already exists (id=%d, state=%s): %r" + % (old['id'], st_desc, data)) + + # check for evidence of tag activity + query = QueryProcessor(columns=['tag_id'], tables=['tag_listing'], + clauses = ['build_id = %(id)s'], values=old) + if query.execute(): + raise koji.GenericError("Build already exists. Unable to recycle, " + "has tag history") + + # check for rpms or archives + query = QueryProcessor(columns=['id'], tables=['rpminfo'], + clauses = ['build_id = %(id)s'], values=old) + if query.execute(): + raise koji.GenericError("Build already exists. Unable to recycle, " + "has rpm data") + query = QueryProcessor(columns=['id'], tables=['archiveinfo'], + clauses = ['build_id = %(id)s'], values=old) + if query.execute(): + raise koji.GenericError("Build already exists. Unable to recycle, " + "has archive data") + + # If we reach here, should be ok to replace + + koji.plugin.run_callbacks('preBuildStateChange', attribute='state', + old=old['state'], new=data['state'], info=data) + + # If there is any old build type info, clear it + delete = """DELETE FROM maven_builds WHERE build_id = %(id)i""" + _dml(delete, old) + delete = """DELETE FROM win_builds WHERE build_id = %(id)i""" + _dml(delete, old) + delete = """DELETE FROM image_builds WHERE build_id = %(id)i""" + _dml(delete, old) + delete = """DELETE FROM build_types WHERE build_id = %(id)i""" + _dml(delete, old) + + data['id'] = old['id'] + update = UpdateProcessor('build', clauses=['id=%(id)s'], values=data) + update.set(**dslice(data, ['state', 'task_id', 'owner', 'start_time', 'completion_time', 'epoch'])) + update.rawset(create_event='get_event()') + update.execute() + builddir = koji.pathinfo.build(data) + if os.path.exists(builddir): + shutil.rmtree(builddir) + koji.plugin.run_callbacks('postBuildStateChange', attribute='state', + old=old['state'], new=data['state'], info=data) + + def check_noarch_rpms(basepath, rpms): """ If rpms contains any noarch rpms with identical names, @@ -4568,6 +4711,7 @@ def import_build(srpm, rpms, brmap=None, task_id=None, build_id=None, logs=None) if build_id is None: build_id = new_build(build) binfo = get_build(build_id, strict=True) + new_typed_build(binfo, 'rpm') else: #build_id was passed in - sanity check binfo = get_build(build_id, strict=True) @@ -4630,6 +4774,7 @@ def import_rpm(fn, buildinfo=None, brootid=None, wrapper=False, fileinfo=None): if not buildinfo: # create a new build build_id = new_build(rpminfo) + # we add the rpm build type below buildinfo = get_build(build_id, strict=True) else: #figure it out from sourcerpm string @@ -4655,6 +4800,10 @@ def import_rpm(fn, buildinfo=None, brootid=None, wrapper=False, fileinfo=None): raise koji.GenericError, "srpm mismatch for %s: %s (expected %s)" \ % (fn, basename, srpmname) + # if we're adding an rpm to it, then this build is of rpm type + # harmless if build already has this type + new_typed_build(buildinfo, 'rpm') + #add rpminfo entry rpminfo['id'] = _singleValue("""SELECT nextval('rpminfo_id_seq')""") rpminfo['build_id'] = buildinfo['id'] @@ -4812,6 +4961,25 @@ class CG_Importer(object): buildinfo['completion_time'] = \ datetime.datetime.fromtimestamp(float(metadata['build']['end_time'])).isoformat(' ') self.buildinfo = buildinfo + + # get typeinfo + b_extra = self.metadata['build'].get('extra', {}) + typeinfo = b_extra.get('typeinfo', {}) + + # legacy types can be at top level of extra + for btype in ['maven', 'win', 'image']: + if btype not in b_extra: + continue + if btype in typeinfo: + # he says they've already got one + raise koji.GenericError('Duplicate typeinfo for %r' % btype) + typeinfo[btype] = b_extra[btype] + + # sanity check + for btype in typeinfo: + lookup_name('btype', btype, strict=True) + + self.typeinfo = typeinfo return buildinfo @@ -4820,14 +4988,23 @@ class CG_Importer(object): buildinfo = get_build(build_id, strict=True) # handle special build types - b_extra = self.metadata['build'].get('extra', {}) - if 'maven' in b_extra: - new_maven_build(buildinfo, b_extra['maven']) - if 'win' in b_extra: - new_win_build(buildinfo, b_extra['win']) - if 'image' in b_extra: - # no extra info tracked at build level - new_image_build(buildinfo) + for btype in self.typeinfo: + tinfo = self.typeinfo[btype] + if btype == 'maven': + new_maven_build(buildinfo, tinfo) + elif btype == 'win': + new_win_build(buildinfo, tinfo) + elif btype == 'image': + # no extra info tracked at build level + new_image_build(buildinfo) + else: + new_typed_build(buildinfo, btype) + + # rpm builds not required to have typeinfo + if 'rpm' not in self.typeinfo: + # if the build contains rpms then it has the rpm type + if [o for o in self.prepped_outputs if o['type'] == 'rpm']: + new_typed_build(buildinfo, 'rpm') self.buildinfo = buildinfo return buildinfo @@ -5008,23 +5185,44 @@ class CG_Importer(object): def prep_archive(self, fileinfo): - # determine archive import type (maven/win/image/other) + # determine archive import type extra = fileinfo.get('extra', {}) legacy_types = ['maven', 'win', 'image'] - l_type = None + btype = None type_info = None - for key in legacy_types: - if key in extra: - if l_type is not None: - raise koji.GenericError("Output file has multiple archive" - "types: %(filename)s" % fileinfo) - l_type = key - type_info = extra[key] - fileinfo['hub.l_type'] = l_type + for key in extra: + if key not in legacy_types: + continue + if btype is not None: + raise koji.GenericError("Output file has multiple types: " + "%(filename)s" % fileinfo) + btype = key + type_info = extra[key] + for key in extra.get('typeinfo', {}): + if btype == key: + raise koji.GenericError("Duplicate typeinfo for: %r" % btype) + elif btype is not None: + raise koji.GenericError("Output file has multiple types: " + "%(filename)s" % fileinfo) + btype = key + type_info = extra['typeinfo'][key] + + if btype is None: + raise koji.GenericError("No typeinfo for: %(filename)s" % fileinfo) + + if btype not in self.typeinfo: + raise koji.GenericError('Output type %s not listed in build ' + 'types' % btype) + + fileinfo['hub.btype'] = btype fileinfo['hub.type_info'] = type_info - if l_type == 'image': - components = fileinfo.get('components', []) + if 'components' in fileinfo: + if btype in ('maven', 'win'): + raise koji.GenericError("Component list not allowed for " + "archives of type %s" % btype) + # for new types, we trust the metadata + components = fileinfo['components'] rpmlist, archives = self.match_components(components) # TODO - note presence of external components fileinfo['hub.rpmlist'] = rpmlist @@ -5052,29 +5250,29 @@ class CG_Importer(object): def import_archive(self, buildinfo, brinfo, fileinfo): fn = fileinfo['hub.path'] - l_type = fileinfo['hub.l_type'] + btype = fileinfo['hub.btype'] type_info = fileinfo['hub.type_info'] - archiveinfo = import_archive_internal(fn, buildinfo, l_type, type_info, brinfo.id, fileinfo) + archiveinfo = import_archive_internal(fn, buildinfo, btype, type_info, brinfo.id, fileinfo) - if l_type == 'image': + if 'components' in fileinfo: self.import_components(archiveinfo['id'], fileinfo) - def import_components(self, image_id, fileinfo): + def import_components(self, archive_id, fileinfo): rpmlist = fileinfo['hub.rpmlist'] archives = fileinfo['hub.archives'] - insert = InsertProcessor('image_listing') - insert.set(image_id=image_id) + insert = InsertProcessor('archive_rpm_components') + insert.set(archive_id=archive_id) for rpminfo in rpmlist: insert.set(rpm_id=rpminfo['id']) insert.execute() - insert = InsertProcessor('image_archive_listing') - insert.set(image_id=image_id) + insert = InsertProcessor('archive_components') + insert.set(archive_id=archive_id) for archiveinfo in archives: - insert.set(archive_id=archiveinfo['id']) + insert.set(component_id=archiveinfo['id']) insert.execute() @@ -5434,9 +5632,11 @@ def new_maven_build(build, maven_info): (field, current_maven_info[field], maven_info[field]) else: maven_info['build_id'] = build['id'] - insert = """INSERT INTO maven_builds (build_id, group_id, artifact_id, version) - VALUES (%(build_id)i, %(group_id)s, %(artifact_id)s, %(version)s)""" - _dml(insert, maven_info) + data = dslice(maven_info, ['build_id', 'group_id', 'artifact_id', 'version']) + insert = InsertProcessor('maven_builds', data=data) + insert.execute() + # also add build_types entry + new_typed_build(build, 'maven') def new_win_build(build_info, win_info): """ @@ -5456,6 +5656,8 @@ def new_win_build(build_info, win_info): insert.set(build_id=build_id) insert.set(platform=win_info['platform']) insert.execute() + # also add build_types entry + new_typed_build(build_info, 'win') def new_image_build(build_info): """ @@ -5473,6 +5675,26 @@ def new_image_build(build_info): insert = InsertProcessor('image_builds') insert.set(build_id=build_info['id']) insert.execute() + # also add build_types entry + new_typed_build(build_info, 'image') + + +def new_typed_build(build_info, btype): + """Mark build as a given btype""" + + btype_id=lookup_name('btype', btype, strict=True)['id'] + query = QueryProcessor(tables=('build_types',), columns=('build_id',), + clauses=('build_id = %(build_id)i', + 'btype_id = %(btype_id)i',), + values={'build_id': build_info['id'], + 'btype_id': btype_id}) + result = query.executeOne() + if not result: + insert = InsertProcessor('build_types') + insert.set(build_id=build_info['id']) + insert.set(btype_id=btype_id) + insert.execute() + def old_image_data(old_image_id): """Return old image data for given id""" @@ -5588,15 +5810,15 @@ def import_old_image(old, name, version): archive_id = archives[0]['id'] logger.debug('root archive id is %s' % archive_id) query = QueryProcessor(columns=['rpm_id'], tables=['imageinfo_listing'], - clauses=['image_id=%(id)i'], values=old, + clauses=['archive_id=%(id)i'], values=old, opts={'asList': True}) rpm_ids = [r[0] for r in query.execute()] - insert = InsertProcessor('image_listing') - insert.set(image_id=archive_id) + insert = InsertProcessor('archive_rpm_components') + insert.set(archive_id=archive_id) for rpm_id in rpm_ids: insert.set(rpm_id=rpm_id) insert.execute() - logger.info('updated image_listing') + logger.info('updated archive_rpm_components') # grab old logs old_log_dir = os.path.join(old['dir'], 'data', 'logs', old['arch']) @@ -5689,6 +5911,10 @@ def import_archive_internal(filepath, buildinfo, type, typeInfo, buildroot_id=No (filename, archiveinfo['checksum'], fileinfo['checksum'])) archivetype = get_archive_type(filename, strict=True) archiveinfo['type_id'] = archivetype['id'] + btype = lookup_name('btype', type, strict=False) + if btype is None: + raise koji.BuildError, 'unsupported archive type: %s' % type + archiveinfo['btype_id'] = btype['id'] # cg extra data extra = fileinfo.get('extra', None) @@ -5758,7 +5984,10 @@ def import_archive_internal(filepath, buildinfo, type, typeInfo, buildroot_id=No _import_archive_file(filepath, imgdir) # import log files? else: - raise koji.BuildError, 'unsupported archive type: %s' % type + # new style type, no supplementary table + if not metadata_only: + destdir = koji.pathinfo.typedir(buildinfo, btype['name']) + _import_archive_file(filepath, destdir) archiveinfo = get_archive(archive_id, strict=True) koji.plugin.run_callbacks('postImport', type='archive', archive=archiveinfo, build=buildinfo, @@ -6353,11 +6582,7 @@ def build_references(build_id, limit=None): The optional limit arg is used to limit the size of the buildroot references. """ - #references (that matter): - # tag_listing - # buildroot_listing (via rpminfo) - # buildroot_archives (via archiveinfo) - # ?? rpmsigs (via rpminfo) + ret = {} # find tags @@ -6365,9 +6590,11 @@ def build_references(build_id, limit=None): WHERE build_id = %(build_id)i AND active = TRUE""" ret['tags'] = _multiRow(q, locals(), ('id', 'name')) - #we'll need the component rpm ids for the rest + #we'll need the component rpm and archive ids for the rest q = """SELECT id FROM rpminfo WHERE build_id=%(build_id)i""" - rpm_ids = _fetchMulti(q, locals()) + build_rpm_ids = _fetchMulti(q, locals()) + q = """SELECT id FROM archiveinfo WHERE build_id=%(build_id)i""" + build_archive_ids = _fetchMulti(q, locals()) # find rpms whose buildroots we were in st_complete = koji.BUILD_STATES['COMPLETE'] @@ -6381,28 +6608,26 @@ def build_references(build_id, limit=None): AND build.state = %(st_complete)i""" if limit is not None: q += "\nLIMIT %(limit)i" - for (rpm_id,) in rpm_ids: + for (rpm_id,) in build_rpm_ids: for row in _multiRow(q, locals(), fields): idx.setdefault(row['id'], row) if limit is not None and len(idx) > limit: break ret['rpms'] = idx.values() - ret['images'] = [] - # find images that contain the build rpms - fields = ['image_id'] - clauses = ['image_listing.rpm_id = %(rpm_id)s'] - # TODO: join in other tables to provide something more than image id - query = QueryProcessor(columns=fields, tables=['image_listing'], clauses=clauses, + ret['component_of'] = [] + # find images/archives that contain the build rpms + fields = ['archive_id'] + clauses = ['archive_rpm_components.rpm_id = %(rpm_id)s'] + # TODO: join in other tables to provide something more than archive id + query = QueryProcessor(columns=fields, tables=['archive_rpm_components'], clauses=clauses, opts={'asList': True}) - for (rpm_id,) in rpm_ids: + for (rpm_id,) in build_rpm_ids: query.values = {'rpm_id': rpm_id} - image_ids = [i[0] for i in query.execute()] - ret['images'].extend(image_ids) + archive_ids = [i[0] for i in query.execute()] + ret['component_of'].extend(archive_ids) # find archives whose buildroots we were in - q = """SELECT id FROM archiveinfo WHERE build_id = %(build_id)i""" - archive_ids = _fetchMulti(q, locals()) fields = ('id', 'type_id', 'type_name', 'build_id', 'filename') idx = {} q = """SELECT archiveinfo.id, archiveinfo.type_id, archivetypes.name, archiveinfo.build_id, archiveinfo.filename @@ -6414,23 +6639,23 @@ def build_references(build_id, limit=None): AND build.state = %(st_complete)i""" if limit is not None: q += "\nLIMIT %(limit)i" - for (archive_id,) in archive_ids: + for (archive_id,) in build_archive_ids: for row in _multiRow(q, locals(), fields): idx.setdefault(row['id'], row) if limit is not None and len(idx) > limit: break ret['archives'] = idx.values() - # find images that contain the build archives - fields = ['image_id'] - clauses = ['image_archive_listing.archive_id = %(archive_id)s'] - # TODO: join in other tables to provide something more than image id - query = QueryProcessor(columns=fields, tables=['image_archive_listing'], clauses=clauses, + # find images/archives that contain the build archives + fields = ['archive_id'] + clauses = ['archive_components.component_id = %(archive_id)s'] + # TODO: join in other tables to provide something more than archive id + query = QueryProcessor(columns=fields, tables=['archive_components'], clauses=clauses, opts={'asList': True}) - for (archive_id,) in archive_ids: + for (archive_id,) in build_archive_ids: query.values = {'archive_id': archive_id} - image_ids = [i[0] for i in query.execute()] - ret['images'].extend(image_ids) + archive_ids = [i[0] for i in query.execute()] + ret['component_of'].extend(archive_ids) # find timestamp of most recent use in a buildroot query = QueryProcessor( @@ -6440,7 +6665,7 @@ def build_references(build_id, limit=None): clauses=['buildroot_listing.rpm_id = %(rpm_id)s'], opts={'order': '-standard_buildroot.create_event', 'limit': 1}) event_id = -1 - for (rpm_id,) in rpm_ids: + for (rpm_id,) in build_rpm_ids: query.values = {'rpm_id': rpm_id} tmp_id = query.singleValue(strict=False) if tmp_id is not None and tmp_id > event_id: @@ -6458,7 +6683,7 @@ def build_references(build_id, limit=None): ORDER BY standard_buildroot.create_event DESC LIMIT 1""" event_id = -1 - for (archive_id,) in archive_ids: + for (archive_id,) in build_archive_ids: tmp_id = _singleValue(q, locals(), strict=False) if tmp_id is not None and tmp_id > event_id: event_id = tmp_id @@ -6470,6 +6695,9 @@ def build_references(build_id, limit=None): if ret['last_used'] is None or last_archive_use > ret['last_used']: ret['last_used'] = last_archive_use + # set 'images' field for backwards compat + ret['images'] = ret['component_of'] + return ret def delete_build(build, strict=True, min_ref_age=604800): @@ -6526,6 +6754,9 @@ def _delete_build(binfo): # build-related data: # build KEEP (marked deleted) # maven_builds KEEP + # win_builds KEEP + # image_builds KEEP + # build_types KEEP # task ?? # tag_listing REVOKE (versioned) (but should ideally be empty anyway) # rpminfo KEEP @@ -6565,17 +6796,13 @@ def _delete_build(binfo): def reset_build(build): """Reset a build so that it can be reimported - WARNING: this function is potentially destructive. use with care. + WARNING: this function is highly destructive. use with care. nulls task_id sets state to CANCELED - clears data in rpminfo - removes rpminfo entries from any buildroot_listings [!] - clears data in archiveinfo, maven_info - removes archiveinfo entries from buildroot_archives - remove files related to the build + clears all referenced data in other tables, including buildroot and + archive component tables - note, we don't actually delete the build data, so tags - remain intact + after reset, only the build table entry is left """ # Only an admin may do this context.session.assertPerm('admin') @@ -6591,6 +6818,8 @@ def reset_build(build): _dml(delete, locals()) delete = """DELETE FROM buildroot_listing WHERE rpm_id=%(rpm_id)i""" _dml(delete, locals()) + delete = """DELETE FROM archive_rpm_components WHERE rpm_id=%(rpm_id)i""" + _dml(delete, locals()) delete = """DELETE FROM rpminfo WHERE build_id=%(id)i""" _dml(delete, binfo) q = """SELECT id FROM archiveinfo WHERE build_id=%(id)i""" @@ -6600,14 +6829,28 @@ def reset_build(build): _dml(delete, locals()) delete = """DELETE FROM win_archives WHERE archive_id=%(archive_id)i""" _dml(delete, locals()) + delete = """DELETE FROM image_archives WHERE archive_id=%(archive_id)i""" + _dml(delete, locals()) delete = """DELETE FROM buildroot_archives WHERE archive_id=%(archive_id)i""" _dml(delete, locals()) + delete = """DELETE FROM archive_rpm_components WHERE archive_id=%(archive_id)i""" + _dml(delete, locals()) + delete = """DELETE FROM archive_components WHERE archive_id=%(archive_id)i""" + _dml(delete, locals()) + delete = """DELETE FROM archive_components WHERE component_id=%(archive_id)i""" + _dml(delete, locals()) delete = """DELETE FROM archiveinfo WHERE build_id=%(id)i""" _dml(delete, binfo) delete = """DELETE FROM maven_builds WHERE build_id = %(id)i""" _dml(delete, binfo) delete = """DELETE FROM win_builds WHERE build_id = %(id)i""" _dml(delete, binfo) + delete = """DELETE FROM image_builds WHERE build_id = %(id)i""" + _dml(delete, binfo) + delete = """DELETE FROM build_types WHERE build_id = %(id)i""" + _dml(delete, binfo) + delete = """DELETE FROM tag_listing WHERE build_id = %(id)i""" + _dml(delete, binfo) binfo['state'] = koji.BUILD_STATES['CANCELED'] update = """UPDATE build SET state=%(state)i, task_id=NULL WHERE id=%(id)i""" _dml(update, binfo) @@ -7912,15 +8155,15 @@ def importImageInternal(task_id, build_id, imgdata): rpm_ids.append(data['id']) # associate those RPMs with the image - q = """INSERT INTO image_listing (image_id,rpm_id) - VALUES (%(image_id)i,%(rpm_id)i)""" + q = """INSERT INTO archive_rpm_components (archive_id,rpm_id) + VALUES (%(archive_id)i,%(rpm_id)i)""" for archive in archives: sys.stderr.write('working on archive %s' % archive) if archive['filename'].endswith('xml'): continue sys.stderr.write('associating installed rpms with %s' % archive['id']) for rpm_id in rpm_ids: - _dml(q, {'image_id': archive['id'], 'rpm_id': rpm_id}) + _dml(q, {'archive_id': archive['id'], 'rpm_id': rpm_id}) koji.plugin.run_callbacks('postImport', type='image', image=imgdata, fullpath=fullpath) @@ -8794,6 +9037,7 @@ class RootExports(object): getMavenBuild = staticmethod(get_maven_build) getWinBuild = staticmethod(get_win_build) getImageBuild = staticmethod(get_image_build) + getBuildType = staticmethod(get_build_type) getArchiveTypes = staticmethod(get_archive_types) getArchiveType = staticmethod(get_archive_type) listArchives = staticmethod(list_archives) @@ -8804,6 +9048,9 @@ class RootExports(object): listArchiveFiles = staticmethod(list_archive_files) getArchiveFile = staticmethod(get_archive_file) + listBTypes = staticmethod(list_btypes) + addBType = staticmethod(add_btype) + def getChangelogEntries(self, buildID=None, taskID=None, filepath=None, author=None, before=None, after=None, queryOpts=None): """Get changelog entries for the build with the given ID, or for the rpm generated by the given task at the given path @@ -9038,6 +9285,8 @@ class RootExports(object): fields = [('build.id', 'build_id'), ('build.version', 'version'), ('build.release', 'release'), ('build.epoch', 'epoch'), ('build.state', 'state'), ('build.completion_time', 'completion_time'), ('build.start_time', 'start_time'), + ('build.source', 'source'), + ('build.extra', 'extra'), ('events.id', 'creation_event_id'), ('events.time', 'creation_time'), ('build.task_id', 'task_id'), ('EXTRACT(EPOCH FROM events.time)', 'creation_ts'), ('EXTRACT(EPOCH FROM build.start_time)', 'start_ts'), @@ -9111,11 +9360,17 @@ class RootExports(object): joins.append('image_builds ON build.id = image_builds.build_id') fields.append(('image_builds.build_id', 'build_id')) else: - raise koji.GenericError, 'unsupported build type: %s' % type + btype = lookup_name('btype', type, strict=False) + if not btype: + raise koji.GenericError, 'unsupported build type: %s' % type + btype_id = btype['id'] + joins.append('build_types ON build.id = build_types.build_id ' + 'AND btype_id = %(btype_id)s') query = QueryProcessor(columns=[pair[0] for pair in fields], aliases=[pair[1] for pair in fields], tables=tables, joins=joins, clauses=clauses, + transform=_fix_extra_field, values=locals(), opts=queryOpts) return query.iterate() @@ -11088,10 +11343,13 @@ class HostExports(object): os.symlink(dest, src) def initBuild(self, data): - """Create a stub build entry. + """Create a stub (rpm) build entry. This is done at the very beginning of the build to inform the system the build is underway. + + This function is only called for rpm builds, other build types + have their own init function """ host = Host() host.verify() @@ -11102,7 +11360,10 @@ class HostExports(object): data['owner'] = task.getOwner() data['state'] = koji.BUILD_STATES['BUILDING'] data['completion_time'] = None - return new_build(data) + build_id = new_build(data) + binfo = get_build(build_id, strict=True) + new_typed_build(binfo, 'rpm') + return build_id def completeBuild(self, task_id, build_id, srpm, rpms, brmap=None, logs=None): """Import final build contents into the database""" diff --git a/koji/__init__.py b/koji/__init__.py index 18100428..2252e4f6 100644 --- a/koji/__init__.py +++ b/koji/__init__.py @@ -1674,6 +1674,17 @@ class PathInfo(object): """Return the directory where the image for the build are stored""" return self.build(build) + '/images' + def typedir(self, build, btype): + """Return the directory where typed files for a build are stored""" + if btype == 'maven': + return self.mavenbuild(build) + elif btype == 'win': + return self.winbuild(build) + elif btype == 'image': + return self.imagebuild(build) + else: + return "%s/files/%s" % (self.build(build), btype) + def rpm(self, rpminfo): """Return the path (relative to build_dir) where an rpm belongs""" return "%(arch)s/%(name)s-%(version)s-%(release)s.%(arch)s.rpm" % rpminfo diff --git a/tests/test_hub/test_add_btype.py b/tests/test_hub/test_add_btype.py new file mode 100644 index 00000000..e64cc0f0 --- /dev/null +++ b/tests/test_hub/test_add_btype.py @@ -0,0 +1,45 @@ +import unittest +import mock + +import koji +import kojihub + +IP = kojihub.InsertProcessor + + +class TestAddBType(unittest.TestCase): + + @mock.patch('kojihub.list_btypes') + @mock.patch('kojihub.InsertProcessor') + def test_add_btype(self, InsertProcessor, list_btypes): + # Not sure why mock can't patch kojihub.context, so we do this + session = kojihub.context.session = mock.MagicMock() + mocks = [InsertProcessor, list_btypes, session] + # It seems MagicMock will not automatically handle attributes that + # start with "assert" + session.assertPerm = mock.MagicMock() + + # expected case + list_btypes.return_value = None + insert = InsertProcessor.return_value + kojihub.add_btype('new_btype') + InsertProcessor.assert_called_once() + insert.execute.assert_called_once() + + args, kwargs = InsertProcessor.call_args + ip = IP(*args, **kwargs) + self.assertEquals(ip.table, 'btype') + self.assertEquals(ip.data, {'name': 'new_btype'}) + self.assertEquals(ip.rawdata, {}) + session.assertPerm.assert_called_with('admin') + + for m in mocks: + m.reset_mock() + session.assertPerm = mock.MagicMock() + + # already exists + list_btypes.return_value = True + with self.assertRaises(koji.GenericError): + kojihub.add_btype('new_btype') + InsertProcessor.assert_not_called() + session.assertPerm.assert_called_with('admin') diff --git a/tests/test_hub/test_get_build_type.py b/tests/test_hub/test_get_build_type.py new file mode 100644 index 00000000..5dc87af9 --- /dev/null +++ b/tests/test_hub/test_get_build_type.py @@ -0,0 +1,49 @@ +import unittest +import mock + +import koji +import kojihub + + +class TestGetBuildType(unittest.TestCase): + + @mock.patch('kojihub.get_build') + @mock.patch('kojihub.QueryProcessor') + def test_no_build(self, QueryProcessor, get_build): + mocks = [QueryProcessor, get_build] + get_build.return_value = None + + # strict on + kojihub.get_build_type('mytestbuild-1-1', strict=True) + QueryProcessor.assert_not_called() + get_build.assert_called_with('mytestbuild-1-1', strict=True) + + + @mock.patch('kojihub.get_maven_build') + @mock.patch('kojihub.get_win_build') + @mock.patch('kojihub.get_image_build') + @mock.patch('kojihub.get_build') + @mock.patch('kojihub.QueryProcessor') + def test_has_build(self, QueryProcessor, get_build, get_image_build, + get_win_build, get_maven_build): + mocks = [x for x in locals().values() if x is not self] + + typeinfo = {'maven': {'maven': 'foo'}, + 'win': {'win': 'foo'}, + 'image': {'image': 'foo'}, + 'new_type': {'bar': 42}} + binfo = {'id' : 1, 'extra' : {'typeinfo': {'new_type': typeinfo['new_type']}}} + get_build.return_value = binfo + get_maven_build.return_value = typeinfo['maven'] + get_win_build.return_value = typeinfo['win'] + get_image_build.return_value = typeinfo['image'] + + query = QueryProcessor.return_value + query.execute.return_value = [['new_type']] + + ret = kojihub.get_build_type('mytestbuild-1-1', strict=True) + assert ret == typeinfo + get_build.assert_called_with('mytestbuild-1-1', strict=True) + get_maven_build.assert_called_with(binfo['id'], strict=False) + get_win_build.assert_called_with(binfo['id'], strict=False) + get_image_build.assert_called_with(binfo['id'], strict=False) diff --git a/tests/test_hub/test_import_build.py b/tests/test_hub/test_import_build.py index b3387572..108f9112 100644 --- a/tests/test_hub/test_import_build.py +++ b/tests/test_hub/test_import_build.py @@ -53,12 +53,14 @@ class TestImportRPM(unittest.TestCase): with self.assertRaises(koji.GenericError): kojihub.import_rpm(self.filename) + @mock.patch('kojihub.new_typed_build') @mock.patch('kojihub._dml') @mock.patch('kojihub._singleValue') @mock.patch('kojihub.get_build') @mock.patch('koji.get_rpm_header') def test_import_rpm_completed_build(self, get_rpm_header, get_build, - _singleValue, _dml): + _singleValue, _dml, + new_typed_build): get_rpm_header.return_value = self.rpm_header_retval get_build.return_value = { 'state': koji.BUILD_STATES['COMPLETE'], @@ -103,12 +105,14 @@ class TestImportRPM(unittest.TestCase): } _dml.assert_called_once_with(statement, values) + @mock.patch('kojihub.new_typed_build') @mock.patch('kojihub._dml') @mock.patch('kojihub._singleValue') @mock.patch('kojihub.get_build') @mock.patch('koji.get_rpm_header') def test_import_rpm_completed_source_build(self, get_rpm_header, get_build, - _singleValue, _dml): + _singleValue, _dml, + new_typed_build): retval = copy.copy(self.rpm_header_retval) retval.update({ 'filename': 'name-version-release.arch.rpm', @@ -189,6 +193,7 @@ class TestImportBuild(unittest.TestCase): def tearDown(self): shutil.rmtree(self.tempdir) + @mock.patch('kojihub.new_typed_build') @mock.patch('kojihub._dml') @mock.patch('kojihub._singleValue') @mock.patch('kojihub.get_build') @@ -205,7 +210,8 @@ class TestImportBuild(unittest.TestCase): new_package, context, query, import_rpm, import_rpm_file, rip_rpm_sighdr, add_rpm_sig, - get_build, _singleValue, _dml): + get_build, _singleValue, _dml, + new_typed_build): rip_rpm_sighdr.return_value = (0, 0) @@ -225,6 +231,16 @@ class TestImportBuild(unittest.TestCase): 1106: 1, }) get_rpm_header.return_value = retval + binfo = { + 'state': koji.BUILD_STATES['COMPLETE'], + 'name': 'name', + 'version': 'version', + 'release': 'release', + 'id': 12345, + } + # get_build called once to check for existing, + # then later to get the build info + get_build.side_effect = [None, binfo] kojihub.import_build(self.src_filename, [self.filename]) diff --git a/tests/test_hub/test_import_image_internal.py b/tests/test_hub/test_import_image_internal.py index 6e889004..b23f7282 100644 --- a/tests/test_hub/test_import_image_internal.py +++ b/tests/test_hub/test_import_image_internal.py @@ -105,7 +105,7 @@ class TestImportImageInternal(unittest.TestCase): self.assertEquals(len(cursor.execute.mock_calls), 1) expression, kwargs = cursor.execute.mock_calls[0][1] expression = " ".join(expression.split()) - expected = 'INSERT INTO image_listing (image_id,rpm_id) ' + \ - 'VALUES (%(image_id)i,%(rpm_id)i)' + expected = 'INSERT INTO archive_rpm_components (archive_id,rpm_id) ' + \ + 'VALUES (%(archive_id)i,%(rpm_id)i)' self.assertEquals(expression, expected) - self.assertEquals(kwargs, {'image_id': 9, 'rpm_id': 6}) + self.assertEquals(kwargs, {'archive_id': 9, 'rpm_id': 6}) diff --git a/tests/test_hub/test_list_btypes.py b/tests/test_hub/test_list_btypes.py new file mode 100644 index 00000000..25a7ab48 --- /dev/null +++ b/tests/test_hub/test_list_btypes.py @@ -0,0 +1,72 @@ +import unittest +import mock + +import koji +import kojihub + +QP = kojihub.QueryProcessor + + +class TestListBTypes(unittest.TestCase): + + @mock.patch('kojihub.QueryProcessor') + def test_list_btypes(self, QueryProcessor): + + # default query + query = QueryProcessor.return_value + query.execute.return_value = "return value" + ret = kojihub.list_btypes() + QueryProcessor.assert_called_once() + query.execute.assert_called_once() + self.assertEquals(ret, "return value") + + args, kwargs = QueryProcessor.call_args + self.assertEquals(args, ()) + qp = QP(**kwargs) + self.assertEquals(qp.tables, ['btype']) + self.assertEquals(qp.columns, ['id', 'name']) + self.assertEquals(qp.clauses, []) + self.assertEquals(qp.joins, None) + + QueryProcessor.reset_mock() + + # query by name + query = QueryProcessor.return_value + query.execute.return_value = "return value" + ret = kojihub.list_btypes({'name': 'rpm'}) + QueryProcessor.assert_called_once() + query.execute.assert_called_once() + self.assertEquals(ret, "return value") + + args, kwargs = QueryProcessor.call_args + self.assertEquals(args, ()) + qp = QP(**kwargs) + self.assertEquals(qp.tables, ['btype']) + self.assertEquals(qp.columns, ['id', 'name']) + self.assertEquals(qp.clauses, ['btype.name = %(name)s']) + self.assertEquals(qp.values, {'name': 'rpm'}) + self.assertEquals(qp.joins, None) + + QueryProcessor.reset_mock() + + # query by id, with opts + query = QueryProcessor.return_value + query.execute.return_value = "return value" + ret = kojihub.list_btypes({'id': 1}, {'order': 'id'}) + QueryProcessor.assert_called_once() + query.execute.assert_called_once() + self.assertEquals(ret, "return value") + + args, kwargs = QueryProcessor.call_args + self.assertEquals(args, ()) + qp = QP(**kwargs) + self.assertEquals(qp.tables, ['btype']) + self.assertEquals(qp.columns, ['id', 'name']) + self.assertEquals(qp.clauses, ['btype.id = %(id)s']) + self.assertEquals(qp.values, {'id': 1}) + self.assertEquals(qp.opts, {'order': 'id'}) + self.assertEquals(qp.joins, None) + + QueryProcessor.reset_mock() + + # query by name diff --git a/tests/test_hub/test_new_typed_build.py b/tests/test_hub/test_new_typed_build.py new file mode 100644 index 00000000..9d6516cb --- /dev/null +++ b/tests/test_hub/test_new_typed_build.py @@ -0,0 +1,39 @@ +import unittest +import mock + +import koji +import kojihub + + +class TestNewTypedBuild(unittest.TestCase): + + @mock.patch('kojihub.lookup_name') + @mock.patch('kojihub.QueryProcessor') + @mock.patch('kojihub.InsertProcessor') + def test_new_typed_build(self, InsertProcessor, QueryProcessor, lookup_name): + + binfo = {'id': 1, 'foo': '137'} + btype = 'sometype' + btype_id = 99 + lookup_name.return_value = {'id':99, 'name':btype} + + # no current entry + query = QueryProcessor.return_value + query.executeOne.return_value = None + insert = InsertProcessor.return_value + kojihub.new_typed_build(binfo, btype) + QueryProcessor.assert_called_once() + query.executeOne.assert_called_once() + InsertProcessor.assert_called_once() + insert.execute.assert_called_once() + + InsertProcessor.reset_mock() + QueryProcessor.reset_mock() + + # current entry + query = QueryProcessor.return_value + query.executeOne.return_value = {'build_id':binfo['id']} + kojihub.new_typed_build(binfo, btype) + QueryProcessor.assert_called_once() + query.executeOne.assert_called_once() + InsertProcessor.assert_not_called() diff --git a/tests/test_hub/test_recycle_build.py b/tests/test_hub/test_recycle_build.py new file mode 100644 index 00000000..24d61bb5 --- /dev/null +++ b/tests/test_hub/test_recycle_build.py @@ -0,0 +1,147 @@ +import unittest +import mock + +import koji +import kojihub + +QP = kojihub.QueryProcessor +UP = kojihub.UpdateProcessor + +class TestRecycleBuild(): + # NOT a subclass of unittest.TestCase so that we can use generator + # methods + + def setUp(self): + self.QueryProcessor = mock.patch('kojihub.QueryProcessor').start() + self.UpdateProcessor = mock.patch('kojihub.UpdateProcessor', + side_effect=self.getUpdate).start() + self._dml = mock.patch('kojihub._dml').start() + self.run_callbacks = mock.patch('koji.plugin.run_callbacks').start() + self.rmtree = mock.patch('shutil.rmtree').start() + self.exists = mock.patch('os.path.exists').start() + self.updates = [] + + def tearDown(self): + mock.patch.stopall() + + def getUpdate(self, *args, **kwargs): + update = UP(*args, **kwargs) + update.execute = mock.MagicMock() + self.updates.append(update) + return update + + # Basic old and new build infos + old = {'id': 2, + 'state': 0, + 'task_id': None, + 'epoch': None, + 'name': 'GConf2', + 'nvr': 'GConf2-3.2.6-15.fc23', + 'package_id': 2, + 'package_name': 'GConf2', + 'release': '15.fc23', + 'version': '3.2.6', + 'volume_id': 0, + 'volume_name': 'DEFAULT'} + new = {'state': 0, + 'name': 'GConf2', + 'version': '3.2.6', + 'release': '15.fc23', + 'epoch': None, + 'nvr': 'GConf2-3.2.6-15.fc23', + 'completion_time': '2016-09-16', + 'start_time': '2016-09-16', + 'owner': 2} + + def test_recycle_building(self): + new = self.new.copy() + old = self.old.copy() + old['state'] = new['state'] = koji.BUILD_STATES['BUILDING'] + old['task_id'] = new['task_id'] = 137 + kojihub.recycle_build(old, new) + self.UpdateProcessor.assert_not_called() + self.QueryProcessor.assert_not_called() + self._dml.assert_not_called() + self.run_callbacks.assert_not_called() + + def test_recycle_building_bad(self): + new = self.new.copy() + old = self.old.copy() + old['state'] = new['state'] = koji.BUILD_STATES['BUILDING'] + old['task_id'] = 137 + new['task_id'] = 200 + self.run_fail(old, new) + self.QueryProcessor.assert_not_called() + + def test_recycle_states_good(self): + for state in 'FAILED', 'CANCELED': + yield self.check_recycle_states_good, koji.BUILD_STATES[state] + + def check_recycle_states_good(self, state): + new = self.new.copy() + old = self.old.copy() + old['state'] = state + new['state'] = koji.BUILD_STATES['BUILDING'] + old['task_id'] = 99 + new['task_id'] = 137 + query = self.QueryProcessor.return_value + query.execute.return_value = [] + self.run_pass(old, new) + + def run_pass(self, old, new): + kojihub.recycle_build(old, new) + self.UpdateProcessor.assert_called_once() + update = self.updates[0] + assert update.table == 'build' + for key in ['state', 'task_id', 'owner', 'start_time', + 'completion_time', 'epoch']: + assert update.data[key] == new[key] + assert update.rawdata == {'create_event': 'get_event()'} + assert update.clauses == ['id=%(id)s'] + assert update.values['id'] == old['id'] + + def run_fail(self, old, new): + try: + kojihub.recycle_build(old, new) + except koji.GenericError: + pass + else: + raise Exception("expected koji.GenericError") + self.UpdateProcessor.assert_not_called() + self._dml.assert_not_called() + self.run_callbacks.assert_not_called() + + def test_recycle_states_bad(self): + for state in 'BUILDING', 'COMPLETE', 'DELETED': + yield self.check_recycle_states_bad, koji.BUILD_STATES[state] + + def check_recycle_states_bad(self, state): + new = self.new.copy() + old = self.old.copy() + old['state'] = state + new['state'] = koji.BUILD_STATES['BUILDING'] + old['task_id'] = 99 + new['task_id'] = 137 + self.run_fail(old, new) + self.QueryProcessor.assert_not_called() + + def test_recycle_query_bad(self): + vlists = [ + [[], [], True], + [True, [], []], + [[], True, []], + ] + for values in vlists: + yield self.check_recycle_query_bad, values + + def check_recycle_query_bad(self, values): + new = self.new.copy() + old = self.old.copy() + old['state'] = koji.BUILD_STATES['FAILED'] + new['state'] = koji.BUILD_STATES['BUILDING'] + old['task_id'] = 99 + new['task_id'] = 137 + query = self.QueryProcessor.return_value + query.execute.side_effect = values + self.run_fail(old, new) + diff --git a/www/kojiweb/buildinfo.chtml b/www/kojiweb/buildinfo.chtml index 3936055c..40a34c3f 100644 --- a/www/kojiweb/buildinfo.chtml +++ b/www/kojiweb/buildinfo.chtml @@ -29,15 +29,15 @@ Source$build['source'] #end if - #if $mavenbuild + #if 'maven' in $typeinfo - Maven groupId$mavenbuild.group_id + Maven groupId$typeinfo.maven.group_id - Maven artifactId$mavenbuild.artifact_id + Maven artifactId$typeinfo.maven.artifact_id - Maven version$mavenbuild.version + Maven version$typeinfo.maven.version #end if #if $summary @@ -159,44 +159,50 @@ #end if - #if $archives + #for btype in $archiveIndex + #set $archivesByExt = $archiveIndex[btype] + #if not $archivesByExt + #continue + #end if - Archives + $btype.capitalize() Archives - - #set $exts = $archivesByExt.keys() - #for ext in $exts - - - + + #end for + #end for +
$ext + + #set $exts = $archivesByExt.keys() + #for ext in $exts + + + - - #for $archive in $archivesByExt[$ext] - - + + #for $archive in $archivesByExt[$ext] + + - - #end for - #end for -
$ext #if $task and $ext == $exts[0] - #if $mavenbuild + #if $btype == 'maven' (build logs) - #elif $winbuild + #elif $btype == 'win' (build logs) - #elif $imagebuild + #elif $btype == 'image' (build logs) - #end if + #else + (build logs) + #end if #end if -
- +
+ #if $archive.metadata_only $archive.display (info) #else $archive.display (info) (download) #end if -
+
- #end if + #end for #if $changelog Changelog diff --git a/www/kojiweb/builds.chtml b/www/kojiweb/builds.chtml index 337c466a..843955de 100644 --- a/www/kojiweb/builds.chtml +++ b/www/kojiweb/builds.chtml @@ -5,7 +5,7 @@ #include "includes/header.chtml" -

#if $latest then 'Latest ' else ''##if $state != None then $util.stateName($state).capitalize() + ' ' else ''##if $type == 'maven' then 'Maven ' else ''#Builds#if $package then ' of %s' % ($package.id, $package.name) else ''##if $prefix then ' starting with "%s"' % $prefix else ''##if $user then ' by %s' % ($user.id, $user.name) else ''##if $tag then ' in tag %s' % ($tag.id, $tag.name) else ''#

+

#if $latest then 'Latest ' else ''##if $state != None then $util.stateName($state).capitalize() + ' ' else ''##if $type then $type.capitalize() + ' ' else ''#Builds#if $package then ' of %s' % ($package.id, $package.name) else ''##if $prefix then ' starting with "%s"' % $prefix else ''##if $user then ' by %s' % ($user.id, $user.name) else ''##if $tag then ' in tag %s' % ($tag.id, $tag.name) else ''#

@@ -42,25 +42,18 @@ #end for - #if $tag or $mavenEnabled or $winEnabled - #if $mavenEnabled or $winEnabled - - - #end if #if $tag #end if - #end if
+ Type: + Inherited: @@ -72,7 +65,6 @@
diff --git a/www/kojiweb/index.py b/www/kojiweb/index.py index 420c606b..63c6be69 100644 --- a/www/kojiweb/index.py +++ b/www/kojiweb/index.py @@ -1088,6 +1088,8 @@ def externalrepoinfo(environ, extrepoID): def buildinfo(environ, buildID): values = _initValues(environ, 'Build Info', 'builds') server = _getServer(environ) + topurl = environ['koji.options']['KojiFilesURL'] + pathinfo = koji.PathInfo(topdir=topurl) buildID = int(buildID) @@ -1099,32 +1101,26 @@ def buildinfo(environ, buildID): tags.sort(_sortbyname) rpms = server.listBuildRPMs(build['id']) rpms.sort(_sortbyname) - mavenbuild = server.getMavenBuild(buildID) - winbuild = server.getWinBuild(buildID) - imagebuild = server.getImageBuild(buildID) - if mavenbuild: - archivetype = 'maven' - elif winbuild: - archivetype = 'win' - elif imagebuild: - archivetype = 'image' - else: - archivetype = None - archives = server.listArchives(build['id'], type=archivetype, queryOpts={'order': 'filename'}) - archivesByExt = {} - topurl = environ['koji.options']['KojiFilesURL'] - pathinfo = koji.PathInfo(topdir=topurl) - for archive in archives: - if mavenbuild: - archive['display'] = archive['filename'] - archive['dl_url'] = '/'.join([pathinfo.mavenbuild(build), pathinfo.mavenfile(archive)]) - elif winbuild: - archive['display'] = pathinfo.winfile(archive) - archive['dl_url'] = '/'.join([pathinfo.winbuild(build), pathinfo.winfile(archive)]) - elif imagebuild: - archive['display'] = archive['filename'] - archive['dl_url'] = '/'.join([pathinfo.imagebuild(build), archive['filename']]) - archivesByExt.setdefault(os.path.splitext(archive['filename'])[1][1:], []).append(archive) + typeinfo = server.getBuildType(buildID) + archiveIndex = {} + for btype in typeinfo: + archives = server.listArchives(build['id'], type=btype, queryOpts={'order': 'filename'}) + idx = archiveIndex.setdefault(btype, {}) + for archive in archives: + if btype == 'maven': + archive['display'] = archive['filename'] + archive['dl_url'] = '/'.join([pathinfo.mavenbuild(build), pathinfo.mavenfile(archive)]) + elif btype == 'win': + archive['display'] = pathinfo.winfile(archive) + archive['dl_url'] = '/'.join([pathinfo.winbuild(build), pathinfo.winfile(archive)]) + elif btype == 'image': + archive['display'] = archive['filename'] + archive['dl_url'] = '/'.join([pathinfo.imagebuild(build), archive['filename']]) + else: + archive['display'] = archive['filename'] + archive['dl_url'] = '/'.join([pathinfo.typedir(build, btype), archive['filename']]) + ext = os.path.splitext(archive['filename'])[1][1:] + idx.setdefault(ext, []).append(archive) rpmsByArch = {} debuginfos = [] @@ -1192,11 +1188,8 @@ def buildinfo(environ, buildID): values['tags'] = tags values['rpmsByArch'] = rpmsByArch values['task'] = task - values['mavenbuild'] = mavenbuild - values['winbuild'] = winbuild - values['imagebuild'] = imagebuild - values['archives'] = archives - values['archivesByExt'] = archivesByExt + values['typeinfo'] = typeinfo + values['archiveIndex'] = archiveIndex values['noarch_log_dest'] = noarch_log_dest if environ['koji.currentUser']: @@ -1210,7 +1203,7 @@ def buildinfo(environ, buildID): values['start_time'] = build.get('start_time') or build['creation_time'] # the build start time is not accurate for maven and win builds, get it from the # task start time instead - if mavenbuild or winbuild: + if 'maven' in typeinfo or 'win' in typeinfo: if task: values['start_time'] = task['start_time'] if build['state'] == koji.BUILD_STATES['BUILDING']: @@ -1271,13 +1264,17 @@ def builds(environ, userID=None, tagID=None, packageID=None, state=None, order=' values['prefix'] = prefix values['order'] = order - if type in ('maven', 'win', 'image'): + + btypes = [b['name'] for b in server.listBTypes()] + btypes.sort() + if type in btypes: pass elif type == 'all': type = None else: type = None values['type'] = type + values['btypes'] = btypes if tag: inherited = int(inherited)