From c5db34a8e1f3751b2b6bbb279474d8141d58d47b Mon Sep 17 00:00:00 2001 From: Yuming Zhu Date: Thu, 27 Feb 2020 22:10:11 +0800 Subject: [PATCH] flake8: apply E501 with max-line-length=99 --- .flake8 | 3 +- builder/kojid | 387 ++++++++++++------ builder/mergerepos | 24 +- cli/koji | 6 +- cli/koji_cli/commands.py | 564 ++++++++++++++++---------- cli/koji_cli/lib.py | 32 +- hub/kojihub.py | 609 +++++++++++++++++++---------- hub/kojixmlrpc.py | 36 +- koji/__init__.py | 89 +++-- koji/auth.py | 10 +- koji/daemon.py | 102 +++-- koji/db.py | 3 +- koji/tasks.py | 46 ++- koji/util.py | 6 +- plugins/builder/runroot.py | 39 +- plugins/cli/runroot.py | 9 +- plugins/cli/save_failed_tree.py | 6 +- plugins/hub/save_failed_tree.py | 8 +- tests/test_cli/test_list_tagged.py | 2 +- util/koji-gc | 12 +- util/koji-shadow | 39 +- util/koji-sweep-db | 12 +- util/kojira | 31 +- vm/kojikamid.py | 38 +- vm/kojivmd | 63 +-- www/kojiweb/index.py | 173 +++++--- www/kojiweb/wsgi_publisher.py | 8 +- www/lib/kojiweb/util.py | 28 +- 28 files changed, 1574 insertions(+), 811 deletions(-) diff --git a/.flake8 b/.flake8 index 81c01bc5..932dbb02 100644 --- a/.flake8 +++ b/.flake8 @@ -1,6 +1,7 @@ [flake8] -select = I,C,F,E1,E2,E3,E4,E502,E7 +select = I,C,F,E ignore = E266,E731 +max_line_length = 99 exclude = .git, __pycache__, diff --git a/builder/kojid b/builder/kojid index 6f194f74..26d5dcc7 100755 --- a/builder/kojid +++ b/builder/kojid @@ -273,7 +273,8 @@ class BuildRoot(object): for k in ('repoid', 'tag_name'): if hasattr(self, k): opts[k] = getattr(self, k) - for k in ('mockdir', 'topdir', 'topurl', 'topurls', 'packager', 'vendor', 'distribution', 'mockhost', 'yum_proxy', 'rpmbuild_timeout'): + for k in ('mockdir', 'topdir', 'topurl', 'topurls', 'packager', 'vendor', + 'distribution', 'mockhost', 'yum_proxy', 'rpmbuild_timeout'): if hasattr(self.options, k): opts[k] = getattr(self.options, k) opts['buildroot_id'] = self.id @@ -476,12 +477,14 @@ class BuildRoot(object): try: stat_info = os.stat(fpath) if not fd or stat_info.st_ino != inode or stat_info.st_size < size: - # either a file we haven't opened before, or mock replaced a file we had open with - # a new file and is writing to it, or truncated the file we're reading, - # but our fd is pointing to the previous location in the old file + # either a file we haven't opened before, or mock replaced a file we + # had open with a new file and is writing to it, or truncated the file + # we're reading, but our fd is pointing to the previous location in the + # old file if fd: self.logger.info('Rereading %s, inode: %s -> %s, size: %s -> %s' % - (fpath, inode, stat_info.st_ino, size, stat_info.st_size)) + (fpath, inode, stat_info.st_ino, size, + stat_info.st_size)) fd.close() fd = open(fpath, 'rb') logs[fname] = (fd, stat_info.st_ino, stat_info.st_size or size, fpath) @@ -596,8 +599,8 @@ class BuildRoot(object): def build_srpm(self, specfile, sourcedir, source_cmd): self.session.host.setBuildRootState(self.id, 'BUILDING') if source_cmd: - # call the command defined by source_cmd in the chroot so any required files not stored in - # the SCM can be retrieved + # call the command defined by source_cmd in the chroot so any required files not stored + # in the SCM can be retrieved chroot_sourcedir = sourcedir[len(self.rootdir()):] args = ['--no-clean', '--unpriv', '--cwd', chroot_sourcedir, '--chroot'] args.extend(source_cmd) @@ -633,7 +636,8 @@ class BuildRoot(object): self.session.host.updateBuildRootList(self.id, self.getPackageList()) if rv: self.expire() - raise koji.BuildError("error building package (arch %s), %s" % (arch, self._mockResult(rv))) + raise koji.BuildError("error building package (arch %s), %s" % + (arch, self._mockResult(rv))) def getPackageList(self): """Return a list of packages from the buildroot @@ -688,7 +692,8 @@ class BuildRoot(object): maven_files = [] for repofile in files: if koji.util.multi_fnmatch(repofile, self.options.maven_repo_ignore) or \ - koji.util.multi_fnmatch(os.path.join(relpath, repofile), self.options.maven_repo_ignore): + koji.util.multi_fnmatch(os.path.join(relpath, repofile), + self.options.maven_repo_ignore): continue if relpath == '' and repofile in ['scm-sources.zip', 'patches.zip']: # special-case the archives of the sources and patches, since we drop them in @@ -699,8 +704,10 @@ class BuildRoot(object): if maven_files: path_comps = relpath.split('/') if len(path_comps) < 3: - raise koji.BuildrootError('files found in unexpected path in local Maven repo, directory: %s, files: %s' % - (relpath, ', '.join([f['filename'] for f in maven_files]))) + raise koji.BuildrootError('files found in unexpected path in local Maven repo,' + ' directory: %s, files: %s' % + (relpath, + ', '.join([f['filename'] for f in maven_files]))) # extract the Maven info from the path within the local repo maven_info = {'version': path_comps[-1], 'artifact_id': path_comps[-2], @@ -712,8 +719,8 @@ class BuildRoot(object): def mavenBuild(self, sourcedir, outputdir, repodir, props=None, profiles=None, options=None, goals=None): self.session.host.setBuildRootState(self.id, 'BUILDING') - cmd = ['--no-clean', '--chroot', '--unpriv', '--cwd', sourcedir[len(self.rootdir()):], '--', - '/usr/bin/mvn', '-C'] + cmd = ['--no-clean', '--chroot', '--unpriv', '--cwd', sourcedir[len(self.rootdir()):], + '--', '/usr/bin/mvn', '-C'] if options: cmd.extend(options) if profiles: @@ -734,13 +741,15 @@ class BuildRoot(object): ignore_unknown = False if rv: ignore_unknown = True - self.session.host.updateMavenBuildRootList(self.id, self.task_id, self.getMavenPackageList(repodir), + self.session.host.updateMavenBuildRootList(self.id, self.task_id, + self.getMavenPackageList(repodir), ignore=self.getMavenPackageList(outputdir), project=True, ignore_unknown=ignore_unknown, extra_deps=self.deps) if rv: self.expire() - raise koji.BuildrootError('error building Maven package, %s' % self._mockResult(rv, logfile='root.log')) + raise koji.BuildrootError('error building Maven package, %s' % + self._mockResult(rv, logfile='root.log')) def markExternalRPMs(self, rpmlist): """Check rpms against pkgorigins and add external repo data to the external ones @@ -793,7 +802,8 @@ class BuildRoot(object): try: repodata = repoMDObject.RepoMD('ourrepo', fo) except BaseException: - raise koji.BuildError("Unable to parse repomd.xml file for %s" % os.path.join(repodir, self.br_arch)) + raise koji.BuildError("Unable to parse repomd.xml file for %s" % + os.path.join(repodir, self.br_arch)) data = repodata.getData('origin') pkgorigins = data.location[1] else: @@ -905,7 +915,8 @@ class ChainBuildTask(BaseTaskHandler): # if there are any nvrs to wait on, do so if nvrs: task_id = self.session.host.subtask(method='waitrepo', - arglist=[target_info['build_tag_name'], None, nvrs], + arglist=[ + target_info['build_tag_name'], None, nvrs], label="wait %i" % n_level, parent=self.id) self.wait(task_id, all=True, failany=True) @@ -1078,7 +1089,9 @@ class BuildTask(BaseTaskHandler): def getSRPMFromSRPM(self, src, build_tag, repo_id): # rebuild srpm in mock, so it gets correct disttag, rpm version, etc. task_id = self.session.host.subtask(method='rebuildSRPM', - arglist=[src, build_tag, {'repo_id': repo_id, 'scratch': self.opts.get('scratch')}], + arglist=[src, build_tag, { + 'repo_id': repo_id, + 'scratch': self.opts.get('scratch')}], label='srpm', parent=self.id) # wait for subtask to finish @@ -1093,7 +1106,9 @@ class BuildTask(BaseTaskHandler): def getSRPMFromSCM(self, url, build_tag, repo_id): # TODO - allow different ways to get the srpm task_id = self.session.host.subtask(method='buildSRPMFromSCM', - arglist=[url, build_tag, {'repo_id': repo_id, 'scratch': self.opts.get('scratch')}], + arglist=[url, build_tag, { + 'repo_id': repo_id, + 'scratch': self.opts.get('scratch')}], label='srpm', parent=self.id) # wait for subtask to finish @@ -1211,7 +1226,8 @@ class BuildTask(BaseTaskHandler): for arch in archlist: taskarch = self.choose_taskarch(arch, srpm, build_tag) subtasks[arch] = self.session.host.subtask(method='buildArch', - arglist=[srpm, build_tag, arch, keep_srpm, {'repo_id': repo_id}], + arglist=[srpm, build_tag, arch, + keep_srpm, {'repo_id': repo_id}], label=arch, parent=self.id, arch=taskarch) @@ -1430,7 +1446,8 @@ class BuildArchTask(BaseBuildTask): if len(srpm_files) == 0: raise koji.BuildError("no srpm files found for task %i" % self.id) if len(srpm_files) > 1: - raise koji.BuildError("multiple srpm files found for task %i: %s" % (self.id, srpm_files)) + raise koji.BuildError("multiple srpm files found for task %i: %s" % + (self.id, srpm_files)) # Run sanity checks. Any failures will throw a BuildError self.srpm_sanity_checks("%s/%s" % (resultdir, srpm_files[0])) @@ -1519,12 +1536,14 @@ class MavenTask(MultiPlatformTask): rpm_results = None spec_url = self.opts.get('specfile') if spec_url: - rpm_results = self.buildWrapperRPM(spec_url, self.build_task_id, target_info, build_info, repo_id) + rpm_results = self.buildWrapperRPM( + spec_url, self.build_task_id, target_info, build_info, repo_id) if self.opts.get('scratch'): self.session.host.moveMavenBuildToScratch(self.id, maven_results, rpm_results) else: - self.session.host.completeMavenBuild(self.id, self.build_id, maven_results, rpm_results) + self.session.host.completeMavenBuild( + self.id, self.build_id, maven_results, rpm_results) except (SystemExit, ServerExit, KeyboardInterrupt): # we do not trap these raise @@ -1537,7 +1556,8 @@ class MavenTask(MultiPlatformTask): if not self.opts.get('scratch') and not self.opts.get('skip_tag'): tag_task_id = self.session.host.subtask(method='tagBuild', - arglist=[dest_tag['id'], self.build_id, False, None, True], + arglist=[dest_tag['id'], + self.build_id, False, None, True], label='tag', parent=self.id, arch='noarch') @@ -1590,7 +1610,8 @@ class BuildMavenTask(BaseBuildTask): repo_info = self.session.repoInfo(repo_id, strict=True) event_id = repo_info['create_event'] - br_arch = self.find_arch('noarch', self.session.host.getHost(), self.session.getBuildConfig(build_tag['id'], event=event_id)) + br_arch = self.find_arch('noarch', self.session.host.getHost( + ), self.session.getBuildConfig(build_tag['id'], event=event_id)) maven_opts = opts.get('jvm_options') if not maven_opts: maven_opts = [] @@ -1598,7 +1619,8 @@ class BuildMavenTask(BaseBuildTask): if opt.startswith('-Xmx'): break else: - # Give the JVM 2G to work with by default, if the build isn't specifying its own max. memory + # Give the JVM 2G to work with by default, if the build isn't specifying + # its own max. memory maven_opts.append('-Xmx2048m') buildroot = BuildRoot(self.session, self.options, build_tag['id'], br_arch, self.id, install_group='maven-build', setup_dns=True, repo_id=repo_id, @@ -1615,7 +1637,8 @@ class BuildMavenTask(BaseBuildTask): self.session.host.updateBuildRootList(buildroot.id, buildroot.getPackageList()) if rv: buildroot.expire() - raise koji.BuildrootError('error installing packages, %s' % buildroot._mockResult(rv, logfile='mock_output.log')) + raise koji.BuildrootError('error installing packages, %s' % + buildroot._mockResult(rv, logfile='mock_output.log')) # existence of symlink should be sufficient if not os.path.lexists('%s/usr/bin/mvn' % buildroot.rootdir()): @@ -1635,7 +1658,8 @@ class BuildMavenTask(BaseBuildTask): logfile = self.workdir + '/checkout.log' uploadpath = self.getUploadDir() - self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(), build_tag=build_tag, scratch=opts.get('scratch')) + self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(), + build_tag=build_tag, scratch=opts.get('scratch')) # Check out sources from the SCM sourcedir = scm.checkout(scmdir, self.session, uploadpath, logfile) self.run_callbacks("postSCMCheckout", @@ -1652,7 +1676,8 @@ class BuildMavenTask(BaseBuildTask): patchlog = self.workdir + '/patches.log' patch_scm = SCM(self.opts.get('patches')) patch_scm.assert_allowed(self.options.allowed_scms) - self.run_callbacks('preSCMCheckout', scminfo=patch_scm.get_info(), build_tag=build_tag, scratch=opts.get('scratch')) + self.run_callbacks('preSCMCheckout', scminfo=patch_scm.get_info(), + build_tag=build_tag, scratch=opts.get('scratch')) # never try to check out a common/ dir when checking out patches patch_scm.use_common = False patchcheckoutdir = patch_scm.checkout(patchdir, self.session, uploadpath, patchlog) @@ -1673,10 +1698,14 @@ class BuildMavenTask(BaseBuildTask): raise koji.BuildError('no patches found at %s' % self.opts.get('patches')) patches.sort() for patch in patches: - cmd = ['/usr/bin/patch', '--verbose', '--no-backup-if-mismatch', '-d', sourcedir, '-p1', '-i', os.path.join(patchcheckoutdir, patch)] - ret = log_output(self.session, cmd[0], cmd, patchlog, uploadpath, logerror=1, append=1) + cmd = ['/usr/bin/patch', '--verbose', '--no-backup-if-mismatch', '-d', + sourcedir, '-p1', '-i', os.path.join(patchcheckoutdir, patch)] + ret = log_output(self.session, cmd[0], cmd, + patchlog, uploadpath, logerror=1, append=1) if ret: - raise koji.BuildError('error applying patches from %s, see patches.log for details' % self.opts.get('patches')) + raise koji.BuildError( + 'error applying patches from %s, see patches.log for details' % + self.opts.get('patches')) # Set ownership of the entire source tree to the mock user uid = pwd.getpwnam(self.options.mockuser)[2] @@ -1834,7 +1863,8 @@ class WrapperRPMTask(BaseBuildTask): artifact_name = os.path.basename(artifact_path) base, ext = os.path.splitext(artifact_name) if ext == '.log': - # Exclude log files for consistency with the output of listArchives() used below + # Exclude log files for consistency with the output of listArchives() used + # below continue relpath = os.path.join(self.pathinfo.task(task['id']), artifact_path)[1:] for volume in artifact_data[artifact_path]: @@ -1847,10 +1877,11 @@ class WrapperRPMTask(BaseBuildTask): # called as a top-level task to create wrapper rpms for an existing build # verify that the build is complete if not build['state'] == koji.BUILD_STATES['COMPLETE']: - raise koji.BuildError('cannot call wrapperRPM on a build that did not complete successfully') + raise koji.BuildError( + 'cannot call wrapperRPM on a build that did not complete successfully') - # get the list of files from the build instead of the task, because the task output directory may - # have already been cleaned up + # get the list of files from the build instead of the task, + # because the task output directory may have already been cleaned up if maven_info: build_artifacts = self.session.listArchives(buildID=build['id'], type='maven') elif win_info: @@ -1888,7 +1919,8 @@ class WrapperRPMTask(BaseBuildTask): assert False # pragma: no cover if not artifacts: - raise koji.BuildError('no output found for %s' % (task and koji.taskLabel(task) or koji.buildLabel(build))) + raise koji.BuildError('no output found for %s' % ( + task and koji.taskLabel(task) or koji.buildLabel(build))) values['artifacts'] = artifacts values['all_artifacts'] = all_artifacts @@ -1932,9 +1964,11 @@ class WrapperRPMTask(BaseBuildTask): repo_info = self.session.repoInfo(repo_id, strict=True) event_id = repo_info['create_event'] build_tag = self.session.getTag(build_target['build_tag'], strict=True) - br_arch = self.find_arch('noarch', self.session.host.getHost(), self.session.getBuildConfig(build_tag['id'], event=event_id)) + br_arch = self.find_arch('noarch', self.session.host.getHost( + ), self.session.getBuildConfig(build_tag['id'], event=event_id)) - buildroot = BuildRoot(self.session, self.options, build_tag['id'], br_arch, self.id, install_group='wrapper-rpm-build', repo_id=repo_id) + buildroot = BuildRoot(self.session, self.options, build_tag['id'], br_arch, self.id, + install_group='wrapper-rpm-build', repo_id=repo_id) buildroot.workdir = self.workdir self.logger.debug("Initializing buildroot") buildroot.init() @@ -1942,7 +1976,8 @@ class WrapperRPMTask(BaseBuildTask): logfile = os.path.join(self.workdir, 'checkout.log') scmdir = buildroot.tmpdir() + '/scmroot' koji.ensuredir(scmdir) - self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(), build_tag=build_tag, scratch=opts.get('scratch')) + self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(), + build_tag=build_tag, scratch=opts.get('scratch')) specdir = scm.checkout(scmdir, self.session, self.getUploadDir(), logfile) self.run_callbacks("postSCMCheckout", scminfo=scm.get_info(), @@ -2002,7 +2037,8 @@ class WrapperRPMTask(BaseBuildTask): if len(srpms) == 0: raise koji.BuildError('no srpms found in %s' % buildroot.resultdir()) elif len(srpms) > 1: - raise koji.BuildError('multiple srpms found in %s: %s' % (buildroot.resultdir(), ', '.join(srpms))) + raise koji.BuildError('multiple srpms found in %s: %s' % + (buildroot.resultdir(), ', '.join(srpms))) else: srpm = srpms[0] @@ -2092,13 +2128,15 @@ class WrapperRPMTask(BaseBuildTask): relrpms = [uploaddir + '/' + r for r in rpms] rellogs = [uploaddir + '/' + l for l in logs] if opts.get('scratch'): - self.session.host.moveBuildToScratch(self.id, relsrpm, relrpms, {'noarch': rellogs}) + self.session.host.moveBuildToScratch( + self.id, relsrpm, relrpms, {'noarch': rellogs}) else: if opts.get('create_build'): brmap = dict.fromkeys([relsrpm] + relrpms, buildroot.id) try: self.session.host.completeBuild(self.id, self.new_build_id, - relsrpm, relrpms, brmap, {'noarch': rellogs}) + relsrpm, relrpms, brmap, + {'noarch': rellogs}) except (SystemExit, ServerExit, KeyboardInterrupt): raise except BaseException: @@ -2107,8 +2145,10 @@ class WrapperRPMTask(BaseBuildTask): if not opts.get('skip_tag'): tag_task_id = self.session.host.subtask(method='tagBuild', arglist=[build_target['dest_tag'], - self.new_build_id, False, None, True], - label='tag', parent=self.id, arch='noarch') + self.new_build_id, False, + None, True], + label='tag', parent=self.id, + arch='noarch') self.wait(tag_task_id) else: self.session.host.importWrapperRPMs(self.id, build['id'], results) @@ -2173,14 +2213,16 @@ class ChainMavenTask(MultiPlatformTask): if not opts.get('force'): # check for a duplicate build (a build performed with the # same scmurl and options) - dup_build = self.get_duplicate_build(dest_tag['name'], package, params, task_opts) + dup_build = self.get_duplicate_build( + dest_tag['name'], package, params, task_opts) # if we find one, mark the package as built and remove it from todo if dup_build: self.done[package] = dup_build['nvr'] for deps in todo.values(): deps.discard(package) del todo[package] - self.results.append('%s previously built from %s' % (dup_build['nvr'], task_url)) + self.results.append('%s previously built from %s' % + (dup_build['nvr'], task_url)) continue task_opts.update(dslice(opts, ['skip_tag', 'scratch'], strict=False)) @@ -2235,7 +2277,8 @@ class ChainMavenTask(MultiPlatformTask): self.done[package] = child['id'] break else: - raise koji.BuildError('could not find buildMaven subtask of %s' % task_id) + raise koji.BuildError( + 'could not find buildMaven subtask of %s' % task_id) self.results.append('%s built from %s by task %s' % (package, task_url, task_id)) else: @@ -2350,10 +2393,13 @@ class TagBuildTask(BaseTaskHandler): # XXX - add more post tests self.session.host.tagBuild(self.id, tag_id, build_id, force=force, fromtag=fromtag) - self.session.host.tagNotification(True, tag_id, fromtag, build_id, user_id, ignore_success) + self.session.host.tagNotification( + True, tag_id, fromtag, build_id, user_id, ignore_success) except Exception as e: exctype, value = sys.exc_info()[:2] - self.session.host.tagNotification(False, tag_id, fromtag, build_id, user_id, ignore_success, "%s: %s" % (exctype, value)) + self.session.host.tagNotification( + False, tag_id, fromtag, build_id, user_id, ignore_success, "%s: %s" % + (exctype, value)) raise e @@ -2367,11 +2413,14 @@ class BuildImageTask(MultiPlatformTask): if not opts.get('skip_tag') and not opts.get('scratch'): # Make sure package is on the list for this tag if pkg_cfg is None: - raise koji.BuildError("package (image) %s not in list for tag %s" % (name, target_info['dest_tag_name'])) + raise koji.BuildError("package (image) %s not in list for tag %s" % + (name, target_info['dest_tag_name'])) elif pkg_cfg['blocked']: - raise koji.BuildError("package (image) %s is blocked for tag %s" % (name, target_info['dest_tag_name'])) + raise koji.BuildError("package (image) %s is blocked for tag %s" % + (name, target_info['dest_tag_name'])) return self.session.host.initImageBuild(self.id, - dict(name=name, version=version, release=release, epoch=0)) + dict(name=name, version=version, release=release, + epoch=0)) def getRelease(self, name, ver): """return the next available release number for an N-V""" @@ -2399,7 +2448,9 @@ class BuildBaseImageTask(BuildImageTask): opts = {} if not ozif_enabled: - self.logger.error("ImageFactory features require the following dependencies: pykickstart, imagefactory, oz and possibly python-hashlib") + self.logger.error( + "ImageFactory features require the following dependencies: pykickstart, " + "imagefactory, oz and possibly python-hashlib") raise koji.ApplianceError('ImageFactory functions not available') # build image(s) @@ -2430,7 +2481,8 @@ class BuildBaseImageTask(BuildImageTask): canfail.append(subtasks[arch]) self.logger.debug("Got image subtasks: %r" % (subtasks)) self.logger.debug("Waiting on image subtasks (%s can fail)..." % canfail) - results = self.wait(to_list(subtasks.values()), all=True, failany=True, canfail=canfail) + results = self.wait(to_list(subtasks.values()), all=True, + failany=True, canfail=canfail) # if everything failed, fail even if all subtasks are in canfail self.logger.debug('subtask results: %r', results) @@ -2498,7 +2550,8 @@ class BuildBaseImageTask(BuildImageTask): # tag it if not opts.get('scratch') and not opts.get('skip_tag'): tag_task_id = self.session.host.subtask(method='tagBuild', - arglist=[target_info['dest_tag'], bld_info['id'], False, None, True], + arglist=[target_info['dest_tag'], + bld_info['id'], False, None, True], label='tag', parent=self.id, arch='noarch') self.wait(tag_task_id) @@ -2506,7 +2559,8 @@ class BuildBaseImageTask(BuildImageTask): report = '' if opts.get('scratch'): respath = ', '.join( - [os.path.join(koji.pathinfo.work(), koji.pathinfo.taskrelpath(tid)) for tid in subtasks.values()]) + [os.path.join(koji.pathinfo.work(), + koji.pathinfo.taskrelpath(tid)) for tid in subtasks.values()]) report += 'Scratch ' else: respath = koji.pathinfo.imagebuild(bld_info) @@ -2534,7 +2588,9 @@ class BuildApplianceTask(BuildImageTask): opts = {} if not image_enabled: - self.logger.error("Appliance features require the following dependencies: pykickstart, and possibly python-hashlib") + self.logger.error( + "Appliance features require the following dependencies: " + "pykickstart, and possibly python-hashlib") raise koji.ApplianceError('Appliance functions not available') # build image @@ -2547,9 +2603,11 @@ class BuildApplianceTask(BuildImageTask): bld_info = self.initImageBuild(name, version, release, target_info, opts) create_task_id = self.session.host.subtask(method='createAppliance', - arglist=[name, version, release, arch, target_info, build_tag, + arglist=[name, version, release, arch, + target_info, build_tag, repo_info, ksfile, opts], - label='appliance', parent=self.id, arch=arch) + label='appliance', parent=self.id, + arch=arch) results = self.wait(create_task_id) self.logger.info('image build task (%s) completed' % create_task_id) self.logger.info('results: %s' % results) @@ -2583,7 +2641,8 @@ class BuildApplianceTask(BuildImageTask): # tag it if not opts.get('scratch') and not opts.get('skip_tag'): tag_task_id = self.session.host.subtask(method='tagBuild', - arglist=[target_info['dest_tag'], bld_info['id'], False, None, True], + arglist=[target_info['dest_tag'], + bld_info['id'], False, None, True], label='tag', parent=self.id, arch='noarch') self.wait(tag_task_id) @@ -2632,7 +2691,8 @@ class BuildLiveCDTask(BuildImageTask): bld_info = self.initImageBuild(name, version, release, target_info, opts) create_task_id = self.session.host.subtask(method='createLiveCD', - arglist=[name, version, release, arch, target_info, build_tag, + arglist=[name, version, release, arch, + target_info, build_tag, repo_info, ksfile, opts], label='livecd', parent=self.id, arch=arch) results = self.wait(create_task_id) @@ -2668,7 +2728,8 @@ class BuildLiveCDTask(BuildImageTask): # tag it if necessary if not opts.get('scratch') and not opts.get('skip_tag'): tag_task_id = self.session.host.subtask(method='tagBuild', - arglist=[target_info['dest_tag'], bld_info['id'], False, None, True], + arglist=[target_info['dest_tag'], + bld_info['id'], False, None, True], label='tag', parent=self.id, arch='noarch') self.wait(tag_task_id) @@ -2726,8 +2787,8 @@ class BuildLiveMediaTask(BuildImageTask): canfail = [] for arch in arches: subtasks[arch] = self.subtask('createLiveMedia', - [name, version, release, arch, target_info, build_tag, - repo_info, ksfile, opts], + [name, version, release, arch, target_info, + build_tag, repo_info, ksfile, opts], label='livemedia %s' % arch, arch=arch) if arch in opts.get('optional_arches', []): canfail.append(subtasks[arch]) @@ -2735,7 +2796,8 @@ class BuildLiveMediaTask(BuildImageTask): self.logger.debug("Got image subtasks: %r", subtasks) self.logger.debug("Waiting on livemedia subtasks...") - results = self.wait(to_list(subtasks.values()), all=True, failany=True, canfail=canfail) + results = self.wait(to_list(subtasks.values()), all=True, + failany=True, canfail=canfail) # if everything failed, fail even if all subtasks are in canfail self.logger.debug('subtask results: %r', results) @@ -2805,14 +2867,16 @@ class BuildLiveMediaTask(BuildImageTask): # tag it if necessary if not opts.get('scratch') and not opts.get('skip_tag'): tag_task_id = self.session.host.subtask(method='tagBuild', - arglist=[target_info['dest_tag'], bld_info['id'], False, None, True], + arglist=[target_info['dest_tag'], + bld_info['id'], False, None, True], label='tag', parent=self.id, arch='noarch') self.wait(tag_task_id) # report the results if opts.get('scratch'): respath = ', '.join( - [os.path.join(koji.pathinfo.work(), koji.pathinfo.taskrelpath(tid)) for tid in subtasks.values()]) + [os.path.join(koji.pathinfo.work(), + koji.pathinfo.taskrelpath(tid)) for tid in subtasks.values()]) report = 'Scratch ' else: respath = koji.pathinfo.imagebuild(bld_info) @@ -2881,7 +2945,8 @@ class ImageTask(BaseTaskHandler): scm = SCM(self.opts['ksurl']) scm.assert_allowed(self.options.allowed_scms) logfile = os.path.join(self.workdir, 'checkout.log') - self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(), build_tag=build_tag, scratch=self.opts.get('scratch')) + self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(), + build_tag=build_tag, scratch=self.opts.get('scratch')) scmsrcdir = scm.checkout(scmdir, self.session, self.getUploadDir(), logfile) self.run_callbacks("postSCMCheckout", scminfo=scm.get_info(), @@ -2953,7 +3018,8 @@ class ImageTask(BaseTaskHandler): user_repos = user_repos.split(',') index = 0 for user_repo in user_repos: - self.ks.handler.repo.repoList.append(repo_class(baseurl=user_repo, name='koji-override-%i' % index)) + self.ks.handler.repo.repoList.append(repo_class( + baseurl=user_repo, name='koji-override-%i' % index)) index += 1 else: path_info = koji.PathInfo(topdir=self.options.topurl) @@ -2961,7 +3027,9 @@ class ImageTask(BaseTaskHandler): target_info['build_tag_name']) baseurl = '%s/%s' % (repopath, arch) self.logger.debug('BASEURL: %s' % baseurl) - self.ks.handler.repo.repoList.append(repo_class(baseurl=baseurl, name='koji-%s-%i' % (target_info['build_tag_name'], repo_info['id']))) + self.ks.handler.repo.repoList.append(repo_class( + baseurl=baseurl, name='koji-%s-%i' % (target_info['build_tag_name'], + repo_info['id']))) # inject url if provided if opts.get('install_tree_url'): self.ks.handler.url(url=opts['install_tree_url']) @@ -3065,7 +3133,8 @@ class ApplianceTask(ImageTask): return part.disk raise koji.ApplianceError('kickstart lacks a "/" mountpoint') - def handler(self, name, version, release, arch, target_info, build_tag, repo_info, ksfile, opts=None): + def handler(self, name, version, release, arch, target_info, + build_tag, repo_info, ksfile, opts=None): if opts is None: opts = {} @@ -3099,7 +3168,8 @@ class ApplianceTask(ImageTask): self.uploadFile(os.path.join(broot.rootdir(), app_log[1:])) if rv: raise koji.ApplianceError( - "Could not create appliance: %s" % parseStatus(rv, 'appliance-creator') + "; see root.log or appliance.log for more information") + "Could not create appliance: %s" % parseStatus(rv, 'appliance-creator') + + "; see root.log or appliance.log for more information") # Find the results results = [] @@ -3221,7 +3291,8 @@ class LiveCDTask(ImageTask): return manifest - def handler(self, name, version, release, arch, target_info, build_tag, repo_info, ksfile, opts=None): + def handler(self, name, version, release, arch, target_info, + build_tag, repo_info, ksfile, opts=None): if opts is None: opts = {} @@ -3251,7 +3322,8 @@ class LiveCDTask(ImageTask): self.uploadFile(os.path.join(broot.rootdir(), livecd_log[1:])) if rv: raise koji.LiveCDError( - 'Could not create LiveCD: %s' % parseStatus(rv, 'livecd-creator') + '; see root.log or livecd.log for more information') + 'Could not create LiveCD: %s' % parseStatus(rv, 'livecd-creator') + + '; see root.log or livecd.log for more information') # Find the resultant iso # The cwd of the livecd-creator process is tmpdir() in the chroot, so @@ -3263,7 +3335,8 @@ class LiveCDTask(ImageTask): if not isofile: isofile = afile else: - raise koji.LiveCDError('multiple .iso files found: %s and %s' % (isofile, afile)) + raise koji.LiveCDError( + 'multiple .iso files found: %s and %s' % (isofile, afile)) if not isofile: raise koji.LiveCDError('could not find iso file in chroot') isosrc = os.path.join(broot.tmpdir(), isofile) @@ -3407,7 +3480,8 @@ class LiveMediaTask(ImageTask): return manifest - def handler(self, name, version, release, arch, target_info, build_tag, repo_info, ksfile, opts=None): + def handler(self, name, version, release, arch, target_info, + build_tag, repo_info, ksfile, opts=None): if opts is None: opts = {} @@ -3485,7 +3559,8 @@ class LiveMediaTask(ImageTask): if rv: raise koji.LiveMediaError( - 'Could not create LiveMedia: %s' % parseStatus(rv, 'livemedia-creator') + '; see root.log or livemedia-out.log for more information') + 'Could not create LiveMedia: %s' % parseStatus(rv, 'livemedia-creator') + + '; see root.log or livemedia-out.log for more information') # Find the resultant iso # The cwd of the livemedia-creator process is broot.tmpdir() in the chroot, so @@ -3498,7 +3573,8 @@ class LiveMediaTask(ImageTask): if not isofile: isofile = afile else: - raise koji.LiveMediaError('multiple .iso files found: %s and %s' % (isofile, afile)) + raise koji.LiveMediaError( + 'multiple .iso files found: %s and %s' % (isofile, afile)) if not isofile: raise koji.LiveMediaError('could not find iso file in chroot') isosrc = os.path.join(rootresultsdir, isofile) @@ -3561,7 +3637,8 @@ class OzImageTask(BaseTaskHandler): scm = SCM(self.opts['ksurl']) scm.assert_allowed(self.options.allowed_scms) logfile = os.path.join(self.workdir, 'checkout-%s.log' % self.arch) - self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(), build_tag=build_tag, scratch=self.opts.get('scratch')) + self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(), + build_tag=build_tag, scratch=self.opts.get('scratch')) scmsrcdir = scm.checkout(self.workdir, self.session, self.getUploadDir(), logfile) self.run_callbacks("postSCMCheckout", @@ -3732,10 +3809,12 @@ class OzImageTask(BaseTaskHandler): %s """ % (name, distname, distver, arch, inst_tree) - template += """ - rpm -qa --qf '%{NAME},%{VERSION},%{RELEASE},%{ARCH},%{EPOCH},%{SIZE},%{SIGMD5},%{BUILDTIME}\n' - - """ + template += ("\n" + " rpm -qa --qf" + " '%{NAME},%{VERSION},%{RELEASE},%{ARCH},%{EPOCH},%{SIZE},%{SIGMD5}," + "%{BUILDTIME}\\n'\n" + " \n" + " ") # TODO: intelligently guess the size based on the kickstart file template += """ %s OS @@ -3743,7 +3822,7 @@ class OzImageTask(BaseTaskHandler): %sG -""" % (name, self.opts.get('disk_size')) +""" % (name, self.opts.get('disk_size')) # noqa: E501 return template def parseDistro(self, distro): @@ -3831,7 +3910,9 @@ class BaseImageTask(OzImageTask): Some image formats require others to be processed first, which is why we have to do this. raw files in particular may not be kept. """ - supported = ('raw', 'raw-xz', 'liveimg-squashfs', 'vmdk', 'qcow', 'qcow2', 'vdi', 'rhevm-ova', 'vsphere-ova', 'docker', 'vagrant-virtualbox', 'vagrant-libvirt', 'vagrant-vmware-fusion', 'vagrant-hyperv', 'vpc', "tar-gz") + supported = ('raw', 'raw-xz', 'liveimg-squashfs', 'vmdk', 'qcow', 'qcow2', 'vdi', + 'rhevm-ova', 'vsphere-ova', 'docker', 'vagrant-virtualbox', 'vagrant-libvirt', + 'vagrant-vmware-fusion', 'vagrant-hyperv', 'vpc', "tar-gz") for f in formats: if f not in supported: raise koji.ApplianceError('Invalid format: %s' % f) @@ -3945,7 +4026,8 @@ class BaseImageTask(OzImageTask): self.tlog.removeHandler(self.fhandler) self.uploadFile(self.ozlog) if 'No disk activity' in details: - details = 'Automated install failed or prompted for input. See the screenshot in the task results for more information.' + details = 'Automated install failed or prompted for input. ' \ + 'See the screenshot in the task results for more information' raise koji.ApplianceError('Image status is %s: %s' % (status, details)) @@ -4108,8 +4190,8 @@ class BaseImageTask(OzImageTask): if format == 'vagrant-vmware-fusion': format = 'vsphere-ova' img_opts['vsphere_ova_format'] = 'vagrant-vmware-fusion' - # The initial disk image transform for VMWare Fusion/Workstation requires a "standard" VMDK - # not the stream oriented format used for VirtualBox or regular VMWare OVAs + # The initial disk image transform for VMWare Fusion/Workstation requires a "standard" + # VMDK, not the stream oriented format used for VirtualBox or regular VMWare OVAs img_opts['vsphere_vmdk_format'] = 'standard' fixed_params = ['vsphere_ova_format', 'vsphere_vmdk_format'] if format == 'vagrant-hyperv': @@ -4117,7 +4199,8 @@ class BaseImageTask(OzImageTask): img_opts['hyperv_ova_format'] = 'hyperv-vagrant' fixed_params = ['hyperv_ova_format'] targ = self._do_target_image(self.base_img.base_image.identifier, - format.replace('-ova', ''), img_opts=img_opts, fixed_params=fixed_params) + format.replace('-ova', ''), img_opts=img_opts, + fixed_params=fixed_params) targ2 = self._do_target_image(targ.target_image.identifier, 'OVA', img_opts=img_opts, fixed_params=fixed_params) return {'image': targ2.target_image.data} @@ -4166,7 +4249,9 @@ class BaseImageTask(OzImageTask): self._mergeFactoryParams(img_opts, fixed_params) self.logger.debug('img_opts_post_merge: %s' % img_opts) target = self.bd.builder_for_target_image(image_type, - image_id=base_id, template=None, parameters=img_opts) + image_id=base_id, + template=None, + parameters=img_opts) target.target_thread.join() self._checkImageState(target) return target @@ -4206,9 +4291,12 @@ class BaseImageTask(OzImageTask): self.getUploadDir(), logerror=1) return {'image': newimg} - def handler(self, name, version, release, arch, target_info, build_tag, repo_info, inst_tree, opts=None): + def handler(self, name, version, release, arch, target_info, + build_tag, repo_info, inst_tree, opts=None): if not ozif_enabled: - self.logger.error("ImageFactory features require the following dependencies: pykickstart, imagefactory, oz and possibly python-hashlib") + self.logger.error( + "ImageFactory features require the following dependencies: " + "pykickstart, imagefactory, oz and possibly python-hashlib") raise koji.ApplianceError('ImageFactory functions not available') if opts is None: @@ -4339,11 +4427,14 @@ class BuildIndirectionImageTask(OzImageTask): if not opts.get('skip_tag') and not opts.get('scratch'): # Make sure package is on the list for this tag if pkg_cfg is None: - raise koji.BuildError("package (image) %s not in list for tag %s" % (name, target_info['dest_tag_name'])) + raise koji.BuildError("package (image) %s not in list for tag %s" % + (name, target_info['dest_tag_name'])) elif pkg_cfg['blocked']: - raise koji.BuildError("package (image) %s is blocked for tag %s" % (name, target_info['dest_tag_name'])) + raise koji.BuildError("package (image) %s is blocked for tag %s" % + (name, target_info['dest_tag_name'])) return self.session.host.initImageBuild(self.id, - dict(name=name, version=version, release=release, epoch=0)) + dict(name=name, version=version, release=release, + epoch=0)) def getRelease(self, name, ver): """return the next available release number for an N-V""" @@ -4371,7 +4462,8 @@ class BuildIndirectionImageTask(OzImageTask): if fileurl: scm = SCM(fileurl) scm.assert_allowed(self.options.allowed_scms) - self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(), build_tag=build_tag, scratch=self.opts.get('scratch')) + self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(), + build_tag=build_tag, scratch=self.opts.get('scratch')) logfile = os.path.join(self.workdir, 'checkout.log') scmsrcdir = scm.checkout(self.workdir, self.session, self.getUploadDir(), logfile) @@ -4402,11 +4494,13 @@ class BuildIndirectionImageTask(OzImageTask): taskinfo = self.session.getTaskInfo(task_id) taskstate = koji.TASK_STATES[taskinfo['state']].lower() if taskstate != 'closed': - raise koji.BuildError("Input task (%d) must be in closed state - current state is (%s)" % + raise koji.BuildError("Input task (%d) must be in closed state" + " - current state is (%s)" % (task_id, taskstate)) taskmethod = taskinfo['method'] if taskmethod != "createImage": - raise koji.BuildError("Input task method must be 'createImage' - actual method (%s)" % + raise koji.BuildError("Input task method must be 'createImage'" + " - actual method (%s)" % (taskmethod)) result = self.session.getTaskResult(task_id) @@ -4424,7 +4518,9 @@ class BuildIndirectionImageTask(OzImageTask): tdl_full = os.path.join(task_dir, task_tdl) if not (os.path.isfile(diskimage_full) and os.path.isfile(tdl_full)): - raise koji.BuildError("Missing TDL or qcow2 image for task (%d) - possible expired scratch build" % (task_id)) + raise koji.BuildError( + "Missing TDL or qcow2 image for task (%d) - possible expired scratch build" % + (task_id)) # The sequence to recreate a valid persistent image is as follows # Create a new BaseImage object @@ -4445,7 +4541,10 @@ class BuildIndirectionImageTask(OzImageTask): return factory_base_image def _nvr_to_image(nvr, arch): - """ Take a build ID or NVR plus arch and turn it into an Image Factory Base Image object """ + """ + Take a build ID or NVR plus arch and turn it into + an Image Factory Base Image object + """ pim = PersistentImageManager.default_manager() build = self.session.getBuild(nvr) if not build: @@ -4471,7 +4570,8 @@ class BuildIndirectionImageTask(OzImageTask): tdl_full = os.path.join(builddir, build_tdl) if not (os.path.isfile(diskimage_full) and os.path.isfile(tdl_full)): - raise koji.BuildError("Missing TDL (%s) or qcow2 (%s) image for image (%s) - this should never happen" % + raise koji.BuildError("Missing TDL (%s) or qcow2 (%s) image for image (%s)" + " - this should never happen" % (build_tdl, build_diskimage, nvr)) # The sequence to recreate a valid persistent image is as follows @@ -4617,7 +4717,8 @@ class BuildIndirectionImageTask(OzImageTask): tlog.removeHandler(fhandler) self.uploadFile(ozlog) raise koji.ApplianceError('Image status is %s: %s' % - (target.target_image.status, target.target_image.status_detail)) + (target.target_image.status, + target.target_image.status_detail)) self.uploadFile(target.target_image.data, remoteName=os.path.basename(results_loc)) @@ -4644,7 +4745,8 @@ class BuildIndirectionImageTask(OzImageTask): # tag it if not opts.get('scratch') and not opts.get('skip_tag'): tag_task_id = self.session.host.subtask(method='tagBuild', - arglist=[target_info['dest_tag'], bld_info['id'], False, None, True], + arglist=[target_info['dest_tag'], + bld_info['id'], False, None, True], label='tag', parent=self.id, arch='noarch') self.wait(tag_task_id) @@ -4652,7 +4754,8 @@ class BuildIndirectionImageTask(OzImageTask): report = '' if opts.get('scratch'): respath = ', '.join( - [os.path.join(koji.pathinfo.work(), koji.pathinfo.taskrelpath(tid)) for tid in [self.id]]) + [os.path.join(koji.pathinfo.work(), + koji.pathinfo.taskrelpath(tid)) for tid in [self.id]]) report += 'Scratch ' else: respath = koji.pathinfo.imagebuild(bld_info) @@ -4681,8 +4784,10 @@ class RebuildSRPM(BaseBuildTask): build_tag = self.session.getTag(build_tag, strict=True, event=event_id) rootopts = {'install_group': 'srpm-build', 'repo_id': repo_id} - br_arch = self.find_arch('noarch', self.session.host.getHost(), self.session.getBuildConfig(build_tag['id'], event=event_id)) - broot = BuildRoot(self.session, self.options, build_tag['id'], br_arch, self.id, **rootopts) + br_arch = self.find_arch('noarch', self.session.host.getHost( + ), self.session.getBuildConfig(build_tag['id'], event=event_id)) + broot = BuildRoot(self.session, self.options, + build_tag['id'], br_arch, self.id, **rootopts) broot.workdir = self.workdir self.logger.debug("Initializing buildroot") @@ -4720,7 +4825,8 @@ class RebuildSRPM(BaseBuildTask): release = koji.get_header_field(h, 'release') srpm_name = "%(name)s-%(version)s-%(release)s.src.rpm" % locals() if srpm_name != os.path.basename(srpm): - raise koji.BuildError('srpm name mismatch: %s != %s' % (srpm_name, os.path.basename(srpm))) + raise koji.BuildError('srpm name mismatch: %s != %s' % + (srpm_name, os.path.basename(srpm))) # upload srpm and return self.uploadFile(srpm) @@ -4784,12 +4890,15 @@ class BuildSRPMFromSCMTask(BaseBuildTask): rootopts = {'install_group': 'srpm-build', 'setup_dns': True, 'repo_id': repo_id} - if self.options.scm_credentials_dir is not None and os.path.isdir(self.options.scm_credentials_dir): + if self.options.scm_credentials_dir is not None and os.path.isdir( + self.options.scm_credentials_dir): rootopts['bind_opts'] = {'dirs': {self.options.scm_credentials_dir: '/credentials', }} # Force internal_dev_setup back to true because bind_opts is used to turn it off rootopts['internal_dev_setup'] = True - br_arch = self.find_arch('noarch', self.session.host.getHost(), self.session.getBuildConfig(build_tag['id'], event=event_id)) - broot = BuildRoot(self.session, self.options, build_tag['id'], br_arch, self.id, **rootopts) + br_arch = self.find_arch('noarch', self.session.host.getHost( + ), self.session.getBuildConfig(build_tag['id'], event=event_id)) + broot = BuildRoot(self.session, self.options, + build_tag['id'], br_arch, self.id, **rootopts) broot.workdir = self.workdir self.logger.debug("Initializing buildroot") @@ -4803,7 +4912,8 @@ class BuildSRPMFromSCMTask(BaseBuildTask): logfile = self.workdir + '/checkout.log' uploadpath = self.getUploadDir() - self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(), build_tag=build_tag, scratch=opts.get('scratch')) + self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(), + build_tag=build_tag, scratch=opts.get('scratch')) # Check out spec file, etc. from SCM sourcedir = scm.checkout(scmdir, self.session, uploadpath, logfile) self.run_callbacks("postSCMCheckout", @@ -4855,7 +4965,8 @@ class BuildSRPMFromSCMTask(BaseBuildTask): release = koji.get_header_field(h, 'release') srpm_name = "%(name)s-%(version)s-%(release)s.src.rpm" % locals() if srpm_name != os.path.basename(srpm): - raise koji.BuildError('srpm name mismatch: %s != %s' % (srpm_name, os.path.basename(srpm))) + raise koji.BuildError('srpm name mismatch: %s != %s' % + (srpm_name, os.path.basename(srpm))) # upload srpm and return self.uploadFile(srpm) @@ -4898,13 +5009,16 @@ Status: %(status)s\r %(failure_info)s\r """ - def handler(self, recipients, is_successful, tag_info, from_info, build_info, user_info, ignore_success=None, failure_msg=''): + def handler(self, recipients, is_successful, tag_info, from_info, + build_info, user_info, ignore_success=None, failure_msg=''): if len(recipients) == 0: self.logger.debug('task %i: no recipients, not sending notifications', self.id) return if ignore_success and is_successful: - self.logger.debug('task %i: tag operation successful and ignore success is true, not sending notifications', self.id) + self.logger.debug( + 'task %i: tag operation successful and ignore success is true, ' + 'not sending notifications', self.id) return build = self.session.getBuild(build_info) @@ -4972,7 +5086,8 @@ class BuildNotificationTask(BaseTaskHandler): _taskWeight = 0.1 # XXX externalize these templates somewhere - subject_templ = """Package: %(build_nvr)s Tag: %(dest_tag)s Status: %(status)s Built by: %(build_owner)s""" + subject_templ = "Package: %(build_nvr)s Tag: %(dest_tag)s Status: %(status)s " \ + "Built by: %(build_owner)s" message_templ = \ """From: %(from_addr)s\r Subject: %(subject)s\r @@ -5073,7 +5188,8 @@ Build Info: %(weburl)s/buildinfo?buildID=%(build_id)i\r return build_pkg_name = build['package_name'] - build_pkg_evr = '%s%s-%s' % ((build['epoch'] and str(build['epoch']) + ':' or ''), build['version'], build['release']) + build_pkg_evr = '%s%s-%s' % ((build['epoch'] and str(build['epoch']) + + ':' or ''), build['version'], build['release']) build_nvr = koji.buildLabel(build) build_id = build['id'] build_owner = build['owner_name'] @@ -5099,7 +5215,9 @@ Build Info: %(weburl)s/buildinfo?buildID=%(build_id)i\r cancel_info = "\r\nCanceled by: %s" % canceler['name'] elif build['state'] == koji.BUILD_STATES['FAILED']: failure_data = task_data[task_id]['result'] - failed_hosts = ['%s (%s)' % (task['host'], task['arch']) for task in task_data.values() if task['host'] and task['state'] == 'failed'] + failed_hosts = ['%s (%s)' % (task['host'], task['arch']) + for task in task_data.values() + if task['host'] and task['state'] == 'failed'] failure_info = "\r\n%s (%d) failed on %s:\r\n %s" % (build_nvr, build_id, ', '.join(failed_hosts), failure_data) @@ -5142,9 +5260,11 @@ Build Info: %(weburl)s/buildinfo?buildID=%(build_id)i\r output += "logs:\r\n" for (file_, volume) in task['logs']: if tasks[task_state] != 'closed': - output += " %s/getfile?taskID=%s&name=%s&volume=%s\r\n" % (weburl, task['id'], file_, volume) + output += " %s/getfile?taskID=%s&name=%s&volume=%s\r\n" % ( + weburl, task['id'], file_, volume) else: - output += " %s\r\n" % '/'.join([buildurl, 'data', 'logs', task['build_arch'], file_]) + output += " %s\r\n" % '/'.join([buildurl, 'data', 'logs', + task['build_arch'], file_]) if task['rpms']: output += "rpms:\r\n" for file_ in task['rpms']: @@ -5152,11 +5272,13 @@ Build Info: %(weburl)s/buildinfo?buildID=%(build_id)i\r if task['misc']: output += "misc:\r\n" for (file_, volume) in task['misc']: - output += " %s/getfile?taskID=%s&name=%s&volume=%s\r\n" % (weburl, task['id'], file_, volume) + output += " %s/getfile?taskID=%s&name=%s&volume=%s\r\n" % ( + weburl, task['id'], file_, volume) output += "\r\n" output += "\r\n" - changelog = koji.util.formatChangelog(self.session.getChangelogEntries(build_id, queryOpts={'limit': 3})).replace("\n", "\r\n") + changelog = koji.util.formatChangelog(self.session.getChangelogEntries( + build_id, queryOpts={'limit': 3})).replace("\n", "\r\n") if changelog: changelog = "Changelog:\r\n%s" % changelog @@ -5464,7 +5586,8 @@ class createDistRepoTask(BaseTaskHandler): "sparc": ("sparcv9v", "sparcv9", "sparcv8", "sparc", "noarch"), "sparc64": ("sparc64v", "sparc64", "noarch"), "alpha": ("alphaev6", "alphaev56", "alphaev5", "alpha", "noarch"), - "arm": ("arm", "armv4l", "armv4tl", "armv5tel", "armv5tejl", "armv6l", "armv7l", "noarch"), + "arm": ("arm", "armv4l", "armv4tl", "armv5tel", "armv5tejl", "armv6l", "armv7l", + "noarch"), "armhfp": ("armv7hl", "armv7hnl", "noarch"), "aarch64": ("aarch64", "noarch"), "riscv64": ("riscv64", "noarch"), @@ -5926,7 +6049,8 @@ enabled=1 for a in self.compat[arch]: # note: self.compat includes noarch for non-src already rpm_iter, builds = self.session.listTaggedRPMS(tag_id, - event=opts['event'], arch=a, latest=opts['latest'], + event=opts['event'], arch=a, + latest=opts['latest'], inherit=opts['inherit'], rpmsigs=True) for build in builds: builddirs[build['id']] = koji.pathinfo.build(build) @@ -6105,9 +6229,12 @@ class WaitrepoTask(BaseTaskHandler): repo = self.session.getRepo(taginfo['id']) if repo and repo != last_repo: if builds: - if koji.util.checkForBuilds(self.session, taginfo['id'], builds, repo['create_event']): - self.logger.debug("Successfully waited %s for %s to appear in the %s repo" % - (koji.util.duration(start), koji.util.printList(nvrs), taginfo['name'])) + if koji.util.checkForBuilds( + self.session, taginfo['id'], builds, repo['create_event']): + self.logger.debug("Successfully waited %s for %s to appear " + "in the %s repo" % + (koji.util.duration(start), koji.util.printList(nvrs), + taginfo['name'])) return repo elif newer_than: if repo['create_ts'] > newer_than: @@ -6120,8 +6247,10 @@ class WaitrepoTask(BaseTaskHandler): if (time.time() - start) > (self.TIMEOUT * 60.0): if builds: - raise koji.GenericError("Unsuccessfully waited %s for %s to appear in the %s repo" % - (koji.util.duration(start), koji.util.printList(nvrs), taginfo['name'])) + raise koji.GenericError("Unsuccessfully waited %s for %s to appear " + "in the %s repo" % + (koji.util.duration(start), koji.util.printList(nvrs), + taginfo['name'])) else: raise koji.GenericError("Unsuccessfully waited %s for a new %s repo" % (koji.util.duration(start), taginfo['name'])) diff --git a/builder/mergerepos b/builder/mergerepos index 1c020702..a6c5a35d 100755 --- a/builder/mergerepos +++ b/builder/mergerepos @@ -61,7 +61,8 @@ MULTILIB_ARCHES = { def parse_args(args): """Parse our opts/args""" usage = """ - mergerepos: take 2 or more repositories and merge their metadata into a new repo using Koji semantics + mergerepos: take 2 or more repositories and merge their metadata into a new + repo using Koji semantics mergerepos --repo=url --repo=url --outputdir=/some/path""" @@ -74,7 +75,8 @@ def parse_args(args): parser.add_option("-a", "--arch", dest="arches", default=[], action="append", help="List of arches to include in the repo") parser.add_option("-b", "--blocked", default=None, - help="A file containing a list of srpm names to exclude from the merged repo") + help="A file containing a list of srpm names to exclude " + "from the merged repo") parser.add_option("--mode", default='koji', help="Select the merge mode") parser.add_option("-o", "--outputdir", default=None, help="Location to create the repository") @@ -175,18 +177,18 @@ class RepoMerge(object): For each package object, check if the srpm name has ever been seen before. If is has not, keep the package. If it has, check if the srpm name was first seen in the same repo as the current package. If so, keep the package from the srpm with the - highest NVR. If not, keep the packages from the first srpm we found, and delete packages from - all other srpms. + highest NVR. If not, keep the packages from the first srpm we found, and delete packages + from all other srpms. Packages with matching NVRs in multiple repos will be taken from the first repo. If the srpm name appears in the blocked package list, any packages generated from the srpm will be deleted from the package sack as well. - This method will also generate a file called "pkgorigins" and add it to the repo metadata. This - is a tab-separated map of package E:N-V-R.A to repo URL (as specified on the command-line). This - allows a package to be tracked back to its origin, even if the location field in the repodata does - not match the original repo location. + This method will also generate a file called "pkgorigins" and add it to the repo metadata. + This is a tab-separated map of package E:N-V-R.A to repo URL (as specified on the + command-line). This allows a package to be tracked back to its origin, even if the location + field in the repodata does not match the original repo location. """ if self.mode == 'simple': @@ -208,7 +210,8 @@ class RepoMerge(object): # to be using relative urls # XXX - kind of a hack, but yum leaves us little choice # force the pkg object to report a relative location - loc = """\n""" % yum.misc.to_xml(pkg.remote_path, attrib=True) + loc = """\n""" % yum.misc.to_xml(pkg.remote_path, + attrib=True) pkg._return_remote_location = make_const_func(loc) if pkg.sourcerpm in seen_srpms: # we're just looking at sourcerpms this pass and we've @@ -299,7 +302,8 @@ class RepoMerge(object): # to be using relative urls # XXX - kind of a hack, but yum leaves us little choice # force the pkg object to report a relative location - loc = """\n""" % yum.misc.to_xml(pkg.remote_path, attrib=True) + loc = """\n""" % yum.misc.to_xml(pkg.remote_path, + attrib=True) pkg._return_remote_location = make_const_func(loc) pkgorigins = os.path.join(self.yumbase.conf.cachedir, 'pkgorigins') diff --git a/cli/koji b/cli/koji index 545f3be8..c450074b 100755 --- a/cli/koji +++ b/cli/koji @@ -129,7 +129,8 @@ def get_options(): help=_("do not authenticate")) parser.add_option("--force-auth", action="store_true", default=False, help=_("authenticate even for read-only operations")) - parser.add_option("--authtype", help=_("force use of a type of authentication, options: noauth, ssl, password, or kerberos")) + parser.add_option("--authtype", help=_("force use of a type of authentication, options: " + "noauth, ssl, password, or kerberos")) parser.add_option("-d", "--debug", action="store_true", help=_("show debug output")) parser.add_option("--debug-xmlrpc", action="store_true", @@ -145,7 +146,8 @@ def get_options(): parser.add_option("--pkgurl", help=SUPPRESS_HELP) parser.add_option("--plugin-paths", metavar='PATHS', help=_("specify additional plugin paths (colon separated)")) - parser.add_option("--help-commands", action="store_true", default=False, help=_("list commands")) + parser.add_option("--help-commands", action="store_true", default=False, + help=_("list commands")) (options, args) = parser.parse_args() # load local config diff --git a/cli/koji_cli/commands.py b/cli/koji_cli/commands.py index f3a9cbf9..b591e97f 100644 --- a/cli/koji_cli/commands.py +++ b/cli/koji_cli/commands.py @@ -185,7 +185,8 @@ def handle_add_host(goptions, session, args): "[admin] Add a host" usage = _("usage: %prog add-host [options] [ ...]") parser = OptionParser(usage=get_usage_str(usage)) - parser.add_option("--krb-principal", help=_("set a non-default kerberos principal for the host")) + parser.add_option("--krb-principal", + help=_("set a non-default kerberos principal for the host")) (options, args) = parser.parse_args(args) if len(args) < 2: parser.error(_("Please specify a hostname and at least one arch")) @@ -208,7 +209,8 @@ def handle_edit_host(options, session, args): "[admin] Edit a host" usage = _("usage: %prog edit-host [ ...] [options]") parser = OptionParser(usage=get_usage_str(usage)) - parser.add_option("--arches", help=_("Space or comma-separated list of supported architectures")) + parser.add_option("--arches", + help=_("Space or comma-separated list of supported architectures")) parser.add_option("--capacity", type="float", help=_("Capacity of this host")) parser.add_option("--description", metavar="DESC", help=_("Description of this host")) parser.add_option("--comment", help=_("A brief comment about this host")) @@ -358,7 +360,8 @@ def handle_add_pkg(goptions, session, args): if dsttag is None: print("No such tag: %s" % tag) sys.exit(1) - pkglist = dict([(p['package_name'], p['package_id']) for p in session.listPackages(tagID=dsttag['id'])]) + pkglist = dict([(p['package_name'], p['package_id']) + for p in session.listPackages(tagID=dsttag['id'])]) to_add = [] for package in args[1:]: package_id = pkglist.get(package, None) @@ -381,7 +384,8 @@ def handle_block_pkg(goptions, session, args): "[admin] Block a package in the listing for tag" usage = _("usage: %prog block-pkg [options] [ ...]") parser = OptionParser(usage=get_usage_str(usage)) - parser.add_option("--force", action='store_true', default=False, help=_("Override blocks and owner if necessary")) + parser.add_option("--force", action='store_true', default=False, + help=_("Override blocks and owner if necessary")) (options, args) = parser.parse_args(args) if len(args) < 2: parser.error(_("Please specify a tag and at least one package")) @@ -392,7 +396,8 @@ def handle_block_pkg(goptions, session, args): if dsttag is None: print("No such tag: %s" % tag) return 1 - pkglist = dict([(p['package_name'], p['package_id']) for p in session.listPackages(tagID=dsttag['id'], inherited=True)]) + pkglist = dict([(p['package_name'], p['package_id']) + for p in session.listPackages(tagID=dsttag['id'], inherited=True)]) ret = 0 for package in args[1:]: package_id = pkglist.get(package, None) @@ -429,7 +434,8 @@ def handle_remove_pkg(goptions, session, args): if dsttag is None: print("No such tag: %s" % tag) return 1 - pkglist = dict([(p['package_name'], p['package_id']) for p in session.listPackages(tagID=dsttag['id'])]) + pkglist = dict([(p['package_name'], p['package_id']) + for p in session.listPackages(tagID=dsttag['id'])]) ret = 0 for package in args[1:]: package_id = pkglist.get(package, None) @@ -472,7 +478,8 @@ def handle_build(options, session, args): help=_("Run the build at a lower priority")) (build_opts, args) = parser.parse_args(args) if len(args) != 2: - parser.error(_("Exactly two arguments (a build target and a SCM URL or srpm file) are required")) + parser.error(_("Exactly two arguments (a build target and a SCM URL or srpm file) are " + "required")) if build_opts.arch_override and not build_opts.scratch: parser.error(_("--arch_override is only allowed for --scratch builds")) activate_session(session, options) @@ -552,8 +559,10 @@ def handle_chain_build(options, session, args): # check that the destination tag is in the inheritance tree of the build tag # otherwise there is no way that a chain-build can work ancestors = session.getFullInheritance(build_target['build_tag']) - if dest_tag['id'] not in [build_target['build_tag']] + [ancestor['parent_id'] for ancestor in ancestors]: - print(_("Packages in destination tag %(dest_tag_name)s are not inherited by build tag %(build_tag_name)s" % build_target)) + if dest_tag['id'] not in [build_target['build_tag']] + \ + [ancestor['parent_id'] for ancestor in ancestors]: + print(_("Packages in destination tag %(dest_tag_name)s are not inherited by build tag " + "%(build_tag_name)s" % build_target)) print(_("Target %s is not usable for a chain-build" % build_target['name'])) return 1 @@ -582,7 +591,8 @@ def handle_chain_build(options, session, args): src_list.append(build_level) if len(src_list) < 2: - parser.error(_('You must specify at least one dependency between builds with : (colon)\nIf there are no dependencies, use the build command instead')) + parser.error(_('You must specify at least one dependency between builds with : (colon)\n' + 'If there are no dependencies, use the build command instead')) priority = None if build_opts.background: @@ -607,7 +617,8 @@ def handle_maven_build(options, session, args): usage += _("\n %prog maven-build --ini=CONFIG... [options] ") parser = OptionParser(usage=get_usage_str(usage)) parser.add_option("--patches", action="store", metavar="URL", - help=_("SCM URL of a directory containing patches to apply to the sources before building")) + help=_("SCM URL of a directory containing patches to apply to the sources " + "before building")) parser.add_option("-G", "--goal", action="append", dest="goals", metavar="GOAL", default=[], help=_("Additional goal to run before \"deploy\"")) @@ -673,7 +684,8 @@ def handle_maven_build(options, session, args): parser.error(e.args[0]) opts = to_list(params.values())[0] if opts.pop('type', 'maven') != 'maven': - parser.error(_("Section %s does not contain a maven-build config") % to_list(params.keys())[0]) + parser.error(_("Section %s does not contain a maven-build config") % + to_list(params.keys())[0]) source = opts.pop('scmurl') else: source = args[1] @@ -704,16 +716,19 @@ def handle_wrapper_rpm(options, session, args): """[build] Build wrapper rpms for any archives associated with a build.""" usage = _("usage: %prog wrapper-rpm [options] ") parser = OptionParser(usage=get_usage_str(usage)) - parser.add_option("--create-build", action="store_true", help=_("Create a new build to contain wrapper rpms")) + parser.add_option("--create-build", action="store_true", + help=_("Create a new build to contain wrapper rpms")) parser.add_option("--ini", action="append", dest="inis", metavar="CONFIG", default=[], help=_("Pass build parameters via a .ini file")) parser.add_option("-s", "--section", help=_("Get build parameters from this section of the .ini")) - parser.add_option("--skip-tag", action="store_true", help=_("If creating a new build, don't tag it")) + parser.add_option("--skip-tag", action="store_true", + help=_("If creating a new build, don't tag it")) parser.add_option("--scratch", action="store_true", help=_("Perform a scratch build")) parser.add_option("--nowait", action="store_true", help=_("Don't wait on build")) - parser.add_option("--background", action="store_true", help=_("Run the build at a lower priority")) + parser.add_option("--background", action="store_true", + help=_("Run the build at a lower priority")) (build_opts, args) = parser.parse_args(args) if build_opts.inis: @@ -721,7 +736,8 @@ def handle_wrapper_rpm(options, session, args): parser.error(_("Exactly one argument (a build target) is required")) else: if len(args) < 3: - parser.error(_("You must provide a build target, a build ID or NVR, and a SCM URL to a specfile fragment")) + parser.error(_("You must provide a build target, a build ID or NVR, " + "and a SCM URL to a specfile fragment")) activate_session(session, options) target = args[0] @@ -733,7 +749,8 @@ def handle_wrapper_rpm(options, session, args): parser.error(e.args[0]) opts = to_list(params.values())[0] if opts.get('type') != 'wrapper': - parser.error(_("Section %s does not contain a wrapper-rpm config") % to_list(params.keys())[0]) + parser.error(_("Section %s does not contain a wrapper-rpm config") % + to_list(params.keys())[0]) url = opts['scmurl'] package = opts['buildrequires'][0] target_info = session.getBuildTarget(target, strict=True) @@ -852,7 +869,8 @@ def handle_call(goptions, session, args): usage = _("usage: %prog call [options] [ ...]") parser = OptionParser(usage=get_usage_str(usage)) parser.add_option("--python", action="store_true", help=_("Use python syntax for values")) - parser.add_option("--kwargs", help=_("Specify keyword arguments as a dictionary (implies --python)")) + parser.add_option("--kwargs", + help=_("Specify keyword arguments as a dictionary (implies --python)")) parser.add_option("--json-output", action="store_true", help=_("Use JSON syntax for output")) (options, args) = parser.parse_args(args) if len(args) < 1: @@ -895,7 +913,8 @@ def anon_handle_mock_config(goptions, session, args): parser.add_option("--target", help=_("Create a mock config for a build target")) parser.add_option("--task", help=_("Duplicate the mock config of a previous task")) parser.add_option("--latest", action="store_true", help=_("use the latest redirect url")) - parser.add_option("--buildroot", help=_("Duplicate the mock config for the specified buildroot id")) + parser.add_option("--buildroot", + help=_("Duplicate the mock config for the specified buildroot id")) parser.add_option("--mockdir", default="/var/lib/mock", metavar="DIR", help=_("Specify mockdir")) parser.add_option("--topdir", metavar="DIR", @@ -1136,9 +1155,11 @@ def handle_import(goptions, session, args): "[admin] Import externally built RPMs into the database" usage = _("usage: %prog import [options] [ ...]") parser = OptionParser(usage=get_usage_str(usage)) - parser.add_option("--link", action="store_true", help=_("Attempt to hardlink instead of uploading")) + parser.add_option("--link", action="store_true", + help=_("Attempt to hardlink instead of uploading")) parser.add_option("--test", action="store_true", help=_("Don't actually import")) - parser.add_option("--create-build", action="store_true", help=_("Auto-create builds as needed")) + parser.add_option("--create-build", action="store_true", + help=_("Auto-create builds as needed")) parser.add_option("--src-epoch", help=_("When auto-creating builds, use this epoch")) (options, args) = parser.parse_args(args) if len(args) < 1: @@ -1284,7 +1305,8 @@ def handle_import_cg(goptions, session, args): parser = OptionParser(usage=get_usage_str(usage)) parser.add_option("--noprogress", action="store_true", help=_("Do not display progress of the upload")) - parser.add_option("--link", action="store_true", help=_("Attempt to hardlink instead of uploading")) + parser.add_option("--link", action="store_true", + help=_("Attempt to hardlink instead of uploading")) parser.add_option("--test", action="store_true", help=_("Don't actually import")) parser.add_option("--token", action="store", default=None, help=_("Build reservation token")) (options, args) = parser.parse_args(args) @@ -1443,7 +1465,8 @@ def handle_import_sig(goptions, session, args): parser.error(_("No such file: %s") % path) activate_session(session, goptions) for path in args: - data = koji.get_header_fields(path, ('name', 'version', 'release', 'arch', 'siggpg', 'sigpgp', 'sourcepackage')) + data = koji.get_header_fields(path, ('name', 'version', 'release', 'arch', 'siggpg', + 'sigpgp', 'sourcepackage')) if data['sourcepackage']: data['arch'] = 'src' sigkey = data['siggpg'] @@ -1463,7 +1486,8 @@ def handle_import_sig(goptions, session, args): print("No such rpm in system: %(name)s-%(version)s-%(release)s.%(arch)s" % data) continue if rinfo.get('external_repo_id'): - print("Skipping external rpm: %(name)s-%(version)s-%(release)s.%(arch)s@%(external_repo_name)s" % rinfo) + print("Skipping external rpm: %(name)s-%(version)s-%(release)s.%(arch)s@" + "%(external_repo_name)s" % rinfo) continue sighdr = koji.rip_rpm_sighdr(path) previous = session.queryRPMSigs(rpm_id=rinfo['id'], sigkey=sigkey) @@ -1490,7 +1514,8 @@ def handle_write_signed_rpm(goptions, session, args): "[admin] Write signed RPMs to disk" usage = _("usage: %prog write-signed-rpm [options] [ ...]") parser = OptionParser(usage=get_usage_str(usage)) - parser.add_option("--all", action="store_true", help=_("Write out all RPMs signed with this key")) + parser.add_option("--all", action="store_true", + help=_("Write out all RPMs signed with this key")) parser.add_option("--buildid", help=_("Specify a build id rather than an n-v-r")) (options, args) = parser.parse_args(args) if len(args) < 1: @@ -1703,7 +1728,8 @@ def handle_prune_signed_copies(options, session, args): # we were still tagged here sometime before the cutoff if options.debug: print("Build %s had protected tag %s until %s" - % (nvr, tag_name, time.asctime(time.localtime(our_entry['revoke_ts'])))) + % (nvr, tag_name, + time.asctime(time.localtime(our_entry['revoke_ts'])))) is_protected = True break replaced_ts = None @@ -2062,7 +2088,8 @@ def handle_list_signed(goptions, session, args): rinfo = session.getRPM(rpm_info, strict=True) rpm_idx[rinfo['id']] = rinfo if rinfo.get('external_repo_id'): - parser.error(_("External rpm: %(name)s-%(version)s-%(release)s.%(arch)s@%(external_repo_name)s") % rinfo) + parser.error(_("External rpm: %(name)s-%(version)s-%(release)s.%(arch)s@" + "%(external_repo_name)s") % rinfo) qopts['rpm_id'] = rinfo['id'] if options.build: build = options.build @@ -2123,13 +2150,18 @@ def handle_import_archive(options, session, args): parser = OptionParser(usage=get_usage_str(usage)) parser.add_option("--noprogress", action="store_true", help=_("Do not display progress of the upload")) - parser.add_option("--create-build", action="store_true", help=_("Auto-create builds as needed")) - parser.add_option("--link", action="store_true", help=_("Attempt to hardlink instead of uploading")) - parser.add_option("--type", help=_("The type of archive being imported. Currently supported types: maven, win, image")) - parser.add_option("--type-info", help=_("Type-specific information to associate with the archives. " - "For Maven archives this should be a local path to a .pom file. " - "For Windows archives this should be relpath:platforms[:flags])) " - "Images need an arch")) + parser.add_option("--create-build", action="store_true", + help=_("Auto-create builds as needed")) + parser.add_option("--link", action="store_true", + help=_("Attempt to hardlink instead of uploading")) + parser.add_option("--type", + help=_("The type of archive being imported. " + "Currently supported types: maven, win, image")) + parser.add_option("--type-info", + help=_("Type-specific information to associate with the archives. " + "For Maven archives this should be a local path to a .pom file. " + "For Windows archives this should be relpath:platforms[:flags])) " + "Images need an arch")) (suboptions, args) = parser.parse_args(args) if not len(args) > 1: @@ -2297,11 +2329,14 @@ def anon_handle_latest_build(goptions, session, args): usage = _("usage: %prog latest-build [options] [ ...]") parser = OptionParser(usage=get_usage_str(usage)) parser.add_option("--arch", help=_("List all of the latest packages for this arch")) - parser.add_option("--all", action="store_true", help=_("List all of the latest packages for this tag")) + parser.add_option("--all", action="store_true", + help=_("List all of the latest packages for this tag")) parser.add_option("--quiet", action="store_true", default=goptions.quiet, help=_("Do not print the header information")) parser.add_option("--paths", action="store_true", help=_("Show the file paths")) - parser.add_option("--type", help=_("Show builds of the given type only. Currently supported types: maven")) + parser.add_option("--type", + help=_("Show builds of the given type only. " + "Currently supported types: maven")) (options, args) = parser.parse_args(args) if len(args) == 0: parser.error(_("A tag name must be specified")) @@ -2338,20 +2373,24 @@ def anon_handle_latest_build(goptions, session, args): if options.type == 'maven': for x in data: x['path'] = pathinfo.mavenbuild(x) - fmt = "%(path)-40s %(tag_name)-20s %(maven_group_id)-20s %(maven_artifact_id)-20s %(owner_name)s" + fmt = "%(path)-40s %(tag_name)-20s %(maven_group_id)-20s " \ + "%(maven_artifact_id)-20s %(owner_name)s" else: for x in data: x['path'] = pathinfo.build(x) fmt = "%(path)-40s %(tag_name)-20s %(owner_name)s" else: if options.type == 'maven': - fmt = "%(nvr)-40s %(tag_name)-20s %(maven_group_id)-20s %(maven_artifact_id)-20s %(owner_name)s" + fmt = "%(nvr)-40s %(tag_name)-20s %(maven_group_id)-20s " \ + "%(maven_artifact_id)-20s %(owner_name)s" else: fmt = "%(nvr)-40s %(tag_name)-20s %(owner_name)s" if not options.quiet: if options.type == 'maven': - print("%-40s %-20s %-20s %-20s %s" % ("Build", "Tag", "Group Id", "Artifact Id", "Built by")) - print("%s %s %s %s %s" % ("-" * 40, "-" * 20, "-" * 20, "-" * 20, "-" * 16)) + print("%-40s %-20s %-20s %-20s %s" % + ("Build", "Tag", "Group Id", "Artifact Id", "Built by")) + print("%s %s %s %s %s" % + ("-" * 40, "-" * 20, "-" * 20, "-" * 20, "-" * 16)) else: print("%-40s %-20s %s" % ("Build", "Tag", "Built by")) print("%s %s %s" % ("-" * 40, "-" * 20, "-" * 16)) @@ -2397,14 +2436,18 @@ def anon_handle_list_tagged(goptions, session, args): parser.add_option("--rpms", action="store_true", help=_("Show rpms instead of builds")) parser.add_option("--inherit", action="store_true", help=_("Follow inheritance")) parser.add_option("--latest", action="store_true", help=_("Only show the latest builds/rpms")) - parser.add_option("--latest-n", type='int', metavar="N", help=_("Only show the latest N builds/rpms")) + parser.add_option("--latest-n", type='int', metavar="N", + help=_("Only show the latest N builds/rpms")) parser.add_option("--quiet", action="store_true", default=goptions.quiet, help=_("Do not print the header information")) parser.add_option("--paths", action="store_true", help=_("Show the file paths")) parser.add_option("--sigs", action="store_true", help=_("Show signatures")) - parser.add_option("--type", help=_("Show builds of the given type only. Currently supported types: maven, win, image")) + parser.add_option("--type", + help=_("Show builds of the given type only. " + "Currently supported types: maven, win, image")) parser.add_option("--event", type='int', metavar="EVENT#", help=_("query at event")) - parser.add_option("--ts", type='int', metavar="TIMESTAMP", help=_("query at last event before timestamp")) + parser.add_option("--ts", type='int', metavar="TIMESTAMP", + help=_("query at last event before timestamp")) parser.add_option("--repo", type='int', metavar="REPO#", help=_("query at event for a repo")) (options, args) = parser.parse_args(args) if len(args) == 0: @@ -2473,20 +2516,24 @@ def anon_handle_list_tagged(goptions, session, args): if options.type == 'maven': for x in data: x['path'] = pathinfo.mavenbuild(x) - fmt = "%(path)-40s %(tag_name)-20s %(maven_group_id)-20s %(maven_artifact_id)-20s %(owner_name)s" + fmt = "%(path)-40s %(tag_name)-20s %(maven_group_id)-20s " \ + "%(maven_artifact_id)-20s %(owner_name)s" else: for x in data: x['path'] = pathinfo.build(x) fmt = "%(path)-40s %(tag_name)-20s %(owner_name)s" else: if options.type == 'maven': - fmt = "%(nvr)-40s %(tag_name)-20s %(maven_group_id)-20s %(maven_artifact_id)-20s %(owner_name)s" + fmt = "%(nvr)-40s %(tag_name)-20s %(maven_group_id)-20s " \ + "%(maven_artifact_id)-20s %(owner_name)s" else: fmt = "%(nvr)-40s %(tag_name)-20s %(owner_name)s" if not options.quiet: if options.type == 'maven': - print("%-40s %-20s %-20s %-20s %s" % ("Build", "Tag", "Group Id", "Artifact Id", "Built by")) - print("%s %s %s %s %s" % ("-" * 40, "-" * 20, "-" * 20, "-" * 20, "-" * 16)) + print("%-40s %-20s %-20s %-20s %s" % + ("Build", "Tag", "Group Id", "Artifact Id", "Built by")) + print("%s %s %s %s %s" % + ("-" * 40, "-" * 20, "-" * 20, "-" * 20, "-" * 16)) else: print("%-40s %-20s %s" % ("Build", "Tag", "Built by")) print("%s %s %s" % ("-" * 40, "-" * 20, "-" * 16)) @@ -2756,7 +2803,8 @@ def anon_handle_list_channels(goptions, session, args): if not options.quiet: print('Channel Enabled Ready Disbld Load Cap Perc') for channel in channels: - print("%(name)-15s %(enabled)6d %(ready)6d %(disabled)6d %(load)6d %(capacity)6d %(perc_load)6d%%" % channel) + print("%(name)-15s %(enabled)6d %(ready)6d %(disabled)6d %(load)6d %(capacity)6d " + "%(perc_load)6d%%" % channel) def anon_handle_list_hosts(goptions, session, args): @@ -2766,10 +2814,13 @@ def anon_handle_list_hosts(goptions, session, args): parser.add_option("--arch", action="append", default=[], help=_("Specify an architecture")) parser.add_option("--channel", help=_("Specify a channel")) parser.add_option("--ready", action="store_true", help=_("Limit to ready hosts")) - parser.add_option("--not-ready", action="store_false", dest="ready", help=_("Limit to not ready hosts")) + parser.add_option("--not-ready", action="store_false", dest="ready", + help=_("Limit to not ready hosts")) parser.add_option("--enabled", action="store_true", help=_("Limit to enabled hosts")) - parser.add_option("--not-enabled", action="store_false", dest="enabled", help=_("Limit to not enabled hosts")) - parser.add_option("--disabled", action="store_false", dest="enabled", help=_("Alias for --not-enabled")) + parser.add_option("--not-enabled", action="store_false", dest="enabled", + help=_("Limit to not enabled hosts")) + parser.add_option("--disabled", action="store_false", dest="enabled", + help=_("Alias for --not-enabled")) parser.add_option("--quiet", action="store_true", default=goptions.quiet, help=_("Do not print header information")) parser.add_option("--show-channels", action="store_true", help=_("Show host's channels")) @@ -2824,11 +2875,13 @@ def anon_handle_list_hosts(goptions, session, args): else: longest_host = 8 if not options.quiet: - hdr = "{hostname:<{longest_host}} Enb Rdy Load/Cap Arches Last Update".format(longest_host=longest_host, hostname='Hostname') + hdr = "{hostname:<{longest_host}} Enb Rdy Load/Cap Arches Last Update".format( + longest_host=longest_host, hostname='Hostname') if options.show_channels: hdr += " Channels" print(hdr) - mask = "%%(name)-%ss %%(enabled)-3s %%(ready)-3s %%(task_load)4.1f/%%(capacity)-4.1f %%(arches)-16s %%(update)-19s" % longest_host + mask = "%%(name)-%ss %%(enabled)-3s %%(ready)-3s %%(task_load)4.1f/%%(capacity)-4.1f " \ + "%%(arches)-16s %%(update)-19s" % longest_host if options.show_channels: mask += " %(channels)s" for host in hosts: @@ -2848,7 +2901,8 @@ def anon_handle_list_pkgs(goptions, session, args): parser.add_option("--show-blocked", action="store_true", help=_("Show blocked packages")) parser.add_option("--show-dups", action="store_true", help=_("Show superseded owners")) parser.add_option("--event", type='int', metavar="EVENT#", help=_("query at event")) - parser.add_option("--ts", type='int', metavar="TIMESTAMP", help=_("query at last event before timestamp")) + parser.add_option("--ts", type='int', metavar="TIMESTAMP", + help=_("query at last event before timestamp")) parser.add_option("--repo", type='int', metavar="REPO#", help=_("query at event for a repo")) (options, args) = parser.parse_args(args) if len(args) != 0: @@ -3043,7 +3097,8 @@ def anon_handle_rpminfo(goptions, session, args): "[info] Print basic information about an RPM" usage = _("usage: %prog rpminfo [options] [ ...]") parser = OptionParser(usage=get_usage_str(usage)) - parser.add_option("--buildroots", action="store_true", help=_("show buildroots the rpm was used in")) + parser.add_option("--buildroots", action="store_true", + help=_("show buildroots the rpm was used in")) (options, args) = parser.parse_args(args) if len(args) < 1: parser.error(_("Please specify an RPM")) @@ -3071,10 +3126,13 @@ def anon_handle_rpminfo(goptions, session, args): print("External Repository: %(name)s [%(id)i]" % repo) print("External Repository url: %(url)s" % repo) else: - print("RPM Path: %s" % os.path.join(koji.pathinfo.build(buildinfo), koji.pathinfo.rpm(info))) + print("RPM Path: %s" % + os.path.join(koji.pathinfo.build(buildinfo), koji.pathinfo.rpm(info))) print("SRPM: %(epoch)s%(name)s-%(version)s-%(release)s [%(id)d]" % buildinfo) - print("SRPM Path: %s" % os.path.join(koji.pathinfo.build(buildinfo), koji.pathinfo.rpm(buildinfo))) - print("Built: %s" % time.strftime('%a, %d %b %Y %H:%M:%S %Z', time.localtime(info['buildtime']))) + print("SRPM Path: %s" % + os.path.join(koji.pathinfo.build(buildinfo), koji.pathinfo.rpm(buildinfo))) + print("Built: %s" % time.strftime('%a, %d %b %Y %H:%M:%S %Z', + time.localtime(info['buildtime']))) print("SIGMD5: %(payloadhash)s" % info) print("Size: %(size)s" % info) if not info.get('external_repo_id', 0): @@ -3087,7 +3145,8 @@ def anon_handle_rpminfo(goptions, session, args): else: br_info = session.getBuildroot(info['buildroot_id']) if br_info['br_type'] == koji.BR_TYPES['STANDARD']: - print("Buildroot: %(id)i (tag %(tag_name)s, arch %(arch)s, repo %(repo_id)i)" % br_info) + print("Buildroot: %(id)i (tag %(tag_name)s, arch %(arch)s, repo %(repo_id)i)" % + br_info) print("Build Host: %(host_name)s" % br_info) print("Build Task: %(task_id)i" % br_info) else: @@ -3110,7 +3169,8 @@ def anon_handle_buildinfo(goptions, session, args): "[info] Print basic information about a build" usage = _("usage: %prog buildinfo [options] [ ...]") parser = OptionParser(usage=get_usage_str(usage)) - parser.add_option("--changelog", action="store_true", help=_("Show the changelog for the build")) + parser.add_option("--changelog", action="store_true", + help=_("Show the changelog for the build")) (options, args) = parser.parse_args(args) if len(args) < 1: parser.error(_("Please specify a build")) @@ -3162,7 +3222,8 @@ def anon_handle_buildinfo(goptions, session, args): print("Maven archives:") for archive in maven_archives: archives_seen.setdefault(archive['id'], 1) - print(os.path.join(koji.pathinfo.mavenbuild(info), koji.pathinfo.mavenfile(archive))) + print(os.path.join(koji.pathinfo.mavenbuild(info), + koji.pathinfo.mavenfile(archive))) win_archives = session.listArchives(buildID=info['id'], type='win') if win_archives: print("Windows archives:") @@ -3239,7 +3300,8 @@ def anon_handle_hostinfo(goptions, session, args): else: update = update[:update.find('.')] print("Last Update: %s" % update) - print("Channels: %s" % ' '.join([c['name'] for c in session.listChannels(hostID=info['id'])])) + print("Channels: %s" % ' '.join([c['name'] + for c in session.listChannels(hostID=info['id'])])) print("Active Buildroots:") states = {0: "INIT", 1: "WAITING", 2: "BUILDING"} rows = [('NAME', 'STATE', 'CREATION TIME')] @@ -3317,7 +3379,8 @@ def handle_clone_tag(goptions, session, args): dsttag = session.getTag(args[1]) if not srctag: parser.error(_("Unknown src-tag: %s" % args[0])) - if (srctag['locked'] and not options.force) or (dsttag and dsttag['locked'] and not options.force): + if (srctag['locked'] and not options.force) \ + or (dsttag and dsttag['locked'] and not options.force): parser.error(_("Error: You are attempting to clone from or to a tag which is locked.\n" "Please use --force if this is what you really want to do.")) @@ -3721,7 +3784,8 @@ def handle_clone_tag(goptions, session, args): for changes in chgpkglist: sys.stdout.write(pfmt % changes) sys.stdout.write('\n') - sys.stdout.write(bfmt % ('Action', 'From/To Package', 'Build(s)', 'State', 'Owner', 'From Tag')) + sys.stdout.write(bfmt % + ('Action', 'From/To Package', 'Build(s)', 'State', 'Owner', 'From Tag')) sys.stdout.write(bfmt % ('-' * 7, '-' * 28, '-' * 40, '-' * 10, '-' * 10, '-' * 10)) for changes in chgbldlist: sys.stdout.write(bfmt % changes) @@ -3808,7 +3872,8 @@ def handle_edit_target(goptions, session, args): return 1 targetInfo['dest_tag_name'] = options.dest_tag - session.editBuildTarget(targetInfo['orig_name'], targetInfo['name'], targetInfo['build_tag_name'], targetInfo['dest_tag_name']) + session.editBuildTarget(targetInfo['orig_name'], targetInfo['name'], + targetInfo['build_tag_name'], targetInfo['dest_tag_name']) def handle_remove_target(goptions, session, args): @@ -3924,11 +3989,13 @@ def anon_handle_list_tag_inheritance(goptions, session, args): "[info] Print the inheritance information for a tag" usage = _("usage: %prog list-tag-inheritance [options] ") parser = OptionParser(usage=get_usage_str(usage)) - parser.add_option("--reverse", action="store_true", help=_("Process tag's children instead of its parents")) + parser.add_option("--reverse", action="store_true", + help=_("Process tag's children instead of its parents")) parser.add_option("--stop", help=_("Stop processing inheritance at this tag")) parser.add_option("--jump", help=_("Jump from one tag to another when processing inheritance")) parser.add_option("--event", type='int', metavar="EVENT#", help=_("query at event")) - parser.add_option("--ts", type='int', metavar="TIMESTAMP", help=_("query at last event before timestamp")) + parser.add_option("--ts", type='int', metavar="TIMESTAMP", + help=_("query at last event before timestamp")) parser.add_option("--repo", type='int', metavar="REPO#", help=_("query at event for a repo")) (options, args) = parser.parse_args(args) if len(args) != 1: @@ -4036,7 +4103,8 @@ def anon_handle_list_tag_history(goptions, session, args): parser.add_option("--build", help=_("Only show data for a specific build")) parser.add_option("--package", help=_("Only show data for a specific package")) parser.add_option("--tag", help=_("Only show data for a specific tag")) - parser.add_option("--all", action="store_true", help=_("Allows listing the entire global history")) + parser.add_option("--all", action="store_true", + help=_("Allows listing the entire global history")) (options, args) = parser.parse_args(args) koji.util.deprecated("list-tag-history is deprecated and will be removed in a future version. " "See: https://pagure.io/koji/issue/836") @@ -4109,7 +4177,8 @@ def _print_histline(entry, **kwargs): if event_id != other[0]: bad_edit = "non-matching" if bad_edit: - print("Warning: unusual edit at event %i in table %s (%s)" % (event_id, table, bad_edit)) + print("Warning: unusual edit at event %i in table %s (%s)" % + (event_id, table, bad_edit)) # we'll simply treat them as separate events pprint.pprint(entry) pprint.pprint(edit) @@ -4333,28 +4402,36 @@ def anon_handle_list_history(goptions, session, args): parser.add_option("--build", help=_("Only show data for a specific build")) parser.add_option("--package", help=_("Only show data for a specific package")) parser.add_option("--tag", help=_("Only show data for a specific tag")) - parser.add_option("--editor", "--by", metavar="USER", help=_("Only show entries modified by user")) + parser.add_option("--editor", "--by", metavar="USER", + help=_("Only show entries modified by user")) parser.add_option("--user", help=_("Only show entries affecting a user")) parser.add_option("--permission", help=_("Only show entries relating to a given permission")) parser.add_option("--cg", help=_("Only show entries relating to a given permission")) - parser.add_option("--external-repo", "--erepo", help=_("Only show entries relating to a given external repo")) - parser.add_option("--build-target", "--target", help=_("Only show entries relating to a given build target")) + parser.add_option("--external-repo", "--erepo", + help=_("Only show entries relating to a given external repo")) + parser.add_option("--build-target", "--target", + help=_("Only show entries relating to a given build target")) parser.add_option("--group", help=_("Only show entries relating to a given group")) parser.add_option("--host", help=_("Only show entries related to given host")) parser.add_option("--channel", help=_("Only show entries related to given channel")) - parser.add_option("--before", metavar="TIMESTAMP", help=_("Only show entries before timestamp")) + parser.add_option("--before", metavar="TIMESTAMP", + help=_("Only show entries before timestamp")) parser.add_option("--after", metavar="TIMESTAMP", help=_("Only show entries after timestamp")) - parser.add_option("--before-event", metavar="EVENT_ID", type='int', help=_("Only show entries before event")) - parser.add_option("--after-event", metavar="EVENT_ID", type='int', help=_("Only show entries after event")) + parser.add_option("--before-event", metavar="EVENT_ID", type='int', + help=_("Only show entries before event")) + parser.add_option("--after-event", metavar="EVENT_ID", type='int', + help=_("Only show entries after event")) parser.add_option("--watch", action="store_true", help=_("Monitor history data")) - parser.add_option("--active", action='store_true', help=_("Only show entries that are currently active")) + parser.add_option("--active", action='store_true', + help=_("Only show entries that are currently active")) parser.add_option("--revoked", action='store_false', dest='active', help=_("Only show entries that are currently revoked")) parser.add_option("--context", action="store_true", help=_("Show related entries")) parser.add_option("-s", "--show", action="append", help=_("Show data from selected tables")) parser.add_option("-v", "--verbose", action="store_true", help=_("Show more detail")) parser.add_option("-e", "--events", action="store_true", help=_("Show event ids")) - parser.add_option("--all", action="store_true", help=_("Allows listing the entire global history")) + parser.add_option("--all", action="store_true", + help=_("Allows listing the entire global history")) (options, args) = parser.parse_args(args) if len(args) != 0: parser.error(_("This command takes no arguments")) @@ -4556,7 +4633,8 @@ def _do_parseTaskParams(session, method, task_id, topdir): if len(params) > 2: _handleOpts(lines, params[2]) elif method in ('createLiveCD', 'createAppliance', 'createLiveMedia'): - argnames = ['Name', 'Version', 'Release', 'Arch', 'Target Info', 'Build Tag', 'Repo', 'Kickstart File'] + argnames = ['Name', 'Version', 'Release', 'Arch', 'Target Info', 'Build Tag', 'Repo', + 'Kickstart File'] for n, v in zip(argnames, params): lines.append("%s: %s" % (n, v)) if len(params) > 8: @@ -4580,7 +4658,8 @@ def _do_parseTaskParams(session, method, task_id, topdir): lines.append("Old Repo ID: %i" % oldrepo['id']) lines.append("Old Repo Creation: %s" % koji.formatTimeLong(oldrepo['creation_time'])) if len(params) > 3: - lines.append("External Repos: %s" % ', '.join([ext['external_repo_name'] for ext in params[3]])) + lines.append("External Repos: %s" % + ', '.join([ext['external_repo_name'] for ext in params[3]])) elif method == 'tagNotification': destTag = session.getTag(params[2]) srcTag = None @@ -4604,7 +4683,8 @@ def _do_parseTaskParams(session, method, task_id, topdir): lines.append("Subtasks:") for subtask in params[1]: lines.append(" Method: %s" % subtask[0]) - lines.append(" Parameters: %s" % ", ".join([str(subparam) for subparam in subtask[1]])) + lines.append(" Parameters: %s" % + ", ".join([str(subparam) for subparam in subtask[1]])) if len(subtask) > 2 and subtask[2]: subopts = subtask[2] _handleOpts(lines, subopts, prefix=' ') @@ -4682,7 +4762,8 @@ def _printTaskInfo(session, task_id, topdir, level=0, recurse=True, verbose=True if buildroot_infos: print("%sBuildroots:" % indent) for root in buildroot_infos: - print("%s %s/%s-%d-%d/" % (indent, BUILDDIR, root['tag_name'], root['id'], root['repo_id'])) + print("%s %s/%s-%d-%d/" % + (indent, BUILDDIR, root['tag_name'], root['id'], root['repo_id'])) if logs: print("%sLog Files:" % indent) for log_path in logs: @@ -4707,7 +4788,8 @@ def anon_handle_taskinfo(goptions, session, args): """[info] Show information about a task""" usage = _("usage: %prog taskinfo [options] [ ...]") parser = OptionParser(usage=get_usage_str(usage)) - parser.add_option("-r", "--recurse", action="store_true", help=_("Show children of this task as well")) + parser.add_option("-r", "--recurse", action="store_true", + help=_("Show children of this task as well")) parser.add_option("-v", "--verbose", action="store_true", help=_("Be verbose")) (options, args) = parser.parse_args(args) if len(args) < 1: @@ -4725,7 +4807,8 @@ def anon_handle_taginfo(goptions, session, args): usage = _("usage: %prog taginfo [options] [ ...]") parser = OptionParser(usage=get_usage_str(usage)) parser.add_option("--event", type='int', metavar="EVENT#", help=_("query at event")) - parser.add_option("--ts", type='int', metavar="TIMESTAMP", help=_("query at last event before timestamp")) + parser.add_option("--ts", type='int', metavar="TIMESTAMP", + help=_("query at last event before timestamp")) parser.add_option("--repo", type='int', metavar="REPO#", help=_("query at event for a repo")) (options, args) = parser.parse_args(args) if len(args) < 1: @@ -4766,7 +4849,8 @@ def anon_handle_taginfo(goptions, session, args): print("Required permission: %r" % perms.get(perm_id, perm_id)) if session.mavenEnabled(): print("Maven support?: %s" % (info['maven_support'] and 'yes' or 'no')) - print("Include all Maven archives?: %s" % (info['maven_include_all'] and 'yes' or 'no')) + print("Include all Maven archives?: %s" % + (info['maven_include_all'] and 'yes' or 'no')) if 'extra' in info: print("Tag options:") for key in sorted(info['extra'].keys()): @@ -4788,7 +4872,8 @@ def anon_handle_taginfo(goptions, session, args): if event: print(" %s (%s)" % (target['name'], target['build_tag_name'])) else: - print(" %s (%s, %s)" % (target['name'], target['build_tag_name'], repos[target['build_tag']])) + print(" %s (%s, %s)" % + (target['name'], target['build_tag_name'], repos[target['build_tag']])) if build_targets: print("This tag is a buildroot for one or more targets") if not event: @@ -4817,8 +4902,10 @@ def handle_add_tag(goptions, session, args): parser = OptionParser(usage=get_usage_str(usage)) parser.add_option("--parent", help=_("Specify parent")) parser.add_option("--arches", help=_("Specify arches")) - parser.add_option("--maven-support", action="store_true", help=_("Enable creation of Maven repos for this tag")) - parser.add_option("--include-all", action="store_true", help=_("Include all packages in this tag when generating Maven repos")) + parser.add_option("--maven-support", action="store_true", + help=_("Enable creation of Maven repos for this tag")) + parser.add_option("--include-all", action="store_true", + help=_("Include all packages in this tag when generating Maven repos")) parser.add_option("-x", "--extra", action="append", default=[], metavar="key=value", help=_("Set tag extra option")) (options, args) = parser.parse_args(args) @@ -4856,10 +4943,15 @@ def handle_edit_tag(goptions, session, args): parser.add_option("--lock", action="store_true", help=_("Lock the tag")) parser.add_option("--unlock", action="store_true", help=_("Unlock the tag")) parser.add_option("--rename", help=_("Rename the tag")) - parser.add_option("--maven-support", action="store_true", help=_("Enable creation of Maven repos for this tag")) - parser.add_option("--no-maven-support", action="store_true", help=_("Disable creation of Maven repos for this tag")) - parser.add_option("--include-all", action="store_true", help=_("Include all packages in this tag when generating Maven repos")) - parser.add_option("--no-include-all", action="store_true", help=_("Do not include all packages in this tag when generating Maven repos")) + parser.add_option("--maven-support", action="store_true", + help=_("Enable creation of Maven repos for this tag")) + parser.add_option("--no-maven-support", action="store_true", + help=_("Disable creation of Maven repos for this tag")) + parser.add_option("--include-all", action="store_true", + help=_("Include all packages in this tag when generating Maven repos")) + parser.add_option("--no-include-all", action="store_true", + help=_("Do not include all packages in this tag when generating Maven " + "repos")) parser.add_option("-x", "--extra", action="append", default=[], metavar="key=value", help=_("Set tag extra option")) parser.add_option("-r", "--remove-extra", action="append", default=[], metavar="key", @@ -4947,7 +5039,8 @@ def handle_lock_tag(goptions, session, args): print(_("Tag %s: %s permission already required") % (tag['name'], perm)) continue elif options.test: - print(_("Would have set permission requirement %s for tag %s") % (perm, tag['name'])) + print(_("Would have set permission requirement %s for tag %s") % + (perm, tag['name'])) continue session.editTag2(tag['id'], perm=perm_id) @@ -5008,7 +5101,8 @@ def handle_add_tag_inheritance(goptions, session, args): (options, args) = parser.parse_args(args) if len(args) != 2: - parser.error(_("This command takes exctly two argument: a tag name or ID and that tag's new parent name or ID")) + parser.error(_("This command takes exctly two argument: a tag name or ID and that tag's " + "new parent name or ID")) activate_session(session, goptions) @@ -5026,12 +5120,14 @@ def handle_add_tag_inheritance(goptions, session, args): samePriority = [datum for datum in inheritanceData if datum['priority'] == priority] if sameParents and not options.force: - print(_("Error: You are attempting to add %s as %s's parent even though it already is %s's parent.") + print(_("Error: You are attempting to add %s as %s's parent even though it already is " + "%s's parent.") % (parent['name'], tag['name'], tag['name'])) print(_("Please use --force if this is what you really want to do.")) return if samePriority: - print(_("Error: There is already an active inheritance with that priority on %s, please specify a different priority with --priority." % tag['name'])) + print(_("Error: There is already an active inheritance with that priority on %s, " + "please specify a different priority with --priority." % tag['name'])) return new_data = {} @@ -5064,7 +5160,8 @@ def handle_edit_tag_inheritance(goptions, session, args): parser.error(_("This command takes at least one argument: a tag name or ID")) if len(args) > 3: - parser.error(_("This command takes at most three argument: a tag name or ID, a parent tag name or ID, and a priority")) + parser.error(_("This command takes at most three argument: a tag name or ID, " + "a parent tag name or ID, and a priority")) activate_session(session, goptions) @@ -5107,7 +5204,8 @@ def handle_edit_tag_inheritance(goptions, session, args): inheritanceData = session.getInheritanceData(tag['id']) samePriority = [datum for datum in inheritanceData if datum['priority'] == options.priority] if samePriority: - print(_("Error: There is already an active inheritance with that priority on %s, please specify a different priority with --priority.") % tag['name']) + print(_("Error: There is already an active inheritance with that priority on %s, " + "please specify a different priority with --priority.") % tag['name']) return 1 new_data = data.copy() @@ -5144,7 +5242,8 @@ def handle_remove_tag_inheritance(goptions, session, args): parser.error(_("This command takes at least one argument: a tag name or ID")) if len(args) > 3: - parser.error(_("This command takes at most three argument: a tag name or ID, a parent tag name or ID, and a priority")) + parser.error(_("This command takes at most three argument: a tag name or ID, a parent tag " + "name or ID, and a priority")) activate_session(session, goptions) @@ -5203,7 +5302,8 @@ def anon_handle_show_groups(goptions, session, args): parser.add_option("-x", "--expand", action="store_true", default=False, help=_("Expand groups in comps format")) parser.add_option("--spec", action="store_true", help=_("Print build spec")) - parser.add_option("--show-blocked", action="store_true", dest="incl_blocked", help=_("Show blocked packages")) + parser.add_option("--show-blocked", action="store_true", dest="incl_blocked", + help=_("Show blocked packages")) (options, args) = parser.parse_args(args) if len(args) != 1: parser.error(_("Incorrect number of arguments")) @@ -5232,9 +5332,11 @@ def anon_handle_list_external_repos(goptions, session, args): parser.add_option("--id", type="int", help=_("Select by id")) parser.add_option("--tag", help=_("Select by tag")) parser.add_option("--used", action='store_true', help=_("List which tags use the repo(s)")) - parser.add_option("--inherit", action='store_true', help=_("Follow tag inheritance when selecting by tag")) + parser.add_option("--inherit", action='store_true', + help=_("Follow tag inheritance when selecting by tag")) parser.add_option("--event", type='int', metavar="EVENT#", help=_("Query at event")) - parser.add_option("--ts", type='int', metavar="TIMESTAMP", help=_("Query at last event before timestamp")) + parser.add_option("--ts", type='int', metavar="TIMESTAMP", + help=_("Query at last event before timestamp")) parser.add_option("--repo", type='int', metavar="REPO#", help=_("Query at event corresponding to (nonexternal) repo")) parser.add_option("--quiet", action="store_true", default=goptions.quiet, @@ -5413,7 +5515,8 @@ def handle_remove_external_repo(goptions, session, args): if delete: # removing entirely if current_tags and not options.force: - print(_("Error: external repo %s used by tag(s): %s") % (repo, ', '.join(current_tags))) + print(_("Error: external repo %s used by tag(s): %s") % + (repo, ', '.join(current_tags))) print(_("Use --force to remove anyway")) return 1 session.deleteExternalRepo(args[0]) @@ -5431,8 +5534,8 @@ def handle_spin_livecd(options, session, args): """[build] Create a live CD image given a kickstart file""" # Usage & option parsing. - usage = _("usage: %prog spin-livecd [options] " + - " ") + usage = _("usage: %prog spin-livecd [options] " + "") parser = OptionParser(usage=get_usage_str(usage)) parser.add_option("--wait", action="store_true", help=_("Wait on the livecd creation, even if running in the background")) @@ -5449,8 +5552,8 @@ def handle_spin_livecd(options, session, args): parser.add_option("--scratch", action="store_true", help=_("Create a scratch LiveCD image")) parser.add_option("--repo", action="append", - help=_("Specify a repo that will override the repo used to install " + - "RPMs in the LiveCD. May be used multiple times. The " + + help=_("Specify a repo that will override the repo used to install " + "RPMs in the LiveCD. May be used multiple times. The " "build tag repo associated with the target is the default.")) parser.add_option("--release", help=_("Forcibly set the release field")) parser.add_option("--volid", help=_("Set the volume id")) @@ -5463,8 +5566,8 @@ def handle_spin_livecd(options, session, args): # Make sure the target and kickstart is specified. print('spin-livecd is deprecated and will be replaced with spin-livemedia') if len(args) != 5: - parser.error(_("Five arguments are required: a name, a version, an" + - " architecture, a build target, and a relative path to" + + parser.error(_("Five arguments are required: a name, a version, an" + " architecture, a build target, and a relative path to" " a kickstart file.")) if task_options.volid is not None and len(task_options.volid) > 32: parser.error(_('Volume ID has a maximum length of 32 characters')) @@ -5476,8 +5579,8 @@ def handle_spin_livemedia(options, session, args): """[build] Create a livemedia image given a kickstart file""" # Usage & option parsing. - usage = _("usage: %prog spin-livemedia [options] " + - " ") + usage = _("usage: %prog spin-livemedia [options] " + "") parser = OptionParser(usage=get_usage_str(usage)) parser.add_option("--wait", action="store_true", help=_("Wait on the livemedia creation, even if running in the background")) @@ -5496,8 +5599,8 @@ def handle_spin_livemedia(options, session, args): parser.add_option("--scratch", action="store_true", help=_("Create a scratch LiveMedia image")) parser.add_option("--repo", action="append", - help=_("Specify a repo that will override the repo used to install " + - "RPMs in the LiveMedia. May be used multiple times. The " + + help=_("Specify a repo that will override the repo used to install " + "RPMs in the LiveMedia. May be used multiple times. The " "build tag repo associated with the target is the default.")) parser.add_option("--release", help=_("Forcibly set the release field")) parser.add_option("--volid", help=_("Set the volume id")) @@ -5507,7 +5610,8 @@ def handle_spin_livemedia(options, session, args): help=_("Do not attempt to tag package")) parser.add_option("--can-fail", action="store", dest="optional_arches", metavar="ARCH1,ARCH2,...", default="", - help=_("List of archs which are not blocking for build (separated by commas.")) + help=_("List of archs which are not blocking for build " + "(separated by commas.")) parser.add_option('--lorax_dir', metavar='DIR', help=_('The relative path to the lorax templates ' 'directory within the checkout of "lorax_url".')) @@ -5519,9 +5623,9 @@ def handle_spin_livemedia(options, session, args): # Make sure the target and kickstart is specified. if len(args) != 5: - parser.error(_("Five arguments are required: a name, a version, a" + - " build target, an architecture, and a relative path to" + - " a kickstart file.")) + parser.error(_("Five arguments are required: a name, a version, a " + "build target, an architecture, and a relative path to " + "a kickstart file.")) if task_options.lorax_url is not None and task_options.lorax_dir is None: parser.error(_('The "--lorax_url" option requires that "--lorax_dir" ' 'also be used.')) @@ -5536,8 +5640,8 @@ def handle_spin_appliance(options, session, args): """[build] Create an appliance given a kickstart file""" # Usage & option parsing - usage = _("usage: %prog spin-appliance [options] " + - " ") + usage = _("usage: %prog spin-appliance [options] " + "") parser = OptionParser(usage=get_usage_str(usage)) parser.add_option("--wait", action="store_true", help=_("Wait on the appliance creation, even if running in the background")) @@ -5554,8 +5658,8 @@ def handle_spin_appliance(options, session, args): parser.add_option("--scratch", action="store_true", help=_("Create a scratch appliance")) parser.add_option("--repo", action="append", - help=_("Specify a repo that will override the repo used to install " + - "RPMs in the appliance. May be used multiple times. The " + + help=_("Specify a repo that will override the repo used to install " + "RPMs in the appliance. May be used multiple times. The " "build tag repo associated with the target is the default.")) parser.add_option("--release", help=_("Forcibly set the release field")) parser.add_option("--specfile", metavar="URL", @@ -5563,13 +5667,13 @@ def handle_spin_appliance(options, session, args): parser.add_option("--skip-tag", action="store_true", help=_("Do not attempt to tag package")) parser.add_option("--vmem", metavar="VMEM", default=None, - help=_("Set the amount of virtual memory in the appliance in MB, " + + help=_("Set the amount of virtual memory in the appliance in MB, " "default is 512")) parser.add_option("--vcpu", metavar="VCPU", default=None, - help=_("Set the number of virtual cpus in the appliance, " + + help=_("Set the number of virtual cpus in the appliance, " "default is 1")) parser.add_option("--format", metavar="DISK_FORMAT", default='raw', - help=_("Disk format, default is raw. Other options are qcow, " + + help=_("Disk format, default is raw. Other options are qcow, " "qcow2, and vmx.")) (task_options, args) = parser.parse_args(args) @@ -5577,20 +5681,20 @@ def handle_spin_appliance(options, session, args): # Make sure the target and kickstart is specified. print('spin-appliance is deprecated and will be replaced with image-build') if len(args) != 5: - parser.error(_("Five arguments are required: a name, a version, " + - "an architecture, a build target, and a relative path" + - " to a kickstart file.")) + parser.error(_("Five arguments are required: a name, a version, " + "an architecture, a build target, and a relative path " + "to a kickstart file.")) return _build_image(options, task_options, session, args, 'appliance') def handle_image_build_indirection(options, session, args): """[build] Create a disk image using other disk images via the Indirection plugin""" - usage = _("usage: %prog image-build-indirection [base_image] " + + usage = _("usage: %prog image-build-indirection [base_image] " "[utility_image] [indirection_build_template]") usage += _("\n %prog image-build --config \n") parser = OptionParser(usage=get_usage_str(usage)) parser.add_option("--config", - help=_("Use a configuration file to define image-build options " + + help=_("Use a configuration file to define image-build options " "instead of command line options (they will be ignored).")) parser.add_option("--background", action="store_true", help=_("Run the image creation task at a lower priority")) @@ -5615,11 +5719,14 @@ def handle_image_build_indirection(options, session, args): parser.add_option("--utility-image-build", help=_("NVR or build ID of the utility image to be used")) parser.add_option("--indirection-template", - help=_("Name of the local file, or SCM file containing the template used to drive the indirection plugin")) + help=_("Name of the local file, or SCM file containing the template used to " + "drive the indirection plugin")) parser.add_option("--indirection-template-url", - help=_("SCM URL containing the template used to drive the indirection plugin")) + help=_("SCM URL containing the template used to drive the indirection " + "plugin")) parser.add_option("--results-loc", - help=_("Relative path inside the working space image where the results should be extracted from")) + help=_("Relative path inside the working space image where the results " + "should be extracted from")) parser.add_option("--scratch", action="store_true", help=_("Create a scratch image")) parser.add_option("--wait", action="store_true", @@ -5646,8 +5753,9 @@ def _build_image_indirection(options, task_opts, session, args): raise koji.GenericError(_("You must specify either a base-image task or build ID/NVR")) required_opts = ['name', 'version', 'arch', 'target', 'indirection_template', 'results_loc'] - optional_opts = ['indirection_template_url', 'scratch', 'utility_image_task', 'utility_image_build', - 'base_image_task', 'base_image_build', 'release', 'skip_tag'] + optional_opts = ['indirection_template_url', 'scratch', 'utility_image_task', + 'utility_image_build', 'base_image_task', 'base_image_build', 'release', + 'skip_tag'] missing = [] for opt in required_opts: @@ -5655,7 +5763,8 @@ def _build_image_indirection(options, task_opts, session, args): missing.append(opt) if len(missing) > 0: - print("Missing the following required options: %s" % ' '.join(['--%s' % o.replace('_', '-') for o in missing])) + print("Missing the following required options: %s" % + ' '.join(['--%s' % o.replace('_', '-') for o in missing])) raise koji.GenericError(_("Missing required options specified above")) activate_session(session, options) @@ -5690,7 +5799,8 @@ def _build_image_indirection(options, task_opts, session, args): if not task_opts.indirection_template_url: if not task_opts.scratch: # only scratch builds can omit indirection_template_url - raise koji.GenericError(_("Non-scratch builds must provide a URL for the indirection template")) + raise koji.GenericError( + _("Non-scratch builds must provide a URL for the indirection template")) templatefile = task_opts.indirection_template serverdir = unique_path('cli-image-indirection') session.uploadWrapper(templatefile, serverdir, callback=callback) @@ -5726,29 +5836,29 @@ def handle_image_build(options, session, args): 'vsphere-ova', 'vagrant-virtualbox', 'vagrant-libvirt', 'vagrant-vmware-fusion', 'vagrant-hyperv', 'docker', 'raw-xz', 'liveimg-squashfs', 'tar-gz') - usage = _("usage: %prog image-build [options] " + + usage = _("usage: %prog image-build [options] " " [ ...]") usage += _("\n %prog image-build --config \n") parser = OptionParser(usage=get_usage_str(usage)) parser.add_option("--background", action="store_true", help=_("Run the image creation task at a lower priority")) parser.add_option("--config", - help=_("Use a configuration file to define image-build options " + + help=_("Use a configuration file to define image-build options " "instead of command line options (they will be ignored).")) parser.add_option("--disk-size", default=10, help=_("Set the disk device size in gigabytes")) parser.add_option("--distro", - help=_("specify the RPM based distribution the image will be based " + - "on with the format RHEL-X.Y, CentOS-X.Y, SL-X.Y, or Fedora-NN. " + - "The packages for the Distro you choose must have been built " + + help=_("specify the RPM based distribution the image will be based " + "on with the format RHEL-X.Y, CentOS-X.Y, SL-X.Y, or Fedora-NN. " + "The packages for the Distro you choose must have been built " "in this system.")) parser.add_option("--format", default=[], action="append", - help=_("Convert results to one or more formats " + - "(%s), this option may be used " % ', '.join(formats) + - "multiple times. By default, specifying this option will " + - "omit the raw disk image (which is 10G in size) from the " + - "build results. If you really want it included with converted " + - "images, pass in 'raw' as an option.")) + help=_("Convert results to one or more formats " + "(%s), this option may be used " + "multiple times. By default, specifying this option will " + "omit the raw disk image (which is 10G in size) from the " + "build results. If you really want it included with converted " + "images, pass in 'raw' as an option.") % ', '.join(formats)) parser.add_option("--kickstart", help=_("Path to a local kickstart file")) parser.add_option("--ksurl", metavar="SCMURL", help=_("The URL to the SCM containing the kickstart file")) @@ -5759,17 +5869,17 @@ def handle_image_build(options, session, args): parser.add_option("--nowait", action="store_false", dest="wait", help=_("Don't wait on image creation")) parser.add_option("--ova-option", action="append", - help=_("Override a value in the OVA description XML. Provide a value " + + help=_("Override a value in the OVA description XML. Provide a value " "in a name=value format, such as 'ovf_memory_mb=6144'")) parser.add_option("--factory-parameter", nargs=2, action="append", - help=_("Pass a parameter to Image Factory. The results are highly specific " + - "to the image format being created. This is a two argument parameter " + + help=_("Pass a parameter to Image Factory. The results are highly specific " + "to the image format being created. This is a two argument parameter " "that can be specified an arbitrary number of times. For example: " "--factory-parameter docker_cmd '[ \"/bin/echo Hello World\" ]'")) parser.add_option("--release", help=_("Forcibly set the release field")) parser.add_option("--repo", action="append", - help=_("Specify a repo that will override the repo used to install " + - "RPMs in the image. May be used multiple times. The " + + help=_("Specify a repo that will override the repo used to install " + "RPMs in the image. May be used multiple times. The " "build tag repo associated with the target is the default.")) parser.add_option("--scratch", action="store_true", help=_("Create a scratch image")) @@ -5777,7 +5887,8 @@ def handle_image_build(options, session, args): help=_("Do not attempt to tag package")) parser.add_option("--can-fail", action="store", dest="optional_arches", metavar="ARCH1,ARCH2,...", default="", - help=_("List of archs which are not blocking for build (separated by commas.")) + help=_("List of archs which are not blocking for build " + "(separated by commas.")) parser.add_option("--specfile", metavar="URL", help=_("SCM URL to spec file fragment to use to generate wrapper RPMs")) parser.add_option("--wait", action="store_true", @@ -5826,14 +5937,14 @@ def handle_image_build(options, session, args): else: if len(args) < 5: - parser.error(_("At least five arguments are required: a name, " + - "a version, a build target, a URL to an " + + parser.error(_("At least five arguments are required: a name, " + "a version, a build target, a URL to an " "install tree, and 1 or more architectures.")) if not task_options.ksurl and not task_options.kickstart: parser.error(_('You must specify --kickstart')) if not task_options.distro: parser.error( - _("You must specify --distro. Examples: Fedora-16, RHEL-6.4, " + + _("You must specify --distro. Examples: Fedora-16, RHEL-6.4, " "SL-6.4 or CentOS-6.4")) return _build_image_oz(options, task_options, session, args) @@ -5994,17 +6105,17 @@ def handle_win_build(options, session, args): usage = _("usage: %prog win-build [options] ") parser = OptionParser(usage=get_usage_str(usage)) parser.add_option("--winspec", metavar="URL", - help=_("SCM URL to retrieve the build descriptor from. " + - "If not specified, the winspec must be in the root directory " + + help=_("SCM URL to retrieve the build descriptor from. " + "If not specified, the winspec must be in the root directory " "of the source repository.")) parser.add_option("--patches", metavar="URL", - help=_("SCM URL of a directory containing patches to apply " + + help=_("SCM URL of a directory containing patches to apply " "to the sources before building")) parser.add_option("--cpus", type="int", - help=_("Number of cpus to allocate to the build VM " + + help=_("Number of cpus to allocate to the build VM " "(requires admin access)")) parser.add_option("--mem", type="int", - help=_("Amount of memory (in megabytes) to allocate to the build VM " + + help=_("Amount of memory (in megabytes) to allocate to the build VM " "(requires admin access)")) parser.add_option("--static-mac", action="store_true", help=_("Retain the original MAC address when cloning the VM")) @@ -6025,7 +6136,8 @@ def handle_win_build(options, session, args): help=_("Do not print the task information"), default=options.quiet) (build_opts, args) = parser.parse_args(args) if len(args) != 3: - parser.error(_("Exactly three arguments (a build target, a SCM URL, and a VM name) are required")) + parser.error( + _("Exactly three arguments (a build target, a SCM URL, and a VM name) are required")) activate_session(session, options) target = args[0] if target.lower() == "none" and build_opts.repo_id: @@ -6121,10 +6233,12 @@ def handle_cancel(goptions, session, args): def handle_set_task_priority(goptions, session, args): "[admin] Set task priority" - usage = _("usage: %prog set-task-priority [options] --priority= [ ...]") + usage = _("usage: %prog set-task-priority [options] --priority= " + "[ ...]") parser = OptionParser(usage=get_usage_str(usage)) parser.add_option("--priority", type="int", help=_("New priority")) - parser.add_option("--recurse", action="store_true", default=False, help=_("Change priority of child tasks as well")) + parser.add_option("--recurse", action="store_true", default=False, + help=_("Change priority of child tasks as well")) (options, args) = parser.parse_args(args) if len(args) == 0: parser.error(_("You must specify at least one task id")) @@ -6213,7 +6327,8 @@ def handle_set_pkg_owner_global(goptions, session, args): parser = OptionParser(usage=get_usage_str(usage)) parser.add_option("--verbose", action='store_true', help=_("List changes")) parser.add_option("--test", action='store_true', help=_("Test mode")) - parser.add_option("--old-user", "--from", action="store", help=_("Only change ownership for packages belonging to this user")) + parser.add_option("--old-user", "--from", action="store", + help=_("Only change ownership for packages belonging to this user")) (options, args) = parser.parse_args(args) if options.old_user: if len(args) < 1: @@ -6256,11 +6371,13 @@ def handle_set_pkg_owner_global(goptions, session, args): else: if options.test: print("Would have changed owner for %s in tag %s: %s -> %s" - % (entry['package_name'], entry['tag_name'], entry['owner_name'], user['name'])) + % (entry['package_name'], entry['tag_name'], entry['owner_name'], + user['name'])) continue if options.verbose: print("Changing owner for %s in tag %s: %s -> %s" - % (entry['package_name'], entry['tag_name'], entry['owner_name'], user['name'])) + % (entry['package_name'], entry['tag_name'], entry['owner_name'], + user['name'])) session.packageListSetOwner(entry['tag_id'], entry['package_name'], user['id']) @@ -6311,8 +6428,9 @@ def anon_handle_watch_logs(goptions, session, args): usage = _("usage: %prog watch-logs [options] [ ...]") parser = OptionParser(usage=get_usage_str(usage)) parser.add_option("--log", help=_("Watch only a specific log")) - parser.add_option("--mine", action="store_true", help=_("Watch logs for " - "all your tasks, task_id arguments are forbidden in this case.")) + parser.add_option("--mine", action="store_true", + help=_("Watch logs for all your tasks, task_id arguments are forbidden in " + "this case.")) parser.add_option("--follow", action="store_true", help=_("Follow spawned child tasks")) (options, args) = parser.parse_args(args) activate_session(session, goptions) @@ -6376,7 +6494,9 @@ def handle_tag_build(opts, session, args): parser.add_option("--nowait", action="store_true", help=_("Do not wait on task")) (options, args) = parser.parse_args(args) if len(args) < 2: - parser.error(_("This command takes at least two arguments: a tag name/ID and one or more package n-v-r's")) + parser.error( + _("This command takes at least two arguments: a tag name/ID and one or more package " + "n-v-r's")) activate_session(session, opts) tasks = [] for pkg in args[1:]: @@ -6398,13 +6518,18 @@ def handle_move_build(opts, session, args): parser = OptionParser(usage=get_usage_str(usage)) parser.add_option("--force", action="store_true", help=_("force operation")) parser.add_option("--nowait", action="store_true", help=_("do not wait on tasks")) - parser.add_option("--all", action="store_true", help=_("move all instances of a package, 's are package names")) + parser.add_option("--all", action="store_true", + help=_("move all instances of a package, 's are package names")) (options, args) = parser.parse_args(args) if len(args) < 3: if options.all: - parser.error(_("This command, with --all, takes at least three arguments: two tags and one or more package names")) + parser.error( + _("This command, with --all, takes at least three arguments: two tags and one or " + "more package names")) else: - parser.error(_("This command takes at least three arguments: two tags and one or more package n-v-r's")) + parser.error( + _("This command takes at least three arguments: two tags and one or more package " + "n-v-r's")) activate_session(session, opts) tasks = [] builds = [] @@ -6442,8 +6567,10 @@ def handle_untag_build(goptions, session, args): "[bind] Remove a tag from one or more builds" usage = _("usage: %prog untag-build [options] [ ...]") parser = OptionParser(usage=get_usage_str(usage)) - parser.add_option("--all", action="store_true", help=_("untag all versions of the package in this tag")) - parser.add_option("--non-latest", action="store_true", help=_("untag all versions of the package in this tag except the latest")) + parser.add_option("--all", action="store_true", + help=_("untag all versions of the package in this tag")) + parser.add_option("--non-latest", action="store_true", + help=_("untag all versions of the package in this tag except the latest")) parser.add_option("-n", "--test", action="store_true", help=_("test mode")) parser.add_option("-v", "--verbose", action="store_true", help=_("print details")) parser.add_option("--force", action="store_true", help=_("force operation")) @@ -6452,7 +6579,9 @@ def handle_untag_build(goptions, session, args): if len(args) < 1: parser.error(_("Please specify a tag")) elif len(args) < 2: - parser.error(_("This command takes at least two arguments: a tag name/ID and one or more package n-v-r's")) + parser.error( + _("This command takes at least two arguments: a tag name/ID and one or more package " + "n-v-r's")) activate_session(session, goptions) tag = session.getTag(args[0]) if not tag: @@ -6526,8 +6655,11 @@ def anon_handle_download_build(options, session, args): parser = OptionParser(usage=get_usage_str(usage)) parser.add_option("--arch", "-a", dest="arches", metavar="ARCH", action="append", default=[], help=_("Only download packages for this arch (may be used multiple times)")) - parser.add_option("--type", help=_("Download archives of the given type, rather than rpms (maven, win, or image)")) - parser.add_option("--latestfrom", dest="latestfrom", help=_("Download the latest build from this tag")) + parser.add_option("--type", + help=_("Download archives of the given type, rather than rpms " + "(maven, win, or image)")) + parser.add_option("--latestfrom", dest="latestfrom", + help=_("Download the latest build from this tag")) parser.add_option("--debuginfo", action="store_true", help=_("Also download -debuginfo rpms")) parser.add_option("--task-id", action="store_true", help=_("Interperet id as a task id")) parser.add_option("--rpm", action="store_true", help=_("Download the given rpm")) @@ -6562,7 +6694,8 @@ def anon_handle_download_build(options, session, args): if suboptions.latestfrom: # We want the latest build, not a specific build try: - builds = session.listTagged(suboptions.latestfrom, latest=True, package=build, type=suboptions.type) + builds = session.listTagged(suboptions.latestfrom, latest=True, package=build, + type=suboptions.type) except koji.GenericError as data: print("Error finding latest build: %s" % data) return 1 @@ -6630,7 +6763,8 @@ def anon_handle_download_build(options, session, args): rpms = session.listRPMs(buildID=info['id'], arches=arches) if not rpms: if arches: - print("No %s packages available for %s" % (" or ".join(arches), koji.buildLabel(info))) + print("No %s packages available for %s" % + (" or ".join(arches), koji.buildLabel(info))) else: print("No packages available for %s" % koji.buildLabel(info)) return 1 @@ -6706,7 +6840,8 @@ def anon_handle_download_logs(options, session, args): offset = 0 try: while contents: - contents = session.downloadTaskOutput(task_id, filename, offset=offset, size=blocksize, volume=volume) + contents = session.downloadTaskOutput(task_id, filename, offset=offset, + size=blocksize, volume=volume) offset += len(contents) if contents: fd.write(contents) @@ -6774,7 +6909,8 @@ def anon_handle_download_task(options, session, args): parser = OptionParser(usage=get_usage_str(usage)) parser.add_option("--arch", dest="arches", metavar="ARCH", action="append", default=[], help=_("Only download packages for this arch (may be used multiple times)")) - parser.add_option("--logs", dest="logs", action="store_true", default=False, help=_("Also download build logs")) + parser.add_option("--logs", dest="logs", action="store_true", default=False, + help=_("Also download build logs")) parser.add_option("--topurl", metavar="URL", default=options.topurl, help=_("URL under which Koji files are accessible")) parser.add_option("--noprogress", action="store_true", @@ -6854,7 +6990,8 @@ def anon_handle_download_task(options, session, args): if '..' in filename: error(_('Invalid file name: %s') % filename) url = '%s/%s/%s' % (pathinfo.work(volume), pathinfo.taskrelpath(task["id"]), filename) - download_file(url, new_filename, suboptions.quiet, suboptions.noprogress, len(downloads), number) + download_file(url, new_filename, suboptions.quiet, suboptions.noprogress, len(downloads), + number) def anon_handle_wait_repo(options, session, args): @@ -6862,10 +6999,16 @@ def anon_handle_wait_repo(options, session, args): usage = _("usage: %prog wait-repo [options] ") parser = OptionParser(usage=get_usage_str(usage)) parser.add_option("--build", metavar="NVR", dest="builds", action="append", default=[], - help=_("Check that the given build is in the newly-generated repo (may be used multiple times)")) - parser.add_option("--target", action="store_true", help=_("Interpret the argument as a build target name")) - parser.add_option("--timeout", type="int", help=_("Amount of time to wait (in minutes) before giving up (default: 120)"), default=120) - parser.add_option("--quiet", action="store_true", help=_("Suppress output, success or failure will be indicated by the return value only"), default=options.quiet) + help=_("Check that the given build is in the newly-generated repo " + "(may be used multiple times)")) + parser.add_option("--target", action="store_true", + help=_("Interpret the argument as a build target name")) + parser.add_option("--timeout", type="int", default=120, + help=_("Amount of time to wait (in minutes) before giving up " + "(default: 120)")) + parser.add_option("--quiet", action="store_true", default=options.quiet, + help=_("Suppress output, success or failure will be indicated by the return " + "value only")) (suboptions, args) = parser.parse_args(args) start = time.time() @@ -6906,24 +7049,30 @@ def anon_handle_wait_repo(options, session, args): else: present_nvr = [x["nvr"] for x in data][0] if present_nvr != "%s-%s-%s" % (nvr["name"], nvr["version"], nvr["release"]): - print("Warning: nvr %s-%s-%s is not current in tag %s\n latest build in %s is %s" % (nvr["name"], nvr["version"], nvr["release"], tag, tag, present_nvr)) + print( + "Warning: nvr %s-%s-%s is not current in tag %s\n latest build in %s is %s" % + (nvr["name"], nvr["version"], nvr["release"], tag, tag, present_nvr)) last_repo = None repo = session.getRepo(tag_id) while True: if builds and repo and repo != last_repo: - if koji.util.checkForBuilds(session, tag_id, builds, repo['create_event'], latest=True): + if koji.util.checkForBuilds(session, tag_id, builds, repo['create_event'], + latest=True): if not suboptions.quiet: - print("Successfully waited %s for %s to appear in the %s repo" % (koji.util.duration(start), koji.util.printList(suboptions.builds), tag)) + print("Successfully waited %s for %s to appear in the %s repo" % + (koji.util.duration(start), koji.util.printList(suboptions.builds), tag)) return if (time.time() - start) >= (suboptions.timeout * 60.0): if not suboptions.quiet: if builds: - print("Unsuccessfully waited %s for %s to appear in the %s repo" % (koji.util.duration(start), koji.util.printList(suboptions.builds), tag)) + print("Unsuccessfully waited %s for %s to appear in the %s repo" % + (koji.util.duration(start), koji.util.printList(suboptions.builds), tag)) else: - print("Unsuccessfully waited %s for a new %s repo" % (koji.util.duration(start), tag)) + print("Unsuccessfully waited %s for a new %s repo" % + (koji.util.duration(start), tag)) return 1 time.sleep(options.poll_interval) @@ -6933,7 +7082,8 @@ def anon_handle_wait_repo(options, session, args): if not builds: if repo != last_repo: if not suboptions.quiet: - print("Successfully waited %s for a new %s repo" % (koji.util.duration(start), tag)) + print("Successfully waited %s for a new %s repo" % + (koji.util.duration(start), tag)) return @@ -6941,11 +7091,14 @@ def handle_regen_repo(options, session, args): "[admin] Force a repo to be regenerated" usage = _("usage: %prog regen-repo [options] ") parser = OptionParser(usage=get_usage_str(usage)) - parser.add_option("--target", action="store_true", help=_("Interpret the argument as a build target name")) + parser.add_option("--target", action="store_true", + help=_("Interpret the argument as a build target name")) parser.add_option("--nowait", action="store_true", help=_("Don't wait on for regen to finish")) parser.add_option("--debuginfo", action="store_true", help=_("Include debuginfo rpms in repo")) - parser.add_option("--source", "--src", action="store_true", help=_("Include source rpms in each of repos")) - parser.add_option("--separate-source", "--separate-src", action="store_true", help=_("Include source rpms in separate src repo")) + parser.add_option("--source", "--src", action="store_true", + help=_("Include source rpms in each of repos")) + parser.add_option("--separate-source", "--separate-src", action="store_true", + help=_("Include source rpms in separate src repo")) (suboptions, args) = parser.parse_args(args) if len(args) == 0: parser.error(_("A tag name must be specified")) @@ -7006,8 +7159,8 @@ def handle_dist_repo(options, session, args): help=_('For RPMs not signed with a desired key, fall back to the ' 'primary copy')) parser.add_option("-a", "--arch", action='append', default=[], - help=_("Indicate an architecture to consider. The default is all " + - "architectures associated with the given tag. This option may " + + help=_("Indicate an architecture to consider. The default is all " + "architectures associated with the given tag. This option may " "be specified multiple times.")) parser.add_option("--with-src", action='store_true', help='Also generate a src repo') parser.add_option("--split-debuginfo", action='store_true', default=False, @@ -7035,7 +7188,8 @@ def handle_dist_repo(options, session, args): parser.add_option('--zck', action='store_true', default=False, help=_('Generate zchunk files as well as the standard repodata')) parser.add_option('--zck-dict-dir', action='store', default=None, - help=_('Directory containing compression dictionaries for use by zchunk (on builder)')) + help=_('Directory containing compression dictionaries for use by zchunk ' + '(on builder)')) task_opts, args = parser.parse_args(args) if len(args) < 1: parser.error(_('You must provide a tag to generate the repo from')) @@ -7210,7 +7364,11 @@ def anon_handle_list_notifications(goptions, session, args): user_id = None mask = "%(id)6s %(tag)-25s %(package)-25s %(email)-20s %(success)-12s" - headers = {'id': 'ID', 'tag': 'Tag', 'package': 'Package', 'email': 'E-mail', 'success': 'Success-only'} + headers = {'id': 'ID', + 'tag': 'Tag', + 'package': 'Package', + 'email': 'E-mail', + 'success': 'Success-only'} head = mask % headers notifications = session.getBuildNotifications(user_id) if notifications: @@ -7300,7 +7458,8 @@ def handle_add_notification(goptions, session, args): def handle_remove_notification(goptions, session, args): "[monitor] Remove user's notifications" - usage = _("usage: %prog remove-notification [options] [ ...]") + usage = _("usage: %prog remove-notification [options] " + "[ ...]") parser = OptionParser(usage=get_usage_str(usage)) (options, args) = parser.parse_args(args) @@ -7422,7 +7581,7 @@ def handle_block_notification(goptions, session, args): tag_id = None for block in session.getBuildNotificationBlocks(user_id): - if (block['package_id'] == package_id and block['tag_id'] == tag_id): + if block['package_id'] == package_id and block['tag_id'] == tag_id: parser.error('Notification already exists.') session.createNotificationBlock(user_id, package_id, tag_id) @@ -7430,7 +7589,8 @@ def handle_block_notification(goptions, session, args): def handle_unblock_notification(goptions, session, args): "[monitor] Unblock user's notification" - usage = _("usage: %prog unblock-notification [options] [ ...]") + usage = _("usage: %prog unblock-notification [options] " + "[ ...]") parser = OptionParser(usage=get_usage_str(usage)) (options, args) = parser.parse_args(args) diff --git a/cli/koji_cli/lib.py b/cli/koji_cli/lib.py index dc4b663f..c2cc6c8e 100644 --- a/cli/koji_cli/lib.py +++ b/cli/koji_cli/lib.py @@ -112,7 +112,8 @@ def ensure_connection(session): except requests.exceptions.ConnectionError: error(_("Error: Unable to connect to server")) if ret != koji.API_VERSION: - warn(_("WARNING: The server is at API version %d and the client is at %d" % (ret, koji.API_VERSION))) + warn(_("WARNING: The server is at API version %d and " + "the client is at %d" % (ret, koji.API_VERSION))) def print_task_headers(): @@ -194,7 +195,8 @@ class TaskWatcher(object): laststate = last['state'] if laststate != state: if not self.quiet: - print("%s: %s -> %s" % (self.str(), self.display_state(last), self.display_state(self.info))) + print("%s: %s -> %s" % (self.str(), self.display_state(last), + self.display_state(self.info))) return True return False else: @@ -277,9 +279,9 @@ def watch_tasks(session, tasklist, quiet=False, poll_interval=60, ki_handler=Non tlist = ['%s: %s' % (t.str(), t.display_state(t.info)) for t in tasks.values() if not t.is_done()] print( - """Tasks still running. You can continue to watch with the '%s watch-task' command. -Running Tasks: -%s""" % (progname, '\n'.join(tlist))) + "Tasks still running. You can continue to watch with the" + " '%s watch-task' command.\n" + "Running Tasks:\n%s" % (progname, '\n'.join(tlist))) sys.stdout.flush() rv = 0 try: @@ -302,7 +304,8 @@ Running Tasks: for child in session.getTaskChildren(task_id): child_id = child['id'] if child_id not in tasks.keys(): - tasks[child_id] = TaskWatcher(child_id, session, task.level + 1, quiet=quiet) + tasks[child_id] = TaskWatcher(child_id, session, task.level + 1, + quiet=quiet) tasks[child_id].update() # If we found new children, go through the list again, # in case they have children also @@ -370,7 +373,8 @@ def watch_logs(session, tasklist, opts, poll_interval): if (log, volume) not in taskoffsets: taskoffsets[(log, volume)] = 0 - contents = session.downloadTaskOutput(task_id, log, taskoffsets[(log, volume)], 16384, volume=volume) + contents = session.downloadTaskOutput(task_id, log, taskoffsets[(log, volume)], + 16384, volume=volume) taskoffsets[(log, volume)] += len(contents) if contents: currlog = "%d:%s:%s:" % (task_id, volume, log) @@ -452,7 +456,9 @@ def _progress_callback(uploaded, total, piece, time, total_time): speed = _format_size(float(total) / float(total_time)) + "/sec" # write formated string and flush - sys.stdout.write("[% -36s] % 4s % 8s % 10s % 14s\r" % ('=' * (int(percent_done * 36)), percent_done_str, elapsed, data_done, speed)) + sys.stdout.write("[% -36s] % 4s % 8s % 10s % 14s\r" % ('=' * (int(percent_done * 36)), + percent_done_str, elapsed, data_done, + speed)) sys.stdout.flush() @@ -520,7 +526,8 @@ def _download_progress(download_t, download_d): percent_done_str = "%3d%%" % (percent_done * 100) data_done = _format_size(download_d) - sys.stdout.write("[% -36s] % 4s % 10s\r" % ('=' * (int(percent_done * 36)), percent_done_str, data_done)) + sys.stdout.write("[% -36s] % 4s % 10s\r" % ('=' * (int(percent_done * 36)), percent_done_str, + data_done)) sys.stdout.flush() @@ -560,13 +567,16 @@ def activate_session(session, options): elif options.authtype == "ssl" or os.path.isfile(options.cert) and options.authtype is None: # authenticate using SSL client cert session.ssl_login(options.cert, None, options.serverca, proxyuser=runas) - elif options.authtype == "password" or getattr(options, 'user', None) and options.authtype is None: + elif options.authtype == "password" \ + or getattr(options, 'user', None) \ + and options.authtype is None: # authenticate using user/password session.login() elif options.authtype == "kerberos" or has_krb_creds() and options.authtype is None: try: if getattr(options, 'keytab', None) and getattr(options, 'principal', None): - session.krb_login(principal=options.principal, keytab=options.keytab, proxyuser=runas) + session.krb_login(principal=options.principal, keytab=options.keytab, + proxyuser=runas) else: session.krb_login(proxyuser=runas) except socket.error as e: diff --git a/hub/kojihub.py b/hub/kojihub.py index 4fc9d351..b350cca7 100644 --- a/hub/kojihub.py +++ b/hub/kojihub.py @@ -636,10 +636,12 @@ def make_task(method, arglist, **opts): opts['request'] = koji.xmlrpcplus.dumps(tuple(arglist), methodname=method) opts['state'] = koji.TASK_STATES['FREE'] opts['method'] = method - koji.plugin.run_callbacks('preTaskStateChange', attribute='state', old=None, new='FREE', info=opts) + koji.plugin.run_callbacks( + 'preTaskStateChange', attribute='state', old=None, new='FREE', info=opts) # stick it in the database - idata = dslice(opts, ['state', 'owner', 'method', 'request', 'priority', 'parent', 'label', 'channel_id', 'arch']) + idata = dslice(opts, ['state', 'owner', 'method', 'request', 'priority', 'parent', 'label', + 'channel_id', 'arch']) if opts.get('assign'): idata['state'] = koji.TASK_STATES['ASSIGNED'] idata['host_id'] = opts['assign'] @@ -647,7 +649,8 @@ def make_task(method, arglist, **opts): insert.execute() task_id = _singleValue("SELECT currval('task_id_seq')", strict=True) opts['id'] = task_id - koji.plugin.run_callbacks('postTaskStateChange', attribute='state', old=None, new='FREE', info=opts) + koji.plugin.run_callbacks( + 'postTaskStateChange', attribute='state', old=None, new='FREE', info=opts) return task_id @@ -660,8 +663,8 @@ def eventCondition(event, table=None): if event is None: return """(%(table)sactive = TRUE)""" % locals() elif isinstance(event, six.integer_types): - return """(%(table)screate_event <= %(event)d AND ( %(table)srevoke_event IS NULL OR %(event)d < %(table)srevoke_event ))""" \ - % locals() + return "(%(table)screate_event <= %(event)d AND ( %(table)srevoke_event IS NULL OR " \ + "%(event)d < %(table)srevoke_event ))" % locals() else: raise koji.GenericError("Invalid event: %r" % event) @@ -681,7 +684,8 @@ def readGlobalInheritance(event=None): def readInheritanceData(tag_id, event=None): c = context.cnx.cursor() - fields = ('parent_id', 'name', 'priority', 'maxdepth', 'intransitive', 'noconfig', 'pkg_filter') + fields = ('parent_id', 'name', 'priority', 'maxdepth', 'intransitive', 'noconfig', + 'pkg_filter') q = """SELECT %s FROM tag_inheritance JOIN tag ON parent_id = id WHERE %s AND tag_id = %%(tag_id)i ORDER BY priority @@ -697,7 +701,8 @@ def readInheritanceData(tag_id, event=None): def readDescendantsData(tag_id, event=None): c = context.cnx.cursor() - fields = ('tag_id', 'parent_id', 'name', 'priority', 'maxdepth', 'intransitive', 'noconfig', 'pkg_filter') + fields = ('tag_id', 'parent_id', 'name', 'priority', 'maxdepth', 'intransitive', 'noconfig', + 'pkg_filter') q = """SELECT %s FROM tag_inheritance JOIN tag ON tag_id = id WHERE %s AND parent_id = %%(tag_id)i ORDER BY priority @@ -778,7 +783,8 @@ def _writeInheritanceData(tag_id, changes, clear=False): continue # oops, duplicate entries for a single priority dup_ids = [link['parent_id'] for link in dups] - raise koji.GenericError("Inheritance priorities must be unique (pri %s: %r )" % (pri, dup_ids)) + raise koji.GenericError("Inheritance priorities must be unique (pri %s: %r )" % + (pri, dup_ids)) for parent_id, link in six.iteritems(data): if not link.get('is_update'): continue @@ -809,11 +815,13 @@ def readFullInheritance(tag_id, event=None, reverse=False, stops=None, jumps=Non if jumps is None: jumps = {} order = [] - readFullInheritanceRecurse(tag_id, event, order, stops, {}, {}, 0, None, False, [], reverse, jumps) + readFullInheritanceRecurse(tag_id, event, order, stops, {}, {}, 0, None, False, [], reverse, + jumps) return order -def readFullInheritanceRecurse(tag_id, event, order, prunes, top, hist, currdepth, maxdepth, noconfig, pfilter, reverse, jumps): +def readFullInheritanceRecurse(tag_id, event, order, prunes, top, hist, currdepth, maxdepth, + noconfig, pfilter, reverse, jumps): if maxdepth is not None and maxdepth < 1: return # note: maxdepth is relative to where we are, but currdepth is absolute from @@ -905,7 +913,8 @@ def readFullInheritanceRecurse(tag_id, event, order, prunes, top, hist, currdept if link['intransitive'] and reverse: # add link, but don't follow it continue - readFullInheritanceRecurse(id, event, order, prunes, top, hist, currdepth, nextdepth, noconfig, filter, reverse, jumps) + readFullInheritanceRecurse(id, event, order, prunes, top, hist, currdepth, nextdepth, + noconfig, filter, reverse, jumps) # tag-package operations # add @@ -953,7 +962,8 @@ def _pkglist_add(tag_id, pkg_id, owner, block, extra_arches): _pkglist_owner_add(tag_id, pkg_id, owner) -def pkglist_add(taginfo, pkginfo, owner=None, block=None, extra_arches=None, force=False, update=False): +def pkglist_add(taginfo, pkginfo, owner=None, block=None, extra_arches=None, force=False, + update=False): """Add to (or update) package list for tag""" return _direct_pkglist_add(taginfo, pkginfo, owner, block, extra_arches, force, update, policy=True) @@ -1064,9 +1074,11 @@ def _direct_pkglist_remove(taginfo, pkginfo, force=False, policy=False): if not (force and context.session.hasPerm('admin')): assert_policy('package_list', policy_data) user = get_user(context.session.user_id) - koji.plugin.run_callbacks('prePackageListChange', action='remove', tag=tag, package=pkg, user=user) + koji.plugin.run_callbacks( + 'prePackageListChange', action='remove', tag=tag, package=pkg, user=user) _pkglist_remove(tag['id'], pkg['id']) - koji.plugin.run_callbacks('postPackageListChange', action='remove', tag=tag, package=pkg, user=user) + koji.plugin.run_callbacks( + 'postPackageListChange', action='remove', tag=tag, package=pkg, user=user) def pkglist_block(taginfo, pkginfo, force=False): @@ -1094,7 +1106,8 @@ def pkglist_unblock(taginfo, pkginfo, force=False): if not (force and context.session.hasPerm('admin')): assert_policy('package_list', policy_data) user = get_user(context.session.user_id) - koji.plugin.run_callbacks('prePackageListChange', action='unblock', tag=tag, package=pkg, user=user) + koji.plugin.run_callbacks( + 'prePackageListChange', action='unblock', tag=tag, package=pkg, user=user) tag_id = tag['id'] pkg_id = pkg['id'] pkglist = readPackageList(tag_id, pkgID=pkg_id, inherit=True) @@ -1114,7 +1127,8 @@ def pkglist_unblock(taginfo, pkginfo, force=False): pkglist = readPackageList(tag_id, pkgID=pkg_id, inherit=True) if pkg_id not in pkglist or pkglist[pkg_id]['blocked']: _pkglist_add(tag_id, pkg_id, previous['owner_id'], False, previous['extra_arches']) - koji.plugin.run_callbacks('postPackageListChange', action='unblock', tag=tag, package=pkg, user=user) + koji.plugin.run_callbacks( + 'postPackageListChange', action='unblock', tag=tag, package=pkg, user=user) def pkglist_setowner(taginfo, pkginfo, owner, force=False): @@ -1127,7 +1141,8 @@ def pkglist_setarches(taginfo, pkginfo, arches, force=False): pkglist_add(taginfo, pkginfo, extra_arches=arches, force=force, update=True) -def readPackageList(tagID=None, userID=None, pkgID=None, event=None, inherit=False, with_dups=False): +def readPackageList(tagID=None, userID=None, pkgID=None, event=None, inherit=False, + with_dups=False): """Returns the package list for the specified tag or user. One of (tagID,userID,pkgID) must be specified @@ -1268,7 +1283,8 @@ def list_tags(build=None, package=None, perms=True, queryOpts=None): packageinfo = lookup_package(package) if not packageinfo: raise koji.GenericError('invalid package: %s' % package) - fields.extend(['users.id', 'users.name', 'tag_packages.blocked', 'tag_packages.extra_arches']) + fields.extend( + ['users.id', 'users.name', 'tag_packages.blocked', 'tag_packages.extra_arches']) aliases.extend(['owner_id', 'owner_name', 'blocked', 'extra_arches']) joins.append('tag_packages ON tag.id = tag_packages.tag_id') clauses.append('tag_packages.active = true') @@ -1286,7 +1302,8 @@ def list_tags(build=None, package=None, perms=True, queryOpts=None): return query.iterate() -def readTaggedBuilds(tag, event=None, inherit=False, latest=False, package=None, owner=None, type=None): +def readTaggedBuilds(tag, event=None, inherit=False, latest=False, package=None, owner=None, + type=None): """Returns a list of builds for specified tag set inherit=True to follow inheritance @@ -1314,7 +1331,8 @@ def readTaggedBuilds(tag, event=None, inherit=False, latest=False, package=None, # these values are used for each iteration fields = [('tag.id', 'tag_id'), ('tag.name', 'tag_name'), ('build.id', 'id'), ('build.id', 'build_id'), ('build.version', 'version'), ('build.release', 'release'), - ('build.epoch', 'epoch'), ('build.state', 'state'), ('build.completion_time', 'completion_time'), + ('build.epoch', 'epoch'), ('build.state', 'state'), + ('build.completion_time', 'completion_time'), ('build.start_time', 'start_time'), ('build.task_id', 'task_id'), ('events.id', 'creation_event_id'), ('events.time', 'creation_time'), @@ -1358,7 +1376,8 @@ def readTaggedBuilds(tag, event=None, inherit=False, latest=False, package=None, JOIN volume ON volume.id = build.volume_id WHERE %s AND tag_id=%%(tagid)s AND build.state=%%(st_complete)i - """ % (', '.join([pair[0] for pair in fields]), type_join, eventCondition(event, 'tag_listing')) + """ % (', '.join([pair[0] for pair in fields]), type_join, + eventCondition(event, 'tag_listing')) if package: q += """AND package.name = %(package)s """ @@ -1393,7 +1412,8 @@ def readTaggedBuilds(tag, event=None, inherit=False, latest=False, package=None, return builds -def readTaggedRPMS(tag, package=None, arch=None, event=None, inherit=False, latest=True, rpmsigs=False, owner=None, type=None): +def readTaggedRPMS(tag, package=None, arch=None, event=None, inherit=False, latest=True, + rpmsigs=False, owner=None, type=None): """Returns a list of rpms for specified tag set inherit=True to follow inheritance @@ -1410,7 +1430,8 @@ def readTaggedRPMS(tag, package=None, arch=None, event=None, inherit=False, late # (however, it is fairly quick) taglist += [link['parent_id'] for link in readFullInheritance(tag, event)] - builds = readTaggedBuilds(tag, event=event, inherit=inherit, latest=latest, package=package, owner=owner, type=type) + builds = readTaggedBuilds(tag, event=event, inherit=inherit, latest=latest, package=package, + owner=owner, type=type) # index builds build_idx = dict([(b['build_id'], b) for b in builds]) @@ -1505,7 +1526,8 @@ def readTaggedArchives(tag, package=None, event=None, inherit=False, latest=True taglist += [link['parent_id'] for link in readFullInheritance(tag, event)] # If type == 'maven', we require that both the build *and* the archive have Maven metadata - builds = readTaggedBuilds(tag, event=event, inherit=inherit, latest=latest, package=package, type=type) + builds = readTaggedBuilds(tag, event=event, inherit=inherit, latest=latest, package=package, + type=type) # index builds build_idx = dict([(b['build_id'], b) for b in builds]) @@ -1699,7 +1721,8 @@ def _untag_build(tag, build, user_id=None, strict=True, force=False): def _direct_untag_build(tag, build, user, strict=True, force=False): """Directly untag a build. No access check or value lookup.""" - koji.plugin.run_callbacks('preUntag', tag=tag, build=build, user=user, force=force, strict=strict) + koji.plugin.run_callbacks( + 'preUntag', tag=tag, build=build, user=user, force=force, strict=strict) values = {'tag_id': tag['id'], 'build_id': build['id']} update = UpdateProcessor('tag_listing', values=values, clauses=['tag_id=%(tag_id)i', 'build_id=%(build_id)i']) @@ -1708,7 +1731,8 @@ def _direct_untag_build(tag, build, user, strict=True, force=False): if count == 0 and strict: nvr = "%(name)s-%(version)s-%(release)s" % build raise koji.TagError("build %s not in tag %s" % (nvr, tag['name'])) - koji.plugin.run_callbacks('postUntag', tag=tag, build=build, user=user, force=force, strict=strict) + koji.plugin.run_callbacks( + 'postUntag', tag=tag, build=build, user=user, force=force, strict=strict) # tag-group operations @@ -1909,7 +1933,9 @@ def _grp_pkg_add(taginfo, grpinfo, pkg_name, block, force, **opts): opts['blocked'] = block # revoke old entry (if present) update = UpdateProcessor('group_package_listing', values=opts, - clauses=['group_id=%(group_id)s', 'tag_id=%(tag_id)s', 'package=%(package)s']) + clauses=['group_id=%(group_id)s', + 'tag_id=%(tag_id)s', + 'package=%(package)s']) update.make_revoke() update.execute() # add new entry @@ -1934,7 +1960,9 @@ def _grp_pkg_remove(taginfo, grpinfo, pkg_name, force): tag_id = get_tag_id(taginfo, strict=True) grp_id = get_group_id(grpinfo, strict=True) update = UpdateProcessor('group_package_listing', values=locals(), - clauses=['package=%(pkg_name)s', 'tag_id=%(tag_id)s', 'group_id = %(grp_id)s']) + clauses=['package=%(pkg_name)s', + 'tag_id=%(tag_id)s', + 'group_id = %(grp_id)s']) update.make_revoke() update.execute() @@ -2033,7 +2061,9 @@ def _grp_req_add(taginfo, grpinfo, reqinfo, block, force, **opts): opts['blocked'] = block # revoke old entry (if present) update = UpdateProcessor('group_req_listing', values=opts, - clauses=['group_id=%(group_id)s', 'tag_id=%(tag_id)s', 'req_id=%(req_id)s']) + clauses=['group_id=%(group_id)s', + 'tag_id=%(tag_id)s', + 'req_id=%(req_id)s']) update.make_revoke() update.execute() # add new entry @@ -2059,7 +2089,9 @@ def _grp_req_remove(taginfo, grpinfo, reqinfo, force): grp_id = get_group_id(grpinfo, strict=True) req_id = get_group_id(reqinfo, strict=True) update = UpdateProcessor('group_req_listing', values=locals(), - clauses=['req_id=%(req_id)s', 'tag_id=%(tag_id)s', 'group_id = %(grp_id)s']) + clauses=['req_id=%(req_id)s', + 'tag_id=%(tag_id)s', + 'group_id = %(grp_id)s']) update.make_revoke() update.execute() @@ -2184,7 +2216,8 @@ def get_tag_groups(tag, event=None, inherit=True, incl_pkgs=True, incl_reqs=True return groups -def readTagGroups(tag, event=None, inherit=True, incl_pkgs=True, incl_reqs=True, incl_blocked=False): +def readTagGroups(tag, event=None, inherit=True, incl_pkgs=True, incl_reqs=True, + incl_blocked=False): """Return group data for the tag with blocked entries removed Also scrubs data into an xmlrpc-safe format (no integer keys) @@ -2199,12 +2232,14 @@ def readTagGroups(tag, event=None, inherit=True, incl_pkgs=True, incl_reqs=True, if incl_blocked: group['packagelist'] = to_list(group['packagelist'].values()) else: - group['packagelist'] = [x for x in group['packagelist'].values() if not x['blocked']] + group['packagelist'] = [x for x in group['packagelist'].values() + if not x['blocked']] if 'grouplist' in group: if incl_blocked: group['grouplist'] = to_list(group['grouplist'].values()) else: - group['grouplist'] = [x for x in group['grouplist'].values() if not x['blocked']] + group['grouplist'] = [x for x in group['grouplist'].values() + if not x['blocked']] # filter blocked entries and collapse to a list if incl_blocked: return groups @@ -2245,7 +2280,8 @@ def add_host_to_channel(hostname, channel_name, create=False): channels = list_channels(host_id) for channel in channels: if channel['id'] == channel_id: - raise koji.GenericError('host %s is already subscribed to the %s channel' % (hostname, channel_name)) + raise koji.GenericError('host %s is already subscribed to the %s channel' % + (hostname, channel_name)) insert = InsertProcessor('host_channels') insert.set(host_id=host_id, channel_id=channel_id) insert.make_create() @@ -2268,7 +2304,8 @@ def remove_host_from_channel(hostname, channel_name): found = True break if not found: - raise koji.GenericError('host %s is not subscribed to the %s channel' % (hostname, channel_name)) + raise koji.GenericError('host %s is not subscribed to the %s channel' % + (hostname, channel_name)) values = {'host_id': host_id, 'channel_id': channel_id} clauses = ['host_id = %(host_id)i AND channel_id = %(channel_id)i'] @@ -2501,8 +2538,9 @@ def repo_init(tag, with_src=False, with_debuginfo=False, event=None, with_separa logger = logging.getLogger("koji.hub.repo_init") state = koji.REPO_INIT tinfo = get_tag(tag, strict=True, event=event) - koji.plugin.run_callbacks('preRepoInit', tag=tinfo, with_src=with_src, with_debuginfo=with_debuginfo, - event=event, repo_id=None, with_separate_src=with_separate_src) + koji.plugin.run_callbacks('preRepoInit', tag=tinfo, with_src=with_src, + with_debuginfo=with_debuginfo, event=event, repo_id=None, + with_separate_src=with_separate_src) tag_id = tinfo['id'] repo_arches = {} if with_separate_src: @@ -2639,8 +2677,9 @@ def repo_init(tag, with_src=False, with_debuginfo=False, event=None, with_separa for artifact_dir, artifacts in six.iteritems(artifact_dirs): _write_maven_repo_metadata(artifact_dir, artifacts) - koji.plugin.run_callbacks('postRepoInit', tag=tinfo, with_src=with_src, with_debuginfo=with_debuginfo, - event=event, repo_id=repo_id, with_separate_src=with_separate_src) + koji.plugin.run_callbacks('postRepoInit', tag=tinfo, with_src=with_src, + with_debuginfo=with_debuginfo, event=event, repo_id=repo_id, + with_separate_src=with_separate_src) return [repo_id, event_id] @@ -3188,13 +3227,15 @@ def lookup_build_target(info, strict=False, create=False): return lookup_name('build_target', info, strict, create) -def create_tag(name, parent=None, arches=None, perm=None, locked=False, maven_support=False, maven_include_all=False, extra=None): +def create_tag(name, parent=None, arches=None, perm=None, locked=False, maven_support=False, + maven_include_all=False, extra=None): """Create a new tag""" context.session.assertPerm('tag') return _create_tag(name, parent, arches, perm, locked, maven_support, maven_include_all, extra) -def _create_tag(name, parent=None, arches=None, perm=None, locked=False, maven_support=False, maven_include_all=False, extra=None): +def _create_tag(name, parent=None, arches=None, perm=None, locked=False, maven_support=False, + maven_include_all=False, extra=None): """Create a new tag, without access check""" max_name_length = 256 @@ -3416,7 +3457,8 @@ WHERE id = %(tagID)i""" if 'remove_extra' in kwargs: for removed in kwargs['remove_extra']: if removed in kwargs['extra']: - raise koji.GenericError("Can not both add/update and remove tag-extra: '%s'" % removed) + raise koji.GenericError("Can not both add/update and remove tag-extra: '%s'" % + removed) for key in kwargs['extra']: value = kwargs['extra'][key] if key not in tag['extra'] or tag['extra'][key] != value: @@ -3426,7 +3468,8 @@ WHERE id = %(tagID)i""" 'value': json.dumps(kwargs['extra'][key]), } # revoke old entry, if any - update = UpdateProcessor('tag_extra', values=data, clauses=['tag_id = %(tag_id)i', 'key=%(key)s']) + update = UpdateProcessor('tag_extra', values=data, clauses=['tag_id = %(tag_id)i', + 'key=%(key)s']) update.make_revoke() update.execute() # add new entry @@ -3438,14 +3481,16 @@ WHERE id = %(tagID)i""" if 'remove_extra' in kwargs: ne = [e for e in kwargs['remove_extra'] if e not in tag['extra']] if ne: - raise koji.GenericError("Tag: %s doesn't have extra: %s" % (tag['name'], ', '.join(ne))) + raise koji.GenericError("Tag: %s doesn't have extra: %s" % + (tag['name'], ', '.join(ne))) for key in kwargs['remove_extra']: data = { 'tag_id': tag['id'], 'key': key, } # revoke old entry - update = UpdateProcessor('tag_extra', values=data, clauses=['tag_id = %(tag_id)i', 'key=%(key)s']) + update = UpdateProcessor('tag_extra', values=data, clauses=['tag_id = %(tag_id)i', + 'key=%(key)s']) update.make_revoke() update.execute() @@ -3576,7 +3621,8 @@ def edit_external_repo(info, name=None, url=None): existing_id = _singleValue("""SELECT id FROM external_repo WHERE name = %(name)s""", locals(), strict=False) if existing_id is not None: - raise koji.GenericError('name "%s" is already taken by external repo %i' % (name, existing_id)) + raise koji.GenericError('name "%s" is already taken by external repo %i' % + (name, existing_id)) rename = """UPDATE external_repo SET name = %(name)s WHERE id = %(repo_id)i""" _dml(rename, locals()) @@ -3714,7 +3760,8 @@ def get_tag_external_repos(tag_info=None, repo_info=None, event=None): } columns, aliases = zip(*fields.items()) - clauses = [eventCondition(event, table='tag_external_repos'), eventCondition(event, table='external_repo_config')] + clauses = [eventCondition(event, table='tag_external_repos'), + eventCondition(event, table='external_repo_config')] if tag_info: tag = get_tag(tag_info, strict=True, event=event) tag_id = tag['id'] @@ -4035,10 +4082,13 @@ def get_build(buildInfo, strict=False): fields = (('build.id', 'id'), ('build.version', 'version'), ('build.release', 'release'), ('build.id', 'build_id'), - ('build.epoch', 'epoch'), ('build.state', 'state'), ('build.completion_time', 'completion_time'), + ('build.epoch', 'epoch'), ('build.state', 'state'), + ('build.completion_time', 'completion_time'), ('build.start_time', 'start_time'), - ('build.task_id', 'task_id'), ('events.id', 'creation_event_id'), ('events.time', 'creation_time'), - ('package.id', 'package_id'), ('package.name', 'package_name'), ('package.name', 'name'), + ('build.task_id', 'task_id'), + ('events.id', 'creation_event_id'), ('events.time', 'creation_time'), + ('package.id', 'package_id'), ('package.name', 'package_name'), + ('package.name', 'name'), ('volume.id', 'volume_id'), ('volume.name', 'volume_name'), ("package.name || '-' || build.version || '-' || build.release", 'nvr'), ('EXTRACT(EPOCH FROM events.time)', 'creation_ts'), @@ -4248,7 +4298,8 @@ def get_rpm(rpminfo, strict=False, multi=False): return ret -def list_rpms(buildID=None, buildrootID=None, imageID=None, componentBuildrootID=None, hostID=None, arches=None, queryOpts=None): +def list_rpms(buildID=None, buildrootID=None, imageID=None, componentBuildrootID=None, hostID=None, + arches=None, queryOpts=None): """List RPMS. If buildID, imageID and/or buildrootID are specified, restrict the list of RPMs to only those RPMs that are part of that build, or were built in that buildroot. If componentBuildrootID is specified, @@ -4311,7 +4362,8 @@ def list_rpms(buildID=None, buildrootID=None, imageID=None, componentBuildrootID joins.append('archive_rpm_components ON rpminfo.id = archive_rpm_components.rpm_id') if hostID is not None: - joins.append('standard_buildroot ON rpminfo.buildroot_id = standard_buildroot.buildroot_id') + joins.append( + 'standard_buildroot ON rpminfo.buildroot_id = standard_buildroot.buildroot_id') clauses.append('standard_buildroot.host_id = %(hostID)i') if arches is not None: if isinstance(arches, (list, tuple)): @@ -4474,17 +4526,19 @@ def add_btype(name): insert.execute() -def list_archives(buildID=None, buildrootID=None, componentBuildrootID=None, hostID=None, type=None, - filename=None, size=None, checksum=None, typeInfo=None, queryOpts=None, imageID=None, - archiveID=None, strict=False): +def list_archives(buildID=None, buildrootID=None, componentBuildrootID=None, hostID=None, + type=None, filename=None, size=None, checksum=None, typeInfo=None, + queryOpts=None, imageID=None, archiveID=None, strict=False): """ Retrieve information about archives. If buildID is not null it will restrict the list to archives built by the build with that ID. - If buildrootID is not null it will restrict the list to archives built in the buildroot with that ID. - If componentBuildrootID is not null it will restrict the list to archives that were present in the - buildroot with that ID. + If buildrootID is not null it will restrict the list to archives built in the buildroot with + that ID. + If componentBuildrootID is not null it will restrict the list to archives that were present in + the buildroot with that ID. If hostID is not null it will restrict the list to archives built on the host with that ID. - If filename, size, and/or checksum are not null it will filter the results to entries matching the provided values. + If filename, size, and/or checksum are not null it will filter the results to entries matching + the provided values. Returns a list of maps containing the following keys: @@ -4578,7 +4632,8 @@ def list_archives(buildID=None, buildrootID=None, componentBuildrootID=None, hos values['imageID'] = imageID joins.append('archive_components ON archiveinfo.id = archive_components.component_id') if hostID is not None: - joins.append('standard_buildroot on archiveinfo.buildroot_id = standard_buildroot.buildroot_id') + joins.append( + 'standard_buildroot on archiveinfo.buildroot_id = standard_buildroot.buildroot_id') clauses.append('standard_buildroot.host_id = %(host_id)i') values['host_id'] = hostID fields.append(['standard_buildroot.host_id', 'host_id']) @@ -5135,7 +5190,10 @@ def edit_host(hostInfo, **kw): update.make_revoke() update.execute() - insert = InsertProcessor('host_config', data=dslice(host, ('arches', 'capacity', 'description', 'comment', 'enabled'))) + insert = InsertProcessor('host_config', + data=dslice(host, + ('arches', 'capacity', 'description', 'comment', + 'enabled'))) insert.set(host_id=host['id']) for change in changes: insert.set(**{change: kw[change]}) @@ -5171,7 +5229,8 @@ def get_channel(channelInfo, strict=False): return _singleRow(query, locals(), fields, strict) -def query_buildroots(hostID=None, tagID=None, state=None, rpmID=None, archiveID=None, taskID=None, buildrootID=None, queryOpts=None): +def query_buildroots(hostID=None, tagID=None, state=None, rpmID=None, archiveID=None, taskID=None, + buildrootID=None, queryOpts=None): """Return a list of matching buildroots Optional args: @@ -5204,16 +5263,21 @@ def query_buildroots(hostID=None, tagID=None, state=None, rpmID=None, archiveID= ('EXTRACT(EPOCH FROM create_events.time)', 'create_ts'), ('retire_events.id', 'retire_event_id'), ('retire_events.time', 'retire_event_time'), ('EXTRACT(EPOCH FROM retire_events.time)', 'retire_ts'), - ('repo_create.id', 'repo_create_event_id'), ('repo_create.time', 'repo_create_event_time')] + ('repo_create.id', 'repo_create_event_id'), + ('repo_create.time', 'repo_create_event_time')] tables = ['buildroot'] - joins = ['LEFT OUTER JOIN standard_buildroot ON standard_buildroot.buildroot_id = buildroot.id', - 'LEFT OUTER JOIN content_generator ON buildroot.cg_id = content_generator.id', + joins = ['LEFT OUTER JOIN standard_buildroot ' + 'ON standard_buildroot.buildroot_id = buildroot.id', + 'LEFT OUTER JOIN content_generator ' + 'ON buildroot.cg_id = content_generator.id', 'LEFT OUTER JOIN host ON host.id = standard_buildroot.host_id', 'LEFT OUTER JOIN repo ON repo.id = standard_buildroot.repo_id', 'LEFT OUTER JOIN tag ON tag.id = repo.tag_id', - 'LEFT OUTER JOIN events AS create_events ON create_events.id = standard_buildroot.create_event', - 'LEFT OUTER JOIN events AS retire_events ON standard_buildroot.retire_event = retire_events.id', + 'LEFT OUTER JOIN events AS create_events ON ' + 'create_events.id = standard_buildroot.create_event', + 'LEFT OUTER JOIN events AS retire_events ON ' + 'standard_buildroot.retire_event = retire_events.id', 'LEFT OUTER JOIN events AS repo_create ON repo_create.id = repo.create_event'] clauses = [] @@ -5399,7 +5463,8 @@ def _set_build_volume(binfo, volinfo, strict=True): shutil.copytree(olddir, newdir, symlinks=True) # Second, update the db - koji.plugin.run_callbacks('preBuildStateChange', attribute='volume_id', old=old_binfo['volume_id'], new=volinfo['id'], info=binfo) + koji.plugin.run_callbacks('preBuildStateChange', attribute='volume_id', + old=old_binfo['volume_id'], new=volinfo['id'], info=binfo) update = UpdateProcessor('build', clauses=['id=%(id)i'], values=binfo) update.set(volume_id=volinfo['id']) update.execute() @@ -5422,7 +5487,8 @@ def _set_build_volume(binfo, volinfo, strict=True): relpath = os.path.relpath(newdir, os.path.dirname(basedir)) os.symlink(relpath, basedir) - koji.plugin.run_callbacks('postBuildStateChange', attribute='volume_id', old=old_binfo['volume_id'], new=volinfo['id'], info=binfo) + koji.plugin.run_callbacks('postBuildStateChange', attribute='volume_id', + old=old_binfo['volume_id'], new=volinfo['id'], info=binfo) def ensure_volume_symlink(binfo): @@ -5573,18 +5639,21 @@ def new_build(data, strict=False): recycle_build(old_binfo, data) # Raises exception if there is a problem return old_binfo['id'] - koji.plugin.run_callbacks('preBuildStateChange', attribute='state', old=None, new=data['state'], info=data) + koji.plugin.run_callbacks('preBuildStateChange', attribute='state', old=None, + new=data['state'], info=data) # insert the new data insert_data = dslice(data, ['pkg_id', 'version', 'release', 'epoch', 'state', 'volume_id', - 'task_id', 'owner', 'start_time', 'completion_time', 'source', 'extra']) + 'task_id', 'owner', 'start_time', 'completion_time', 'source', + 'extra']) if 'cg_id' in data: insert_data['cg_id'] = data['cg_id'] data['id'] = insert_data['id'] = _singleValue("SELECT nextval('build_id_seq')") insert = InsertProcessor('build', data=insert_data) insert.execute() new_binfo = get_build(data['id'], strict=True) - koji.plugin.run_callbacks('postBuildStateChange', attribute='state', old=None, new=data['state'], info=new_binfo) + koji.plugin.run_callbacks('postBuildStateChange', attribute='state', old=None, + new=data['state'], info=new_binfo) # return build_id return data['id'] @@ -5759,10 +5828,13 @@ def import_build(srpm, rpms, brmap=None, task_id=None, build_id=None, logs=None) binfo = get_build(build_id, strict=True) st_complete = koji.BUILD_STATES['COMPLETE'] st_old = binfo['state'] - koji.plugin.run_callbacks('preBuildStateChange', attribute='state', old=st_old, new=st_complete, info=binfo) + koji.plugin.run_callbacks('preBuildStateChange', attribute='state', old=st_old, + new=st_complete, info=binfo) for key in ('name', 'version', 'release', 'epoch', 'task_id'): if build[key] != binfo[key]: - raise koji.GenericError("Unable to complete build: %s mismatch (build: %s, rpm: %s)" % (key, binfo[key], build[key])) + raise koji.GenericError( + "Unable to complete build: %s mismatch (build: %s, rpm: %s)" % + (key, binfo[key], build[key])) if binfo['state'] != koji.BUILD_STATES['BUILDING']: raise koji.GenericError("Unable to complete build: state is %s" % koji.BUILD_STATES[binfo['state']]) @@ -5773,7 +5845,8 @@ def import_build(srpm, rpms, brmap=None, task_id=None, build_id=None, logs=None) update.set(volume_id=build['volume_id']) update.execute() binfo = get_build(build_id, strict=True) - koji.plugin.run_callbacks('postBuildStateChange', attribute='state', old=st_old, new=st_complete, info=binfo) + koji.plugin.run_callbacks('postBuildStateChange', attribute='state', old=st_old, + new=st_complete, info=binfo) # now to handle the individual rpms for relpath in [srpm] + rpms: @@ -6143,7 +6216,8 @@ class CG_Importer(object): metadata = self.metadata if metadata['build'].get('build_id'): if len(self.cgs) != 1: - raise koji.GenericError("Reserved builds can handle only single content generator.") + raise koji.GenericError( + "Reserved builds can handle only single content generator.") cg_id = list(self.cgs)[0] build_id = metadata['build']['build_id'] buildinfo = get_build(build_id, strict=True) @@ -6257,7 +6331,8 @@ class CG_Importer(object): source = self.buildinfo.get('source') st_complete = koji.BUILD_STATES['COMPLETE'] st_old = old_info['state'] - koji.plugin.run_callbacks('preBuildStateChange', attribute='state', old=st_old, new=st_complete, info=old_info) + koji.plugin.run_callbacks('preBuildStateChange', attribute='state', old=st_old, + new=st_complete, info=old_info) update = UpdateProcessor('build', clauses=['id=%(build_id)s'], values=self.buildinfo) update.set(state=st_complete, extra=extra, owner=owner, source=source) if self.buildinfo.get('volume_id'): @@ -6267,7 +6342,8 @@ class CG_Importer(object): update.execute() buildinfo = get_build(build_id, strict=True) clear_reservation(build_id) - koji.plugin.run_callbacks('postBuildStateChange', attribute='state', old=st_old, new=st_complete, info=buildinfo) + koji.plugin.run_callbacks('postBuildStateChange', attribute='state', old=st_old, + new=st_complete, info=buildinfo) return buildinfo @@ -6396,14 +6472,16 @@ class CG_Importer(object): if archive['checksum'] == comp['checksum']: return archive # else - logger.error("Failed to match archive %(filename)s (size %(filesize)s, sum %(checksum)s", comp) + logger.error("Failed to match archive %(filename)s (size %(filesize)s, sum %(checksum)s", + comp) if type_mismatches: logger.error("Match failed with %i type mismatches", type_mismatches) # TODO: allow external archives # XXX - this is a temporary workaround until we can better track external refs logger.warning("IGNORING unmatched archive: %r", comp) return None - # raise koji.GenericError("No match: %(filename)s (size %(filesize)s, sum %(checksum)s" % comp) + # raise koji.GenericError("No match: %(filename)s (size %(filesize)s, sum %(checksum)s" % + # comp) def match_kojifile(self, comp): """Look up the file by archive id and sanity check the other data""" @@ -6437,13 +6515,15 @@ class CG_Importer(object): if fileinfo.get('metadata_only', False): self.metadata_only = True workdir = koji.pathinfo.work() - path = joinpath(workdir, self.directory, fileinfo.get('relpath', ''), fileinfo['filename']) + path = joinpath(workdir, self.directory, fileinfo.get('relpath', ''), + fileinfo['filename']) fileinfo['hub.path'] = path filesize = os.path.getsize(path) if filesize != fileinfo['filesize']: - raise koji.GenericError("File size %s for %s (expected %s) doesn't match. Corrupted upload?" % - (filesize, fileinfo['filename'], fileinfo['filesize'])) + raise koji.GenericError( + "File size %s for %s (expected %s) doesn't match. Corrupted upload?" % + (filesize, fileinfo['filename'], fileinfo['filesize'])) # checksum if fileinfo['checksum_type'] != 'md5': @@ -6459,11 +6539,13 @@ class CG_Importer(object): m.update(contents) if fileinfo['checksum'] != m.hexdigest(): raise koji.GenericError("File checksum mismatch for %s: %s != %s" % - (fileinfo['filename'], fileinfo['checksum'], m.hexdigest())) + (fileinfo['filename'], fileinfo['checksum'], + m.hexdigest())) fileinfo['hub.checked_md5'] = True if fileinfo['buildroot_id'] not in self.br_prep: - raise koji.GenericError("Missing buildroot metadata for id %(buildroot_id)r" % fileinfo) + raise koji.GenericError("Missing buildroot metadata for id %(buildroot_id)r" % + fileinfo) if fileinfo['type'] not in ['rpm', 'log']: self.prep_archive(fileinfo) if fileinfo['type'] == 'rpm': @@ -6992,7 +7074,8 @@ def import_archive_internal(filepath, buildinfo, type, typeInfo, buildroot_id=No be any non-rpm filetype supported by Koji. filepath: full path to the archive file - buildinfo: dict of information about the build to associate the archive with (as returned by getBuild()) + buildinfo: dict of information about the build to associate the archive with + (as returned by getBuild()) type: type of the archive being imported. Currently supported archive types: maven, win, image typeInfo: dict of type-specific information buildroot_id: the id of the buildroot the archive was built in (may be None) @@ -7081,14 +7164,16 @@ def import_archive_internal(filepath, buildinfo, type, typeInfo, buildroot_id=No pom_maveninfo = koji.pom_to_maven_info(pom_info) # sanity check: Maven info from pom must match the user-supplied typeInfo if koji.mavenLabel(pom_maveninfo) != koji.mavenLabel(typeInfo): - raise koji.BuildError('Maven info from .pom file (%s) does not match user-supplied typeInfo (%s)' % - (koji.mavenLabel(pom_maveninfo), koji.mavenLabel(typeInfo))) + raise koji.BuildError( + 'Maven info from .pom file (%s) does not match user-supplied typeInfo (%s)' % + (koji.mavenLabel(pom_maveninfo), koji.mavenLabel(typeInfo))) # sanity check: the filename of the pom file must match -.pom if filename != '%(artifact_id)s-%(version)s.pom' % typeInfo: raise koji.BuildError('Maven info (%s) is not consistent with pom filename (%s)' % (koji.mavenLabel(typeInfo), filename)) - insert = InsertProcessor('maven_archives', data=dslice(typeInfo, ('group_id', 'artifact_id', 'version'))) + insert = InsertProcessor('maven_archives', + data=dslice(typeInfo, ('group_id', 'artifact_id', 'version'))) insert.set(archive_id=archive_id) insert.execute() @@ -7151,7 +7236,8 @@ def _import_archive_file(filepath, destdir): if os.path.exists(final_path): raise koji.GenericError("Error importing archive file, %s already exists" % final_path) if os.path.islink(filepath) or not os.path.isfile(filepath): - raise koji.GenericError("Error importing archive file, %s is not a regular file" % filepath) + raise koji.GenericError("Error importing archive file, %s is not a regular file" % + filepath) move_and_symlink(filepath, final_path, create_dir=True) @@ -7234,7 +7320,8 @@ def add_rpm_sig(an_rpm, sighdr): koji.ensuredir(os.path.dirname(sigpath)) with open(sigpath, 'wb') as fo: fo.write(sighdr) - koji.plugin.run_callbacks('postRPMSign', sigkey=sigkey, sighash=sighash, build=binfo, rpm=rinfo) + koji.plugin.run_callbacks('postRPMSign', + sigkey=sigkey, sighash=sighash, build=binfo, rpm=rinfo) def _scan_sighdr(sighdr, fn): @@ -7411,8 +7498,10 @@ def query_history(tables=None, **kwargs): 'user_perms': ['user_id', 'perm_id'], 'user_groups': ['user_id', 'group_id'], 'cg_users': ['user_id', 'cg_id'], - 'tag_inheritance': ['tag_id', 'parent_id', 'priority', 'maxdepth', 'intransitive', 'noconfig', 'pkg_filter'], - 'tag_config': ['tag_id', 'arches', 'perm_id', 'locked', 'maven_support', 'maven_include_all'], + 'tag_inheritance': ['tag_id', 'parent_id', 'priority', 'maxdepth', 'intransitive', + 'noconfig', 'pkg_filter'], + 'tag_config': ['tag_id', 'arches', 'perm_id', 'locked', 'maven_support', + 'maven_include_all'], 'tag_extra': ['tag_id', 'key', 'value'], 'build_target_config': ['build_target_id', 'build_tag', 'dest_tag'], 'external_repo_config': ['external_repo_id', 'url'], @@ -7422,10 +7511,11 @@ def query_history(tables=None, **kwargs): 'tag_listing': ['build_id', 'tag_id'], 'tag_packages': ['package_id', 'tag_id', 'blocked', 'extra_arches'], 'tag_package_owners': ['package_id', 'tag_id', 'owner'], - 'group_config': ['group_id', 'tag_id', 'blocked', 'exported', 'display_name', 'is_default', 'uservisible', - 'description', 'langonly', 'biarchonly'], + 'group_config': ['group_id', 'tag_id', 'blocked', 'exported', 'display_name', 'is_default', + 'uservisible', 'description', 'langonly', 'biarchonly'], 'group_req_listing': ['group_id', 'tag_id', 'req_id', 'blocked', 'type', 'is_metapkg'], - 'group_package_listing': ['group_id', 'tag_id', 'package', 'blocked', 'type', 'basearchonly', 'requires'], + 'group_package_listing': ['group_id', 'tag_id', 'package', 'blocked', 'type', + 'basearchonly', 'requires'], } name_joins = { # joins triggered by table fields for name lookup @@ -7480,7 +7570,8 @@ def query_history(tables=None, **kwargs): if join_as == tbl: joins.append('LEFT OUTER JOIN %s ON %s = %s.id' % (tbl, field, tbl)) else: - joins.append('LEFT OUTER JOIN %s AS %s ON %s = %s.id' % (tbl, join_as, field, join_as)) + joins.append('LEFT OUTER JOIN %s AS %s ON %s = %s.id' % + (tbl, join_as, field, join_as)) elif field == 'build_id': # special case fields.update({ @@ -7603,7 +7694,8 @@ def query_history(tables=None, **kwargs): clauses.append('ev1.time > %(after)s OR ev2.time > %(after)s') fields['ev1.time > %(after)s'] = '_created_after' fields['ev2.time > %(after)s'] = '_revoked_after' - # clauses.append('EXTRACT(EPOCH FROM ev1.time) > %(after)s OR EXTRACT(EPOCH FROM ev2.time) > %(after)s') + # clauses.append('EXTRACT(EPOCH FROM ev1.time) > %(after)s OR ' + # 'EXTRACT(EPOCH FROM ev2.time) > %(after)s') elif arg == 'afterEvent': data['afterEvent'] = value c_test = '%s.create_event > %%(afterEvent)i' % table @@ -7616,7 +7708,8 @@ def query_history(tables=None, **kwargs): value = datetime.datetime.fromtimestamp(value).isoformat(' ') data['before'] = value clauses.append('ev1.time < %(before)s OR ev2.time < %(before)s') - # clauses.append('EXTRACT(EPOCH FROM ev1.time) < %(before)s OR EXTRACT(EPOCH FROM ev2.time) < %(before)s') + # clauses.append('EXTRACT(EPOCH FROM ev1.time) < %(before)s OR ' + # 'EXTRACT(EPOCH FROM ev2.time) < %(before)s') fields['ev1.time < %(before)s'] = '_created_before' fields['ev2.time < %(before)s'] = '_revoked_before' elif arg == 'beforeEvent': @@ -7750,7 +7843,8 @@ def build_references(build_id, limit=None, lazy=False): st_complete = koji.BUILD_STATES['COMPLETE'] fields = ('id', 'name', 'version', 'release', 'arch', 'build_id') idx = {} - q = """SELECT rpminfo.id, rpminfo.name, rpminfo.version, rpminfo.release, rpminfo.arch, rpminfo.build_id + q = """SELECT + rpminfo.id, rpminfo.name, rpminfo.version, rpminfo.release, rpminfo.arch, rpminfo.build_id FROM rpminfo, build WHERE rpminfo.buildroot_id IN ( @@ -7795,7 +7889,8 @@ def build_references(build_id, limit=None, lazy=False): # find archives whose buildroots we were in fields = ('id', 'type_id', 'type_name', 'build_id', 'filename') idx = {} - q = """SELECT archiveinfo.id, archiveinfo.type_id, archivetypes.name, archiveinfo.build_id, archiveinfo.filename + q = """SELECT archiveinfo.id, archiveinfo.type_id, archivetypes.name, archiveinfo.build_id, + archiveinfo.filename FROM buildroot_archives JOIN archiveinfo ON archiveinfo.buildroot_id = buildroot_archives.buildroot_id JOIN build ON archiveinfo.build_id = build.id @@ -7897,11 +7992,13 @@ def delete_build(build, strict=True, min_ref_age=604800): return False if refs.get('archives'): if strict: - raise koji.GenericError("Cannot delete build, used in archive buildroots: %s" % refs['archives']) + raise koji.GenericError("Cannot delete build, used in archive buildroots: %s" % + refs['archives']) return False if refs.get('component_of'): if strict: - raise koji.GenericError("Cannot delete build, used as component of: %r" % refs['component_of']) + raise koji.GenericError("Cannot delete build, used as component of: %r" % + refs['component_of']) return False if refs.get('last_used'): age = time.time() - refs['last_used'] @@ -7935,7 +8032,8 @@ def _delete_build(binfo): # files on disk: DELETE st_deleted = koji.BUILD_STATES['DELETED'] st_old = binfo['state'] - koji.plugin.run_callbacks('preBuildStateChange', attribute='state', old=st_old, new=st_deleted, info=binfo) + koji.plugin.run_callbacks('preBuildStateChange', + attribute='state', old=st_old, new=st_deleted, info=binfo) build_id = binfo['id'] q = """SELECT id FROM rpminfo WHERE build_id=%(build_id)i""" rpm_ids = _fetchMulti(q, locals()) @@ -7952,7 +8050,8 @@ def _delete_build(binfo): if os.path.exists(builddir): koji.util.rmtree(builddir) binfo = get_build(build_id, strict=True) - koji.plugin.run_callbacks('postBuildStateChange', attribute='state', old=st_old, new=st_deleted, info=binfo) + koji.plugin.run_callbacks('postBuildStateChange', + attribute='state', old=st_old, new=st_deleted, info=binfo) def reset_build(build): @@ -7973,7 +8072,9 @@ def reset_build(build): # nothing to do return st_old = binfo['state'] - koji.plugin.run_callbacks('preBuildStateChange', attribute='state', old=st_old, new=koji.BUILD_STATES['CANCELED'], info=binfo) + koji.plugin.run_callbacks('preBuildStateChange', + attribute='state', old=st_old, new=koji.BUILD_STATES['CANCELED'], + info=binfo) q = """SELECT id FROM rpminfo WHERE build_id=%(id)i""" ids = _fetchMulti(q, binfo) for (rpm_id,) in ids: @@ -8022,7 +8123,9 @@ def reset_build(build): if os.path.exists(builddir): koji.util.rmtree(builddir) binfo = get_build(build, strict=True) - koji.plugin.run_callbacks('postBuildStateChange', attribute='state', old=st_old, new=koji.BUILD_STATES['CANCELED'], info=binfo) + koji.plugin.run_callbacks('postBuildStateChange', + attribute='state', old=st_old, new=koji.BUILD_STATES['CANCELED'], + info=binfo) def cancel_build(build_id, cancel_task=True): @@ -8043,7 +8146,8 @@ def cancel_build(build_id, cancel_task=True): if build['state'] != st_building: return False st_old = build['state'] - koji.plugin.run_callbacks('preBuildStateChange', attribute='state', old=st_old, new=st_canceled, info=build) + koji.plugin.run_callbacks('preBuildStateChange', + attribute='state', old=st_old, new=st_canceled, info=build) update = """UPDATE build SET state = %(st_canceled)i, completion_time = NOW() WHERE id = %(build_id)i AND state = %(st_building)i""" @@ -8062,7 +8166,8 @@ def cancel_build(build_id, cancel_task=True): _dml(delete, {'build_id': build_id}) build = get_build(build_id, strict=True) - koji.plugin.run_callbacks('postBuildStateChange', attribute='state', old=st_old, new=st_canceled, info=build) + koji.plugin.run_callbacks('postBuildStateChange', + attribute='state', old=st_old, new=st_canceled, info=build) return True @@ -8179,7 +8284,8 @@ def get_notification_recipients(build, tag_id, state): return list(set(emails)) -def tag_notification(is_successful, tag_id, from_id, build_id, user_id, ignore_success=False, failure_msg=''): +def tag_notification(is_successful, tag_id, from_id, build_id, user_id, ignore_success=False, + failure_msg=''): if context.opts.get('DisableNotifications'): return if is_successful: @@ -8203,7 +8309,9 @@ def tag_notification(is_successful, tag_id, from_id, build_id, user_id, ignore_s recipients[email] = 1 recipients_uniq = to_list(recipients.keys()) if len(recipients_uniq) > 0 and not (is_successful and ignore_success): - task_id = make_task('tagNotification', [recipients_uniq, is_successful, tag_id, from_id, build_id, user_id, ignore_success, failure_msg]) + task_id = make_task('tagNotification', + [recipients_uniq, is_successful, tag_id, from_id, build_id, user_id, + ignore_success, failure_msg]) return task_id return None @@ -8842,8 +8950,10 @@ SELECT %(col_str)s return query def __repr__(self): - return '' % \ - (self.columns, self.aliases, self.tables, self.joins, self.clauses, self.values, self.opts) + return '' % \ + (self.columns, self.aliases, self.tables, self.joins, self.clauses, self.values, + self.opts) def _seqtostr(self, seq, sep=', ', sort=False): if seq: @@ -9580,7 +9690,9 @@ def check_policy(name, data, default='deny', strict=False): reason = reason.lower() lastrule = ruleset.last_rule() if context.opts.get('KojiDebug', False): - logger.error("policy %(name)s gave %(result)s, reason: %(reason)s, last rule: %(lastrule)s", locals()) + logger.error( + "policy %(name)s gave %(result)s, reason: %(reason)s, last rule: %(lastrule)s", + locals()) if result == 'allow': return True, reason if result != 'deny': @@ -9684,7 +9796,8 @@ def importImageInternal(task_id, build_id, imgdata): if os.path.exists(final_path): raise koji.GenericError("Error importing build log. %s already exists." % final_path) if os.path.islink(logsrc) or not os.path.isfile(logsrc): - raise koji.GenericError("Error importing build log. %s is not a regular file." % logsrc) + raise koji.GenericError("Error importing build log. %s is not a regular file." % + logsrc) move_and_symlink(logsrc, final_path, create_dir=True) # record all of the RPMs installed in the image(s) @@ -9812,8 +9925,9 @@ class RootExports(object): build: The build to generate wrapper rpms for. Must be in the COMPLETE state and have no rpms already associated with it. url: SCM URL to a specfile fragment - target: The build target to use when building the wrapper rpm. The build_tag of the target will - be used to populate the buildroot in which the rpms are built. + target: The build target to use when building the wrapper rpm. + The build_tag of the target will be used to populate the buildroot in which the + rpms are built. priority: the amount to increase (or decrease) the task priority, relative to the default priority; higher values mean lower priority; only admins have the right to specify a negative priority here @@ -9830,7 +9944,8 @@ class RootExports(object): build = self.getBuild(build, strict=True) if list_rpms(build['id']) and not (opts.get('scratch') or opts.get('create_build')): - raise koji.PreBuildError('wrapper rpms for %s have already been built' % koji.buildLabel(build)) + raise koji.PreBuildError('wrapper rpms for %s have already been built' % + koji.buildLabel(build)) build_target = self.getBuildTarget(target) if not build_target: raise koji.PreBuildError('no such build target: %s' % target) @@ -9951,7 +10066,8 @@ class RootExports(object): taskOpts['priority'] = koji.PRIO_DEFAULT + priority if 'scratch' not in opts and 'indirection_template_url' not in opts: - raise koji.ActionNotAllowed('Non-scratch builds must provide url for the indirection template') + raise koji.ActionNotAllowed( + 'Non-scratch builds must provide url for the indirection template') if 'arch' in opts: taskOpts['arch'] = opts['arch'] @@ -10212,7 +10328,9 @@ class RootExports(object): given ID.""" if '..' in fileName: raise koji.GenericError('Invalid file name: %s' % fileName) - filePath = '%s/%s/%s' % (koji.pathinfo.work(volume), koji.pathinfo.taskrelpath(taskID), fileName) + filePath = '%s/%s/%s' % (koji.pathinfo.work(volume), + koji.pathinfo.taskrelpath(taskID), + fileName) filePath = os.path.normpath(filePath) if not os.path.isfile(filePath): raise koji.GenericError('no file "%s" output by task %i' % (fileName, taskID)) @@ -10264,7 +10382,8 @@ class RootExports(object): filepath: path to the archive file (relative to the Koji workdir) buildinfo: information about the build to associate the archive with - May be a string (NVR), integer (buildID), or dict (containing keys: name, version, release) + May be a string (NVR), integer (buildID), or dict (containing keys: name, + version, release) type: type of the archive being imported. Currently supported archive types: maven, win typeInfo: dict of type-specific information """ @@ -10602,12 +10721,16 @@ class RootExports(object): for build in build_list: policy_data['build'] = build['id'] assert_policy('tag', policy_data) - # XXX - we're running this check twice, here and in host.tagBuild (called by the task) + # XXX - we're running this check twice, here and in host.tagBuild (called by the + # task) wait_on = [] tasklist = [] for build in build_list: - task_id = make_task('dependantTask', [wait_on, [['tagBuild', [tag2_id, build['id'], force, tag1_id], {'priority': 15}]]]) + task_id = make_task('dependantTask', + [wait_on, [['tagBuild', + [tag2_id, build['id'], force, tag1_id], + {'priority': 15}]]]) wait_on = [task_id] log_error("\nMade Task: %s\n" % task_id) tasklist.append(task_id) @@ -10644,11 +10767,11 @@ class RootExports(object): - author: only return changelogs with a matching author - before: only return changelogs from before the given date (in UTC) - (a datetime object, a string in the 'YYYY-MM-DD HH24:MI:SS format, or integer seconds - since the epoch) + (a datetime object, a string in the 'YYYY-MM-DD HH24:MI:SS format, or integer + seconds since the epoch) - after: only return changelogs from after the given date (in UTC) - (a datetime object, a string in the 'YYYY-MM-DD HH24:MI:SS format, or integer seconds - since the epoch) + (a datetime object, a string in the 'YYYY-MM-DD HH24:MI:SS format, or integer + seconds since the epoch) - queryOpts: query options used by the QueryProcessor - strict: if srpm doesn't exist raise an error, otherwise return empty list @@ -10716,7 +10839,8 @@ class RootExports(object): results = [] - fields = koji.get_header_fields(srpm_path, ['changelogtime', 'changelogname', 'changelogtext']) + fields = koji.get_header_fields(srpm_path, + ['changelogtime', 'changelogname', 'changelogtext']) for (cltime, clname, cltext) in zip(fields['changelogtime'], fields['changelogname'], fields['changelogtext']): cldate = datetime.datetime.fromtimestamp(cltime).isoformat(' ') @@ -10733,7 +10857,10 @@ class RootExports(object): if queryOpts.get('asList'): results.append([cldate, clname, cltext]) else: - results.append({'date': cldate, 'date_ts': cltime, 'author': clname, 'text': cltext}) + results.append({'date': cldate, + 'date_ts': cltime, + 'author': clname, + 'text': cltext}) results = _applyQueryOpts(results, queryOpts) return koji.fixEncodingRecurse(results, remove_nonprintable=True) @@ -10800,32 +10927,40 @@ class RootExports(object): raise koji.GenericError("Finished task's priority can't be updated") task.setPriority(priority, recurse=recurse) - def listTagged(self, tag, event=None, inherit=False, prefix=None, latest=False, package=None, owner=None, type=None): + def listTagged(self, tag, event=None, inherit=False, prefix=None, latest=False, package=None, + owner=None, type=None): """List builds tagged with tag""" # lookup tag id tag = get_tag(tag, strict=True, event=event)['id'] - results = readTaggedBuilds(tag, event, inherit=inherit, latest=latest, package=package, owner=owner, type=type) + results = readTaggedBuilds(tag, event, inherit=inherit, latest=latest, package=package, + owner=owner, type=type) if prefix: prefix = prefix.lower() - results = [build for build in results if build['package_name'].lower().startswith(prefix)] + results = [build for build in results + if build['package_name'].lower().startswith(prefix)] return results - def listTaggedRPMS(self, tag, event=None, inherit=False, latest=False, package=None, arch=None, rpmsigs=False, owner=None, type=None): + def listTaggedRPMS(self, tag, event=None, inherit=False, latest=False, package=None, arch=None, + rpmsigs=False, owner=None, type=None): """List rpms and builds within tag""" # lookup tag id tag = get_tag(tag, strict=True, event=event)['id'] - return readTaggedRPMS(tag, event=event, inherit=inherit, latest=latest, package=package, arch=arch, rpmsigs=rpmsigs, owner=owner, type=type) + return readTaggedRPMS(tag, event=event, inherit=inherit, latest=latest, package=package, + arch=arch, rpmsigs=rpmsigs, owner=owner, type=type) - def listTaggedArchives(self, tag, event=None, inherit=False, latest=False, package=None, type=None): + def listTaggedArchives(self, tag, event=None, inherit=False, latest=False, package=None, + type=None): """List archives and builds within a tag""" # lookup tag id tag = get_tag(tag, strict=True, event=event)['id'] - return readTaggedArchives(tag, event=event, inherit=inherit, latest=latest, package=package, type=type) + return readTaggedArchives(tag, event=event, inherit=inherit, latest=latest, + package=package, type=type) def listBuilds(self, packageID=None, userID=None, taskID=None, prefix=None, state=None, volumeID=None, source=None, createdBefore=None, createdAfter=None, - completeBefore=None, completeAfter=None, type=None, typeInfo=None, queryOpts=None): + completeBefore=None, completeAfter=None, type=None, typeInfo=None, + queryOpts=None): """Return a list of builds that match the given parameters Filter parameters @@ -10899,16 +11034,20 @@ class RootExports(object): If no builds match, an empty list is returned. """ - fields = [('build.id', 'build_id'), ('build.version', 'version'), ('build.release', 'release'), - ('build.epoch', 'epoch'), ('build.state', 'state'), ('build.completion_time', 'completion_time'), + fields = [('build.id', 'build_id'), ('build.version', 'version'), + ('build.release', 'release'), + ('build.epoch', 'epoch'), ('build.state', 'state'), + ('build.completion_time', 'completion_time'), ('build.start_time', 'start_time'), ('build.source', 'source'), ('build.extra', 'extra'), - ('events.id', 'creation_event_id'), ('events.time', 'creation_time'), ('build.task_id', 'task_id'), + ('events.id', 'creation_event_id'), ('events.time', 'creation_time'), + ('build.task_id', 'task_id'), ('EXTRACT(EPOCH FROM events.time)', 'creation_ts'), ('EXTRACT(EPOCH FROM build.start_time)', 'start_ts'), ('EXTRACT(EPOCH FROM build.completion_time)', 'completion_ts'), - ('package.id', 'package_id'), ('package.name', 'package_name'), ('package.name', 'name'), + ('package.id', 'package_id'), ('package.name', 'package_name'), + ('package.name', 'name'), ('volume.id', 'volume_id'), ('volume.name', 'volume_name'), ("package.name || '-' || build.version || '-' || build.release", 'nvr'), ('users.id', 'owner_id'), ('users.name', 'owner_name')] @@ -11007,7 +11146,8 @@ class RootExports(object): if not isinstance(tag, six.integer_types): # lookup tag id tag = get_tag_id(tag, strict=True) - return readTaggedRPMS(tag, package=package, arch=arch, event=event, inherit=True, latest=True, rpmsigs=rpmsigs, type=type) + return readTaggedRPMS(tag, package=package, arch=arch, event=event, inherit=True, + latest=True, rpmsigs=rpmsigs, type=type) def getLatestMavenArchives(self, tag, event=None, inherit=True): """Return a list of the latest Maven archives in the tag, as of the given event @@ -11163,7 +11303,8 @@ class RootExports(object): results = [] - for dep_name in ['REQUIRE', 'PROVIDE', 'CONFLICT', 'OBSOLETE', 'SUGGEST', 'ENHANCE', 'SUPPLEMENT', 'RECOMMEND']: + for dep_name in ['REQUIRE', 'PROVIDE', 'CONFLICT', 'OBSOLETE', 'SUGGEST', 'ENHANCE', + 'SUPPLEMENT', 'RECOMMEND']: dep_id = getattr(koji, 'DEP_' + dep_name) if depType is None or depType == dep_id: fields = koji.get_header_fields(rpm_path, [dep_name + 'NAME', @@ -11175,7 +11316,8 @@ class RootExports(object): if queryOpts.get('asList'): results.append([name, version, flags, dep_id]) else: - results.append({'name': name, 'version': version, 'flags': flags, 'type': dep_id}) + results.append( + {'name': name, 'version': version, 'flags': flags, 'type': dep_id}) return _applyQueryOpts(results, queryOpts) @@ -11204,13 +11346,15 @@ class RootExports(object): results = [] hdr = koji.get_rpm_header(rpm_path) fields = koji.get_header_fields(hdr, ['filenames', 'filemd5s', 'filesizes', 'fileflags', - 'fileusername', 'filegroupname', 'filemtimes', 'filemodes']) + 'fileusername', 'filegroupname', 'filemtimes', + 'filemodes']) digest_algo = koji.util.filedigestAlgo(hdr) - for (name, digest, size, flags, user, group, mtime, mode) in zip(fields['filenames'], fields['filemd5s'], - fields['filesizes'], fields['fileflags'], - fields['fileusername'], fields['filegroupname'], - fields['filemtimes'], fields['filemodes']): + for (name, digest, size, flags, user, group, mtime, mode) \ + in zip(fields['filenames'], fields['filemd5s'], + fields['filesizes'], fields['fileflags'], + fields['fileusername'], fields['filegroupname'], + fields['filemtimes'], fields['filemodes']): if queryOpts.get('asList'): results.append([name, digest, size, flags, digest_algo, user, group, mtime, mode]) else: @@ -11261,7 +11405,8 @@ class RootExports(object): hdr = koji.get_rpm_header(rpm_path) # use filemd5s for backward compatibility fields = koji.get_header_fields(hdr, ['filenames', 'filemd5s', 'filesizes', 'fileflags', - 'fileusername', 'filegroupname', 'filemtimes', 'filemodes']) + 'fileusername', 'filegroupname', 'filemtimes', + 'filemodes']) digest_algo = koji.util.filedigestAlgo(hdr) i = 0 @@ -11347,7 +11492,8 @@ class RootExports(object): getPackage = staticmethod(lookup_package) - def listPackages(self, tagID=None, userID=None, pkgID=None, prefix=None, inherited=False, with_dups=False, event=None, queryOpts=None): + def listPackages(self, tagID=None, userID=None, pkgID=None, prefix=None, inherited=False, + with_dups=False, event=None, queryOpts=None): """List if tagID and/or userID is specified, limit the list to packages belonging to the given user or with the given tag. @@ -11391,7 +11537,8 @@ class RootExports(object): if prefix: prefix = prefix.lower() - results = [package for package in results if package['package_name'].lower().startswith(prefix)] + results = [package for package in results + if package['package_name'].lower().startswith(prefix)] return _applyQueryOpts(results, queryOpts) @@ -11452,7 +11599,8 @@ class RootExports(object): perm = lookup_perm(permission, strict=(not create), create=create) perm_id = perm['id'] if perm['name'] in koji.auth.get_user_perms(user_id): - raise koji.GenericError('user %s already has permission: %s' % (userinfo, perm['name'])) + raise koji.GenericError('user %s already has permission: %s' % + (userinfo, perm['name'])) insert = InsertProcessor('user_perms') insert.set(user_id=user_id, perm_id=perm_id) insert.make_create() @@ -11465,7 +11613,8 @@ class RootExports(object): perm = lookup_perm(permission, strict=True) perm_id = perm['id'] if perm['name'] not in koji.auth.get_user_perms(user_id): - raise koji.GenericError('user %s does not have permission: %s' % (userinfo, perm['name'])) + raise koji.GenericError('user %s does not have permission: %s' % + (userinfo, perm['name'])) update = UpdateProcessor('user_perms', values=locals(), clauses=["user_id = %(user_id)i", "perm_id = %(perm_id)i"]) update.make_revoke() @@ -11584,7 +11733,8 @@ class RootExports(object): else: id = get_tag_id(tag, strict=True) - fields = ['repo.id', 'repo.state', 'repo.create_event', 'events.time', 'EXTRACT(EPOCH FROM events.time)', 'repo.dist'] + fields = ['repo.id', 'repo.state', 'repo.create_event', 'events.time', + 'EXTRACT(EPOCH FROM events.time)', 'repo.dist'] aliases = ['id', 'state', 'create_event', 'creation_time', 'create_ts', 'dist'] joins = ['events ON repo.create_event = events.id'] clauses = ['repo.tag_id = %(id)i'] @@ -11628,7 +11778,8 @@ class RootExports(object): for task_id in task_ids: logger.debug("Cancelling distRepo task %d" % task_id) Task(task_id).cancel(recurse=True) - return make_task('distRepo', [tag, repo_id, keys, task_opts], priority=15, channel='createrepo') + return make_task('distRepo', [tag, repo_id, keys, task_opts], + priority=15, channel='createrepo') def newRepo(self, tag, event=None, src=False, debuginfo=False, separate_src=False): """Create a newRepo task. returns task id""" @@ -11758,7 +11909,8 @@ class RootExports(object): owner[int|list]: limit to tasks owned by the user with the given ID not_owner[int|list]: limit to tasks not owned by the user with the given ID host_id[int|list]: limit to tasks running on the host with the given ID - not_host_id[int|list]: limit to tasks running on the hosts with IDs other than the given ID + not_host_id[int|list]: limit to tasks running on the hosts with IDs other than the + given ID channel_id[int|list]: limit to tasks in the specified channel not_channel_id[int|list]: limit to tasks not in the specified channel parent[int|list]: limit to tasks with the given parent @@ -11954,7 +12106,9 @@ class RootExports(object): args = task.getRequest() channel = get_channel(taskInfo['channel_id'], strict=True) - return make_task(taskInfo['method'], args, arch=taskInfo['arch'], channel=channel['name'], priority=taskInfo['priority']) + return make_task(taskInfo['method'], args, + arch=taskInfo['arch'], channel=channel['name'], + priority=taskInfo['priority']) def addHost(self, hostname, arches, krb_principal=None): """ @@ -11986,7 +12140,8 @@ class RootExports(object): krb_principal=krb_principal) # host entry hostID = _singleValue("SELECT nextval('host_id_seq')", strict=True) - insert = "INSERT INTO host (id, user_id, name) VALUES (%(hostID)i, %(userID)i, %(hostname)s)" + insert = "INSERT INTO host (id, user_id, name) VALUES (%(hostID)i, %(userID)i, " \ + "%(hostname)s)" _dml(insert, dslice(locals(), ('hostID', 'userID', 'hostname'))) insert = InsertProcessor('host_config') @@ -12017,7 +12172,8 @@ class RootExports(object): renameChannel = staticmethod(rename_channel) removeChannel = staticmethod(remove_channel) - def listHosts(self, arches=None, channelID=None, ready=None, enabled=None, userID=None, queryOpts=None): + def listHosts(self, arches=None, channelID=None, ready=None, enabled=None, userID=None, + queryOpts=None): """Get a list of hosts. "arches" is a list of string architecture names, e.g. ['i386', 'ppc64']. If one of the arches associated with a given host appears in the list, it will be included in the results. If "ready" and "enabled" @@ -12152,11 +12308,15 @@ class RootExports(object): userid = userinfo['id'] buildid = buildinfo['id'] owner_id_old = buildinfo['owner_id'] - koji.plugin.run_callbacks('preBuildStateChange', attribute='owner_id', old=owner_id_old, new=userid, info=buildinfo) + koji.plugin.run_callbacks('preBuildStateChange', + attribute='owner_id', old=owner_id_old, new=userid, + info=buildinfo) q = """UPDATE build SET owner=%(userid)i WHERE id=%(buildid)i""" _dml(q, locals()) buildinfo = get_build(build, strict=True) - koji.plugin.run_callbacks('postBuildStateChange', attribute='owner_id', old=owner_id_old, new=userid, info=buildinfo) + koji.plugin.run_callbacks('postBuildStateChange', + attribute='owner_id', old=owner_id_old, new=userid, + info=buildinfo) def setBuildTimestamp(self, build, ts): """Set the completion time for a build @@ -12176,20 +12336,23 @@ class RootExports(object): elif not isinstance(ts, NUMERIC_TYPES): raise koji.GenericError("Invalid type for timestamp") ts_old = buildinfo['completion_ts'] - koji.plugin.run_callbacks('preBuildStateChange', attribute='completion_ts', old=ts_old, new=ts, info=buildinfo) + koji.plugin.run_callbacks('preBuildStateChange', + attribute='completion_ts', old=ts_old, new=ts, info=buildinfo) buildid = buildinfo['id'] q = """UPDATE build SET completion_time=TIMESTAMP 'epoch' AT TIME ZONE 'utc' + '%(ts)f seconds'::interval WHERE id=%%(buildid)i""" % locals() _dml(q, locals()) buildinfo = get_build(build, strict=True) - koji.plugin.run_callbacks('postBuildStateChange', attribute='completion_ts', old=ts_old, new=ts, info=buildinfo) + koji.plugin.run_callbacks('postBuildStateChange', + attribute='completion_ts', old=ts_old, new=ts, info=buildinfo) def count(self, methodName, *args, **kw): """Execute the XML-RPC method with the given name and count the results. - A method return value of None will return O, a return value of type "list", "tuple", or "dict" - will return len(value), and a return value of any other type will return 1. An invalid - methodName will raise an AttributeError, and invalid arguments will raise a TypeError.""" + A method return value of None will return O, a return value of type "list", "tuple", or + "dict" will return len(value), and a return value of any other type will return 1. An + invalid methodName will raise an AttributeError, and invalid arguments will raise a + TypeError.""" result = getattr(self, methodName)(*args, **kw) if result is None: return 0 @@ -12463,7 +12626,8 @@ class RootExports(object): be replaced with "%". If matchType is "regexp", no changes will be made.""" if matchType == 'glob': - return terms.replace('\\', '\\\\').replace('_', r'\_').replace('?', '_').replace('*', '%') + return terms.replace( + '\\', '\\\\').replace('_', r'\_').replace('?', '_').replace('*', '%') else: return terms @@ -12523,24 +12687,29 @@ class RootExports(object): joins = [] if type == 'build': joins.append('package ON build.pkg_id = package.id') - clause = "package.name || '-' || build.version || '-' || build.release %s %%(terms)s" % oper + clause = "package.name || '-' || build.version || '-' || build.release %s %%(terms)s" \ + % oper cols = ('build.id', "package.name || '-' || build.version || '-' || build.release") elif type == 'rpm': - clause = "name || '-' || version || '-' || release || '.' || arch || '.rpm' %s %%(terms)s" % oper + clause = "name || '-' || version || '-' || release || '.' || arch || '.rpm' %s " \ + "%%(terms)s" % oper cols = ('id', "name || '-' || version || '-' || release || '.' || arch || '.rpm'") elif type == 'tag': joins.append('tag_config ON tag.id = tag_config.tag_id') clause = 'tag_config.active = TRUE and name %s %%(terms)s' % oper elif type == 'target': - joins.append('build_target_config ON build_target.id = build_target_config.build_target_id') + joins.append('build_target_config ' + 'ON build_target.id = build_target_config.build_target_id') clause = 'build_target_config.active = TRUE and name %s %%(terms)s' % oper elif type == 'maven': cols = ('id', 'filename') joins.append('maven_archives ON archiveinfo.id = maven_archives.archive_id') clause = "archiveinfo.filename %s %%(terms)s or maven_archives.group_id || '-' || " \ - "maven_archives.artifact_id || '-' || maven_archives.version %s %%(terms)s" % (oper, oper) + "maven_archives.artifact_id || '-' || maven_archives.version %s %%(terms)s" \ + % (oper, oper) elif type == 'win': - cols = ('id', "trim(leading '/' from win_archives.relpath || '/' || archiveinfo.filename)") + cols = ('id', + "trim(leading '/' from win_archives.relpath || '/' || archiveinfo.filename)") joins.append('win_archives ON archiveinfo.id = win_archives.archive_id') clause = "archiveinfo.filename %s %%(terms)s or win_archives.relpath || '/' || " \ "archiveinfo.filename %s %%(terms)s" % (oper, oper) @@ -12725,7 +12894,8 @@ class BuildRoot(object): ) query = QueryProcessor(columns=[f[0] for f in fields], aliases=[f[1] for f in fields], tables=['buildroot_listing'], - joins=["rpminfo ON rpm_id = rpminfo.id", "external_repo ON external_repo_id = external_repo.id"], + joins=["rpminfo ON rpm_id = rpminfo.id", + "external_repo ON external_repo_id = external_repo.id"], clauses=["buildroot_listing.buildroot_id = %(brootid)i"], values=locals()) return query.execute() @@ -12882,12 +13052,15 @@ class Host(object): update.execute() elif tasks: # wait on specified subtasks - update = UpdateProcessor('task', clauses=['id IN %(tasks)s', 'parent=%(parent)s'], values=locals()) + update = UpdateProcessor('task', clauses=['id IN %(tasks)s', 'parent=%(parent)s'], + values=locals()) update.set(awaited=True) update.execute() # clear awaited flag on any other child tasks update = UpdateProcessor('task', values=locals(), - clauses=['id NOT IN %(tasks)s', 'parent=%(parent)s', 'awaited=true']) + clauses=['id NOT IN %(tasks)s', + 'parent=%(parent)s', + 'awaited=true']) update.set(awaited=False) update.execute() else: @@ -13360,7 +13533,8 @@ class HostExports(object): st_old = build_info['state'] st_complete = koji.BUILD_STATES['COMPLETE'] - koji.plugin.run_callbacks('preBuildStateChange', attribute='state', old=st_old, new=st_complete, info=build_info) + koji.plugin.run_callbacks('preBuildStateChange', + attribute='state', old=st_old, new=st_complete, info=build_info) update = UpdateProcessor('build', clauses=['id=%(build_id)i'], values={'build_id': build_id}) @@ -13370,7 +13544,8 @@ class HostExports(object): update.set(volume_id=build_info['volume_id']) update.execute() build_info = get_build(build_id, strict=True) - koji.plugin.run_callbacks('postBuildStateChange', attribute='state', old=st_old, new=st_complete, info=build_info) + koji.plugin.run_callbacks('postBuildStateChange', + attribute='state', old=st_old, new=st_complete, info=build_info) # send email build_notification(task_id, build_id) @@ -13487,7 +13662,8 @@ class HostExports(object): # update build state st_complete = koji.BUILD_STATES['COMPLETE'] st_old = build_info['state'] - koji.plugin.run_callbacks('preBuildStateChange', attribute='state', old=st_old, new=st_complete, info=build_info) + koji.plugin.run_callbacks('preBuildStateChange', + attribute='state', old=st_old, new=st_complete, info=build_info) update = UpdateProcessor('build', clauses=['id=%(build_id)i'], values={'build_id': build_id}) update.set(state=st_complete) @@ -13496,7 +13672,8 @@ class HostExports(object): update.rawset(completion_time='now()') update.execute() build_info = get_build(build_id, strict=True) - koji.plugin.run_callbacks('postBuildStateChange', attribute='state', old=st_old, new=st_complete, info=build_info) + koji.plugin.run_callbacks('postBuildStateChange', + attribute='state', old=st_old, new=st_complete, info=build_info) # send email build_notification(task_id, build_id) @@ -13531,12 +13708,14 @@ class HostExports(object): build_info = get_build(build_id, strict=True) if build_info['state'] != koji.BUILD_STATES['COMPLETE']: - raise koji.GenericError('cannot import wrapper rpms for %s: build state is %s, not complete' % - (koji.buildLabel(build_info), koji.BUILD_STATES[build_info['state']].lower())) + raise koji.GenericError( + 'cannot import wrapper rpms for %s: build state is %s, not complete' % + (koji.buildLabel(build_info), koji.BUILD_STATES[build_info['state']].lower())) if list_rpms(buildID=build_info['id']): # don't allow overwriting of already-imported wrapper RPMs - raise koji.GenericError('wrapper rpms for %s have already been imported' % koji.buildLabel(build_info)) + raise koji.GenericError('wrapper rpms for %s have already been imported' % + koji.buildLabel(build_info)) _import_wrapper(task.id, build_info, rpm_results) @@ -13613,7 +13792,8 @@ class HostExports(object): raise koji.BuildError('unsupported file type: %s' % relpath) filepath = joinpath(task_dir, relpath) metadata['relpath'] = os.path.dirname(relpath) - import_archive(filepath, build_info, 'win', metadata, buildroot_id=results['buildroot_id']) + import_archive(filepath, build_info, 'win', metadata, + buildroot_id=results['buildroot_id']) # move the logs to their final destination for relpath in results['logs']: @@ -13632,7 +13812,8 @@ class HostExports(object): # update build state st_old = build_info['state'] st_complete = koji.BUILD_STATES['COMPLETE'] - koji.plugin.run_callbacks('preBuildStateChange', attribute='state', old=st_old, new=st_complete, info=build_info) + koji.plugin.run_callbacks('preBuildStateChange', + attribute='state', old=st_old, new=st_complete, info=build_info) update = UpdateProcessor('build', clauses=['id=%(build_id)i'], values={'build_id': build_id}) update.set(state=st_complete) @@ -13641,7 +13822,8 @@ class HostExports(object): update.rawset(completion_time='now()') update.execute() build_info = get_build(build_id, strict=True) - koji.plugin.run_callbacks('postBuildStateChange', attribute='state', old=st_old, new=st_complete, info=build_info) + koji.plugin.run_callbacks('postBuildStateChange', + attribute='state', old=st_old, new=st_complete, info=build_info) # send email build_notification(task_id, build_id) @@ -13658,7 +13840,8 @@ class HostExports(object): st_failed = koji.BUILD_STATES['FAILED'] buildinfo = get_build(build_id, strict=True) st_old = buildinfo['state'] - koji.plugin.run_callbacks('preBuildStateChange', attribute='state', old=st_old, new=st_failed, info=buildinfo) + koji.plugin.run_callbacks('preBuildStateChange', + attribute='state', old=st_old, new=st_failed, info=buildinfo) query = """SELECT state, completion_time FROM build @@ -13679,7 +13862,8 @@ class HostExports(object): WHERE id = %(build_id)i""" _dml(update, locals()) buildinfo = get_build(build_id, strict=True) - koji.plugin.run_callbacks('postBuildStateChange', attribute='state', old=st_old, new=st_failed, info=buildinfo) + koji.plugin.run_callbacks('postBuildStateChange', + attribute='state', old=st_old, new=st_failed, info=buildinfo) build_notification(task_id, build_id) def tagBuild(self, task_id, tag, build, force=False, fromtag=None): @@ -13742,12 +13926,14 @@ class HostExports(object): _import_wrapper(rpm_results['task_id'], get_build(build_id, strict=True), rpm_results) - def tagNotification(self, is_successful, tag_id, from_id, build_id, user_id, ignore_success=False, failure_msg=''): + def tagNotification(self, is_successful, tag_id, from_id, build_id, user_id, + ignore_success=False, failure_msg=''): """Create a tag notification message. Handles creation of tagNotification tasks for hosts.""" host = Host() host.verify() - tag_notification(is_successful, tag_id, from_id, build_id, user_id, ignore_success, failure_msg) + tag_notification(is_successful, tag_id, from_id, build_id, user_id, ignore_success, + failure_msg) def checkPolicy(self, name, data, default='deny', strict=False): host = Host() @@ -13845,7 +14031,9 @@ class HostExports(object): archive['artifact_id'], {}).setdefault( archive['version'], archive['build_id']) if idx_build != archive['build_id']: - logger.error("Found multiple builds for %(group_id)s:%(artifact_id)s:%(version)s. Current build: %(build_id)i", archive) + logger.error( + "Found multiple builds for %(group_id)s:%(artifact_id)s:%(version)s. " + "Current build: %(build_id)i", archive) logger.error("Indexed build id was %i", idx_build) if not ignore: @@ -13885,9 +14073,19 @@ class HostExports(object): archive['artifact_id'], {}).setdefault( archive['version'], archive['build_id']) if idx_build != archive['build_id']: - logger.error("Overriding build for %(group_id)s:%(artifact_id)s:%(version)s.", archive) - logger.error("Current build is %s, new build is %s.", idx_build, archive['build_id']) - maven_build_index[archive['group_id']][archive['artifact_id']][archive['version']] = archive['build_id'] + logger.error( + "Overriding build for %(group_id)s:%(artifact_id)s:%(version)s.", + archive) + logger.error( + "Current build is %s, new build is %s.", + idx_build, archive['build_id']) + maven_build_index[ + archive['group_id'] + ][ + archive['artifact_id'] + ][ + archive['version'] + ] = archive['build_id'] ignore.extend(task_deps.values()) @@ -13938,23 +14136,29 @@ class HostExports(object): pass else: if not ignore_unknown: - logger.error("Unknown file for %(group_id)s:%(artifact_id)s:%(version)s", maven_info) + logger.error("Unknown file for %(group_id)s:%(artifact_id)s:%(version)s", + maven_info) if build_id: build = get_build(build_id) logger.error("g:a:v supplied by build %(nvr)s", build) - logger.error("Build supplies %i archives: %r", len(build_archives), to_list(build_archives.keys())) + logger.error("Build supplies %i archives: %r", + len(build_archives), to_list(build_archives.keys())) if tag_archive: - logger.error("Size mismatch, br: %i, db: %i", fileinfo['size'], tag_archive['size']) - raise koji.BuildrootError('Unknown file in build environment: %s, size: %s' % - ('%s/%s' % (fileinfo['path'], fileinfo['filename']), fileinfo['size'])) + logger.error("Size mismatch, br: %i, db: %i", + fileinfo['size'], tag_archive['size']) + raise koji.BuildrootError( + 'Unknown file in build environment: %s, size: %s' % + ('%s/%s' % (fileinfo['path'], fileinfo['filename']), fileinfo['size'])) return br.updateArchiveList(archives, project) - def repoInit(self, tag, with_src=False, with_debuginfo=False, event=None, with_separate_src=False): + def repoInit(self, tag, with_src=False, with_debuginfo=False, event=None, + with_separate_src=False): """Initialize a new repo for tag""" host = Host() host.verify() - return repo_init(tag, with_src=with_src, with_debuginfo=with_debuginfo, event=event, with_separate_src=with_separate_src) + return repo_init(tag, with_src=with_src, with_debuginfo=with_debuginfo, event=event, + with_separate_src=with_separate_src) def repoDone(self, repo_id, data, expire=False): """Finalize a repo @@ -14175,7 +14379,8 @@ def get_upload_path(reldir, name, create=False, volume=None): if os.path.exists(u_fn): user_id = int(open(u_fn, 'r').read()) if context.session.user_id != user_id: - raise koji.GenericError("Invalid upload directory, not owner: %s" % orig_reldir) + raise koji.GenericError("Invalid upload directory, not owner: %s" % + orig_reldir) else: with open(u_fn, 'w') as fo: fo.write(str(context.session.user_id)) diff --git a/hub/kojixmlrpc.py b/hub/kojixmlrpc.py index cab7edf9..8f2c2025 100644 --- a/hub/kojixmlrpc.py +++ b/hub/kojixmlrpc.py @@ -155,7 +155,9 @@ class HandlerRegistry(object): if x == 0 and func.__code__.co_varnames[x] == "self": continue if func.__defaults__ and func.__code__.co_argcount - x <= len(func.__defaults__): - args.append((func.__code__.co_varnames[x], func.__defaults__[x - func.__code__.co_argcount + len(func.__defaults__)])) + args.append( + (func.__code__.co_varnames[x], + func.__defaults__[x - func.__code__.co_argcount + len(func.__defaults__)])) else: args.append(func.__code__.co_varnames[x]) return args @@ -317,10 +319,11 @@ class ModXMLRPCRequestHandler(object): if self.logger.isEnabledFor(logging.INFO): rusage = resource.getrusage(resource.RUSAGE_SELF) - self.logger.info("Completed method %s for session %s (#%s): %f seconds, rss %s, stime %f", - method, context.session.id, context.session.callnum, - time.time() - start, - rusage.ru_maxrss, rusage.ru_stime) + self.logger.info( + "Completed method %s for session %s (#%s): %f seconds, rss %s, stime %f", + method, context.session.id, context.session.callnum, + time.time() - start, + rusage.ru_maxrss, rusage.ru_stime) return ret @@ -344,8 +347,11 @@ class ModXMLRPCRequestHandler(object): faultCode = getattr(exc_type, 'faultCode', 1) faultString = ', '.join(exc_value.args) trace = traceback.format_exception(*sys.exc_info()) - # traceback is not part of the multicall spec, but we include it for debugging purposes - results.append({'faultCode': faultCode, 'faultString': faultString, 'traceback': trace}) + # traceback is not part of the multicall spec, + # but we include it for debugging purposes + results.append({'faultCode': faultCode, + 'faultString': faultString, + 'traceback': trace}) else: results.append([result]) @@ -438,7 +444,9 @@ def load_config(environ): ['VerbosePolicy', 'boolean', False], ['LogLevel', 'string', 'WARNING'], - ['LogFormat', 'string', '%(asctime)s [%(levelname)s] m=%(method)s u=%(user_name)s p=%(process)s r=%(remoteaddr)s %(name)s: %(message)s'], + ['LogFormat', 'string', + '%(asctime)s [%(levelname)s] m=%(method)s u=%(user_name)s p=%(process)s r=%(remoteaddr)s ' + '%(name)s: %(message)s'], ['MissingPolicyOk', 'boolean', True], ['EnableMaven', 'boolean', False], @@ -660,7 +668,8 @@ def load_scripts(environ): def get_memory_usage(): pagesize = resource.getpagesize() - statm = [pagesize * int(y) // 1024 for y in "".join(open("/proc/self/statm").readlines()).strip().split()] + statm = [pagesize * int(y) // 1024 + for y in "".join(open("/proc/self/statm").readlines()).strip().split()] size, res, shr, text, lib, data, dirty = statm return res - shr @@ -713,7 +722,8 @@ def application(environ, start_response): ('Allow', 'POST'), ] start_response('405 Method Not Allowed', headers) - response = "Method Not Allowed\nThis is an XML-RPC server. Only POST requests are accepted." + response = "Method Not Allowed\n" \ + "This is an XML-RPC server. Only POST requests are accepted." if six.PY3: response = response.encode() headers = [ @@ -767,7 +777,11 @@ def application(environ, start_response): paramstr = repr(getattr(context, 'params', 'UNKNOWN')) if len(paramstr) > 120: paramstr = paramstr[:117] + "..." - h.logger.warning("Memory usage of process %d grew from %d KiB to %d KiB (+%d KiB) processing request %s with args %s" % (os.getpid(), memory_usage_at_start, memory_usage_at_end, memory_usage_at_end - memory_usage_at_start, context.method, paramstr)) + h.logger.warning( + "Memory usage of process %d grew from %d KiB to %d KiB (+%d KiB) processing " + "request %s with args %s" % + (os.getpid(), memory_usage_at_start, memory_usage_at_end, + memory_usage_at_end - memory_usage_at_start, context.method, paramstr)) h.logger.debug("Returning %d bytes after %f seconds", len(response), time.time() - start) finally: diff --git a/koji/__init__.py b/koji/__init__.py index cef9af78..82b92c1d 100644 --- a/koji/__init__.py +++ b/koji/__init__.py @@ -75,7 +75,7 @@ try: from OpenSSL.SSL import Error as SSL_Error except Exception: # pragma: no cover # the hub imports koji, and sometimes this import fails there - # see: https://cryptography.io/en/latest/faq/#starting-cryptography-using-mod-wsgi-produces-an-internalerror-during-a-call-in-register-osrandom-engine + # see: https://cryptography.io/en/latest/faq/#starting-cryptography-using-mod-wsgi-produces-an-internalerror-during-a-call-in-register-osrandom-engine # noqa: E501 # unfortunately the workaround at the above link does not always work, so # we ignore it here pass @@ -1270,7 +1270,8 @@ def parse_pom(path=None, contents=None): fd.close() if not contents: - raise GenericError('either a path to a pom file or the contents of a pom file must be specified') + raise GenericError( + 'either a path to a pom file or the contents of a pom file must be specified') # A common problem is non-UTF8 characters in XML files, so we'll convert the string first @@ -1287,7 +1288,8 @@ def parse_pom(path=None, contents=None): for field in fields: if field not in util.to_list(values.keys()): - raise GenericError('could not extract %s from POM: %s' % (field, (path or ''))) + raise GenericError('could not extract %s from POM: %s' % + (field, (path or ''))) return values @@ -1649,7 +1651,8 @@ name=build # The following macro values cannot be overridden by tag options macros['%_topdir'] = '%s/build' % config_opts['chroothome'] macros['%_host_cpu'] = opts.get('target_arch', arch) - macros['%_host'] = '%s-%s' % (opts.get('target_arch', arch), opts.get('mockhost', 'koji-linux-gnu')) + macros['%_host'] = '%s-%s' % (opts.get('target_arch', arch), + opts.get('mockhost', 'koji-linux-gnu')) parts = ["""# Auto-generated by the Koji build system """] @@ -1681,7 +1684,9 @@ name=build if bind_opts: for key in bind_opts.keys(): for mnt_src, mnt_dest in six.iteritems(bind_opts.get(key)): - parts.append("config_opts['plugin_conf']['bind_mount_opts'][%r].append((%r, %r))\n" % (key, mnt_src, mnt_dest)) + parts.append( + "config_opts['plugin_conf']['bind_mount_opts'][%r].append((%r, %r))\n" % + (key, mnt_src, mnt_dest)) parts.append("\n") for key in sorted(macros): @@ -1886,7 +1891,8 @@ def read_config(profile_name, user_config=None): try: result[name] = int(value) except ValueError: - raise ConfigurationError("value for %s config option must be a valid integer" % name) + raise ConfigurationError( + "value for %s config option must be a valid integer" % name) else: result[name] = value @@ -2030,7 +2036,8 @@ def read_config_files(config_files, raw=False): class PathInfo(object): # ASCII numbers and upper- and lower-case letter for use in tmpdir() - ASCII_CHARS = [chr(i) for i in list(range(48, 58)) + list(range(65, 91)) + list(range(97, 123))] + ASCII_CHARS = [chr(i) + for i in list(range(48, 58)) + list(range(65, 91)) + list(range(97, 123))] def __init__(self, topdir=None): self._topdir = topdir @@ -2053,10 +2060,12 @@ class PathInfo(object): def build(self, build): """Return the directory where a build belongs""" - return self.volumedir(build.get('volume_name')) + ("/packages/%(name)s/%(version)s/%(release)s" % build) + return self.volumedir(build.get('volume_name')) + \ + ("/packages/%(name)s/%(version)s/%(release)s" % build) def mavenbuild(self, build): - """Return the directory where the Maven build exists in the global store (/mnt/koji/packages)""" + """Return the directory where the Maven build exists in the global store + (/mnt/koji/packages)""" return self.build(build) + '/maven' def mavenrepo(self, maveninfo): @@ -2137,7 +2146,8 @@ class PathInfo(object): """Return a path to a unique directory under work()/tmp/""" tmp = None while tmp is None or os.path.exists(tmp): - tmp = self.work(volume) + '/tmp/' + ''.join([random.choice(self.ASCII_CHARS) for dummy in '123456']) + tmp = self.work(volume) + '/tmp/' + ''.join([random.choice(self.ASCII_CHARS) + for dummy in '123456']) return tmp def scratch(self): @@ -2781,9 +2791,9 @@ class ClientSession(object): # basically, we want to retry on most errors, with a few exceptions # - faults (this means the call completed and failed) # - SystemExit, KeyboardInterrupt - # note that, for logged-in sessions the server should tell us (via a RetryError fault) - # if the call cannot be retried. For non-logged-in sessions, all calls should be read-only - # and hence retryable. + # note that, for logged-in sessions the server should tell us (via a RetryError + # fault) if the call cannot be retried. For non-logged-in sessions, all calls + # should be read-only and hence retryable. except Fault as fault: # try to convert the fault to a known exception err = convertFault(fault) @@ -2792,13 +2802,14 @@ class ClientSession(object): secs = self.opts.get('offline_retry_interval', interval) self.logger.debug("Server offline. Retrying in %i seconds", secs) time.sleep(secs) - # reset try count - this isn't a typical error, this is a running server - # correctly reporting an outage + # reset try count - this isn't a typical error, this is a running + # server correctly reporting an outage tries = 0 continue raise err except (SystemExit, KeyboardInterrupt): - # (depending on the python version, these may or may not be subclasses of Exception) + # (depending on the python version, these may or may not be subclasses of + # Exception) raise except Exception as e: tb_str = ''.join(traceback.format_exception(*sys.exc_info())) @@ -2809,8 +2820,9 @@ class ClientSession(object): raise if not self.logged_in: - # in the past, non-logged-in sessions did not retry. For compatibility purposes - # this behavior is governed by the anon_retry opt. + # in the past, non-logged-in sessions did not retry. + # For compatibility purposes this behavior is governed by the anon_retry + # opt. if not self.opts.get('anon_retry', False): raise @@ -2822,7 +2834,8 @@ class ClientSession(object): # otherwise keep retrying if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(tb_str) - self.logger.info("Try #%s for call %s (%s) failed: %s", tries, self.callnum, name, e) + self.logger.info("Try #%s for call %s (%s) failed: %s", + tries, self.callnum, name, e) if tries > 1: # first retry is immediate, after that we honor retry_interval time.sleep(interval) @@ -2864,7 +2877,8 @@ class ClientSession(object): transaction. """ if not self.multicall: - raise GenericError('ClientSession.multicall must be set to True before calling multiCall()') + raise GenericError( + 'ClientSession.multicall must be set to True before calling multiCall()') self.multicall = False if len(self._calls) == 0: return [] @@ -2896,7 +2910,8 @@ class ClientSession(object): return self.__dict__['_apidoc'] return VirtualMethod(self._callMethod, name, self) - def fastUpload(self, localfile, path, name=None, callback=None, blocksize=None, overwrite=False, volume=None): + def fastUpload(self, localfile, path, name=None, callback=None, blocksize=None, + overwrite=False, volume=None): if blocksize is None: blocksize = self.opts.get('upload_blocksize', 1048576) @@ -2930,7 +2945,8 @@ class ClientSession(object): hexdigest = util.adler32_constructor(chunk).hexdigest() full_chksum.update(chunk) if result['size'] != len(chunk): - raise GenericError("server returned wrong chunk size: %s != %s" % (result['size'], len(chunk))) + raise GenericError("server returned wrong chunk size: %s != %s" % + (result['size'], len(chunk))) if result['hexdigest'] != hexdigest: raise GenericError('upload checksum failed: %s != %s' % (result['hexdigest'], hexdigest)) @@ -2957,9 +2973,11 @@ class ClientSession(object): if problems and result['hexdigest'] != full_chksum.hexdigest(): raise GenericError("Uploaded file has wrong checksum: %s/%s, %s != %s" % (path, name, result['hexdigest'], full_chksum.hexdigest())) - self.logger.debug("Fast upload: %s complete. %i bytes in %.1f seconds", localfile, size, t2) + self.logger.debug("Fast upload: %s complete. %i bytes in %.1f seconds", + localfile, size, t2) - def _prepUpload(self, chunk, offset, path, name, verify="adler32", overwrite=False, volume=None): + def _prepUpload(self, chunk, offset, path, name, verify="adler32", overwrite=False, + volume=None): """prep a rawUpload call""" if not self.logged_in: raise ActionNotAllowed("you must be logged in to upload") @@ -2989,7 +3007,8 @@ class ClientSession(object): request = chunk return handler, headers, request - def uploadWrapper(self, localfile, path, name=None, callback=None, blocksize=None, overwrite=True, volume=None): + def uploadWrapper(self, localfile, path, name=None, callback=None, blocksize=None, + overwrite=True, volume=None): """upload a file in chunks using the uploadFile call""" if blocksize is None: blocksize = self.opts.get('upload_blocksize', 1048576) @@ -3044,7 +3063,8 @@ class ClientSession(object): tries = 0 while True: if debug: - self.logger.debug("uploadFile(%r,%r,%r,%r,%r,...)" % (path, name, sz, digest, offset)) + self.logger.debug("uploadFile(%r,%r,%r,%r,%r,...)" % + (path, name, sz, digest, offset)) if self.callMethod('uploadFile', path, name, sz, digest, offset, data, **volopts): break if tries <= retries: @@ -3063,9 +3083,11 @@ class ClientSession(object): if t2 <= 0: t2 = 1 if debug: - self.logger.debug("Uploaded %d bytes in %f seconds (%f kbytes/sec)" % (size, t1, size / t1 / 1024.0)) + self.logger.debug("Uploaded %d bytes in %f seconds (%f kbytes/sec)" % + (size, t1, size / t1 / 1024.0)) if debug: - self.logger.debug("Total: %d bytes in %f seconds (%f kbytes/sec)" % (ofs, t2, ofs / t2 / 1024.0)) + self.logger.debug("Total: %d bytes in %f seconds (%f kbytes/sec)" % + (ofs, t2, ofs / t2 / 1024.0)) if callback: callback(ofs, totalsize, size, t1, t2) fo.close() @@ -3281,8 +3303,8 @@ class DBHandler(logging.Handler): cursor.execute(command, data) cursor.close() # self.cnx.commit() - # XXX - committing here is most likely wrong, but we need to set commit_pending or something - # ...and this is really the wrong place for that + # XXX - committing here is most likely wrong, but we need to set commit_pending or + # something...and this is really the wrong place for that except BaseException: self.handleError(record) @@ -3583,7 +3605,9 @@ def add_file_logger(logger, fn): def add_stderr_logger(logger): handler = logging.StreamHandler() - handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] {%(process)d} %(name)s:%(lineno)d %(message)s')) + handler.setFormatter( + logging.Formatter( + '%(asctime)s [%(levelname)s] {%(process)d} %(name)s:%(lineno)d %(message)s')) handler.setLevel(logging.DEBUG) logging.getLogger(logger).addHandler(handler) @@ -3612,7 +3636,8 @@ def add_mail_logger(logger, addr): return addresses = addr.split(',') handler = logging.handlers.SMTPHandler("localhost", - "%s@%s" % (pwd.getpwuid(os.getuid())[0], socket.getfqdn()), + "%s@%s" % (pwd.getpwuid(os.getuid())[0], + socket.getfqdn()), addresses, "%s: error notice" % socket.getfqdn()) handler.setFormatter(logging.Formatter('%(pathname)s:%(lineno)d [%(levelname)s] %(message)s')) diff --git a/koji/auth.py b/koji/auth.py index 3a74a7cd..1a3eef6e 100644 --- a/koji/auth.py +++ b/koji/auth.py @@ -334,7 +334,8 @@ class Session(object): # Successfully authenticated via Kerberos, now log in if proxyuser: - proxyprincs = [princ.strip() for princ in context.opts.get('ProxyPrincipals', '').split(',')] + proxyprincs = [princ.strip() + for princ in context.opts.get('ProxyPrincipals', '').split(',')] if cprinc.name in proxyprincs: login_principal = proxyuser else: @@ -408,12 +409,15 @@ class Session(object): authtype = koji.AUTHTYPE_GSSAPI else: if context.environ.get('SSL_CLIENT_VERIFY') != 'SUCCESS': - raise koji.AuthError('could not verify client: %s' % context.environ.get('SSL_CLIENT_VERIFY')) + raise koji.AuthError('could not verify client: %s' % + context.environ.get('SSL_CLIENT_VERIFY')) name_dn_component = context.opts.get('DNUsernameComponent', 'CN') username = context.environ.get('SSL_CLIENT_S_DN_%s' % name_dn_component) if not username: - raise koji.AuthError('unable to get user information (%s) from client certificate' % name_dn_component) + raise koji.AuthError( + 'unable to get user information (%s) from client certificate' % + name_dn_component) client_dn = context.environ.get('SSL_CLIENT_S_DN') authtype = koji.AUTHTYPE_SSL diff --git a/koji/daemon.py b/koji/daemon.py index c3adad7e..ad425d92 100644 --- a/koji/daemon.py +++ b/koji/daemon.py @@ -110,8 +110,9 @@ def fast_incremental_upload(session, fname, fd, path, retries, logger): break -def log_output(session, path, args, outfile, uploadpath, cwd=None, logerror=0, append=0, chroot=None, env=None): - """Run command with output redirected. If chroot is not None, chroot to the directory specified +def log_output(session, path, args, outfile, uploadpath, cwd=None, logerror=0, append=0, + chroot=None, env=None): + """Run command with output redirected. If chroot is not None, chroot to the directory specified before running the command.""" pid = os.fork() fd = None @@ -287,11 +288,13 @@ class SCM(object): elif len(userhost) > 2: raise koji.GenericError('Invalid username@hostname specified: %s' % netloc) if not netloc: - raise koji.GenericError('Unable to parse SCM URL: %s . Could not find the netloc element.' % self.url) + raise koji.GenericError( + 'Unable to parse SCM URL: %s . Could not find the netloc element.' % self.url) # check for empty path before we apply normpath if not path: - raise koji.GenericError('Unable to parse SCM URL: %s . Could not find the path element.' % self.url) + raise koji.GenericError( + 'Unable to parse SCM URL: %s . Could not find the path element.' % self.url) path = os.path.normpath(path) @@ -306,14 +309,19 @@ class SCM(object): # any such url should have already been caught by is_scm_url raise koji.GenericError('Invalid SCM URL. Path should begin with /: %s) ') - # check for validity: params should be empty, query may be empty, everything else should be populated + # check for validity: params should be empty, query may be empty, everything else should be + # populated if params: - raise koji.GenericError('Unable to parse SCM URL: %s . Params element %s should be empty.' % (self.url, params)) + raise koji.GenericError( + 'Unable to parse SCM URL: %s . Params element %s should be empty.' % + (self.url, params)) if not scheme: # pragma: no cover # should not happen because of is_scm_url check earlier - raise koji.GenericError('Unable to parse SCM URL: %s . Could not find the scheme element.' % self.url) + raise koji.GenericError( + 'Unable to parse SCM URL: %s . Could not find the scheme element.' % self.url) if not fragment: - raise koji.GenericError('Unable to parse SCM URL: %s . Could not find the fragment element.' % self.url) + raise koji.GenericError( + 'Unable to parse SCM URL: %s . Could not find the fragment element.' % self.url) # return parsed values return (scheme, user, netloc, path, query, fragment) @@ -356,7 +364,8 @@ class SCM(object): for allowed_scm in allowed.split(): scm_tuple = allowed_scm.split(':') if len(scm_tuple) < 2: - self.logger.warn('Ignoring incorrectly formatted SCM host:repository: %s' % allowed_scm) + self.logger.warn('Ignoring incorrectly formatted SCM host:repository: %s' % + allowed_scm) continue host_pat = scm_tuple[0] repo_pat = scm_tuple[1] @@ -378,11 +387,13 @@ class SCM(object): if scm_tuple[3]: self.source_cmd = scm_tuple[3].split(',') else: - # there was nothing after the trailing :, so they don't want to run a source_cmd at all + # there was nothing after the trailing :, + # so they don't want to run a source_cmd at all self.source_cmd = None break if not is_allowed: - raise koji.BuildError('%s:%s is not in the list of allowed SCMs' % (self.host, self.repository)) + raise koji.BuildError( + '%s:%s is not in the list of allowed SCMs' % (self.host, self.repository)) def checkout(self, scmdir, session=None, uploadpath=None, logfile=None): """ @@ -416,16 +427,20 @@ class SCM(object): (self.scmtype, ' '.join(cmd), os.path.basename(logfile))) if self.scmtype == 'CVS': - pserver = ':pserver:%s@%s:%s' % ((self.user or 'anonymous'), self.host, self.repository) - module_checkout_cmd = ['cvs', '-d', pserver, 'checkout', '-r', self.revision, self.module] + pserver = ':pserver:%s@%s:%s' % ((self.user or 'anonymous'), self.host, + self.repository) + module_checkout_cmd = ['cvs', '-d', pserver, 'checkout', '-r', self.revision, + self.module] common_checkout_cmd = ['cvs', '-d', pserver, 'checkout', 'common'] elif self.scmtype == 'CVS+SSH': if not self.user: - raise koji.BuildError('No user specified for repository access scheme: %s' % self.scheme) + raise koji.BuildError( + 'No user specified for repository access scheme: %s' % self.scheme) cvsserver = ':ext:%s@%s:%s' % (self.user, self.host, self.repository) - module_checkout_cmd = ['cvs', '-d', cvsserver, 'checkout', '-r', self.revision, self.module] + module_checkout_cmd = ['cvs', '-d', cvsserver, 'checkout', '-r', self.revision, + self.module] common_checkout_cmd = ['cvs', '-d', cvsserver, 'checkout', 'common'] env = {'CVS_RSH': 'ssh'} @@ -453,14 +468,16 @@ class SCM(object): update_checkout_cmd = ['git', 'reset', '--hard', self.revision] update_checkout_dir = sourcedir - # self.module may be empty, in which case the specfile should be in the top-level directory + # self.module may be empty, in which case the specfile should be in the top-level + # directory if self.module: # Treat the module as a directory inside the git repository sourcedir = '%s/%s' % (sourcedir, self.module) elif self.scmtype == 'GIT+SSH': if not self.user: - raise koji.BuildError('No user specified for repository access scheme: %s' % self.scheme) + raise koji.BuildError( + 'No user specified for repository access scheme: %s' % self.scheme) gitrepo = 'git+ssh://%s@%s%s' % (self.user, self.host, self.repository) commonrepo = os.path.dirname(gitrepo) + '/common' checkout_path = os.path.basename(self.repository) @@ -481,7 +498,8 @@ class SCM(object): update_checkout_cmd = ['git', 'reset', '--hard', self.revision] update_checkout_dir = sourcedir - # self.module may be empty, in which case the specfile should be in the top-level directory + # self.module may be empty, in which case the specfile should be in the top-level + # directory if self.module: # Treat the module as a directory inside the git repository sourcedir = '%s/%s' % (sourcedir, self.module) @@ -492,15 +510,18 @@ class SCM(object): scheme = scheme.split('+')[1] svnserver = '%s%s%s' % (scheme, self.host, self.repository) - module_checkout_cmd = ['svn', 'checkout', '-r', self.revision, '%s/%s' % (svnserver, self.module), self.module] + module_checkout_cmd = ['svn', 'checkout', '-r', self.revision, + '%s/%s' % (svnserver, self.module), self.module] common_checkout_cmd = ['svn', 'checkout', '%s/common' % svnserver] elif self.scmtype == 'SVN+SSH': if not self.user: - raise koji.BuildError('No user specified for repository access scheme: %s' % self.scheme) + raise koji.BuildError( + 'No user specified for repository access scheme: %s' % self.scheme) svnserver = 'svn+ssh://%s@%s%s' % (self.user, self.host, self.repository) - module_checkout_cmd = ['svn', 'checkout', '-r', self.revision, '%s/%s' % (svnserver, self.module), self.module] + module_checkout_cmd = ['svn', 'checkout', '-r', self.revision, + '%s/%s' % (svnserver, self.module), self.module] common_checkout_cmd = ['svn', 'checkout', '%s/common' % svnserver] else: @@ -513,8 +534,10 @@ class SCM(object): # Currently only required for GIT checkouts # Run the command in the directory the source was checked out into if self.scmtype.startswith('GIT') and globals().get('KOJIKAMID'): - _run(['git', 'config', 'core.autocrlf', 'true'], chdir=update_checkout_dir, fatal=True) - _run(['git', 'config', 'core.safecrlf', 'true'], chdir=update_checkout_dir, fatal=True) + _run(['git', 'config', 'core.autocrlf', 'true'], + chdir=update_checkout_dir, fatal=True) + _run(['git', 'config', 'core.safecrlf', 'true'], + chdir=update_checkout_dir, fatal=True) _run(update_checkout_cmd, chdir=update_checkout_dir, fatal=True) if self.use_common and not globals().get('KOJIKAMID'): @@ -583,7 +606,8 @@ class TaskManager(object): def registerHandler(self, entry): """register and index task handler""" - if isinstance(entry, type(koji.tasks.BaseTaskHandler)) and issubclass(entry, koji.tasks.BaseTaskHandler): + if isinstance(entry, type(koji.tasks.BaseTaskHandler)) and \ + issubclass(entry, koji.tasks.BaseTaskHandler): for method in entry.Methods: self.handlers[method] = entry @@ -638,7 +662,9 @@ class TaskManager(object): # task not running - expire the buildroot # TODO - consider recycling hooks here (with strong sanity checks) self.logger.info("Expiring buildroot: %(id)i/%(tag_name)s/%(arch)s" % br) - self.logger.debug("Buildroot task: %r, Current tasks: %r" % (task_id, to_list(self.tasks.keys()))) + self.logger.debug( + "Buildroot task: %r, Current tasks: %r" % + (task_id, to_list(self.tasks.keys()))) self.session.host.setBuildRootState(id, st_expired) continue if nolocal: @@ -678,7 +704,8 @@ class TaskManager(object): if not task: self.logger.warn("%s: invalid task %s" % (desc, br['task_id'])) continue - if (task['state'] == koji.TASK_STATES['FAILED'] and age < self.options.failed_buildroot_lifetime): + if task['state'] == koji.TASK_STATES['FAILED'] and \ + age < self.options.failed_buildroot_lifetime: # XXX - this could be smarter # keep buildroots for failed tasks around for a little while self.logger.debug("Keeping failed buildroot: %s" % desc) @@ -1004,7 +1031,9 @@ class TaskManager(object): self.logger.info('%s (pid %i, taskID %i) is running' % (execname, pid, task_id)) else: if signaled: - self.logger.info('%s (pid %i, taskID %i) was killed by signal %i' % (execname, pid, task_id, sig)) + self.logger.info( + '%s (pid %i, taskID %i) was killed by signal %i' % + (execname, pid, task_id, sig)) else: self.logger.info('%s (pid %i, taskID %i) exited' % (execname, pid, task_id)) return True @@ -1041,7 +1070,8 @@ class TaskManager(object): if not os.path.isfile(proc_path): return None proc_file = open(proc_path) - procstats = [not field.isdigit() and field or int(field) for field in proc_file.read().split()] + procstats = [not field.isdigit() and field or int(field) + for field in proc_file.read().split()] proc_file.close() cmd_path = '/proc/%i/cmdline' % pid @@ -1084,9 +1114,9 @@ class TaskManager(object): while parents: for ppid in parents[:]: for procstats in statsByPPID.get(ppid, []): - # get the /proc entries with ppid as their parent, and append their pid to the list, - # then recheck for their children - # pid is the 0th field, ppid is the 3rd field + # get the /proc entries with ppid as their parent, and append their pid to the + # list, then recheck for their children pid is the 0th field, ppid is the 3rd + # field pids.append((procstats[0], procstats[1])) parents.append(procstats[0]) parents.remove(ppid) @@ -1154,7 +1184,8 @@ class TaskManager(object): availableMB = available // 1024 // 1024 self.logger.debug("disk space available in '%s': %i MB", br_path, availableMB) if availableMB < self.options.minspace: - self.status = "Insufficient disk space at %s: %i MB, %i MB required" % (br_path, availableMB, self.options.minspace) + self.status = "Insufficient disk space at %s: %i MB, %i MB required" % \ + (br_path, availableMB, self.options.minspace) self.logger.warn(self.status) return False return True @@ -1189,7 +1220,9 @@ class TaskManager(object): return False if self.task_load > self.hostdata['capacity']: self.status = "Over capacity" - self.logger.info("Task load (%.2f) exceeds capacity (%.2f)" % (self.task_load, self.hostdata['capacity'])) + self.logger.info( + "Task load (%.2f) exceeds capacity (%.2f)" % + (self.task_load, self.hostdata['capacity'])) return False if len(self.tasks) >= self.options.maxjobs: # This serves as a backup to the capacity check and prevents @@ -1238,7 +1271,8 @@ class TaskManager(object): self.logger.warn('Error during host check') self.logger.warn(''.join(traceback.format_exception(*sys.exc_info()))) if not valid_host: - self.logger.info('Skipping task %s (%s) due to host check', task['id'], task['method']) + self.logger.info( + 'Skipping task %s (%s) due to host check', task['id'], task['method']) return False data = self.session.host.openTask(task['id']) if data is None: diff --git a/koji/db.py b/koji/db.py index f7911a9b..29042ada 100644 --- a/koji/db.py +++ b/koji/db.py @@ -110,7 +110,8 @@ class CursorWrapper: try: return quote(operation, parameters) except Exception: - self.logger.exception('Unable to quote query:\n%s\nParameters: %s', operation, parameters) + self.logger.exception( + 'Unable to quote query:\n%s\nParameters: %s', operation, parameters) return "INVALID QUERY" def preformat(self, sql, params): diff --git a/koji/tasks.py b/koji/tasks.py index 440eaa04..7ceac2f0 100644 --- a/koji/tasks.py +++ b/koji/tasks.py @@ -154,10 +154,14 @@ LEGACY_SIGNATURES = { [['tag', 'newer_than', 'nvrs'], None, None, (None, None)], ], 'createLiveMedia': [ - [['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info', 'ksfile', 'opts'], None, None, (None,)], + [['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info', 'ksfile', + 'opts'], + None, None, (None,)], ], 'createAppliance': [ - [['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info', 'ksfile', 'opts'], None, None, (None,)], + [['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info', 'ksfile', + 'opts'], + None, None, (None,)], ], 'livecd': [ [['name', 'version', 'arch', 'target', 'ksfile', 'opts'], None, None, (None,)], @@ -190,7 +194,9 @@ LEGACY_SIGNATURES = { [['spec_url', 'build_target', 'build', 'task', 'opts'], None, None, (None,)], ], 'createLiveCD': [ - [['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info', 'ksfile', 'opts'], None, None, (None,)], + [['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info', 'ksfile', + 'opts'], + None, None, (None,)], ], 'appliance': [ [['name', 'version', 'arch', 'target', 'ksfile', 'opts'], None, None, (None,)], @@ -199,19 +205,25 @@ LEGACY_SIGNATURES = { [['name', 'version', 'arches', 'target', 'inst_tree', 'opts'], None, None, (None,)], ], 'tagBuild': [ - [['tag_id', 'build_id', 'force', 'fromtag', 'ignore_success'], None, None, (False, None, False)], + [['tag_id', 'build_id', 'force', 'fromtag', 'ignore_success'], + None, None, (False, None, False)], ], 'chainmaven': [ [['builds', 'target', 'opts'], None, None, (None,)], ], 'newRepo': [ - [['tag', 'event', 'src', 'debuginfo', 'separate_src'], None, None, (None, False, False, False)], + [['tag', 'event', 'src', 'debuginfo', 'separate_src'], + None, None, (None, False, False, False)], ], 'createImage': [ - [['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info', 'inst_tree', 'opts'], None, None, (None,)], + [['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info', + 'inst_tree', 'opts'], + None, None, (None,)], ], 'tagNotification': [ - [['recipients', 'is_successful', 'tag_info', 'from_info', 'build_info', 'user_info', 'ignore_success', 'failure_msg'], None, None, (None, '')], + [['recipients', 'is_successful', 'tag_info', 'from_info', 'build_info', 'user_info', + 'ignore_success', 'failure_msg'], + None, None, (None, '')], ], 'buildArch': [ [['pkg', 'root', 'arch', 'keep_srpm', 'opts'], None, None, (None,)], @@ -253,7 +265,9 @@ LEGACY_SIGNATURES = { [['options'], None, None, (None,)], ], 'runroot': [ - [['root', 'arch', 'command', 'keep', 'packages', 'mounts', 'repo_id', 'skip_setarch', 'weight', 'upload_logs', 'new_chroot'], None, None, (False, [], [], None, False, None, None, False)], + [['root', 'arch', 'command', 'keep', 'packages', 'mounts', 'repo_id', 'skip_setarch', + 'weight', 'upload_logs', 'new_chroot'], + None, None, (False, [], [], None, False, None, None, False)], ], 'distRepo': [ [['tag', 'repo_id', 'keys', 'task_opts'], None, None, None], @@ -400,7 +414,9 @@ class BaseTaskHandler(object): self.session.getTaskResult(task) checked.add(task) except (koji.GenericError, six.moves.xmlrpc_client.Fault): - self.logger.info("task %s failed or was canceled, cancelling unfinished tasks" % task) + self.logger.info( + "task %s failed or was canceled, cancelling unfinished tasks" % + task) self.session.cancelTaskChildren(self.id) # reraise the original error now, rather than waiting for # an error in taskWaitResults() @@ -743,8 +759,10 @@ class RestartHostsTask(BaseTaskHandler): my_tasks = None for host in hosts: # note: currently task assignments bypass channel restrictions - task1 = self.subtask('restart', [host], assign=host['id'], label="restart %i" % host['id']) - task2 = self.subtask('restartVerify', [task1, host], assign=host['id'], label="sleep %i" % host['id']) + task1 = self.subtask('restart', [host], + assign=host['id'], label="restart %i" % host['id']) + task2 = self.subtask('restartVerify', [task1, host], + assign=host['id'], label="sleep %i" % host['id']) subtasks.append(task1) subtasks.append(task2) if host['id'] == this_host: @@ -790,8 +808,10 @@ class DependantTask(BaseTaskHandler): subtasks = [] for task in task_list: - # **((len(task)>2 and task[2]) or {}) expands task[2] into opts if it exists, allows for things like 'priority=15' - task_id = self.session.host.subtask(method=task[0], arglist=task[1], parent=self.id, **((len(task) > 2 and task[2]) or {})) + # **((len(task)>2 and task[2]) or {}) expands task[2] into opts if it exists, allows + # for things like 'priority=15' + task_id = self.session.host.subtask(method=task[0], arglist=task[1], parent=self.id, + **((len(task) > 2 and task[2]) or {})) if task_id: subtasks.append(task_id) if subtasks: diff --git a/koji/util.py b/koji/util.py index 919f5929..745362d1 100644 --- a/koji/util.py +++ b/koji/util.py @@ -54,7 +54,8 @@ def deprecated(message): def _changelogDate(cldate): - return time.strftime('%a %b %d %Y', time.strptime(koji.formatTime(cldate), '%Y-%m-%d %H:%M:%S')) + return time.strftime('%a %b %d %Y', + time.strptime(koji.formatTime(cldate), '%Y-%m-%d %H:%M:%S')) def formatChangelog(entries): @@ -813,7 +814,8 @@ def parse_maven_param(confs, chain=False, scratch=False, section=None): else: raise ValueError("Section %s does not exist in: %s" % (section, ', '.join(confs))) elif len(builds) > 1: - raise ValueError("Multiple sections in: %s, you must specify the section" % ', '.join(confs)) + raise ValueError( + "Multiple sections in: %s, you must specify the section" % ', '.join(confs)) return builds diff --git a/plugins/builder/runroot.py b/plugins/builder/runroot.py index 31552c0e..d2671925 100644 --- a/plugins/builder/runroot.py +++ b/plugins/builder/runroot.py @@ -47,7 +47,8 @@ class RunRootTask(koji.tasks.BaseTaskHandler): options.append(o) rel_path = path[len(mount_data['mountpoint']):] rel_path = rel_path[1:] if rel_path.startswith('/') else rel_path - res = (os.path.join(mount_data['path'], rel_path), path, mount_data['fstype'], ','.join(options)) + res = (os.path.join(mount_data['path'], rel_path), path, mount_data['fstype'], + ','.join(options)) return res def _read_config(self): @@ -94,11 +95,15 @@ class RunRootTask(koji.tasks.BaseTaskHandler): except six.moves.configparser.NoOptionError: raise koji.GenericError("bad config: missing options in %s section" % section_name) - for path in self.config['default_mounts'] + self.config['safe_roots'] + [x[0] for x in self.config['path_subs']]: + for path in self.config['default_mounts'] + self.config['safe_roots'] + \ + [x[0] for x in self.config['path_subs']]: if not path.startswith('/'): - raise koji.GenericError("bad config: all paths (default_mounts, safe_roots, path_subs) needs to be absolute: %s" % path) + raise koji.GenericError( + "bad config: all paths (default_mounts, safe_roots, path_subs) needs to be " + "absolute: %s" % path) - def handler(self, root, arch, command, keep=False, packages=[], mounts=[], repo_id=None, skip_setarch=False, weight=None, upload_logs=None, new_chroot=None): + def handler(self, root, arch, command, keep=False, packages=[], mounts=[], repo_id=None, + skip_setarch=False, weight=None, upload_logs=None, new_chroot=None): """Create a buildroot and run a command (as root) inside of it Command may be a string or a list. @@ -141,15 +146,19 @@ class RunRootTask(koji.tasks.BaseTaskHandler): break else: # no overlap - raise koji.BuildError("host does not match tag arches: %s (%s)" % (root, tag_arches)) + raise koji.BuildError( + "host does not match tag arches: %s (%s)" % (root, tag_arches)) else: br_arch = arch if repo_id: repo_info = self.session.repoInfo(repo_id, strict=True) if repo_info['tag_name'] != root: - raise koji.BuildError("build tag (%s) does not match repo tag (%s)" % (root, repo_info['tag_name'])) + raise koji.BuildError( + "build tag (%s) does not match repo tag (%s)" % (root, repo_info['tag_name'])) if repo_info['state'] not in (koji.REPO_STATES['READY'], koji.REPO_STATES['EXPIRED']): - raise koji.BuildError("repos in the %s state may not be used by runroot" % koji.REPO_STATES[repo_info['state']]) + raise koji.BuildError( + "repos in the %s state may not be used by runroot" % + koji.REPO_STATES[repo_info['state']]) else: repo_info = self.session.getRepo(root) if not repo_info: @@ -186,12 +195,15 @@ class RunRootTask(koji.tasks.BaseTaskHandler): cmdstr = ' '.join(["'%s'" % arg.replace("'", r"'\''") for arg in command]) # A nasty hack to put command output into its own file until mock can be # patched to do something more reasonable than stuff everything into build.log - cmdargs = ['/bin/sh', '-c', "{ %s; } < /dev/null 2>&1 | /usr/bin/tee /builddir/runroot.log; exit ${PIPESTATUS[0]}" % cmdstr] + cmdargs = ['/bin/sh', '-c', + "{ %s; } < /dev/null 2>&1 | /usr/bin/tee /builddir/runroot.log; exit " + "${PIPESTATUS[0]}" % cmdstr] # always mount /mnt/redhat (read-only) # always mount /mnt/iso (read-only) # also need /dev bind mount - self.do_mounts(rootdir, [self._get_path_params(x) for x in self.config['default_mounts']]) + self.do_mounts(rootdir, + [self._get_path_params(x) for x in self.config['default_mounts']]) self.do_extra_mounts(rootdir, mounts) mock_cmd = ['chroot'] if new_chroot: @@ -199,7 +211,8 @@ class RunRootTask(koji.tasks.BaseTaskHandler): elif new_chroot is False: # None -> no option added mock_cmd.append('--old-chroot') if skip_setarch: - # we can't really skip it, but we can set it to the current one instead of of the chroot one + # we can't really skip it, but we can set it to the current one instead of of the + # chroot one myarch = platform.uname()[5] mock_cmd.extend(['--arch', myarch]) mock_cmd.append('--') @@ -279,7 +292,8 @@ class RunRootTask(koji.tasks.BaseTaskHandler): cmd = ['mount', '-t', type, '-o', opts, dev, mpoint] self.logger.info("Mount command: %r" % cmd) koji.ensuredir(mpoint) - status = log_output(self.session, cmd[0], cmd, logfile, uploadpath, logerror=True, append=True) + status = log_output(self.session, cmd[0], cmd, logfile, uploadpath, + logerror=True, append=True) if not isSuccess(status): error = koji.GenericError("Unable to mount %s: %s" % (mpoint, parseStatus(status, cmd))) @@ -306,7 +320,8 @@ class RunRootTask(koji.tasks.BaseTaskHandler): failed = [] self.logger.info("Unmounting (runroot): %s" % mounts) for dir in mounts: - proc = subprocess.Popen(["umount", "-l", dir], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + proc = subprocess.Popen(["umount", "-l", dir], + stdout=subprocess.PIPE, stderr=subprocess.PIPE) if proc.wait() != 0: output = proc.stdout.read() output += proc.stderr.read() diff --git a/plugins/cli/runroot.py b/plugins/cli/runroot.py index 7c9a16a2..f31e950c 100644 --- a/plugins/cli/runroot.py +++ b/plugins/cli/runroot.py @@ -22,8 +22,10 @@ def handle_runroot(options, session, args): usage += _("\n(Specify the --help global option for a list of other help options)") parser = OptionParser(usage=usage) parser.disable_interspersed_args() - parser.add_option("-p", "--package", action="append", default=[], help=_("make sure this package is in the chroot")) - parser.add_option("-m", "--mount", action="append", default=[], help=_("mount this directory read-write in the chroot")) + parser.add_option("-p", "--package", action="append", default=[], + help=_("make sure this package is in the chroot")) + parser.add_option("-m", "--mount", action="append", default=[], + help=_("mount this directory read-write in the chroot")) parser.add_option("--skip-setarch", action="store_true", default=False, help=_("bypass normal setarch in the chroot")) parser.add_option("-w", "--weight", type='int', help=_("set task weight")) @@ -39,7 +41,8 @@ def handle_runroot(options, session, args): parser.add_option("--repo-id", type="int", help=_("ID of the repo to use")) parser.add_option("--nowait", action="store_false", dest="wait", default=True, help=_("Do not wait on task")) - parser.add_option("--watch", action="store_true", help=_("Watch task instead of printing runroot.log")) + parser.add_option("--watch", action="store_true", + help=_("Watch task instead of printing runroot.log")) parser.add_option("--quiet", action="store_true", default=options.quiet, help=_("Do not print the task information")) diff --git a/plugins/cli/save_failed_tree.py b/plugins/cli/save_failed_tree.py index 7bfee70d..91dca02f 100644 --- a/plugins/cli/save_failed_tree.py +++ b/plugins/cli/save_failed_tree.py @@ -14,7 +14,8 @@ def handle_save_failed_tree(options, session, args): usage += _("\n(Specify the --help global option for a list of other help options)") parser = OptionParser(usage=usage) parser.add_option("-f", "--full", action="store_true", default=False, - help=_("Download whole tree, if not specified, only builddir will be downloaded")) + help=_("Download whole tree, if not specified, " + "only builddir will be downloaded")) parser.add_option("-t", "--task", action="store_const", dest="mode", const="task", default="task", help=_("Treat ID as a task ID (the default)")) @@ -69,4 +70,5 @@ def handle_save_failed_tree(options, session, args): return else: session.logout() - return watch_tasks(session, [task_id], quiet=opts.quiet, poll_interval=options.poll_interval) + return watch_tasks(session, [task_id], + quiet=opts.quiet, poll_interval=options.poll_interval) diff --git a/plugins/hub/save_failed_tree.py b/plugins/hub/save_failed_tree.py index beb18e68..00792234 100644 --- a/plugins/hub/save_failed_tree.py +++ b/plugins/hub/save_failed_tree.py @@ -40,10 +40,12 @@ def saveFailedTree(buildrootID, full=False, **opts): taskID = brinfo['task_id'] task_info = kojihub.Task(taskID).getInfo() if task_info['state'] != koji.TASK_STATES['FAILED']: - raise koji.PreBuildError("Task %s has not failed. Only failed tasks can upload their buildroots." % taskID) + raise koji.PreBuildError( + "Task %s has not failed. Only failed tasks can upload their buildroots." % taskID) elif allowed_methods != '*' and task_info['method'] not in allowed_methods: - raise koji.PreBuildError("Only %s tasks can upload their buildroots (Task %s is %s)." % - (', '.join(allowed_methods), task_info['id'], task_info['method'])) + raise koji.PreBuildError( + "Only %s tasks can upload their buildroots (Task %s is %s)." % + (', '.join(allowed_methods), task_info['id'], task_info['method'])) elif task_info["owner"] != context.session.user_id and not context.session.hasPerm('admin'): raise koji.ActionNotAllowed("Only owner of failed task or 'admin' can run this task.") elif not kojihub.get_host(task_info['host_id'])['enabled']: diff --git a/tests/test_cli/test_list_tagged.py b/tests/test_cli/test_list_tagged.py index dd20fdc1..0bf81b8f 100644 --- a/tests/test_cli/test_list_tagged.py +++ b/tests/test_cli/test_list_tagged.py @@ -251,7 +251,7 @@ Options: --quiet Do not print the header information --paths Show the file paths --sigs Show signatures - --type=TYPE Show builds of the given type only. Currently supported + --type=TYPE Show builds of the given type only. Currently supported types: maven, win, image --event=EVENT# query at event --ts=TIMESTAMP query at last event before timestamp diff --git a/util/koji-gc b/util/koji-gc index ea9fa899..74fd0b81 100755 --- a/util/koji-gc +++ b/util/koji-gc @@ -364,7 +364,8 @@ def ensure_connection(session): except requests.exceptions.ConnectionError: error(_("Error: Unable to connect to server")) if ret != koji.API_VERSION: - warn(_("WARNING: The server is at API version %d and the client is at %d" % (ret, koji.API_VERSION))) + warn(_("WARNING: The server is at API version %d and the client is at %d" % + (ret, koji.API_VERSION))) def has_krb_creds(): @@ -394,7 +395,8 @@ def activate_session(session): elif has_krb_creds() or (options.keytab and options.principal): try: if options.keytab and options.principal: - session.krb_login(principal=options.principal, keytab=options.keytab, proxyuser=options.runas) + session.krb_login(principal=options.principal, keytab=options.keytab, + proxyuser=options.runas) else: session.krb_login(proxyuser=options.runas) except krbV.Krb5Error as e: @@ -503,7 +505,8 @@ def handle_trash(): continue if refs.get('archives'): if options.debug: - print("[%i/%i] Build has %i archive references: %s" % (i, N, len(refs['archives']), nvr)) + print("[%i/%i] Build has %i archive references: %s" % + (i, N, len(refs['archives']), nvr)) # pprint.pprint(refs['archives']) continue if refs.get('component_of'): @@ -941,7 +944,8 @@ def handle_prune(): else: print("Untagging build %s from %s" % (nvr, tagname)) try: - session.untagBuildBypass(taginfo['id'], entry['build_id'], force=bypass) + session.untagBuildBypass(taginfo['id'], entry['build_id'], + force=bypass) untagged.setdefault(nvr, {})[tagname] = 1 except (six.moves.xmlrpc_client.Fault, koji.GenericError) as e: print("Warning: untag operation failed: %s" % e) diff --git a/util/koji-shadow b/util/koji-shadow index e388551c..55490e41 100755 --- a/util/koji-shadow +++ b/util/koji-shadow @@ -145,13 +145,15 @@ def get_options(): parser.add_option("--rules-ignorelist", help=_("Rules: list of packages to ignore")) parser.add_option("--rules-excludelist", - help=_("Rules: list of packages to are excluded using ExcludeArch or ExclusiveArch")) + help=_("Rules: list of packages to are excluded using ExcludeArch or " + "ExclusiveArch")) parser.add_option("--rules-includelist", help=_("Rules: list of packages to always include")) parser.add_option("--rules-protectlist", help=_("Rules: list of package names to never replace")) parser.add_option("--tag-build", action="store_true", default=False, - help=_("tag successful builds into the tag we are building, default is to not tag")) + help=_("tag successful builds into the tag we are building, default is to " + "not tag")) parser.add_option("--logfile", help=_("file where everything gets logged")) parser.add_option("--arches", @@ -298,14 +300,16 @@ def activate_session(session): if os.path.isfile(options.auth_cert): # authenticate using SSL client cert - session.ssl_login(cert=options.auth_cert, serverca=options.serverca, proxyuser=options.runas) + session.ssl_login(cert=options.auth_cert, serverca=options.serverca, + proxyuser=options.runas) elif options.user: # authenticate using user/password session.login() elif krbV: try: if options.keytab and options.principal: - session.krb_login(principal=options.principal, keytab=options.keytab, proxyuser=options.runas) + session.krb_login(principal=options.principal, keytab=options.keytab, + proxyuser=options.runas) else: session.krb_login(proxyuser=options.runas) except krbV.Krb5Error as e: @@ -537,12 +541,14 @@ class TrackedBuild(object): # each buildroot had this as a base package base.append(name) if len(tags) > 1: - log("Warning: found multiple buildroot tags for %s: %s" % (self.nvr, to_list(tags.keys()))) + log("Warning: found multiple buildroot tags for %s: %s" % + (self.nvr, to_list(tags.keys()))) counts = sorted([(n, tag) for tag, n in six.iteritems(tags)]) tag = counts[-1][1] else: tag = to_list(tags.keys())[0] - # due bugs in used tools mainline koji instance could store empty buildroot infos for builds + # due bugs in used tools mainline koji instance could store empty buildroot infos for + # builds if len(builds) == 0: self.setState("noroot") self.deps = builds @@ -655,7 +661,8 @@ class BuildTracker(object): return -1 def newerBuild(self, build, tag): - # XXX: secondary arches need a policy to say if we have newer build localy it will be the substitute + # XXX: secondary arches need a policy to say if we have newer build localy it will be the + # substitute localBuilds = session.listTagged(tag, inherit=True, package=str(build.name)) newer = None parentevr = (str(build.epoch), build.version, build.release) @@ -664,14 +671,16 @@ class BuildTracker(object): latestevr = (str(b['epoch']), b['version'], b['release']) newestRPM = self.rpmvercmp(parentevr, latestevr) if options.debug: - log("remote evr: %s \nlocal evr: %s \nResult: %s" % (parentevr, latestevr, newestRPM)) + log("remote evr: %s \nlocal evr: %s \nResult: %s" % + (parentevr, latestevr, newestRPM)) if newestRPM == -1: newer = b else: break # the local is newer if newer is not None: - info = session.getBuild("%s-%s-%s" % (str(newer['name']), newer['version'], newer['release'])) + info = session.getBuild("%s-%s-%s" % + (str(newer['name']), newer['version'], newer['release'])) if info: build = LocalBuild(info) self.substitute_idx[parentnvr] = build @@ -751,7 +760,8 @@ class BuildTracker(object): if depth > 0: log("%sDep replaced: %s->%s" % (head, build.nvr, replace)) return build - if options.prefer_new and (depth > 0) and (tag is not None) and not (build.state == "common"): + if options.prefer_new and (depth > 0) and (tag is not None) and \ + not (build.state == "common"): latestBuild = self.newerBuild(build, tag) if latestBuild is not None: build.substitute = latestBuild.nvr @@ -875,7 +885,8 @@ class BuildTracker(object): finally: os.umask(old_umask) else: - # TODO - would be possible, using uploadFile directly, to upload without writing locally. + # TODO - would be possible, using uploadFile directly, + # to upload without writing locally. # for now, though, just use uploadWrapper koji.ensuredir(options.workpath) dst = "%s/%s" % (options.workpath, fn) @@ -1053,7 +1064,8 @@ class BuildTracker(object): session.groupListAdd(taginfo['id'], 'build', force=True) # using force in case group is blocked. This shouldn't be the case, but... for pkg_name in drop_pkgs: - # in principal, our tag should not have inheritance, so the remove call is the right thing + # in principal, our tag should not have inheritance, + # so the remove call is the right thing session.groupPackageListRemove(taginfo['id'], 'build', pkg_name) for pkg_name in add_pkgs: session.groupPackageListAdd(taginfo['id'], 'build', pkg_name) @@ -1278,7 +1290,8 @@ def main(args): logfile = None if logfile is not None: log("logging to %s" % filename) - os.write(logfile, "\n\n========================================================================\n") + os.write(logfile, + "\n\n========================================================================\n") if options.build: binfo = remote.getBuild(options.build, strict=True) diff --git a/util/koji-sweep-db b/util/koji-sweep-db index 8714e857..48da7041 100755 --- a/util/koji-sweep-db +++ b/util/koji-sweep-db @@ -36,7 +36,8 @@ def clean_reservations(cursor, vacuum, test, age): def clean_notification_tasks(cursor, vacuum, test, age): - q = " FROM task WHERE method = 'build' AND completion_time < NOW() - '%s days'::interval" % int(age) + q = " FROM task WHERE method = 'build' AND completion_time < NOW() - '%s days'::interval" % \ + int(age) if options.verbose: cursor.execute("SELECT COUNT(*) " + q) rows = cursor.fetchall()[0][0] @@ -95,7 +96,8 @@ def clean_scratch_tasks(cursor, vacuum, test, age): return # delete standard buildroots - cursor.execute("DELETE FROM standard_buildroot WHERE task_id IN (SELECT task_id FROM temp_scratch_tasks)") + cursor.execute( + "DELETE FROM standard_buildroot WHERE task_id IN (SELECT task_id FROM temp_scratch_tasks)") # delete tasks finally cursor.execute("DELETE FROM task WHERE id IN (SELECT task_id FROM temp_scratch_tasks)") @@ -106,7 +108,8 @@ def clean_scratch_tasks(cursor, vacuum, test, age): def clean_buildroots(cursor, vacuum, test): - q = " FROM buildroot WHERE cg_id IS NULL AND id NOT IN (SELECT buildroot_id FROM standard_buildroot)" + q = " FROM buildroot " \ + "WHERE cg_id IS NULL AND id NOT IN (SELECT buildroot_id FROM standard_buildroot)" if options.verbose: cursor.execute("SELECT COUNT(*) " + q) @@ -206,7 +209,8 @@ if __name__ == "__main__": clean_sessions(cursor, options.vacuum, options.test, options.sessions_age) clean_reservations(cursor, options.vacuum, options.test, options.reservations_age) if options.tag_notifications: - clean_notification_tasks(cursor, options.vacuum, options.test, age=options.tag_notifications_age) + clean_notification_tasks(cursor, options.vacuum, options.test, + age=options.tag_notifications_age) if options.scratch: clean_scratch_tasks(cursor, options.vacuum, options.test, age=options.scratch_age) if options.buildroots: diff --git a/util/kojira b/util/kojira index 3931bb94..8ad82ce4 100755 --- a/util/kojira +++ b/util/kojira @@ -269,7 +269,8 @@ class RepoManager(object): self._local.session = value def printState(self): - self.logger.debug('Tracking %i repos, %i child processes', len(self.repos), len(self.delete_pids)) + self.logger.debug('Tracking %i repos, %i child processes', + len(self.repos), len(self.delete_pids)) for tag_id, task_id in six.iteritems(self.tasks): self.logger.debug("Tracking task %s for tag %s", task_id, tag_id) for pid, desc in six.iteritems(self.delete_pids): @@ -348,8 +349,9 @@ class RepoManager(object): if repo: # we're already tracking it if repo.state != data['state']: - self.logger.info('State changed for repo %s: %s -> %s' - % (repo_id, koji.REPO_STATES[repo.state], koji.REPO_STATES[data['state']])) + self.logger.info( + 'State changed for repo %s: %s -> %s', + repo_id, koji.REPO_STATES[repo.state], koji.REPO_STATES[data['state']]) repo.state = data['state'] else: self.logger.info('Found repo %s, state=%s' @@ -357,7 +359,7 @@ class RepoManager(object): repo = ManagedRepo(self, data) self.repos[repo_id] = repo if not getTag(self.session, repo.tag_id) and not repo.expired(): - self.logger.info('Tag %d for repo %d disappeared, expiring.' % (repo.tag_id, repo_id)) + self.logger.info('Tag %d for repo %d disappeared, expiring.', repo.tag_id, repo_id) repo.expire() if len(self.repos) > len(repodata): # This shouldn't normally happen, but might if someone else calls @@ -491,20 +493,23 @@ class RepoManager(object): self.logger.debug("did not expect %s; age: %s", repodir, age) if age > max_age: - self.logger.info("Removing unexpected directory (no such repo): %s", repodir) + self.logger.info( + "Removing unexpected directory (no such repo): %s", repodir) if symlink: os.unlink(repodir) else: self.rmtree(repodir) continue if rinfo['tag_name'] != tag: - self.logger.warn("Tag name mismatch (rename?): %s vs %s", tag, rinfo['tag_name']) + self.logger.warn( + "Tag name mismatch (rename?): %s vs %s", tag, rinfo['tag_name']) continue if rinfo['state'] in (koji.REPO_DELETED, koji.REPO_PROBLEM): age = time.time() - max(rinfo['create_ts'], dir_ts) self.logger.debug("potential removal candidate: %s; age: %s" % (repodir, age)) if age > max_age: - logger.info("Removing stray repo (state=%s): %s" % (koji.REPO_STATES[rinfo['state']], repodir)) + logger.info("Removing stray repo (state=%s): %s", + koji.REPO_STATES[rinfo['state']], repodir) if symlink: os.unlink(repodir) else: @@ -622,11 +627,12 @@ class RepoManager(object): tstate = koji.TASK_STATES[tinfo['state']] tag_id = self.tasks[task_id]['tag_id'] if tstate == 'CLOSED': - self.logger.info("Finished: newRepo task %s for tag %s" % (task_id, tag_id)) + self.logger.info("Finished: newRepo task %s for tag %s", task_id, tag_id) self.recent_tasks[task_id] = time.time() del self.tasks[task_id] elif tstate in ('CANCELED', 'FAILED'): - self.logger.info("Problem: newRepo task %s for tag %s is %s" % (task_id, tag_id, tstate)) + self.logger.info( + "Problem: newRepo task %s for tag %s is %s", task_id, tag_id, tstate) self.recent_tasks[task_id] = time.time() del self.tasks[task_id] else: @@ -635,7 +641,8 @@ class RepoManager(object): # also check other newRepo tasks repo_tasks = self.session.listTasks(opts={'method': 'newRepo', - 'state': ([koji.TASK_STATES[s] for s in ('FREE', 'OPEN')])}) + 'state': ([koji.TASK_STATES[s] + for s in ('FREE', 'OPEN')])}) others = [t for t in repo_tasks if t['id'] not in self.tasks] for tinfo in others: if tinfo['id'] not in self.other_tasks: @@ -947,8 +954,8 @@ def get_options(): 'max_delete_processes', 'max_repo_tasks_maven', 'delete_batch_size', 'dist_repo_lifetime', 'sleeptime', 'recent_tasks_lifetime') - str_opts = ('topdir', 'server', 'user', 'password', 'logfile', 'principal', 'keytab', 'krbservice', - 'cert', 'ca', 'serverca', 'debuginfo_tags', + str_opts = ('topdir', 'server', 'user', 'password', 'logfile', 'principal', 'keytab', + 'krbservice', 'cert', 'ca', 'serverca', 'debuginfo_tags', 'source_tags', 'separate_source_tags', 'ignore_tags') # FIXME: remove ca here bool_opts = ('verbose', 'debug', 'ignore_stray_repos', 'offline_retry', 'krb_rdns', 'krb_canon_host', 'no_ssl_verify') diff --git a/vm/kojikamid.py b/vm/kojikamid.py index a2f159e5..d03d9ada 100755 --- a/vm/kojikamid.py +++ b/vm/kojikamid.py @@ -183,17 +183,20 @@ class WindowsBuild(object): def checkout(self): """Checkout sources, winspec, and patches, and apply patches""" src_scm = SCM(self.source_url) # noqa: F821 - self.source_dir = src_scm.checkout(ensuredir(os.path.join(self.workdir, 'source'))) # noqa: F821 + self.source_dir = src_scm.checkout( + ensuredir(os.path.join(self.workdir, 'source'))) # noqa: F821 self.zipDir(self.source_dir, os.path.join(self.workdir, 'sources.zip')) if 'winspec' in self.task_opts: spec_scm = SCM(self.task_opts['winspec']) # noqa: F821 - self.spec_dir = spec_scm.checkout(ensuredir(os.path.join(self.workdir, 'spec'))) # noqa: F821 + self.spec_dir = spec_scm.checkout( + ensuredir(os.path.join(self.workdir, 'spec'))) # noqa: F821 self.zipDir(self.spec_dir, os.path.join(self.workdir, 'spec.zip')) else: self.spec_dir = self.source_dir if 'patches' in self.task_opts: patch_scm = SCM(self.task_opts['patches']) # noqa: F821 - self.patches_dir = patch_scm.checkout(ensuredir(os.path.join(self.workdir, 'patches'))) # noqa: F821 + self.patches_dir = patch_scm.checkout( + ensuredir(os.path.join(self.workdir, 'patches'))) # noqa: F821 self.zipDir(self.patches_dir, os.path.join(self.workdir, 'patches.zip')) self.applyPatches(self.source_dir, self.patches_dir) self.virusCheck(self.workdir) @@ -207,7 +210,8 @@ class WindowsBuild(object): raise BuildError('no patches found at %s' % patchdir) # noqa: F821 patches.sort() for patch in patches: - cmd = ['/bin/patch', '--verbose', '-d', sourcedir, '-p1', '-i', os.path.join(patchdir, patch)] + cmd = ['/bin/patch', '--verbose', '-d', sourcedir, '-p1', '-i', + os.path.join(patchdir, patch)] run(cmd, fatal=True) def loadConfig(self): @@ -241,7 +245,8 @@ class WindowsBuild(object): # absolute paths, or without a path in which case it is searched for # on the PATH. if conf.has_option('building', 'preinstalled'): - self.preinstalled.extend([e.strip() for e in conf.get('building', 'preinstalled').split('\n') if e]) + self.preinstalled.extend( + [e.strip() for e in conf.get('building', 'preinstalled').split('\n') if e]) # buildrequires and provides are multi-valued (space-separated) for br in conf.get('building', 'buildrequires').split(): @@ -336,7 +341,8 @@ class WindowsBuild(object): with open(destpath, 'w') as destfile: offset = 0 while True: - encoded = self.server.getFile(buildinfo, fileinfo, encode_int(offset), 1048576, brtype) + encoded = self.server.getFile(buildinfo, fileinfo, encode_int(offset), 1048576, + brtype) if not encoded: break data = base64.b64decode(encoded) @@ -349,9 +355,11 @@ class WindowsBuild(object): if 'checksum_type' in fileinfo: digest = checksum.hexdigest() if fileinfo['checksum'] != digest: - raise BuildError('checksum validation failed for %s, %s (computed) != %s (provided)' % # noqa: F821 - (destpath, digest, fileinfo['checksum'])) - self.logger.info('Retrieved %s (%s bytes, %s: %s)', destpath, offset, checksum_type, digest) + raise BuildError( # noqa: F821 + 'checksum validation failed for %s, %s (computed) != %s (provided)' % + (destpath, digest, fileinfo['checksum'])) + self.logger.info( + 'Retrieved %s (%s bytes, %s: %s)', destpath, offset, checksum_type, digest) else: self.logger.info('Retrieved %s (%s bytes)', destpath, offset) @@ -409,7 +417,8 @@ class WindowsBuild(object): def cmdBuild(self): """Do the build: run the execute line(s) with cmd.exe""" - tmpfd, tmpname = tempfile.mkstemp(prefix='koji-tmp', suffix='.bat', dir='/cygdrive/c/Windows/Temp') + tmpfd, tmpname = tempfile.mkstemp(prefix='koji-tmp', suffix='.bat', + dir='/cygdrive/c/Windows/Temp') script = os.fdopen(tmpfd, 'w') for attr in ['source_dir', 'spec_dir', 'patches_dir']: val = getattr(self, attr) @@ -630,7 +639,8 @@ def get_mgmt_server(): # supported by python/cygwin/Windows task_port = server.getPort(macaddr) logger.debug('found task-specific port %s', task_port) - return six.moves.xmlrpc_client.ServerProxy('http://%s:%s/' % (gateway, task_port), allow_none=True) + return six.moves.xmlrpc_client.ServerProxy('http://%s:%s/' % (gateway, task_port), + allow_none=True) def get_options(): @@ -641,8 +651,10 @@ def get_options(): """ parser = OptionParser(usage=usage) parser.add_option('-d', '--debug', action='store_true', help='Log debug statements') - parser.add_option('-i', '--install', action='store_true', help='Install this daemon as a service', default=False) - parser.add_option('-u', '--uninstall', action='store_true', help='Uninstall this daemon if it was installed previously as a service', default=False) + parser.add_option('-i', '--install', action='store_true', default=False, + help='Install this daemon as a service') + parser.add_option('-u', '--uninstall', action='store_true', default=False, + help='Uninstall this daemon if it was installed previously as a service') (options, args) = parser.parse_args() return options diff --git a/vm/kojivmd b/vm/kojivmd index ef9fa2b4..3981783d 100755 --- a/vm/kojivmd +++ b/vm/kojivmd @@ -269,9 +269,11 @@ class DaemonXMLRPCServer(six.moves.xmlrpc_server.SimpleXMLRPCServer): def __init__(self, addr, port): if sys.version_info[:2] <= (2, 4): - six.moves.xmlrpc_server.SimpleXMLRPCServer.__init__(self, (addr, port), logRequests=False) + six.moves.xmlrpc_server.SimpleXMLRPCServer.__init__(self, (addr, port), + logRequests=False) else: - six.moves.xmlrpc_server.SimpleXMLRPCServer.__init__(self, (addr, port), logRequests=False, + six.moves.xmlrpc_server.SimpleXMLRPCServer.__init__(self, (addr, port), + logRequests=False, allow_none=True) self.logger = logging.getLogger('koji.vm.DaemonXMLRPCServer') self.socket.settimeout(5) @@ -307,7 +309,8 @@ class DaemonXMLRPCServer(six.moves.xmlrpc_server.SimpleXMLRPCServer): else: response = self._dispatch(method, params) response = (response,) - response = six.moves.xmlrpc_client.dumps(response, methodresponse=1, allow_none=True) + response = six.moves.xmlrpc_client.dumps(response, + methodresponse=1, allow_none=True) except six.moves.xmlrpc_client.Fault as fault: response = six.moves.xmlrpc_client.dumps(fault) except BaseException: @@ -369,7 +372,9 @@ class WinBuildTask(MultiPlatformTask): task_opts = koji.util.dslice(opts, ['timeout', 'cpus', 'mem', 'static_mac'], strict=False) task_id = self.session.host.subtask(method='vmExec', - arglist=[name, [source_url, build_tag['name'], subopts], task_opts], + arglist=[name, + [source_url, build_tag['name'], subopts], + task_opts], label=name[:255], parent=self.id) results = self.wait(task_id)[task_id] @@ -379,7 +384,8 @@ class WinBuildTask(MultiPlatformTask): if not opts.get('scratch'): build_info = koji.util.dslice(results, ['name', 'version', 'release', 'epoch']) build_info['package_name'] = build_info['name'] - pkg_cfg = self.session.getPackageConfig(dest_tag['id'], build_info['name'], event=event_id) + pkg_cfg = self.session.getPackageConfig(dest_tag['id'], build_info['name'], + event=event_id) if not opts.get('skip_tag'): # Make sure package is on the list for this tag if pkg_cfg is None: @@ -397,8 +403,8 @@ class WinBuildTask(MultiPlatformTask): rpm_results = None spec_url = opts.get('specfile') if spec_url: - rpm_results = self.buildWrapperRPM(spec_url, task_id, target_info, build_info, repo_id, - channel='default') + rpm_results = self.buildWrapperRPM(spec_url, task_id, target_info, build_info, + repo_id, channel='default') if opts.get('scratch'): self.session.host.moveWinBuildToScratch(self.id, results, rpm_results) @@ -436,8 +442,8 @@ class VMExecTask(BaseTaskHandler): def __init__(self, *args, **kw): super(VMExecTask, self).__init__(*args, **kw) - self.task_manager = six.moves.xmlrpc_client.ServerProxy('http://%s:%s/' % (self.options.privaddr, self.options.portbase), - allow_none=True) + self.task_manager = six.moves.xmlrpc_client.ServerProxy( + 'http://%s:%s/' % (self.options.privaddr, self.options.portbase), allow_none=True) self.port = None self.server = None self.task_info = None @@ -451,13 +457,16 @@ class VMExecTask(BaseTaskHandler): def mkqcow2(self, clone_name, source_disk, disk_num): new_name = clone_name + '-disk-' + str(disk_num) + self.QCOW2_EXT new_path = os.path.join(self.options.imagedir, new_name) - cmd = ['/usr/bin/qemu-img', 'create', '-f', 'qcow2', '-o', 'backing_file=%s' % source_disk, new_path] - proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True) + cmd = ['/usr/bin/qemu-img', 'create', '-f', 'qcow2', '-o', 'backing_file=%s' % source_disk, + new_path] + proc = subprocess.Popen(cmd, + stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True) output, dummy = proc.communicate() ret = proc.wait() if ret: - raise koji.BuildError('unable to create qcow2 image, "%s" returned %s; output was: %s' % - (' '.join(cmd), ret, output)) + raise koji.BuildError( + 'unable to create qcow2 image, "%s" returned %s; output was: %s' % + (' '.join(cmd), ret, output)) vm_user = pwd.getpwnam(self.options.vmuser) os.chown(new_path, vm_user.pw_uid, vm_user.pw_gid) return new_path @@ -708,14 +717,17 @@ class VMExecTask(BaseTaskHandler): hdr = koji.get_rpm_header(localpath) payloadhash = koji.hex_string(koji.get_header_field(hdr, 'sigmd5')) if fileinfo['payloadhash'] != payloadhash: - raise koji.BuildError("Downloaded rpm %s doesn't match checksum (expected: %s, got %s)" % ( - os.path.basename(fileinfo['localpath']), - fileinfo['payloadhash'], payloadhash)) + raise koji.BuildError( + "Downloaded rpm %s doesn't match checksum (expected: %s, got %s)" % + (os.path.basename(fileinfo['localpath']), + fileinfo['payloadhash'], + payloadhash)) if not koji.util.check_sigmd5(localpath): raise koji.BuildError("Downloaded rpm %s doesn't match sigmd5" % os.path.basename(fileinfo['localpath'])) else: - self.verifyChecksum(localpath, fileinfo['checksum'], koji.CHECKSUM_TYPES[fileinfo['checksum_type']]) + self.verifyChecksum(localpath, fileinfo['checksum'], + koji.CHECKSUM_TYPES[fileinfo['checksum_type']]) return open(localpath, 'r') @@ -796,8 +808,9 @@ class VMExecTask(BaseTaskHandler): if sum.hexdigest() == checksum: return True else: - raise koji.BuildError('%s checksum validation failed for %s, %s (computed) != %s (provided)' % - (algo, local_path, sum.hexdigest(), checksum)) + raise koji.BuildError( + '%s checksum validation failed for %s, %s (computed) != %s (provided)' % + (algo, local_path, sum.hexdigest(), checksum)) def closeTask(self, output): self.output = output @@ -879,8 +892,9 @@ class VMExecTask(BaseTaskHandler): if mins > timeout: vm.destroy() self.server.server_close() - raise koji.BuildError('Task did not complete after %.2f minutes, VM %s has been destroyed' % - (mins, clone_name)) + raise koji.BuildError( + 'Task did not complete after %.2f minutes, VM %s has been destroyed' % + (mins, clone_name)) else: vm.destroy() self.server.server_close() @@ -913,7 +927,9 @@ class VMTaskManager(TaskManager): if macaddr in self.macaddrs: raise koji.PreBuildError('duplicate MAC address: %s' % macaddr) self.macaddrs[macaddr] = (vm_name, task_id, port) - self.logger.info('registered MAC address %s for VM %s (task ID %s, port %s)', macaddr, vm_name, task_id, port) + self.logger.info( + 'registered MAC address %s for VM %s (task ID %s, port %s)', + macaddr, vm_name, task_id, port) return True finally: self.macaddr_lock.release() @@ -964,7 +980,8 @@ class VMTaskManager(TaskManager): availableMB = available // 1024 // 1024 self.logger.debug('disk space available in %s: %i MB', self.options.imagedir, availableMB) if availableMB < self.options.minspace: - self.status = 'Insufficient disk space: %i MB, %i MB required' % (availableMB, self.options.minspace) + self.status = 'Insufficient disk space: %i MB, %i MB required' % \ + (availableMB, self.options.minspace) self.logger.warn(self.status) return False return True diff --git a/www/kojiweb/index.py b/www/kojiweb/index.py index eb4e8816..f62e96c8 100644 --- a/www/kojiweb/index.py +++ b/www/kojiweb/index.py @@ -154,9 +154,12 @@ def _assertLogin(environ): raise koji.AuthError('could not login %s via SSL' % environ['koji.currentLogin']) elif options['WebPrincipal']: if not _krbLogin(environ, environ['koji.session'], environ['koji.currentLogin']): - raise koji.AuthError('could not login using principal: %s' % environ['koji.currentLogin']) + raise koji.AuthError( + 'could not login using principal: %s' % environ['koji.currentLogin']) else: - raise koji.AuthError('KojiWeb is incorrectly configured for authentication, contact the system administrator') + raise koji.AuthError( + 'KojiWeb is incorrectly configured for authentication, ' + 'contact the system administrator') # verify a valid authToken was passed in to avoid CSRF authToken = environ['koji.form'].getfirst('a', '') @@ -168,7 +171,8 @@ def _assertLogin(environ): # their authToken is likely expired # send them back to the page that brought them here so they # can re-click the link with a valid authToken - _redirectBack(environ, page=None, forceSSL=(_getBaseURL(environ).startswith('https://'))) + _redirectBack(environ, page=None, + forceSSL=(_getBaseURL(environ).startswith('https://'))) assert False # pragma: no cover else: _redirect(environ, 'login') @@ -188,7 +192,8 @@ def _getServer(environ): if environ['koji.currentLogin']: environ['koji.currentUser'] = session.getUser(environ['koji.currentLogin']) if not environ['koji.currentUser']: - raise koji.AuthError('could not get user for principal: %s' % environ['koji.currentLogin']) + raise koji.AuthError( + 'could not get user for principal: %s' % environ['koji.currentLogin']) _setUserCookie(environ, environ['koji.currentLogin']) else: environ['koji.currentUser'] = None @@ -271,7 +276,9 @@ def login(environ, page=None): elif options['WebPrincipal']: principal = environ.get('REMOTE_USER') if not principal: - raise koji.AuthError('configuration error: mod_auth_gssapi should have performed authentication before presenting this page') + raise koji.AuthError( + 'configuration error: mod_auth_gssapi should have performed authentication before ' + 'presenting this page') if not _krbLogin(environ, session, principal): raise koji.AuthError('could not login using principal: %s' % principal) @@ -279,7 +286,9 @@ def login(environ, page=None): username = principal authlogger.info('Successful Kerberos authentication by %s', username) else: - raise koji.AuthError('KojiWeb is incorrectly configured for authentication, contact the system administrator') + raise koji.AuthError( + 'KojiWeb is incorrectly configured for authentication, contact the system ' + 'administrator') _setUserCookie(environ, username) # To protect the session cookie, we must forceSSL @@ -322,8 +331,10 @@ def index(environ, packageOrder='package_name', packageStart=None): values['order'] = '-id' if user: - kojiweb.util.paginateResults(server, values, 'listPackages', kw={'userID': user['id'], 'with_dups': True}, - start=packageStart, dataName='packages', prefix='package', order=packageOrder, pageSize=10) + kojiweb.util.paginateResults(server, values, 'listPackages', + kw={'userID': user['id'], 'with_dups': True}, + start=packageStart, dataName='packages', prefix='package', + order=packageOrder, pageSize=10) notifs = server.getBuildNotifications(user['id']) notifs.sort(key=lambda x: x['id']) @@ -480,12 +491,16 @@ _TASKS = ['build', 'livemedia', 'createLiveMedia'] # Tasks that can exist without a parent -_TOPLEVEL_TASKS = ['build', 'buildNotification', 'chainbuild', 'maven', 'chainmaven', 'wrapperRPM', 'winbuild', 'newRepo', 'distRepo', 'tagBuild', 'tagNotification', 'waitrepo', 'livecd', 'appliance', 'image', 'livemedia'] +_TOPLEVEL_TASKS = ['build', 'buildNotification', 'chainbuild', 'maven', 'chainmaven', 'wrapperRPM', + 'winbuild', 'newRepo', 'distRepo', 'tagBuild', 'tagNotification', 'waitrepo', + 'livecd', 'appliance', 'image', 'livemedia'] # Tasks that can have children -_PARENT_TASKS = ['build', 'chainbuild', 'maven', 'chainmaven', 'winbuild', 'newRepo', 'distRepo', 'wrapperRPM', 'livecd', 'appliance', 'image', 'livemedia'] +_PARENT_TASKS = ['build', 'chainbuild', 'maven', 'chainmaven', 'winbuild', 'newRepo', 'distRepo', + 'wrapperRPM', 'livecd', 'appliance', 'image', 'livemedia'] -def tasks(environ, owner=None, state='active', view='tree', method='all', hostID=None, channelID=None, start=None, order='-id'): +def tasks(environ, owner=None, state='active', view='tree', method='all', hostID=None, + channelID=None, start=None, order='-id'): values = _initValues(environ, 'Tasks', 'tasks') server = _getServer(environ) @@ -539,7 +554,9 @@ def tasks(environ, owner=None, state='active', view='tree', method='all', hostID opts['parent'] = None if state == 'active': - opts['state'] = [koji.TASK_STATES['FREE'], koji.TASK_STATES['OPEN'], koji.TASK_STATES['ASSIGNED']] + opts['state'] = [koji.TASK_STATES['FREE'], + koji.TASK_STATES['OPEN'], + koji.TASK_STATES['ASSIGNED']] elif state == 'all': pass else: @@ -830,7 +847,8 @@ def _chunk_file(server, environ, taskID, name, offset, size, volume): chunk_size = 1048576 if remaining < chunk_size: chunk_size = remaining - content = server.downloadTaskOutput(taskID, name, offset=offset, size=chunk_size, volume=volume) + content = server.downloadTaskOutput(taskID, name, + offset=offset, size=chunk_size, volume=volume) if not content: break yield content @@ -863,7 +881,8 @@ def tags(environ, start=None, order=None, childID=None): _PREFIX_CHARS = [chr(char) for char in list(range(48, 58)) + list(range(97, 123))] -def packages(environ, tagID=None, userID=None, order='package_name', start=None, prefix=None, inherited='1'): +def packages(environ, tagID=None, userID=None, order='package_name', start=None, prefix=None, + inherited='1'): values = _initValues(environ, 'Packages', 'packages') server = _getServer(environ) tag = None @@ -890,7 +909,10 @@ def packages(environ, tagID=None, userID=None, order='package_name', start=None, values['inherited'] = inherited kojiweb.util.paginateMethod(server, values, 'listPackages', - kw={'tagID': tagID, 'userID': userID, 'prefix': prefix, 'inherited': bool(inherited)}, + kw={'tagID': tagID, + 'userID': userID, + 'prefix': prefix, + 'inherited': bool(inherited)}, start=start, dataName='packages', prefix='package', order=order) values['chars'] = _PREFIX_CHARS @@ -898,7 +920,8 @@ def packages(environ, tagID=None, userID=None, order='package_name', start=None, return _genHTML(environ, 'packages.chtml') -def packageinfo(environ, packageID, tagOrder='name', tagStart=None, buildOrder='-completion_time', buildStart=None): +def packageinfo(environ, packageID, tagOrder='name', tagStart=None, buildOrder='-completion_time', + buildStart=None): values = _initValues(environ, 'Package Info', 'packages') server = _getServer(environ) @@ -916,12 +939,14 @@ def packageinfo(environ, packageID, tagOrder='name', tagStart=None, buildOrder=' kojiweb.util.paginateMethod(server, values, 'listTags', kw={'package': package['id']}, start=tagStart, dataName='tags', prefix='tag', order=tagOrder) kojiweb.util.paginateMethod(server, values, 'listBuilds', kw={'packageID': package['id']}, - start=buildStart, dataName='builds', prefix='build', order=buildOrder) + start=buildStart, dataName='builds', prefix='build', + order=buildOrder) return _genHTML(environ, 'packageinfo.chtml') -def taginfo(environ, tagID, all='0', packageOrder='package_name', packageStart=None, buildOrder='-completion_time', buildStart=None, childID=None): +def taginfo(environ, tagID, all='0', packageOrder='package_name', packageStart=None, + buildOrder='-completion_time', buildStart=None, childID=None): values = _initValues(environ, 'Tag Info', 'tags') server = _getServer(environ) @@ -1115,7 +1140,9 @@ def tagparent(environ, tagID, parentID, action): elif len(inheritanceData) == 1: values['inheritanceData'] = inheritanceData[0] else: - raise koji.GenericError('tag %i has tag %i listed as a parent more than once' % (tag['id'], parent['id'])) + raise koji.GenericError( + 'tag %i has tag %i listed as a parent more than once' % + (tag['id'], parent['id'])) return _genHTML(environ, 'tagparent.chtml') elif action == 'remove': @@ -1174,7 +1201,8 @@ def buildinfo(environ, buildID): for archive in archives: if btype == 'maven': archive['display'] = archive['filename'] - archive['dl_url'] = '/'.join([pathinfo.mavenbuild(build), pathinfo.mavenfile(archive)]) + archive['dl_url'] = '/'.join([pathinfo.mavenbuild(build), + pathinfo.mavenfile(archive)]) elif btype == 'win': archive['display'] = pathinfo.winfile(archive) archive['dl_url'] = '/'.join([pathinfo.winbuild(build), pathinfo.winfile(archive)]) @@ -1210,7 +1238,8 @@ def buildinfo(environ, buildID): # get the summary, description, and changelogs from the built srpm # if the build is not yet complete if build['state'] != koji.BUILD_STATES['COMPLETE']: - srpm_tasks = server.listTasks(opts={'parent': task['id'], 'method': 'buildSRPMFromSCM'}) + srpm_tasks = server.listTasks(opts={'parent': task['id'], + 'method': 'buildSRPMFromSCM'}) if srpm_tasks: srpm_task = srpm_tasks[0] if srpm_task['state'] == koji.TASK_STATES['CLOSED']: @@ -1220,12 +1249,14 @@ def buildinfo(environ, buildID): srpm_path = output break if srpm_path: - srpm_headers = server.getRPMHeaders(taskID=srpm_task['id'], filepath=srpm_path, + srpm_headers = server.getRPMHeaders(taskID=srpm_task['id'], + filepath=srpm_path, headers=['summary', 'description']) if srpm_headers: values['summary'] = koji.fixEncoding(srpm_headers['summary']) values['description'] = koji.fixEncoding(srpm_headers['description']) - changelog = server.getChangelogEntries(taskID=srpm_task['id'], filepath=srpm_path) + changelog = server.getChangelogEntries(taskID=srpm_task['id'], + filepath=srpm_path) if changelog: values['changelog'] = changelog else: @@ -1276,7 +1307,8 @@ def buildinfo(environ, buildID): return _genHTML(environ, 'buildinfo.chtml') -def builds(environ, userID=None, tagID=None, packageID=None, state=None, order='-build_id', start=None, prefix=None, inherited='1', latest='1', type=None): +def builds(environ, userID=None, tagID=None, packageID=None, state=None, order='-build_id', + start=None, prefix=None, inherited='1', latest='1', type=None): values = _initValues(environ, 'Builds', 'builds') server = _getServer(environ) @@ -1344,15 +1376,20 @@ def builds(environ, userID=None, tagID=None, packageID=None, state=None, order=' if tag: # don't need to consider 'state' here, since only completed builds would be tagged - kojiweb.util.paginateResults(server, values, 'listTagged', kw={'tag': tag['id'], 'package': (package and package['name'] or None), - 'owner': (user and user['name'] or None), - 'type': type, - 'inherit': bool(inherited), 'latest': bool(latest), 'prefix': prefix}, + kojiweb.util.paginateResults(server, values, 'listTagged', + kw={'tag': tag['id'], + 'package': (package and package['name'] or None), + 'owner': (user and user['name'] or None), + 'type': type, + 'inherit': bool(inherited), 'latest': bool(latest), + 'prefix': prefix}, start=start, dataName='builds', prefix='build', order=order) else: - kojiweb.util.paginateMethod(server, values, 'listBuilds', kw={'userID': (user and user['id'] or None), 'packageID': (package and package['id'] or None), - 'type': type, - 'state': state, 'prefix': prefix}, + kojiweb.util.paginateMethod(server, values, 'listBuilds', + kw={'userID': (user and user['id'] or None), + 'packageID': (package and package['id'] or None), + 'type': type, + 'state': state, 'prefix': prefix}, start=start, dataName='builds', prefix='build', order=order) values['chars'] = _PREFIX_CHARS @@ -1380,7 +1417,8 @@ def users(environ, order='name', start=None, prefix=None): return _genHTML(environ, 'users.chtml') -def userinfo(environ, userID, packageOrder='package_name', packageStart=None, buildOrder='-completion_time', buildStart=None): +def userinfo(environ, userID, packageOrder='package_name', packageStart=None, + buildOrder='-completion_time', buildStart=None): values = _initValues(environ, 'User Info', 'users') server = _getServer(environ) @@ -1392,18 +1430,23 @@ def userinfo(environ, userID, packageOrder='package_name', packageStart=None, bu values['user'] = user values['userID'] = userID - values['taskCount'] = server.listTasks(opts={'owner': user['id'], 'parent': None}, queryOpts={'countOnly': True}) + values['taskCount'] = server.listTasks(opts={'owner': user['id'], 'parent': None}, + queryOpts={'countOnly': True}) - kojiweb.util.paginateResults(server, values, 'listPackages', kw={'userID': user['id'], 'with_dups': True}, - start=packageStart, dataName='packages', prefix='package', order=packageOrder, pageSize=10) + kojiweb.util.paginateResults(server, values, 'listPackages', + kw={'userID': user['id'], 'with_dups': True}, + start=packageStart, dataName='packages', prefix='package', + order=packageOrder, pageSize=10) kojiweb.util.paginateMethod(server, values, 'listBuilds', kw={'userID': user['id']}, - start=buildStart, dataName='builds', prefix='build', order=buildOrder, pageSize=10) + start=buildStart, dataName='builds', prefix='build', + order=buildOrder, pageSize=10) return _genHTML(environ, 'userinfo.chtml') -def rpminfo(environ, rpmID, fileOrder='name', fileStart=None, buildrootOrder='-id', buildrootStart=None): +def rpminfo(environ, rpmID, fileOrder='name', fileStart=None, buildrootOrder='-id', + buildrootStart=None): values = _initValues(environ, 'RPM Info', 'builds') server = _getServer(environ) @@ -1441,8 +1484,11 @@ def rpminfo(environ, rpmID, fileOrder='name', fileStart=None, buildrootOrder='-i values['summary'] = koji.fixEncoding(headers.get('summary')) values['description'] = koji.fixEncoding(headers.get('description')) values['license'] = koji.fixEncoding(headers.get('license')) - buildroots = kojiweb.util.paginateMethod(server, values, 'listBuildroots', kw={'rpmID': rpm['id']}, - start=buildrootStart, dataName='buildroots', prefix='buildroot', + buildroots = kojiweb.util.paginateMethod(server, values, 'listBuildroots', + kw={'rpmID': rpm['id']}, + start=buildrootStart, + dataName='buildroots', + prefix='buildroot', order=buildrootOrder) values['rpmID'] = rpmID @@ -1457,7 +1503,8 @@ def rpminfo(environ, rpmID, fileOrder='name', fileStart=None, buildrootOrder='-i return _genHTML(environ, 'rpminfo.chtml') -def archiveinfo(environ, archiveID, fileOrder='name', fileStart=None, buildrootOrder='-id', buildrootStart=None): +def archiveinfo(environ, archiveID, fileOrder='name', fileStart=None, buildrootOrder='-id', + buildrootStart=None): values = _initValues(environ, 'Archive Info', 'builds') server = _getServer(environ) @@ -1476,8 +1523,11 @@ def archiveinfo(environ, archiveID, fileOrder='name', fileStart=None, buildrootO builtInRoot = server.getBuildroot(archive['buildroot_id']) kojiweb.util.paginateMethod(server, values, 'listArchiveFiles', args=[archive['id']], start=fileStart, dataName='files', prefix='file', order=fileOrder) - buildroots = kojiweb.util.paginateMethod(server, values, 'listBuildroots', kw={'archiveID': archive['id']}, - start=buildrootStart, dataName='buildroots', prefix='buildroot', + buildroots = kojiweb.util.paginateMethod(server, values, 'listBuildroots', + kw={'archiveID': archive['id']}, + start=buildrootStart, + dataName='buildroots', + prefix='buildroot', order=buildrootOrder) values['title'] = archive['filename'] + ' | Archive Info' @@ -1491,7 +1541,8 @@ def archiveinfo(environ, archiveID, fileOrder='name', fileStart=None, buildrootO values['builtInRoot'] = builtInRoot values['buildroots'] = buildroots values['show_rpm_components'] = server.listRPMs(imageID=archive['id'], queryOpts={'limit': 1}) - values['show_archive_components'] = server.listArchives(imageID=archive['id'], queryOpts={'limit': 1}) + values['show_archive_components'] = server.listArchives(imageID=archive['id'], + queryOpts={'limit': 1}) return _genHTML(environ, 'archiveinfo.chtml') @@ -1604,7 +1655,8 @@ def hostinfo(environ, hostID=None, userID=None): channels = server.listChannels(host['id']) channels.sort(key=_sortbyname) buildroots = server.listBuildroots(hostID=host['id'], - state=[state[1] for state in koji.BR_STATES.items() if state[0] != 'EXPIRED']) + state=[state[1] for state in koji.BR_STATES.items() + if state[0] != 'EXPIRED']) buildroots.sort(key=lambda x: x['create_event_time'], reverse=True) values['host'] = host @@ -1718,7 +1770,8 @@ def channelinfo(environ, channelID): return _genHTML(environ, 'channelinfo.chtml') -def buildrootinfo(environ, buildrootID, builtStart=None, builtOrder=None, componentStart=None, componentOrder=None): +def buildrootinfo(environ, buildrootID, builtStart=None, builtOrder=None, componentStart=None, + componentOrder=None): values = _initValues(environ, 'Buildroot Info', 'hosts') server = _getServer(environ) @@ -1807,11 +1860,15 @@ def archivelist(environ, type, buildrootID=None, imageID=None, start=None, order raise koji.GenericError('unknown buildroot ID: %i' % buildrootID) if type == 'component': - kojiweb.util.paginateMethod(server, values, 'listArchives', kw={'componentBuildrootID': buildroot['id']}, - start=start, dataName='archives', prefix='archive', order=order) + kojiweb.util.paginateMethod(server, values, 'listArchives', + kw={'componentBuildrootID': buildroot['id']}, + start=start, dataName='archives', prefix='archive', + order=order) elif type == 'built': - kojiweb.util.paginateMethod(server, values, 'listArchives', kw={'buildrootID': buildroot['id']}, - start=start, dataName='archives', prefix='archive', order=order) + kojiweb.util.paginateMethod(server, values, 'listArchives', + kw={'buildrootID': buildroot['id']}, + start=start, dataName='archives', prefix='archive', + order=order) else: raise koji.GenericError('unrecognized type of archivelist') elif imageID is not None: @@ -1820,7 +1877,8 @@ def archivelist(environ, type, buildrootID=None, imageID=None, start=None, order # If/When future image types are supported, add elifs here if needed. if type == 'image': kojiweb.util.paginateMethod(server, values, 'listArchives', kw={'imageID': imageID}, - start=start, dataName='archives', prefix='archive', order=order) + start=start, dataName='archives', prefix='archive', + order=order) else: raise koji.GenericError('unrecognized type of archivelist') else: @@ -2155,9 +2213,12 @@ def buildsbystatus(environ, days='7'): server.multicall = True # use taskID=-1 to filter out builds with a null task_id (imported rather than built in koji) - server.listBuilds(completeAfter=dateAfter, state=koji.BUILD_STATES['COMPLETE'], taskID=-1, queryOpts={'countOnly': True}) - server.listBuilds(completeAfter=dateAfter, state=koji.BUILD_STATES['FAILED'], taskID=-1, queryOpts={'countOnly': True}) - server.listBuilds(completeAfter=dateAfter, state=koji.BUILD_STATES['CANCELED'], taskID=-1, queryOpts={'countOnly': True}) + server.listBuilds(completeAfter=dateAfter, state=koji.BUILD_STATES['COMPLETE'], taskID=-1, + queryOpts={'countOnly': True}) + server.listBuilds(completeAfter=dateAfter, state=koji.BUILD_STATES['FAILED'], taskID=-1, + queryOpts={'countOnly': True}) + server.listBuilds(completeAfter=dateAfter, state=koji.BUILD_STATES['CANCELED'], taskID=-1, + queryOpts={'countOnly': True}) [[numSucceeded], [numFailed], [numCanceled]] = server.multiCall() values['numSucceeded'] = numSucceeded @@ -2298,7 +2359,8 @@ def recentbuilds(environ, user=None, tag=None, package=None): packageObj = server.getPackage(package) if tagObj is not None: - builds = server.listTagged(tagObj['id'], inherit=True, package=(packageObj and packageObj['name'] or None), + builds = server.listTagged(tagObj['id'], inherit=True, + package=(packageObj and packageObj['name'] or None), owner=(userObj and userObj['name'] or None)) builds.sort(key=kojiweb.util.sortByKeyFuncNoneGreatest('completion_time'), reverse=True) builds = builds[:20] @@ -2408,7 +2470,8 @@ def search(environ, start=None, order=None): values['order'] = order results = kojiweb.util.paginateMethod(server, values, 'search', args=(terms, type, match), - start=start, dataName='results', prefix='result', order=order) + start=start, dataName='results', prefix='result', + order=order) if not start and len(results) == 1: # if we found exactly one result, skip the result list and redirect to the info page # (you're feeling lucky) diff --git a/www/kojiweb/wsgi_publisher.py b/www/kojiweb/wsgi_publisher.py index 96cecc8b..97477ec1 100644 --- a/www/kojiweb/wsgi_publisher.py +++ b/www/kojiweb/wsgi_publisher.py @@ -96,7 +96,9 @@ class Dispatcher(object): ['LibPath', 'string', '/usr/share/koji-web/lib'], ['LogLevel', 'string', 'WARNING'], - ['LogFormat', 'string', '%(msecs)d [%(levelname)s] m=%(method)s u=%(user_name)s p=%(process)s r=%(remoteaddr)s %(name)s: %(message)s'], + ['LogFormat', 'string', + '%(msecs)d [%(levelname)s] m=%(method)s u=%(user_name)s p=%(process)s r=%(remoteaddr)s ' + '%(name)s: %(message)s'], ['Tasks', 'list', []], ['ToplevelTasks', 'list', []], @@ -227,7 +229,9 @@ class Dispatcher(object): raise URLNotFound # parse form args data = {} - fs = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ.copy(), keep_blank_values=True) + fs = cgi.FieldStorage(fp=environ['wsgi.input'], + environ=environ.copy(), + keep_blank_values=True) for field in fs.list: if field.filename: val = field diff --git a/www/lib/kojiweb/util.py b/www/lib/kojiweb/util.py index d5692983..e42a6e53 100644 --- a/www/lib/kojiweb/util.py +++ b/www/lib/kojiweb/util.py @@ -65,7 +65,8 @@ def _initValues(environ, title='Build System Info', pageID='summary'): themeCache.clear() themeInfo.clear() themeInfo['name'] = environ['koji.options'].get('KojiTheme', None) - themeInfo['staticdir'] = environ['koji.options'].get('KojiStaticDir', '/usr/share/koji-web/static') + themeInfo['staticdir'] = environ['koji.options'].get('KojiStaticDir', + '/usr/share/koji-web/static') environ['koji.values'] = values @@ -227,9 +228,11 @@ def sortImage(template, sortKey, orderVar='order'): """ orderVal = template.getVar(orderVar) if orderVal == sortKey: - return 'ascending sort' % themePath("images/gray-triangle-up.gif") + return 'ascending sort' % \ + themePath("images/gray-triangle-up.gif") elif orderVal == '-' + sortKey: - return 'descending sort' % themePath("images/gray-triangle-down.gif") + return 'descending sort' % \ + themePath("images/gray-triangle-down.gif") else: return '' @@ -283,7 +286,8 @@ def sortByKeyFuncNoneGreatest(key): return internal_key -def paginateList(values, data, start, dataName, prefix=None, order=None, noneGreatest=False, pageSize=50): +def paginateList(values, data, start, dataName, prefix=None, order=None, noneGreatest=False, + pageSize=50): """ Slice the 'data' list into one page worth. Start at offset 'start' and limit the total number of pages to pageSize @@ -317,8 +321,9 @@ def paginateList(values, data, start, dataName, prefix=None, order=None, noneGre def paginateMethod(server, values, methodName, args=None, kw=None, start=None, dataName=None, prefix=None, order=None, pageSize=50): - """Paginate the results of the method with the given name when called with the given args and kws. - The method must support the queryOpts keyword parameter, and pagination is done in the database.""" + """Paginate the results of the method with the given name when called with the given args and + kws. The method must support the queryOpts keyword parameter, and pagination is done in the + database.""" if args is None: args = [] if kw is None: @@ -346,10 +351,10 @@ def paginateMethod(server, values, methodName, args=None, kw=None, def paginateResults(server, values, methodName, args=None, kw=None, start=None, dataName=None, prefix=None, order=None, pageSize=50): - """Paginate the results of the method with the given name when called with the given args and kws. - This method should only be used when then method does not support the queryOpts command (because - the logic used to generate the result list prevents filtering/ordering from being done in the database). - The method must return a list of maps.""" + """Paginate the results of the method with the given name when called with the given args and + kws. This method should only be used when then method does not support the queryOpts command + (because the logic used to generate the result list prevents filtering/ordering from being done + in the database). The method must return a list of maps.""" if args is None: args = [] if kw is None: @@ -390,7 +395,8 @@ def _populateValues(values, dataName, prefix, data, totalRows, start, count, pag totalPages = int(totalRows // pageSize) if totalRows % pageSize > 0: totalPages += 1 - pages = [page for page in range(0, totalPages) if (abs(page - currentPage) < 100 or ((page + 1) % 100 == 0))] + pages = [page for page in range(0, totalPages) + if (abs(page - currentPage) < 100 or ((page + 1) % 100 == 0))] values[(prefix and prefix + 'Pages') or 'pages'] = pages