flake8: apply E501 with max-line-length=99

This commit is contained in:
Yuming Zhu 2020-02-27 22:10:11 +08:00
parent f1ba6cefd7
commit c5db34a8e1
28 changed files with 1574 additions and 811 deletions

View file

@ -1,6 +1,7 @@
[flake8]
select = I,C,F,E1,E2,E3,E4,E502,E7
select = I,C,F,E
ignore = E266,E731
max_line_length = 99
exclude =
.git,
__pycache__,

View file

@ -273,7 +273,8 @@ class BuildRoot(object):
for k in ('repoid', 'tag_name'):
if hasattr(self, k):
opts[k] = getattr(self, k)
for k in ('mockdir', 'topdir', 'topurl', 'topurls', 'packager', 'vendor', 'distribution', 'mockhost', 'yum_proxy', 'rpmbuild_timeout'):
for k in ('mockdir', 'topdir', 'topurl', 'topurls', 'packager', 'vendor',
'distribution', 'mockhost', 'yum_proxy', 'rpmbuild_timeout'):
if hasattr(self.options, k):
opts[k] = getattr(self.options, k)
opts['buildroot_id'] = self.id
@ -476,12 +477,14 @@ class BuildRoot(object):
try:
stat_info = os.stat(fpath)
if not fd or stat_info.st_ino != inode or stat_info.st_size < size:
# either a file we haven't opened before, or mock replaced a file we had open with
# a new file and is writing to it, or truncated the file we're reading,
# but our fd is pointing to the previous location in the old file
# either a file we haven't opened before, or mock replaced a file we
# had open with a new file and is writing to it, or truncated the file
# we're reading, but our fd is pointing to the previous location in the
# old file
if fd:
self.logger.info('Rereading %s, inode: %s -> %s, size: %s -> %s' %
(fpath, inode, stat_info.st_ino, size, stat_info.st_size))
(fpath, inode, stat_info.st_ino, size,
stat_info.st_size))
fd.close()
fd = open(fpath, 'rb')
logs[fname] = (fd, stat_info.st_ino, stat_info.st_size or size, fpath)
@ -596,8 +599,8 @@ class BuildRoot(object):
def build_srpm(self, specfile, sourcedir, source_cmd):
self.session.host.setBuildRootState(self.id, 'BUILDING')
if source_cmd:
# call the command defined by source_cmd in the chroot so any required files not stored in
# the SCM can be retrieved
# call the command defined by source_cmd in the chroot so any required files not stored
# in the SCM can be retrieved
chroot_sourcedir = sourcedir[len(self.rootdir()):]
args = ['--no-clean', '--unpriv', '--cwd', chroot_sourcedir, '--chroot']
args.extend(source_cmd)
@ -633,7 +636,8 @@ class BuildRoot(object):
self.session.host.updateBuildRootList(self.id, self.getPackageList())
if rv:
self.expire()
raise koji.BuildError("error building package (arch %s), %s" % (arch, self._mockResult(rv)))
raise koji.BuildError("error building package (arch %s), %s" %
(arch, self._mockResult(rv)))
def getPackageList(self):
"""Return a list of packages from the buildroot
@ -688,7 +692,8 @@ class BuildRoot(object):
maven_files = []
for repofile in files:
if koji.util.multi_fnmatch(repofile, self.options.maven_repo_ignore) or \
koji.util.multi_fnmatch(os.path.join(relpath, repofile), self.options.maven_repo_ignore):
koji.util.multi_fnmatch(os.path.join(relpath, repofile),
self.options.maven_repo_ignore):
continue
if relpath == '' and repofile in ['scm-sources.zip', 'patches.zip']:
# special-case the archives of the sources and patches, since we drop them in
@ -699,8 +704,10 @@ class BuildRoot(object):
if maven_files:
path_comps = relpath.split('/')
if len(path_comps) < 3:
raise koji.BuildrootError('files found in unexpected path in local Maven repo, directory: %s, files: %s' %
(relpath, ', '.join([f['filename'] for f in maven_files])))
raise koji.BuildrootError('files found in unexpected path in local Maven repo,'
' directory: %s, files: %s' %
(relpath,
', '.join([f['filename'] for f in maven_files])))
# extract the Maven info from the path within the local repo
maven_info = {'version': path_comps[-1],
'artifact_id': path_comps[-2],
@ -712,8 +719,8 @@ class BuildRoot(object):
def mavenBuild(self, sourcedir, outputdir, repodir,
props=None, profiles=None, options=None, goals=None):
self.session.host.setBuildRootState(self.id, 'BUILDING')
cmd = ['--no-clean', '--chroot', '--unpriv', '--cwd', sourcedir[len(self.rootdir()):], '--',
'/usr/bin/mvn', '-C']
cmd = ['--no-clean', '--chroot', '--unpriv', '--cwd', sourcedir[len(self.rootdir()):],
'--', '/usr/bin/mvn', '-C']
if options:
cmd.extend(options)
if profiles:
@ -734,13 +741,15 @@ class BuildRoot(object):
ignore_unknown = False
if rv:
ignore_unknown = True
self.session.host.updateMavenBuildRootList(self.id, self.task_id, self.getMavenPackageList(repodir),
self.session.host.updateMavenBuildRootList(self.id, self.task_id,
self.getMavenPackageList(repodir),
ignore=self.getMavenPackageList(outputdir),
project=True, ignore_unknown=ignore_unknown,
extra_deps=self.deps)
if rv:
self.expire()
raise koji.BuildrootError('error building Maven package, %s' % self._mockResult(rv, logfile='root.log'))
raise koji.BuildrootError('error building Maven package, %s' %
self._mockResult(rv, logfile='root.log'))
def markExternalRPMs(self, rpmlist):
"""Check rpms against pkgorigins and add external repo data to the external ones
@ -793,7 +802,8 @@ class BuildRoot(object):
try:
repodata = repoMDObject.RepoMD('ourrepo', fo)
except BaseException:
raise koji.BuildError("Unable to parse repomd.xml file for %s" % os.path.join(repodir, self.br_arch))
raise koji.BuildError("Unable to parse repomd.xml file for %s" %
os.path.join(repodir, self.br_arch))
data = repodata.getData('origin')
pkgorigins = data.location[1]
else:
@ -905,7 +915,8 @@ class ChainBuildTask(BaseTaskHandler):
# if there are any nvrs to wait on, do so
if nvrs:
task_id = self.session.host.subtask(method='waitrepo',
arglist=[target_info['build_tag_name'], None, nvrs],
arglist=[
target_info['build_tag_name'], None, nvrs],
label="wait %i" % n_level,
parent=self.id)
self.wait(task_id, all=True, failany=True)
@ -1078,7 +1089,9 @@ class BuildTask(BaseTaskHandler):
def getSRPMFromSRPM(self, src, build_tag, repo_id):
# rebuild srpm in mock, so it gets correct disttag, rpm version, etc.
task_id = self.session.host.subtask(method='rebuildSRPM',
arglist=[src, build_tag, {'repo_id': repo_id, 'scratch': self.opts.get('scratch')}],
arglist=[src, build_tag, {
'repo_id': repo_id,
'scratch': self.opts.get('scratch')}],
label='srpm',
parent=self.id)
# wait for subtask to finish
@ -1093,7 +1106,9 @@ class BuildTask(BaseTaskHandler):
def getSRPMFromSCM(self, url, build_tag, repo_id):
# TODO - allow different ways to get the srpm
task_id = self.session.host.subtask(method='buildSRPMFromSCM',
arglist=[url, build_tag, {'repo_id': repo_id, 'scratch': self.opts.get('scratch')}],
arglist=[url, build_tag, {
'repo_id': repo_id,
'scratch': self.opts.get('scratch')}],
label='srpm',
parent=self.id)
# wait for subtask to finish
@ -1211,7 +1226,8 @@ class BuildTask(BaseTaskHandler):
for arch in archlist:
taskarch = self.choose_taskarch(arch, srpm, build_tag)
subtasks[arch] = self.session.host.subtask(method='buildArch',
arglist=[srpm, build_tag, arch, keep_srpm, {'repo_id': repo_id}],
arglist=[srpm, build_tag, arch,
keep_srpm, {'repo_id': repo_id}],
label=arch,
parent=self.id,
arch=taskarch)
@ -1430,7 +1446,8 @@ class BuildArchTask(BaseBuildTask):
if len(srpm_files) == 0:
raise koji.BuildError("no srpm files found for task %i" % self.id)
if len(srpm_files) > 1:
raise koji.BuildError("multiple srpm files found for task %i: %s" % (self.id, srpm_files))
raise koji.BuildError("multiple srpm files found for task %i: %s" %
(self.id, srpm_files))
# Run sanity checks. Any failures will throw a BuildError
self.srpm_sanity_checks("%s/%s" % (resultdir, srpm_files[0]))
@ -1519,12 +1536,14 @@ class MavenTask(MultiPlatformTask):
rpm_results = None
spec_url = self.opts.get('specfile')
if spec_url:
rpm_results = self.buildWrapperRPM(spec_url, self.build_task_id, target_info, build_info, repo_id)
rpm_results = self.buildWrapperRPM(
spec_url, self.build_task_id, target_info, build_info, repo_id)
if self.opts.get('scratch'):
self.session.host.moveMavenBuildToScratch(self.id, maven_results, rpm_results)
else:
self.session.host.completeMavenBuild(self.id, self.build_id, maven_results, rpm_results)
self.session.host.completeMavenBuild(
self.id, self.build_id, maven_results, rpm_results)
except (SystemExit, ServerExit, KeyboardInterrupt):
# we do not trap these
raise
@ -1537,7 +1556,8 @@ class MavenTask(MultiPlatformTask):
if not self.opts.get('scratch') and not self.opts.get('skip_tag'):
tag_task_id = self.session.host.subtask(method='tagBuild',
arglist=[dest_tag['id'], self.build_id, False, None, True],
arglist=[dest_tag['id'],
self.build_id, False, None, True],
label='tag',
parent=self.id,
arch='noarch')
@ -1590,7 +1610,8 @@ class BuildMavenTask(BaseBuildTask):
repo_info = self.session.repoInfo(repo_id, strict=True)
event_id = repo_info['create_event']
br_arch = self.find_arch('noarch', self.session.host.getHost(), self.session.getBuildConfig(build_tag['id'], event=event_id))
br_arch = self.find_arch('noarch', self.session.host.getHost(
), self.session.getBuildConfig(build_tag['id'], event=event_id))
maven_opts = opts.get('jvm_options')
if not maven_opts:
maven_opts = []
@ -1598,7 +1619,8 @@ class BuildMavenTask(BaseBuildTask):
if opt.startswith('-Xmx'):
break
else:
# Give the JVM 2G to work with by default, if the build isn't specifying its own max. memory
# Give the JVM 2G to work with by default, if the build isn't specifying
# its own max. memory
maven_opts.append('-Xmx2048m')
buildroot = BuildRoot(self.session, self.options, build_tag['id'], br_arch, self.id,
install_group='maven-build', setup_dns=True, repo_id=repo_id,
@ -1615,7 +1637,8 @@ class BuildMavenTask(BaseBuildTask):
self.session.host.updateBuildRootList(buildroot.id, buildroot.getPackageList())
if rv:
buildroot.expire()
raise koji.BuildrootError('error installing packages, %s' % buildroot._mockResult(rv, logfile='mock_output.log'))
raise koji.BuildrootError('error installing packages, %s' %
buildroot._mockResult(rv, logfile='mock_output.log'))
# existence of symlink should be sufficient
if not os.path.lexists('%s/usr/bin/mvn' % buildroot.rootdir()):
@ -1635,7 +1658,8 @@ class BuildMavenTask(BaseBuildTask):
logfile = self.workdir + '/checkout.log'
uploadpath = self.getUploadDir()
self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(), build_tag=build_tag, scratch=opts.get('scratch'))
self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(),
build_tag=build_tag, scratch=opts.get('scratch'))
# Check out sources from the SCM
sourcedir = scm.checkout(scmdir, self.session, uploadpath, logfile)
self.run_callbacks("postSCMCheckout",
@ -1652,7 +1676,8 @@ class BuildMavenTask(BaseBuildTask):
patchlog = self.workdir + '/patches.log'
patch_scm = SCM(self.opts.get('patches'))
patch_scm.assert_allowed(self.options.allowed_scms)
self.run_callbacks('preSCMCheckout', scminfo=patch_scm.get_info(), build_tag=build_tag, scratch=opts.get('scratch'))
self.run_callbacks('preSCMCheckout', scminfo=patch_scm.get_info(),
build_tag=build_tag, scratch=opts.get('scratch'))
# never try to check out a common/ dir when checking out patches
patch_scm.use_common = False
patchcheckoutdir = patch_scm.checkout(patchdir, self.session, uploadpath, patchlog)
@ -1673,10 +1698,14 @@ class BuildMavenTask(BaseBuildTask):
raise koji.BuildError('no patches found at %s' % self.opts.get('patches'))
patches.sort()
for patch in patches:
cmd = ['/usr/bin/patch', '--verbose', '--no-backup-if-mismatch', '-d', sourcedir, '-p1', '-i', os.path.join(patchcheckoutdir, patch)]
ret = log_output(self.session, cmd[0], cmd, patchlog, uploadpath, logerror=1, append=1)
cmd = ['/usr/bin/patch', '--verbose', '--no-backup-if-mismatch', '-d',
sourcedir, '-p1', '-i', os.path.join(patchcheckoutdir, patch)]
ret = log_output(self.session, cmd[0], cmd,
patchlog, uploadpath, logerror=1, append=1)
if ret:
raise koji.BuildError('error applying patches from %s, see patches.log for details' % self.opts.get('patches'))
raise koji.BuildError(
'error applying patches from %s, see patches.log for details' %
self.opts.get('patches'))
# Set ownership of the entire source tree to the mock user
uid = pwd.getpwnam(self.options.mockuser)[2]
@ -1834,7 +1863,8 @@ class WrapperRPMTask(BaseBuildTask):
artifact_name = os.path.basename(artifact_path)
base, ext = os.path.splitext(artifact_name)
if ext == '.log':
# Exclude log files for consistency with the output of listArchives() used below
# Exclude log files for consistency with the output of listArchives() used
# below
continue
relpath = os.path.join(self.pathinfo.task(task['id']), artifact_path)[1:]
for volume in artifact_data[artifact_path]:
@ -1847,10 +1877,11 @@ class WrapperRPMTask(BaseBuildTask):
# called as a top-level task to create wrapper rpms for an existing build
# verify that the build is complete
if not build['state'] == koji.BUILD_STATES['COMPLETE']:
raise koji.BuildError('cannot call wrapperRPM on a build that did not complete successfully')
raise koji.BuildError(
'cannot call wrapperRPM on a build that did not complete successfully')
# get the list of files from the build instead of the task, because the task output directory may
# have already been cleaned up
# get the list of files from the build instead of the task,
# because the task output directory may have already been cleaned up
if maven_info:
build_artifacts = self.session.listArchives(buildID=build['id'], type='maven')
elif win_info:
@ -1888,7 +1919,8 @@ class WrapperRPMTask(BaseBuildTask):
assert False # pragma: no cover
if not artifacts:
raise koji.BuildError('no output found for %s' % (task and koji.taskLabel(task) or koji.buildLabel(build)))
raise koji.BuildError('no output found for %s' % (
task and koji.taskLabel(task) or koji.buildLabel(build)))
values['artifacts'] = artifacts
values['all_artifacts'] = all_artifacts
@ -1932,9 +1964,11 @@ class WrapperRPMTask(BaseBuildTask):
repo_info = self.session.repoInfo(repo_id, strict=True)
event_id = repo_info['create_event']
build_tag = self.session.getTag(build_target['build_tag'], strict=True)
br_arch = self.find_arch('noarch', self.session.host.getHost(), self.session.getBuildConfig(build_tag['id'], event=event_id))
br_arch = self.find_arch('noarch', self.session.host.getHost(
), self.session.getBuildConfig(build_tag['id'], event=event_id))
buildroot = BuildRoot(self.session, self.options, build_tag['id'], br_arch, self.id, install_group='wrapper-rpm-build', repo_id=repo_id)
buildroot = BuildRoot(self.session, self.options, build_tag['id'], br_arch, self.id,
install_group='wrapper-rpm-build', repo_id=repo_id)
buildroot.workdir = self.workdir
self.logger.debug("Initializing buildroot")
buildroot.init()
@ -1942,7 +1976,8 @@ class WrapperRPMTask(BaseBuildTask):
logfile = os.path.join(self.workdir, 'checkout.log')
scmdir = buildroot.tmpdir() + '/scmroot'
koji.ensuredir(scmdir)
self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(), build_tag=build_tag, scratch=opts.get('scratch'))
self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(),
build_tag=build_tag, scratch=opts.get('scratch'))
specdir = scm.checkout(scmdir, self.session, self.getUploadDir(), logfile)
self.run_callbacks("postSCMCheckout",
scminfo=scm.get_info(),
@ -2002,7 +2037,8 @@ class WrapperRPMTask(BaseBuildTask):
if len(srpms) == 0:
raise koji.BuildError('no srpms found in %s' % buildroot.resultdir())
elif len(srpms) > 1:
raise koji.BuildError('multiple srpms found in %s: %s' % (buildroot.resultdir(), ', '.join(srpms)))
raise koji.BuildError('multiple srpms found in %s: %s' %
(buildroot.resultdir(), ', '.join(srpms)))
else:
srpm = srpms[0]
@ -2092,13 +2128,15 @@ class WrapperRPMTask(BaseBuildTask):
relrpms = [uploaddir + '/' + r for r in rpms]
rellogs = [uploaddir + '/' + l for l in logs]
if opts.get('scratch'):
self.session.host.moveBuildToScratch(self.id, relsrpm, relrpms, {'noarch': rellogs})
self.session.host.moveBuildToScratch(
self.id, relsrpm, relrpms, {'noarch': rellogs})
else:
if opts.get('create_build'):
brmap = dict.fromkeys([relsrpm] + relrpms, buildroot.id)
try:
self.session.host.completeBuild(self.id, self.new_build_id,
relsrpm, relrpms, brmap, {'noarch': rellogs})
relsrpm, relrpms, brmap,
{'noarch': rellogs})
except (SystemExit, ServerExit, KeyboardInterrupt):
raise
except BaseException:
@ -2107,8 +2145,10 @@ class WrapperRPMTask(BaseBuildTask):
if not opts.get('skip_tag'):
tag_task_id = self.session.host.subtask(method='tagBuild',
arglist=[build_target['dest_tag'],
self.new_build_id, False, None, True],
label='tag', parent=self.id, arch='noarch')
self.new_build_id, False,
None, True],
label='tag', parent=self.id,
arch='noarch')
self.wait(tag_task_id)
else:
self.session.host.importWrapperRPMs(self.id, build['id'], results)
@ -2173,14 +2213,16 @@ class ChainMavenTask(MultiPlatformTask):
if not opts.get('force'):
# check for a duplicate build (a build performed with the
# same scmurl and options)
dup_build = self.get_duplicate_build(dest_tag['name'], package, params, task_opts)
dup_build = self.get_duplicate_build(
dest_tag['name'], package, params, task_opts)
# if we find one, mark the package as built and remove it from todo
if dup_build:
self.done[package] = dup_build['nvr']
for deps in todo.values():
deps.discard(package)
del todo[package]
self.results.append('%s previously built from %s' % (dup_build['nvr'], task_url))
self.results.append('%s previously built from %s' %
(dup_build['nvr'], task_url))
continue
task_opts.update(dslice(opts, ['skip_tag', 'scratch'], strict=False))
@ -2235,7 +2277,8 @@ class ChainMavenTask(MultiPlatformTask):
self.done[package] = child['id']
break
else:
raise koji.BuildError('could not find buildMaven subtask of %s' % task_id)
raise koji.BuildError(
'could not find buildMaven subtask of %s' % task_id)
self.results.append('%s built from %s by task %s' %
(package, task_url, task_id))
else:
@ -2350,10 +2393,13 @@ class TagBuildTask(BaseTaskHandler):
# XXX - add more post tests
self.session.host.tagBuild(self.id, tag_id, build_id, force=force, fromtag=fromtag)
self.session.host.tagNotification(True, tag_id, fromtag, build_id, user_id, ignore_success)
self.session.host.tagNotification(
True, tag_id, fromtag, build_id, user_id, ignore_success)
except Exception as e:
exctype, value = sys.exc_info()[:2]
self.session.host.tagNotification(False, tag_id, fromtag, build_id, user_id, ignore_success, "%s: %s" % (exctype, value))
self.session.host.tagNotification(
False, tag_id, fromtag, build_id, user_id, ignore_success, "%s: %s" %
(exctype, value))
raise e
@ -2367,11 +2413,14 @@ class BuildImageTask(MultiPlatformTask):
if not opts.get('skip_tag') and not opts.get('scratch'):
# Make sure package is on the list for this tag
if pkg_cfg is None:
raise koji.BuildError("package (image) %s not in list for tag %s" % (name, target_info['dest_tag_name']))
raise koji.BuildError("package (image) %s not in list for tag %s" %
(name, target_info['dest_tag_name']))
elif pkg_cfg['blocked']:
raise koji.BuildError("package (image) %s is blocked for tag %s" % (name, target_info['dest_tag_name']))
raise koji.BuildError("package (image) %s is blocked for tag %s" %
(name, target_info['dest_tag_name']))
return self.session.host.initImageBuild(self.id,
dict(name=name, version=version, release=release, epoch=0))
dict(name=name, version=version, release=release,
epoch=0))
def getRelease(self, name, ver):
"""return the next available release number for an N-V"""
@ -2399,7 +2448,9 @@ class BuildBaseImageTask(BuildImageTask):
opts = {}
if not ozif_enabled:
self.logger.error("ImageFactory features require the following dependencies: pykickstart, imagefactory, oz and possibly python-hashlib")
self.logger.error(
"ImageFactory features require the following dependencies: pykickstart, "
"imagefactory, oz and possibly python-hashlib")
raise koji.ApplianceError('ImageFactory functions not available')
# build image(s)
@ -2430,7 +2481,8 @@ class BuildBaseImageTask(BuildImageTask):
canfail.append(subtasks[arch])
self.logger.debug("Got image subtasks: %r" % (subtasks))
self.logger.debug("Waiting on image subtasks (%s can fail)..." % canfail)
results = self.wait(to_list(subtasks.values()), all=True, failany=True, canfail=canfail)
results = self.wait(to_list(subtasks.values()), all=True,
failany=True, canfail=canfail)
# if everything failed, fail even if all subtasks are in canfail
self.logger.debug('subtask results: %r', results)
@ -2498,7 +2550,8 @@ class BuildBaseImageTask(BuildImageTask):
# tag it
if not opts.get('scratch') and not opts.get('skip_tag'):
tag_task_id = self.session.host.subtask(method='tagBuild',
arglist=[target_info['dest_tag'], bld_info['id'], False, None, True],
arglist=[target_info['dest_tag'],
bld_info['id'], False, None, True],
label='tag', parent=self.id, arch='noarch')
self.wait(tag_task_id)
@ -2506,7 +2559,8 @@ class BuildBaseImageTask(BuildImageTask):
report = ''
if opts.get('scratch'):
respath = ', '.join(
[os.path.join(koji.pathinfo.work(), koji.pathinfo.taskrelpath(tid)) for tid in subtasks.values()])
[os.path.join(koji.pathinfo.work(),
koji.pathinfo.taskrelpath(tid)) for tid in subtasks.values()])
report += 'Scratch '
else:
respath = koji.pathinfo.imagebuild(bld_info)
@ -2534,7 +2588,9 @@ class BuildApplianceTask(BuildImageTask):
opts = {}
if not image_enabled:
self.logger.error("Appliance features require the following dependencies: pykickstart, and possibly python-hashlib")
self.logger.error(
"Appliance features require the following dependencies: "
"pykickstart, and possibly python-hashlib")
raise koji.ApplianceError('Appliance functions not available')
# build image
@ -2547,9 +2603,11 @@ class BuildApplianceTask(BuildImageTask):
bld_info = self.initImageBuild(name, version, release,
target_info, opts)
create_task_id = self.session.host.subtask(method='createAppliance',
arglist=[name, version, release, arch, target_info, build_tag,
arglist=[name, version, release, arch,
target_info, build_tag,
repo_info, ksfile, opts],
label='appliance', parent=self.id, arch=arch)
label='appliance', parent=self.id,
arch=arch)
results = self.wait(create_task_id)
self.logger.info('image build task (%s) completed' % create_task_id)
self.logger.info('results: %s' % results)
@ -2583,7 +2641,8 @@ class BuildApplianceTask(BuildImageTask):
# tag it
if not opts.get('scratch') and not opts.get('skip_tag'):
tag_task_id = self.session.host.subtask(method='tagBuild',
arglist=[target_info['dest_tag'], bld_info['id'], False, None, True],
arglist=[target_info['dest_tag'],
bld_info['id'], False, None, True],
label='tag', parent=self.id, arch='noarch')
self.wait(tag_task_id)
@ -2632,7 +2691,8 @@ class BuildLiveCDTask(BuildImageTask):
bld_info = self.initImageBuild(name, version, release,
target_info, opts)
create_task_id = self.session.host.subtask(method='createLiveCD',
arglist=[name, version, release, arch, target_info, build_tag,
arglist=[name, version, release, arch,
target_info, build_tag,
repo_info, ksfile, opts],
label='livecd', parent=self.id, arch=arch)
results = self.wait(create_task_id)
@ -2668,7 +2728,8 @@ class BuildLiveCDTask(BuildImageTask):
# tag it if necessary
if not opts.get('scratch') and not opts.get('skip_tag'):
tag_task_id = self.session.host.subtask(method='tagBuild',
arglist=[target_info['dest_tag'], bld_info['id'], False, None, True],
arglist=[target_info['dest_tag'],
bld_info['id'], False, None, True],
label='tag', parent=self.id, arch='noarch')
self.wait(tag_task_id)
@ -2726,8 +2787,8 @@ class BuildLiveMediaTask(BuildImageTask):
canfail = []
for arch in arches:
subtasks[arch] = self.subtask('createLiveMedia',
[name, version, release, arch, target_info, build_tag,
repo_info, ksfile, opts],
[name, version, release, arch, target_info,
build_tag, repo_info, ksfile, opts],
label='livemedia %s' % arch, arch=arch)
if arch in opts.get('optional_arches', []):
canfail.append(subtasks[arch])
@ -2735,7 +2796,8 @@ class BuildLiveMediaTask(BuildImageTask):
self.logger.debug("Got image subtasks: %r", subtasks)
self.logger.debug("Waiting on livemedia subtasks...")
results = self.wait(to_list(subtasks.values()), all=True, failany=True, canfail=canfail)
results = self.wait(to_list(subtasks.values()), all=True,
failany=True, canfail=canfail)
# if everything failed, fail even if all subtasks are in canfail
self.logger.debug('subtask results: %r', results)
@ -2805,14 +2867,16 @@ class BuildLiveMediaTask(BuildImageTask):
# tag it if necessary
if not opts.get('scratch') and not opts.get('skip_tag'):
tag_task_id = self.session.host.subtask(method='tagBuild',
arglist=[target_info['dest_tag'], bld_info['id'], False, None, True],
arglist=[target_info['dest_tag'],
bld_info['id'], False, None, True],
label='tag', parent=self.id, arch='noarch')
self.wait(tag_task_id)
# report the results
if opts.get('scratch'):
respath = ', '.join(
[os.path.join(koji.pathinfo.work(), koji.pathinfo.taskrelpath(tid)) for tid in subtasks.values()])
[os.path.join(koji.pathinfo.work(),
koji.pathinfo.taskrelpath(tid)) for tid in subtasks.values()])
report = 'Scratch '
else:
respath = koji.pathinfo.imagebuild(bld_info)
@ -2881,7 +2945,8 @@ class ImageTask(BaseTaskHandler):
scm = SCM(self.opts['ksurl'])
scm.assert_allowed(self.options.allowed_scms)
logfile = os.path.join(self.workdir, 'checkout.log')
self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(), build_tag=build_tag, scratch=self.opts.get('scratch'))
self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(),
build_tag=build_tag, scratch=self.opts.get('scratch'))
scmsrcdir = scm.checkout(scmdir, self.session, self.getUploadDir(), logfile)
self.run_callbacks("postSCMCheckout",
scminfo=scm.get_info(),
@ -2953,7 +3018,8 @@ class ImageTask(BaseTaskHandler):
user_repos = user_repos.split(',')
index = 0
for user_repo in user_repos:
self.ks.handler.repo.repoList.append(repo_class(baseurl=user_repo, name='koji-override-%i' % index))
self.ks.handler.repo.repoList.append(repo_class(
baseurl=user_repo, name='koji-override-%i' % index))
index += 1
else:
path_info = koji.PathInfo(topdir=self.options.topurl)
@ -2961,7 +3027,9 @@ class ImageTask(BaseTaskHandler):
target_info['build_tag_name'])
baseurl = '%s/%s' % (repopath, arch)
self.logger.debug('BASEURL: %s' % baseurl)
self.ks.handler.repo.repoList.append(repo_class(baseurl=baseurl, name='koji-%s-%i' % (target_info['build_tag_name'], repo_info['id'])))
self.ks.handler.repo.repoList.append(repo_class(
baseurl=baseurl, name='koji-%s-%i' % (target_info['build_tag_name'],
repo_info['id'])))
# inject url if provided
if opts.get('install_tree_url'):
self.ks.handler.url(url=opts['install_tree_url'])
@ -3065,7 +3133,8 @@ class ApplianceTask(ImageTask):
return part.disk
raise koji.ApplianceError('kickstart lacks a "/" mountpoint')
def handler(self, name, version, release, arch, target_info, build_tag, repo_info, ksfile, opts=None):
def handler(self, name, version, release, arch, target_info,
build_tag, repo_info, ksfile, opts=None):
if opts is None:
opts = {}
@ -3099,7 +3168,8 @@ class ApplianceTask(ImageTask):
self.uploadFile(os.path.join(broot.rootdir(), app_log[1:]))
if rv:
raise koji.ApplianceError(
"Could not create appliance: %s" % parseStatus(rv, 'appliance-creator') + "; see root.log or appliance.log for more information")
"Could not create appliance: %s" % parseStatus(rv, 'appliance-creator') +
"; see root.log or appliance.log for more information")
# Find the results
results = []
@ -3221,7 +3291,8 @@ class LiveCDTask(ImageTask):
return manifest
def handler(self, name, version, release, arch, target_info, build_tag, repo_info, ksfile, opts=None):
def handler(self, name, version, release, arch, target_info,
build_tag, repo_info, ksfile, opts=None):
if opts is None:
opts = {}
@ -3251,7 +3322,8 @@ class LiveCDTask(ImageTask):
self.uploadFile(os.path.join(broot.rootdir(), livecd_log[1:]))
if rv:
raise koji.LiveCDError(
'Could not create LiveCD: %s' % parseStatus(rv, 'livecd-creator') + '; see root.log or livecd.log for more information')
'Could not create LiveCD: %s' % parseStatus(rv, 'livecd-creator') +
'; see root.log or livecd.log for more information')
# Find the resultant iso
# The cwd of the livecd-creator process is tmpdir() in the chroot, so
@ -3263,7 +3335,8 @@ class LiveCDTask(ImageTask):
if not isofile:
isofile = afile
else:
raise koji.LiveCDError('multiple .iso files found: %s and %s' % (isofile, afile))
raise koji.LiveCDError(
'multiple .iso files found: %s and %s' % (isofile, afile))
if not isofile:
raise koji.LiveCDError('could not find iso file in chroot')
isosrc = os.path.join(broot.tmpdir(), isofile)
@ -3407,7 +3480,8 @@ class LiveMediaTask(ImageTask):
return manifest
def handler(self, name, version, release, arch, target_info, build_tag, repo_info, ksfile, opts=None):
def handler(self, name, version, release, arch, target_info,
build_tag, repo_info, ksfile, opts=None):
if opts is None:
opts = {}
@ -3485,7 +3559,8 @@ class LiveMediaTask(ImageTask):
if rv:
raise koji.LiveMediaError(
'Could not create LiveMedia: %s' % parseStatus(rv, 'livemedia-creator') + '; see root.log or livemedia-out.log for more information')
'Could not create LiveMedia: %s' % parseStatus(rv, 'livemedia-creator') +
'; see root.log or livemedia-out.log for more information')
# Find the resultant iso
# The cwd of the livemedia-creator process is broot.tmpdir() in the chroot, so
@ -3498,7 +3573,8 @@ class LiveMediaTask(ImageTask):
if not isofile:
isofile = afile
else:
raise koji.LiveMediaError('multiple .iso files found: %s and %s' % (isofile, afile))
raise koji.LiveMediaError(
'multiple .iso files found: %s and %s' % (isofile, afile))
if not isofile:
raise koji.LiveMediaError('could not find iso file in chroot')
isosrc = os.path.join(rootresultsdir, isofile)
@ -3561,7 +3637,8 @@ class OzImageTask(BaseTaskHandler):
scm = SCM(self.opts['ksurl'])
scm.assert_allowed(self.options.allowed_scms)
logfile = os.path.join(self.workdir, 'checkout-%s.log' % self.arch)
self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(), build_tag=build_tag, scratch=self.opts.get('scratch'))
self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(),
build_tag=build_tag, scratch=self.opts.get('scratch'))
scmsrcdir = scm.checkout(self.workdir, self.session,
self.getUploadDir(), logfile)
self.run_callbacks("postSCMCheckout",
@ -3732,10 +3809,12 @@ class OzImageTask(BaseTaskHandler):
<url>%s</url>
</install>
""" % (name, distname, distver, arch, inst_tree)
template += """<icicle>
<extra_command>rpm -qa --qf '%{NAME},%{VERSION},%{RELEASE},%{ARCH},%{EPOCH},%{SIZE},%{SIGMD5},%{BUILDTIME}\n'</extra_command>
</icicle>
"""
template += ("<icicle>\n"
" <extra_command>rpm -qa --qf"
" '%{NAME},%{VERSION},%{RELEASE},%{ARCH},%{EPOCH},%{SIZE},%{SIGMD5},"
"%{BUILDTIME}\\n'</extra_command>\n"
" </icicle>\n"
" ")
# TODO: intelligently guess the size based on the kickstart file
template += """</os>
<description>%s OS</description>
@ -3743,7 +3822,7 @@ class OzImageTask(BaseTaskHandler):
<size>%sG</size>
</disk>
</template>
""" % (name, self.opts.get('disk_size'))
""" % (name, self.opts.get('disk_size')) # noqa: E501
return template
def parseDistro(self, distro):
@ -3831,7 +3910,9 @@ class BaseImageTask(OzImageTask):
Some image formats require others to be processed first, which is why
we have to do this. raw files in particular may not be kept.
"""
supported = ('raw', 'raw-xz', 'liveimg-squashfs', 'vmdk', 'qcow', 'qcow2', 'vdi', 'rhevm-ova', 'vsphere-ova', 'docker', 'vagrant-virtualbox', 'vagrant-libvirt', 'vagrant-vmware-fusion', 'vagrant-hyperv', 'vpc', "tar-gz")
supported = ('raw', 'raw-xz', 'liveimg-squashfs', 'vmdk', 'qcow', 'qcow2', 'vdi',
'rhevm-ova', 'vsphere-ova', 'docker', 'vagrant-virtualbox', 'vagrant-libvirt',
'vagrant-vmware-fusion', 'vagrant-hyperv', 'vpc', "tar-gz")
for f in formats:
if f not in supported:
raise koji.ApplianceError('Invalid format: %s' % f)
@ -3945,7 +4026,8 @@ class BaseImageTask(OzImageTask):
self.tlog.removeHandler(self.fhandler)
self.uploadFile(self.ozlog)
if 'No disk activity' in details:
details = 'Automated install failed or prompted for input. See the screenshot in the task results for more information.'
details = 'Automated install failed or prompted for input. ' \
'See the screenshot in the task results for more information'
raise koji.ApplianceError('Image status is %s: %s' %
(status, details))
@ -4108,8 +4190,8 @@ class BaseImageTask(OzImageTask):
if format == 'vagrant-vmware-fusion':
format = 'vsphere-ova'
img_opts['vsphere_ova_format'] = 'vagrant-vmware-fusion'
# The initial disk image transform for VMWare Fusion/Workstation requires a "standard" VMDK
# not the stream oriented format used for VirtualBox or regular VMWare OVAs
# The initial disk image transform for VMWare Fusion/Workstation requires a "standard"
# VMDK, not the stream oriented format used for VirtualBox or regular VMWare OVAs
img_opts['vsphere_vmdk_format'] = 'standard'
fixed_params = ['vsphere_ova_format', 'vsphere_vmdk_format']
if format == 'vagrant-hyperv':
@ -4117,7 +4199,8 @@ class BaseImageTask(OzImageTask):
img_opts['hyperv_ova_format'] = 'hyperv-vagrant'
fixed_params = ['hyperv_ova_format']
targ = self._do_target_image(self.base_img.base_image.identifier,
format.replace('-ova', ''), img_opts=img_opts, fixed_params=fixed_params)
format.replace('-ova', ''), img_opts=img_opts,
fixed_params=fixed_params)
targ2 = self._do_target_image(targ.target_image.identifier, 'OVA',
img_opts=img_opts, fixed_params=fixed_params)
return {'image': targ2.target_image.data}
@ -4166,7 +4249,9 @@ class BaseImageTask(OzImageTask):
self._mergeFactoryParams(img_opts, fixed_params)
self.logger.debug('img_opts_post_merge: %s' % img_opts)
target = self.bd.builder_for_target_image(image_type,
image_id=base_id, template=None, parameters=img_opts)
image_id=base_id,
template=None,
parameters=img_opts)
target.target_thread.join()
self._checkImageState(target)
return target
@ -4206,9 +4291,12 @@ class BaseImageTask(OzImageTask):
self.getUploadDir(), logerror=1)
return {'image': newimg}
def handler(self, name, version, release, arch, target_info, build_tag, repo_info, inst_tree, opts=None):
def handler(self, name, version, release, arch, target_info,
build_tag, repo_info, inst_tree, opts=None):
if not ozif_enabled:
self.logger.error("ImageFactory features require the following dependencies: pykickstart, imagefactory, oz and possibly python-hashlib")
self.logger.error(
"ImageFactory features require the following dependencies: "
"pykickstart, imagefactory, oz and possibly python-hashlib")
raise koji.ApplianceError('ImageFactory functions not available')
if opts is None:
@ -4339,11 +4427,14 @@ class BuildIndirectionImageTask(OzImageTask):
if not opts.get('skip_tag') and not opts.get('scratch'):
# Make sure package is on the list for this tag
if pkg_cfg is None:
raise koji.BuildError("package (image) %s not in list for tag %s" % (name, target_info['dest_tag_name']))
raise koji.BuildError("package (image) %s not in list for tag %s" %
(name, target_info['dest_tag_name']))
elif pkg_cfg['blocked']:
raise koji.BuildError("package (image) %s is blocked for tag %s" % (name, target_info['dest_tag_name']))
raise koji.BuildError("package (image) %s is blocked for tag %s" %
(name, target_info['dest_tag_name']))
return self.session.host.initImageBuild(self.id,
dict(name=name, version=version, release=release, epoch=0))
dict(name=name, version=version, release=release,
epoch=0))
def getRelease(self, name, ver):
"""return the next available release number for an N-V"""
@ -4371,7 +4462,8 @@ class BuildIndirectionImageTask(OzImageTask):
if fileurl:
scm = SCM(fileurl)
scm.assert_allowed(self.options.allowed_scms)
self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(), build_tag=build_tag, scratch=self.opts.get('scratch'))
self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(),
build_tag=build_tag, scratch=self.opts.get('scratch'))
logfile = os.path.join(self.workdir, 'checkout.log')
scmsrcdir = scm.checkout(self.workdir, self.session,
self.getUploadDir(), logfile)
@ -4402,11 +4494,13 @@ class BuildIndirectionImageTask(OzImageTask):
taskinfo = self.session.getTaskInfo(task_id)
taskstate = koji.TASK_STATES[taskinfo['state']].lower()
if taskstate != 'closed':
raise koji.BuildError("Input task (%d) must be in closed state - current state is (%s)" %
raise koji.BuildError("Input task (%d) must be in closed state"
" - current state is (%s)" %
(task_id, taskstate))
taskmethod = taskinfo['method']
if taskmethod != "createImage":
raise koji.BuildError("Input task method must be 'createImage' - actual method (%s)" %
raise koji.BuildError("Input task method must be 'createImage'"
" - actual method (%s)" %
(taskmethod))
result = self.session.getTaskResult(task_id)
@ -4424,7 +4518,9 @@ class BuildIndirectionImageTask(OzImageTask):
tdl_full = os.path.join(task_dir, task_tdl)
if not (os.path.isfile(diskimage_full) and os.path.isfile(tdl_full)):
raise koji.BuildError("Missing TDL or qcow2 image for task (%d) - possible expired scratch build" % (task_id))
raise koji.BuildError(
"Missing TDL or qcow2 image for task (%d) - possible expired scratch build" %
(task_id))
# The sequence to recreate a valid persistent image is as follows
# Create a new BaseImage object
@ -4445,7 +4541,10 @@ class BuildIndirectionImageTask(OzImageTask):
return factory_base_image
def _nvr_to_image(nvr, arch):
""" Take a build ID or NVR plus arch and turn it into an Image Factory Base Image object """
"""
Take a build ID or NVR plus arch and turn it into
an Image Factory Base Image object
"""
pim = PersistentImageManager.default_manager()
build = self.session.getBuild(nvr)
if not build:
@ -4471,7 +4570,8 @@ class BuildIndirectionImageTask(OzImageTask):
tdl_full = os.path.join(builddir, build_tdl)
if not (os.path.isfile(diskimage_full) and os.path.isfile(tdl_full)):
raise koji.BuildError("Missing TDL (%s) or qcow2 (%s) image for image (%s) - this should never happen" %
raise koji.BuildError("Missing TDL (%s) or qcow2 (%s) image for image (%s)"
" - this should never happen" %
(build_tdl, build_diskimage, nvr))
# The sequence to recreate a valid persistent image is as follows
@ -4617,7 +4717,8 @@ class BuildIndirectionImageTask(OzImageTask):
tlog.removeHandler(fhandler)
self.uploadFile(ozlog)
raise koji.ApplianceError('Image status is %s: %s' %
(target.target_image.status, target.target_image.status_detail))
(target.target_image.status,
target.target_image.status_detail))
self.uploadFile(target.target_image.data, remoteName=os.path.basename(results_loc))
@ -4644,7 +4745,8 @@ class BuildIndirectionImageTask(OzImageTask):
# tag it
if not opts.get('scratch') and not opts.get('skip_tag'):
tag_task_id = self.session.host.subtask(method='tagBuild',
arglist=[target_info['dest_tag'], bld_info['id'], False, None, True],
arglist=[target_info['dest_tag'],
bld_info['id'], False, None, True],
label='tag', parent=self.id, arch='noarch')
self.wait(tag_task_id)
@ -4652,7 +4754,8 @@ class BuildIndirectionImageTask(OzImageTask):
report = ''
if opts.get('scratch'):
respath = ', '.join(
[os.path.join(koji.pathinfo.work(), koji.pathinfo.taskrelpath(tid)) for tid in [self.id]])
[os.path.join(koji.pathinfo.work(),
koji.pathinfo.taskrelpath(tid)) for tid in [self.id]])
report += 'Scratch '
else:
respath = koji.pathinfo.imagebuild(bld_info)
@ -4681,8 +4784,10 @@ class RebuildSRPM(BaseBuildTask):
build_tag = self.session.getTag(build_tag, strict=True, event=event_id)
rootopts = {'install_group': 'srpm-build', 'repo_id': repo_id}
br_arch = self.find_arch('noarch', self.session.host.getHost(), self.session.getBuildConfig(build_tag['id'], event=event_id))
broot = BuildRoot(self.session, self.options, build_tag['id'], br_arch, self.id, **rootopts)
br_arch = self.find_arch('noarch', self.session.host.getHost(
), self.session.getBuildConfig(build_tag['id'], event=event_id))
broot = BuildRoot(self.session, self.options,
build_tag['id'], br_arch, self.id, **rootopts)
broot.workdir = self.workdir
self.logger.debug("Initializing buildroot")
@ -4720,7 +4825,8 @@ class RebuildSRPM(BaseBuildTask):
release = koji.get_header_field(h, 'release')
srpm_name = "%(name)s-%(version)s-%(release)s.src.rpm" % locals()
if srpm_name != os.path.basename(srpm):
raise koji.BuildError('srpm name mismatch: %s != %s' % (srpm_name, os.path.basename(srpm)))
raise koji.BuildError('srpm name mismatch: %s != %s' %
(srpm_name, os.path.basename(srpm)))
# upload srpm and return
self.uploadFile(srpm)
@ -4784,12 +4890,15 @@ class BuildSRPMFromSCMTask(BaseBuildTask):
rootopts = {'install_group': 'srpm-build',
'setup_dns': True,
'repo_id': repo_id}
if self.options.scm_credentials_dir is not None and os.path.isdir(self.options.scm_credentials_dir):
if self.options.scm_credentials_dir is not None and os.path.isdir(
self.options.scm_credentials_dir):
rootopts['bind_opts'] = {'dirs': {self.options.scm_credentials_dir: '/credentials', }}
# Force internal_dev_setup back to true because bind_opts is used to turn it off
rootopts['internal_dev_setup'] = True
br_arch = self.find_arch('noarch', self.session.host.getHost(), self.session.getBuildConfig(build_tag['id'], event=event_id))
broot = BuildRoot(self.session, self.options, build_tag['id'], br_arch, self.id, **rootopts)
br_arch = self.find_arch('noarch', self.session.host.getHost(
), self.session.getBuildConfig(build_tag['id'], event=event_id))
broot = BuildRoot(self.session, self.options,
build_tag['id'], br_arch, self.id, **rootopts)
broot.workdir = self.workdir
self.logger.debug("Initializing buildroot")
@ -4803,7 +4912,8 @@ class BuildSRPMFromSCMTask(BaseBuildTask):
logfile = self.workdir + '/checkout.log'
uploadpath = self.getUploadDir()
self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(), build_tag=build_tag, scratch=opts.get('scratch'))
self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(),
build_tag=build_tag, scratch=opts.get('scratch'))
# Check out spec file, etc. from SCM
sourcedir = scm.checkout(scmdir, self.session, uploadpath, logfile)
self.run_callbacks("postSCMCheckout",
@ -4855,7 +4965,8 @@ class BuildSRPMFromSCMTask(BaseBuildTask):
release = koji.get_header_field(h, 'release')
srpm_name = "%(name)s-%(version)s-%(release)s.src.rpm" % locals()
if srpm_name != os.path.basename(srpm):
raise koji.BuildError('srpm name mismatch: %s != %s' % (srpm_name, os.path.basename(srpm)))
raise koji.BuildError('srpm name mismatch: %s != %s' %
(srpm_name, os.path.basename(srpm)))
# upload srpm and return
self.uploadFile(srpm)
@ -4898,13 +5009,16 @@ Status: %(status)s\r
%(failure_info)s\r
"""
def handler(self, recipients, is_successful, tag_info, from_info, build_info, user_info, ignore_success=None, failure_msg=''):
def handler(self, recipients, is_successful, tag_info, from_info,
build_info, user_info, ignore_success=None, failure_msg=''):
if len(recipients) == 0:
self.logger.debug('task %i: no recipients, not sending notifications', self.id)
return
if ignore_success and is_successful:
self.logger.debug('task %i: tag operation successful and ignore success is true, not sending notifications', self.id)
self.logger.debug(
'task %i: tag operation successful and ignore success is true, '
'not sending notifications', self.id)
return
build = self.session.getBuild(build_info)
@ -4972,7 +5086,8 @@ class BuildNotificationTask(BaseTaskHandler):
_taskWeight = 0.1
# XXX externalize these templates somewhere
subject_templ = """Package: %(build_nvr)s Tag: %(dest_tag)s Status: %(status)s Built by: %(build_owner)s"""
subject_templ = "Package: %(build_nvr)s Tag: %(dest_tag)s Status: %(status)s " \
"Built by: %(build_owner)s"
message_templ = \
"""From: %(from_addr)s\r
Subject: %(subject)s\r
@ -5073,7 +5188,8 @@ Build Info: %(weburl)s/buildinfo?buildID=%(build_id)i\r
return
build_pkg_name = build['package_name']
build_pkg_evr = '%s%s-%s' % ((build['epoch'] and str(build['epoch']) + ':' or ''), build['version'], build['release'])
build_pkg_evr = '%s%s-%s' % ((build['epoch'] and str(build['epoch']) +
':' or ''), build['version'], build['release'])
build_nvr = koji.buildLabel(build)
build_id = build['id']
build_owner = build['owner_name']
@ -5099,7 +5215,9 @@ Build Info: %(weburl)s/buildinfo?buildID=%(build_id)i\r
cancel_info = "\r\nCanceled by: %s" % canceler['name']
elif build['state'] == koji.BUILD_STATES['FAILED']:
failure_data = task_data[task_id]['result']
failed_hosts = ['%s (%s)' % (task['host'], task['arch']) for task in task_data.values() if task['host'] and task['state'] == 'failed']
failed_hosts = ['%s (%s)' % (task['host'], task['arch'])
for task in task_data.values()
if task['host'] and task['state'] == 'failed']
failure_info = "\r\n%s (%d) failed on %s:\r\n %s" % (build_nvr, build_id,
', '.join(failed_hosts),
failure_data)
@ -5142,9 +5260,11 @@ Build Info: %(weburl)s/buildinfo?buildID=%(build_id)i\r
output += "logs:\r\n"
for (file_, volume) in task['logs']:
if tasks[task_state] != 'closed':
output += " %s/getfile?taskID=%s&name=%s&volume=%s\r\n" % (weburl, task['id'], file_, volume)
output += " %s/getfile?taskID=%s&name=%s&volume=%s\r\n" % (
weburl, task['id'], file_, volume)
else:
output += " %s\r\n" % '/'.join([buildurl, 'data', 'logs', task['build_arch'], file_])
output += " %s\r\n" % '/'.join([buildurl, 'data', 'logs',
task['build_arch'], file_])
if task['rpms']:
output += "rpms:\r\n"
for file_ in task['rpms']:
@ -5152,11 +5272,13 @@ Build Info: %(weburl)s/buildinfo?buildID=%(build_id)i\r
if task['misc']:
output += "misc:\r\n"
for (file_, volume) in task['misc']:
output += " %s/getfile?taskID=%s&name=%s&volume=%s\r\n" % (weburl, task['id'], file_, volume)
output += " %s/getfile?taskID=%s&name=%s&volume=%s\r\n" % (
weburl, task['id'], file_, volume)
output += "\r\n"
output += "\r\n"
changelog = koji.util.formatChangelog(self.session.getChangelogEntries(build_id, queryOpts={'limit': 3})).replace("\n", "\r\n")
changelog = koji.util.formatChangelog(self.session.getChangelogEntries(
build_id, queryOpts={'limit': 3})).replace("\n", "\r\n")
if changelog:
changelog = "Changelog:\r\n%s" % changelog
@ -5464,7 +5586,8 @@ class createDistRepoTask(BaseTaskHandler):
"sparc": ("sparcv9v", "sparcv9", "sparcv8", "sparc", "noarch"),
"sparc64": ("sparc64v", "sparc64", "noarch"),
"alpha": ("alphaev6", "alphaev56", "alphaev5", "alpha", "noarch"),
"arm": ("arm", "armv4l", "armv4tl", "armv5tel", "armv5tejl", "armv6l", "armv7l", "noarch"),
"arm": ("arm", "armv4l", "armv4tl", "armv5tel", "armv5tejl", "armv6l", "armv7l",
"noarch"),
"armhfp": ("armv7hl", "armv7hnl", "noarch"),
"aarch64": ("aarch64", "noarch"),
"riscv64": ("riscv64", "noarch"),
@ -5926,7 +6049,8 @@ enabled=1
for a in self.compat[arch]:
# note: self.compat includes noarch for non-src already
rpm_iter, builds = self.session.listTaggedRPMS(tag_id,
event=opts['event'], arch=a, latest=opts['latest'],
event=opts['event'], arch=a,
latest=opts['latest'],
inherit=opts['inherit'], rpmsigs=True)
for build in builds:
builddirs[build['id']] = koji.pathinfo.build(build)
@ -6105,9 +6229,12 @@ class WaitrepoTask(BaseTaskHandler):
repo = self.session.getRepo(taginfo['id'])
if repo and repo != last_repo:
if builds:
if koji.util.checkForBuilds(self.session, taginfo['id'], builds, repo['create_event']):
self.logger.debug("Successfully waited %s for %s to appear in the %s repo" %
(koji.util.duration(start), koji.util.printList(nvrs), taginfo['name']))
if koji.util.checkForBuilds(
self.session, taginfo['id'], builds, repo['create_event']):
self.logger.debug("Successfully waited %s for %s to appear "
"in the %s repo" %
(koji.util.duration(start), koji.util.printList(nvrs),
taginfo['name']))
return repo
elif newer_than:
if repo['create_ts'] > newer_than:
@ -6120,8 +6247,10 @@ class WaitrepoTask(BaseTaskHandler):
if (time.time() - start) > (self.TIMEOUT * 60.0):
if builds:
raise koji.GenericError("Unsuccessfully waited %s for %s to appear in the %s repo" %
(koji.util.duration(start), koji.util.printList(nvrs), taginfo['name']))
raise koji.GenericError("Unsuccessfully waited %s for %s to appear "
"in the %s repo" %
(koji.util.duration(start), koji.util.printList(nvrs),
taginfo['name']))
else:
raise koji.GenericError("Unsuccessfully waited %s for a new %s repo" %
(koji.util.duration(start), taginfo['name']))

View file

@ -61,7 +61,8 @@ MULTILIB_ARCHES = {
def parse_args(args):
"""Parse our opts/args"""
usage = """
mergerepos: take 2 or more repositories and merge their metadata into a new repo using Koji semantics
mergerepos: take 2 or more repositories and merge their metadata into a new
repo using Koji semantics
mergerepos --repo=url --repo=url --outputdir=/some/path"""
@ -74,7 +75,8 @@ def parse_args(args):
parser.add_option("-a", "--arch", dest="arches", default=[], action="append",
help="List of arches to include in the repo")
parser.add_option("-b", "--blocked", default=None,
help="A file containing a list of srpm names to exclude from the merged repo")
help="A file containing a list of srpm names to exclude "
"from the merged repo")
parser.add_option("--mode", default='koji', help="Select the merge mode")
parser.add_option("-o", "--outputdir", default=None,
help="Location to create the repository")
@ -175,18 +177,18 @@ class RepoMerge(object):
For each package object, check if the srpm name has ever been seen before.
If is has not, keep the package. If it has, check if the srpm name was first seen
in the same repo as the current package. If so, keep the package from the srpm with the
highest NVR. If not, keep the packages from the first srpm we found, and delete packages from
all other srpms.
highest NVR. If not, keep the packages from the first srpm we found, and delete packages
from all other srpms.
Packages with matching NVRs in multiple repos will be taken from the first repo.
If the srpm name appears in the blocked package list, any packages generated from the srpm
will be deleted from the package sack as well.
This method will also generate a file called "pkgorigins" and add it to the repo metadata. This
is a tab-separated map of package E:N-V-R.A to repo URL (as specified on the command-line). This
allows a package to be tracked back to its origin, even if the location field in the repodata does
not match the original repo location.
This method will also generate a file called "pkgorigins" and add it to the repo metadata.
This is a tab-separated map of package E:N-V-R.A to repo URL (as specified on the
command-line). This allows a package to be tracked back to its origin, even if the location
field in the repodata does not match the original repo location.
"""
if self.mode == 'simple':
@ -208,7 +210,8 @@ class RepoMerge(object):
# to be using relative urls
# XXX - kind of a hack, but yum leaves us little choice
# force the pkg object to report a relative location
loc = """<location href="%s"/>\n""" % yum.misc.to_xml(pkg.remote_path, attrib=True)
loc = """<location href="%s"/>\n""" % yum.misc.to_xml(pkg.remote_path,
attrib=True)
pkg._return_remote_location = make_const_func(loc)
if pkg.sourcerpm in seen_srpms:
# we're just looking at sourcerpms this pass and we've
@ -299,7 +302,8 @@ class RepoMerge(object):
# to be using relative urls
# XXX - kind of a hack, but yum leaves us little choice
# force the pkg object to report a relative location
loc = """<location href="%s"/>\n""" % yum.misc.to_xml(pkg.remote_path, attrib=True)
loc = """<location href="%s"/>\n""" % yum.misc.to_xml(pkg.remote_path,
attrib=True)
pkg._return_remote_location = make_const_func(loc)
pkgorigins = os.path.join(self.yumbase.conf.cachedir, 'pkgorigins')

View file

@ -129,7 +129,8 @@ def get_options():
help=_("do not authenticate"))
parser.add_option("--force-auth", action="store_true", default=False,
help=_("authenticate even for read-only operations"))
parser.add_option("--authtype", help=_("force use of a type of authentication, options: noauth, ssl, password, or kerberos"))
parser.add_option("--authtype", help=_("force use of a type of authentication, options: "
"noauth, ssl, password, or kerberos"))
parser.add_option("-d", "--debug", action="store_true",
help=_("show debug output"))
parser.add_option("--debug-xmlrpc", action="store_true",
@ -145,7 +146,8 @@ def get_options():
parser.add_option("--pkgurl", help=SUPPRESS_HELP)
parser.add_option("--plugin-paths", metavar='PATHS',
help=_("specify additional plugin paths (colon separated)"))
parser.add_option("--help-commands", action="store_true", default=False, help=_("list commands"))
parser.add_option("--help-commands", action="store_true", default=False,
help=_("list commands"))
(options, args) = parser.parse_args()
# load local config

File diff suppressed because it is too large Load diff

View file

@ -112,7 +112,8 @@ def ensure_connection(session):
except requests.exceptions.ConnectionError:
error(_("Error: Unable to connect to server"))
if ret != koji.API_VERSION:
warn(_("WARNING: The server is at API version %d and the client is at %d" % (ret, koji.API_VERSION)))
warn(_("WARNING: The server is at API version %d and "
"the client is at %d" % (ret, koji.API_VERSION)))
def print_task_headers():
@ -194,7 +195,8 @@ class TaskWatcher(object):
laststate = last['state']
if laststate != state:
if not self.quiet:
print("%s: %s -> %s" % (self.str(), self.display_state(last), self.display_state(self.info)))
print("%s: %s -> %s" % (self.str(), self.display_state(last),
self.display_state(self.info)))
return True
return False
else:
@ -277,9 +279,9 @@ def watch_tasks(session, tasklist, quiet=False, poll_interval=60, ki_handler=Non
tlist = ['%s: %s' % (t.str(), t.display_state(t.info))
for t in tasks.values() if not t.is_done()]
print(
"""Tasks still running. You can continue to watch with the '%s watch-task' command.
Running Tasks:
%s""" % (progname, '\n'.join(tlist)))
"Tasks still running. You can continue to watch with the"
" '%s watch-task' command.\n"
"Running Tasks:\n%s" % (progname, '\n'.join(tlist)))
sys.stdout.flush()
rv = 0
try:
@ -302,7 +304,8 @@ Running Tasks:
for child in session.getTaskChildren(task_id):
child_id = child['id']
if child_id not in tasks.keys():
tasks[child_id] = TaskWatcher(child_id, session, task.level + 1, quiet=quiet)
tasks[child_id] = TaskWatcher(child_id, session, task.level + 1,
quiet=quiet)
tasks[child_id].update()
# If we found new children, go through the list again,
# in case they have children also
@ -370,7 +373,8 @@ def watch_logs(session, tasklist, opts, poll_interval):
if (log, volume) not in taskoffsets:
taskoffsets[(log, volume)] = 0
contents = session.downloadTaskOutput(task_id, log, taskoffsets[(log, volume)], 16384, volume=volume)
contents = session.downloadTaskOutput(task_id, log, taskoffsets[(log, volume)],
16384, volume=volume)
taskoffsets[(log, volume)] += len(contents)
if contents:
currlog = "%d:%s:%s:" % (task_id, volume, log)
@ -452,7 +456,9 @@ def _progress_callback(uploaded, total, piece, time, total_time):
speed = _format_size(float(total) / float(total_time)) + "/sec"
# write formated string and flush
sys.stdout.write("[% -36s] % 4s % 8s % 10s % 14s\r" % ('=' * (int(percent_done * 36)), percent_done_str, elapsed, data_done, speed))
sys.stdout.write("[% -36s] % 4s % 8s % 10s % 14s\r" % ('=' * (int(percent_done * 36)),
percent_done_str, elapsed, data_done,
speed))
sys.stdout.flush()
@ -520,7 +526,8 @@ def _download_progress(download_t, download_d):
percent_done_str = "%3d%%" % (percent_done * 100)
data_done = _format_size(download_d)
sys.stdout.write("[% -36s] % 4s % 10s\r" % ('=' * (int(percent_done * 36)), percent_done_str, data_done))
sys.stdout.write("[% -36s] % 4s % 10s\r" % ('=' * (int(percent_done * 36)), percent_done_str,
data_done))
sys.stdout.flush()
@ -560,13 +567,16 @@ def activate_session(session, options):
elif options.authtype == "ssl" or os.path.isfile(options.cert) and options.authtype is None:
# authenticate using SSL client cert
session.ssl_login(options.cert, None, options.serverca, proxyuser=runas)
elif options.authtype == "password" or getattr(options, 'user', None) and options.authtype is None:
elif options.authtype == "password" \
or getattr(options, 'user', None) \
and options.authtype is None:
# authenticate using user/password
session.login()
elif options.authtype == "kerberos" or has_krb_creds() and options.authtype is None:
try:
if getattr(options, 'keytab', None) and getattr(options, 'principal', None):
session.krb_login(principal=options.principal, keytab=options.keytab, proxyuser=runas)
session.krb_login(principal=options.principal, keytab=options.keytab,
proxyuser=runas)
else:
session.krb_login(proxyuser=runas)
except socket.error as e:

File diff suppressed because it is too large Load diff

View file

@ -155,7 +155,9 @@ class HandlerRegistry(object):
if x == 0 and func.__code__.co_varnames[x] == "self":
continue
if func.__defaults__ and func.__code__.co_argcount - x <= len(func.__defaults__):
args.append((func.__code__.co_varnames[x], func.__defaults__[x - func.__code__.co_argcount + len(func.__defaults__)]))
args.append(
(func.__code__.co_varnames[x],
func.__defaults__[x - func.__code__.co_argcount + len(func.__defaults__)]))
else:
args.append(func.__code__.co_varnames[x])
return args
@ -317,7 +319,8 @@ class ModXMLRPCRequestHandler(object):
if self.logger.isEnabledFor(logging.INFO):
rusage = resource.getrusage(resource.RUSAGE_SELF)
self.logger.info("Completed method %s for session %s (#%s): %f seconds, rss %s, stime %f",
self.logger.info(
"Completed method %s for session %s (#%s): %f seconds, rss %s, stime %f",
method, context.session.id, context.session.callnum,
time.time() - start,
rusage.ru_maxrss, rusage.ru_stime)
@ -344,8 +347,11 @@ class ModXMLRPCRequestHandler(object):
faultCode = getattr(exc_type, 'faultCode', 1)
faultString = ', '.join(exc_value.args)
trace = traceback.format_exception(*sys.exc_info())
# traceback is not part of the multicall spec, but we include it for debugging purposes
results.append({'faultCode': faultCode, 'faultString': faultString, 'traceback': trace})
# traceback is not part of the multicall spec,
# but we include it for debugging purposes
results.append({'faultCode': faultCode,
'faultString': faultString,
'traceback': trace})
else:
results.append([result])
@ -438,7 +444,9 @@ def load_config(environ):
['VerbosePolicy', 'boolean', False],
['LogLevel', 'string', 'WARNING'],
['LogFormat', 'string', '%(asctime)s [%(levelname)s] m=%(method)s u=%(user_name)s p=%(process)s r=%(remoteaddr)s %(name)s: %(message)s'],
['LogFormat', 'string',
'%(asctime)s [%(levelname)s] m=%(method)s u=%(user_name)s p=%(process)s r=%(remoteaddr)s '
'%(name)s: %(message)s'],
['MissingPolicyOk', 'boolean', True],
['EnableMaven', 'boolean', False],
@ -660,7 +668,8 @@ def load_scripts(environ):
def get_memory_usage():
pagesize = resource.getpagesize()
statm = [pagesize * int(y) // 1024 for y in "".join(open("/proc/self/statm").readlines()).strip().split()]
statm = [pagesize * int(y) // 1024
for y in "".join(open("/proc/self/statm").readlines()).strip().split()]
size, res, shr, text, lib, data, dirty = statm
return res - shr
@ -713,7 +722,8 @@ def application(environ, start_response):
('Allow', 'POST'),
]
start_response('405 Method Not Allowed', headers)
response = "Method Not Allowed\nThis is an XML-RPC server. Only POST requests are accepted."
response = "Method Not Allowed\n" \
"This is an XML-RPC server. Only POST requests are accepted."
if six.PY3:
response = response.encode()
headers = [
@ -767,7 +777,11 @@ def application(environ, start_response):
paramstr = repr(getattr(context, 'params', 'UNKNOWN'))
if len(paramstr) > 120:
paramstr = paramstr[:117] + "..."
h.logger.warning("Memory usage of process %d grew from %d KiB to %d KiB (+%d KiB) processing request %s with args %s" % (os.getpid(), memory_usage_at_start, memory_usage_at_end, memory_usage_at_end - memory_usage_at_start, context.method, paramstr))
h.logger.warning(
"Memory usage of process %d grew from %d KiB to %d KiB (+%d KiB) processing "
"request %s with args %s" %
(os.getpid(), memory_usage_at_start, memory_usage_at_end,
memory_usage_at_end - memory_usage_at_start, context.method, paramstr))
h.logger.debug("Returning %d bytes after %f seconds", len(response),
time.time() - start)
finally:

View file

@ -75,7 +75,7 @@ try:
from OpenSSL.SSL import Error as SSL_Error
except Exception: # pragma: no cover
# the hub imports koji, and sometimes this import fails there
# see: https://cryptography.io/en/latest/faq/#starting-cryptography-using-mod-wsgi-produces-an-internalerror-during-a-call-in-register-osrandom-engine
# see: https://cryptography.io/en/latest/faq/#starting-cryptography-using-mod-wsgi-produces-an-internalerror-during-a-call-in-register-osrandom-engine # noqa: E501
# unfortunately the workaround at the above link does not always work, so
# we ignore it here
pass
@ -1270,7 +1270,8 @@ def parse_pom(path=None, contents=None):
fd.close()
if not contents:
raise GenericError('either a path to a pom file or the contents of a pom file must be specified')
raise GenericError(
'either a path to a pom file or the contents of a pom file must be specified')
# A common problem is non-UTF8 characters in XML files, so we'll convert the string first
@ -1287,7 +1288,8 @@ def parse_pom(path=None, contents=None):
for field in fields:
if field not in util.to_list(values.keys()):
raise GenericError('could not extract %s from POM: %s' % (field, (path or '<contents>')))
raise GenericError('could not extract %s from POM: %s' %
(field, (path or '<contents>')))
return values
@ -1649,7 +1651,8 @@ name=build
# The following macro values cannot be overridden by tag options
macros['%_topdir'] = '%s/build' % config_opts['chroothome']
macros['%_host_cpu'] = opts.get('target_arch', arch)
macros['%_host'] = '%s-%s' % (opts.get('target_arch', arch), opts.get('mockhost', 'koji-linux-gnu'))
macros['%_host'] = '%s-%s' % (opts.get('target_arch', arch),
opts.get('mockhost', 'koji-linux-gnu'))
parts = ["""# Auto-generated by the Koji build system
"""]
@ -1681,7 +1684,9 @@ name=build
if bind_opts:
for key in bind_opts.keys():
for mnt_src, mnt_dest in six.iteritems(bind_opts.get(key)):
parts.append("config_opts['plugin_conf']['bind_mount_opts'][%r].append((%r, %r))\n" % (key, mnt_src, mnt_dest))
parts.append(
"config_opts['plugin_conf']['bind_mount_opts'][%r].append((%r, %r))\n" %
(key, mnt_src, mnt_dest))
parts.append("\n")
for key in sorted(macros):
@ -1886,7 +1891,8 @@ def read_config(profile_name, user_config=None):
try:
result[name] = int(value)
except ValueError:
raise ConfigurationError("value for %s config option must be a valid integer" % name)
raise ConfigurationError(
"value for %s config option must be a valid integer" % name)
else:
result[name] = value
@ -2030,7 +2036,8 @@ def read_config_files(config_files, raw=False):
class PathInfo(object):
# ASCII numbers and upper- and lower-case letter for use in tmpdir()
ASCII_CHARS = [chr(i) for i in list(range(48, 58)) + list(range(65, 91)) + list(range(97, 123))]
ASCII_CHARS = [chr(i)
for i in list(range(48, 58)) + list(range(65, 91)) + list(range(97, 123))]
def __init__(self, topdir=None):
self._topdir = topdir
@ -2053,10 +2060,12 @@ class PathInfo(object):
def build(self, build):
"""Return the directory where a build belongs"""
return self.volumedir(build.get('volume_name')) + ("/packages/%(name)s/%(version)s/%(release)s" % build)
return self.volumedir(build.get('volume_name')) + \
("/packages/%(name)s/%(version)s/%(release)s" % build)
def mavenbuild(self, build):
"""Return the directory where the Maven build exists in the global store (/mnt/koji/packages)"""
"""Return the directory where the Maven build exists in the global store
(/mnt/koji/packages)"""
return self.build(build) + '/maven'
def mavenrepo(self, maveninfo):
@ -2137,7 +2146,8 @@ class PathInfo(object):
"""Return a path to a unique directory under work()/tmp/"""
tmp = None
while tmp is None or os.path.exists(tmp):
tmp = self.work(volume) + '/tmp/' + ''.join([random.choice(self.ASCII_CHARS) for dummy in '123456'])
tmp = self.work(volume) + '/tmp/' + ''.join([random.choice(self.ASCII_CHARS)
for dummy in '123456'])
return tmp
def scratch(self):
@ -2781,9 +2791,9 @@ class ClientSession(object):
# basically, we want to retry on most errors, with a few exceptions
# - faults (this means the call completed and failed)
# - SystemExit, KeyboardInterrupt
# note that, for logged-in sessions the server should tell us (via a RetryError fault)
# if the call cannot be retried. For non-logged-in sessions, all calls should be read-only
# and hence retryable.
# note that, for logged-in sessions the server should tell us (via a RetryError
# fault) if the call cannot be retried. For non-logged-in sessions, all calls
# should be read-only and hence retryable.
except Fault as fault:
# try to convert the fault to a known exception
err = convertFault(fault)
@ -2792,13 +2802,14 @@ class ClientSession(object):
secs = self.opts.get('offline_retry_interval', interval)
self.logger.debug("Server offline. Retrying in %i seconds", secs)
time.sleep(secs)
# reset try count - this isn't a typical error, this is a running server
# correctly reporting an outage
# reset try count - this isn't a typical error, this is a running
# server correctly reporting an outage
tries = 0
continue
raise err
except (SystemExit, KeyboardInterrupt):
# (depending on the python version, these may or may not be subclasses of Exception)
# (depending on the python version, these may or may not be subclasses of
# Exception)
raise
except Exception as e:
tb_str = ''.join(traceback.format_exception(*sys.exc_info()))
@ -2809,8 +2820,9 @@ class ClientSession(object):
raise
if not self.logged_in:
# in the past, non-logged-in sessions did not retry. For compatibility purposes
# this behavior is governed by the anon_retry opt.
# in the past, non-logged-in sessions did not retry.
# For compatibility purposes this behavior is governed by the anon_retry
# opt.
if not self.opts.get('anon_retry', False):
raise
@ -2822,7 +2834,8 @@ class ClientSession(object):
# otherwise keep retrying
if self.logger.isEnabledFor(logging.DEBUG):
self.logger.debug(tb_str)
self.logger.info("Try #%s for call %s (%s) failed: %s", tries, self.callnum, name, e)
self.logger.info("Try #%s for call %s (%s) failed: %s",
tries, self.callnum, name, e)
if tries > 1:
# first retry is immediate, after that we honor retry_interval
time.sleep(interval)
@ -2864,7 +2877,8 @@ class ClientSession(object):
transaction.
"""
if not self.multicall:
raise GenericError('ClientSession.multicall must be set to True before calling multiCall()')
raise GenericError(
'ClientSession.multicall must be set to True before calling multiCall()')
self.multicall = False
if len(self._calls) == 0:
return []
@ -2896,7 +2910,8 @@ class ClientSession(object):
return self.__dict__['_apidoc']
return VirtualMethod(self._callMethod, name, self)
def fastUpload(self, localfile, path, name=None, callback=None, blocksize=None, overwrite=False, volume=None):
def fastUpload(self, localfile, path, name=None, callback=None, blocksize=None,
overwrite=False, volume=None):
if blocksize is None:
blocksize = self.opts.get('upload_blocksize', 1048576)
@ -2930,7 +2945,8 @@ class ClientSession(object):
hexdigest = util.adler32_constructor(chunk).hexdigest()
full_chksum.update(chunk)
if result['size'] != len(chunk):
raise GenericError("server returned wrong chunk size: %s != %s" % (result['size'], len(chunk)))
raise GenericError("server returned wrong chunk size: %s != %s" %
(result['size'], len(chunk)))
if result['hexdigest'] != hexdigest:
raise GenericError('upload checksum failed: %s != %s'
% (result['hexdigest'], hexdigest))
@ -2957,9 +2973,11 @@ class ClientSession(object):
if problems and result['hexdigest'] != full_chksum.hexdigest():
raise GenericError("Uploaded file has wrong checksum: %s/%s, %s != %s"
% (path, name, result['hexdigest'], full_chksum.hexdigest()))
self.logger.debug("Fast upload: %s complete. %i bytes in %.1f seconds", localfile, size, t2)
self.logger.debug("Fast upload: %s complete. %i bytes in %.1f seconds",
localfile, size, t2)
def _prepUpload(self, chunk, offset, path, name, verify="adler32", overwrite=False, volume=None):
def _prepUpload(self, chunk, offset, path, name, verify="adler32", overwrite=False,
volume=None):
"""prep a rawUpload call"""
if not self.logged_in:
raise ActionNotAllowed("you must be logged in to upload")
@ -2989,7 +3007,8 @@ class ClientSession(object):
request = chunk
return handler, headers, request
def uploadWrapper(self, localfile, path, name=None, callback=None, blocksize=None, overwrite=True, volume=None):
def uploadWrapper(self, localfile, path, name=None, callback=None, blocksize=None,
overwrite=True, volume=None):
"""upload a file in chunks using the uploadFile call"""
if blocksize is None:
blocksize = self.opts.get('upload_blocksize', 1048576)
@ -3044,7 +3063,8 @@ class ClientSession(object):
tries = 0
while True:
if debug:
self.logger.debug("uploadFile(%r,%r,%r,%r,%r,...)" % (path, name, sz, digest, offset))
self.logger.debug("uploadFile(%r,%r,%r,%r,%r,...)" %
(path, name, sz, digest, offset))
if self.callMethod('uploadFile', path, name, sz, digest, offset, data, **volopts):
break
if tries <= retries:
@ -3063,9 +3083,11 @@ class ClientSession(object):
if t2 <= 0:
t2 = 1
if debug:
self.logger.debug("Uploaded %d bytes in %f seconds (%f kbytes/sec)" % (size, t1, size / t1 / 1024.0))
self.logger.debug("Uploaded %d bytes in %f seconds (%f kbytes/sec)" %
(size, t1, size / t1 / 1024.0))
if debug:
self.logger.debug("Total: %d bytes in %f seconds (%f kbytes/sec)" % (ofs, t2, ofs / t2 / 1024.0))
self.logger.debug("Total: %d bytes in %f seconds (%f kbytes/sec)" %
(ofs, t2, ofs / t2 / 1024.0))
if callback:
callback(ofs, totalsize, size, t1, t2)
fo.close()
@ -3281,8 +3303,8 @@ class DBHandler(logging.Handler):
cursor.execute(command, data)
cursor.close()
# self.cnx.commit()
# XXX - committing here is most likely wrong, but we need to set commit_pending or something
# ...and this is really the wrong place for that
# XXX - committing here is most likely wrong, but we need to set commit_pending or
# something...and this is really the wrong place for that
except BaseException:
self.handleError(record)
@ -3583,7 +3605,9 @@ def add_file_logger(logger, fn):
def add_stderr_logger(logger):
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] {%(process)d} %(name)s:%(lineno)d %(message)s'))
handler.setFormatter(
logging.Formatter(
'%(asctime)s [%(levelname)s] {%(process)d} %(name)s:%(lineno)d %(message)s'))
handler.setLevel(logging.DEBUG)
logging.getLogger(logger).addHandler(handler)
@ -3612,7 +3636,8 @@ def add_mail_logger(logger, addr):
return
addresses = addr.split(',')
handler = logging.handlers.SMTPHandler("localhost",
"%s@%s" % (pwd.getpwuid(os.getuid())[0], socket.getfqdn()),
"%s@%s" % (pwd.getpwuid(os.getuid())[0],
socket.getfqdn()),
addresses,
"%s: error notice" % socket.getfqdn())
handler.setFormatter(logging.Formatter('%(pathname)s:%(lineno)d [%(levelname)s] %(message)s'))

View file

@ -334,7 +334,8 @@ class Session(object):
# Successfully authenticated via Kerberos, now log in
if proxyuser:
proxyprincs = [princ.strip() for princ in context.opts.get('ProxyPrincipals', '').split(',')]
proxyprincs = [princ.strip()
for princ in context.opts.get('ProxyPrincipals', '').split(',')]
if cprinc.name in proxyprincs:
login_principal = proxyuser
else:
@ -408,12 +409,15 @@ class Session(object):
authtype = koji.AUTHTYPE_GSSAPI
else:
if context.environ.get('SSL_CLIENT_VERIFY') != 'SUCCESS':
raise koji.AuthError('could not verify client: %s' % context.environ.get('SSL_CLIENT_VERIFY'))
raise koji.AuthError('could not verify client: %s' %
context.environ.get('SSL_CLIENT_VERIFY'))
name_dn_component = context.opts.get('DNUsernameComponent', 'CN')
username = context.environ.get('SSL_CLIENT_S_DN_%s' % name_dn_component)
if not username:
raise koji.AuthError('unable to get user information (%s) from client certificate' % name_dn_component)
raise koji.AuthError(
'unable to get user information (%s) from client certificate' %
name_dn_component)
client_dn = context.environ.get('SSL_CLIENT_S_DN')
authtype = koji.AUTHTYPE_SSL

View file

@ -110,7 +110,8 @@ def fast_incremental_upload(session, fname, fd, path, retries, logger):
break
def log_output(session, path, args, outfile, uploadpath, cwd=None, logerror=0, append=0, chroot=None, env=None):
def log_output(session, path, args, outfile, uploadpath, cwd=None, logerror=0, append=0,
chroot=None, env=None):
"""Run command with output redirected. If chroot is not None, chroot to the directory specified
before running the command."""
pid = os.fork()
@ -287,11 +288,13 @@ class SCM(object):
elif len(userhost) > 2:
raise koji.GenericError('Invalid username@hostname specified: %s' % netloc)
if not netloc:
raise koji.GenericError('Unable to parse SCM URL: %s . Could not find the netloc element.' % self.url)
raise koji.GenericError(
'Unable to parse SCM URL: %s . Could not find the netloc element.' % self.url)
# check for empty path before we apply normpath
if not path:
raise koji.GenericError('Unable to parse SCM URL: %s . Could not find the path element.' % self.url)
raise koji.GenericError(
'Unable to parse SCM URL: %s . Could not find the path element.' % self.url)
path = os.path.normpath(path)
@ -306,14 +309,19 @@ class SCM(object):
# any such url should have already been caught by is_scm_url
raise koji.GenericError('Invalid SCM URL. Path should begin with /: %s) ')
# check for validity: params should be empty, query may be empty, everything else should be populated
# check for validity: params should be empty, query may be empty, everything else should be
# populated
if params:
raise koji.GenericError('Unable to parse SCM URL: %s . Params element %s should be empty.' % (self.url, params))
raise koji.GenericError(
'Unable to parse SCM URL: %s . Params element %s should be empty.' %
(self.url, params))
if not scheme: # pragma: no cover
# should not happen because of is_scm_url check earlier
raise koji.GenericError('Unable to parse SCM URL: %s . Could not find the scheme element.' % self.url)
raise koji.GenericError(
'Unable to parse SCM URL: %s . Could not find the scheme element.' % self.url)
if not fragment:
raise koji.GenericError('Unable to parse SCM URL: %s . Could not find the fragment element.' % self.url)
raise koji.GenericError(
'Unable to parse SCM URL: %s . Could not find the fragment element.' % self.url)
# return parsed values
return (scheme, user, netloc, path, query, fragment)
@ -356,7 +364,8 @@ class SCM(object):
for allowed_scm in allowed.split():
scm_tuple = allowed_scm.split(':')
if len(scm_tuple) < 2:
self.logger.warn('Ignoring incorrectly formatted SCM host:repository: %s' % allowed_scm)
self.logger.warn('Ignoring incorrectly formatted SCM host:repository: %s' %
allowed_scm)
continue
host_pat = scm_tuple[0]
repo_pat = scm_tuple[1]
@ -378,11 +387,13 @@ class SCM(object):
if scm_tuple[3]:
self.source_cmd = scm_tuple[3].split(',')
else:
# there was nothing after the trailing :, so they don't want to run a source_cmd at all
# there was nothing after the trailing :,
# so they don't want to run a source_cmd at all
self.source_cmd = None
break
if not is_allowed:
raise koji.BuildError('%s:%s is not in the list of allowed SCMs' % (self.host, self.repository))
raise koji.BuildError(
'%s:%s is not in the list of allowed SCMs' % (self.host, self.repository))
def checkout(self, scmdir, session=None, uploadpath=None, logfile=None):
"""
@ -416,16 +427,20 @@ class SCM(object):
(self.scmtype, ' '.join(cmd), os.path.basename(logfile)))
if self.scmtype == 'CVS':
pserver = ':pserver:%s@%s:%s' % ((self.user or 'anonymous'), self.host, self.repository)
module_checkout_cmd = ['cvs', '-d', pserver, 'checkout', '-r', self.revision, self.module]
pserver = ':pserver:%s@%s:%s' % ((self.user or 'anonymous'), self.host,
self.repository)
module_checkout_cmd = ['cvs', '-d', pserver, 'checkout', '-r', self.revision,
self.module]
common_checkout_cmd = ['cvs', '-d', pserver, 'checkout', 'common']
elif self.scmtype == 'CVS+SSH':
if not self.user:
raise koji.BuildError('No user specified for repository access scheme: %s' % self.scheme)
raise koji.BuildError(
'No user specified for repository access scheme: %s' % self.scheme)
cvsserver = ':ext:%s@%s:%s' % (self.user, self.host, self.repository)
module_checkout_cmd = ['cvs', '-d', cvsserver, 'checkout', '-r', self.revision, self.module]
module_checkout_cmd = ['cvs', '-d', cvsserver, 'checkout', '-r', self.revision,
self.module]
common_checkout_cmd = ['cvs', '-d', cvsserver, 'checkout', 'common']
env = {'CVS_RSH': 'ssh'}
@ -453,14 +468,16 @@ class SCM(object):
update_checkout_cmd = ['git', 'reset', '--hard', self.revision]
update_checkout_dir = sourcedir
# self.module may be empty, in which case the specfile should be in the top-level directory
# self.module may be empty, in which case the specfile should be in the top-level
# directory
if self.module:
# Treat the module as a directory inside the git repository
sourcedir = '%s/%s' % (sourcedir, self.module)
elif self.scmtype == 'GIT+SSH':
if not self.user:
raise koji.BuildError('No user specified for repository access scheme: %s' % self.scheme)
raise koji.BuildError(
'No user specified for repository access scheme: %s' % self.scheme)
gitrepo = 'git+ssh://%s@%s%s' % (self.user, self.host, self.repository)
commonrepo = os.path.dirname(gitrepo) + '/common'
checkout_path = os.path.basename(self.repository)
@ -481,7 +498,8 @@ class SCM(object):
update_checkout_cmd = ['git', 'reset', '--hard', self.revision]
update_checkout_dir = sourcedir
# self.module may be empty, in which case the specfile should be in the top-level directory
# self.module may be empty, in which case the specfile should be in the top-level
# directory
if self.module:
# Treat the module as a directory inside the git repository
sourcedir = '%s/%s' % (sourcedir, self.module)
@ -492,15 +510,18 @@ class SCM(object):
scheme = scheme.split('+')[1]
svnserver = '%s%s%s' % (scheme, self.host, self.repository)
module_checkout_cmd = ['svn', 'checkout', '-r', self.revision, '%s/%s' % (svnserver, self.module), self.module]
module_checkout_cmd = ['svn', 'checkout', '-r', self.revision,
'%s/%s' % (svnserver, self.module), self.module]
common_checkout_cmd = ['svn', 'checkout', '%s/common' % svnserver]
elif self.scmtype == 'SVN+SSH':
if not self.user:
raise koji.BuildError('No user specified for repository access scheme: %s' % self.scheme)
raise koji.BuildError(
'No user specified for repository access scheme: %s' % self.scheme)
svnserver = 'svn+ssh://%s@%s%s' % (self.user, self.host, self.repository)
module_checkout_cmd = ['svn', 'checkout', '-r', self.revision, '%s/%s' % (svnserver, self.module), self.module]
module_checkout_cmd = ['svn', 'checkout', '-r', self.revision,
'%s/%s' % (svnserver, self.module), self.module]
common_checkout_cmd = ['svn', 'checkout', '%s/common' % svnserver]
else:
@ -513,8 +534,10 @@ class SCM(object):
# Currently only required for GIT checkouts
# Run the command in the directory the source was checked out into
if self.scmtype.startswith('GIT') and globals().get('KOJIKAMID'):
_run(['git', 'config', 'core.autocrlf', 'true'], chdir=update_checkout_dir, fatal=True)
_run(['git', 'config', 'core.safecrlf', 'true'], chdir=update_checkout_dir, fatal=True)
_run(['git', 'config', 'core.autocrlf', 'true'],
chdir=update_checkout_dir, fatal=True)
_run(['git', 'config', 'core.safecrlf', 'true'],
chdir=update_checkout_dir, fatal=True)
_run(update_checkout_cmd, chdir=update_checkout_dir, fatal=True)
if self.use_common and not globals().get('KOJIKAMID'):
@ -583,7 +606,8 @@ class TaskManager(object):
def registerHandler(self, entry):
"""register and index task handler"""
if isinstance(entry, type(koji.tasks.BaseTaskHandler)) and issubclass(entry, koji.tasks.BaseTaskHandler):
if isinstance(entry, type(koji.tasks.BaseTaskHandler)) and \
issubclass(entry, koji.tasks.BaseTaskHandler):
for method in entry.Methods:
self.handlers[method] = entry
@ -638,7 +662,9 @@ class TaskManager(object):
# task not running - expire the buildroot
# TODO - consider recycling hooks here (with strong sanity checks)
self.logger.info("Expiring buildroot: %(id)i/%(tag_name)s/%(arch)s" % br)
self.logger.debug("Buildroot task: %r, Current tasks: %r" % (task_id, to_list(self.tasks.keys())))
self.logger.debug(
"Buildroot task: %r, Current tasks: %r" %
(task_id, to_list(self.tasks.keys())))
self.session.host.setBuildRootState(id, st_expired)
continue
if nolocal:
@ -678,7 +704,8 @@ class TaskManager(object):
if not task:
self.logger.warn("%s: invalid task %s" % (desc, br['task_id']))
continue
if (task['state'] == koji.TASK_STATES['FAILED'] and age < self.options.failed_buildroot_lifetime):
if task['state'] == koji.TASK_STATES['FAILED'] and \
age < self.options.failed_buildroot_lifetime:
# XXX - this could be smarter
# keep buildroots for failed tasks around for a little while
self.logger.debug("Keeping failed buildroot: %s" % desc)
@ -1004,7 +1031,9 @@ class TaskManager(object):
self.logger.info('%s (pid %i, taskID %i) is running' % (execname, pid, task_id))
else:
if signaled:
self.logger.info('%s (pid %i, taskID %i) was killed by signal %i' % (execname, pid, task_id, sig))
self.logger.info(
'%s (pid %i, taskID %i) was killed by signal %i' %
(execname, pid, task_id, sig))
else:
self.logger.info('%s (pid %i, taskID %i) exited' % (execname, pid, task_id))
return True
@ -1041,7 +1070,8 @@ class TaskManager(object):
if not os.path.isfile(proc_path):
return None
proc_file = open(proc_path)
procstats = [not field.isdigit() and field or int(field) for field in proc_file.read().split()]
procstats = [not field.isdigit() and field or int(field)
for field in proc_file.read().split()]
proc_file.close()
cmd_path = '/proc/%i/cmdline' % pid
@ -1084,9 +1114,9 @@ class TaskManager(object):
while parents:
for ppid in parents[:]:
for procstats in statsByPPID.get(ppid, []):
# get the /proc entries with ppid as their parent, and append their pid to the list,
# then recheck for their children
# pid is the 0th field, ppid is the 3rd field
# get the /proc entries with ppid as their parent, and append their pid to the
# list, then recheck for their children pid is the 0th field, ppid is the 3rd
# field
pids.append((procstats[0], procstats[1]))
parents.append(procstats[0])
parents.remove(ppid)
@ -1154,7 +1184,8 @@ class TaskManager(object):
availableMB = available // 1024 // 1024
self.logger.debug("disk space available in '%s': %i MB", br_path, availableMB)
if availableMB < self.options.minspace:
self.status = "Insufficient disk space at %s: %i MB, %i MB required" % (br_path, availableMB, self.options.minspace)
self.status = "Insufficient disk space at %s: %i MB, %i MB required" % \
(br_path, availableMB, self.options.minspace)
self.logger.warn(self.status)
return False
return True
@ -1189,7 +1220,9 @@ class TaskManager(object):
return False
if self.task_load > self.hostdata['capacity']:
self.status = "Over capacity"
self.logger.info("Task load (%.2f) exceeds capacity (%.2f)" % (self.task_load, self.hostdata['capacity']))
self.logger.info(
"Task load (%.2f) exceeds capacity (%.2f)" %
(self.task_load, self.hostdata['capacity']))
return False
if len(self.tasks) >= self.options.maxjobs:
# This serves as a backup to the capacity check and prevents
@ -1238,7 +1271,8 @@ class TaskManager(object):
self.logger.warn('Error during host check')
self.logger.warn(''.join(traceback.format_exception(*sys.exc_info())))
if not valid_host:
self.logger.info('Skipping task %s (%s) due to host check', task['id'], task['method'])
self.logger.info(
'Skipping task %s (%s) due to host check', task['id'], task['method'])
return False
data = self.session.host.openTask(task['id'])
if data is None:

View file

@ -110,7 +110,8 @@ class CursorWrapper:
try:
return quote(operation, parameters)
except Exception:
self.logger.exception('Unable to quote query:\n%s\nParameters: %s', operation, parameters)
self.logger.exception(
'Unable to quote query:\n%s\nParameters: %s', operation, parameters)
return "INVALID QUERY"
def preformat(self, sql, params):

View file

@ -154,10 +154,14 @@ LEGACY_SIGNATURES = {
[['tag', 'newer_than', 'nvrs'], None, None, (None, None)],
],
'createLiveMedia': [
[['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info', 'ksfile', 'opts'], None, None, (None,)],
[['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info', 'ksfile',
'opts'],
None, None, (None,)],
],
'createAppliance': [
[['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info', 'ksfile', 'opts'], None, None, (None,)],
[['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info', 'ksfile',
'opts'],
None, None, (None,)],
],
'livecd': [
[['name', 'version', 'arch', 'target', 'ksfile', 'opts'], None, None, (None,)],
@ -190,7 +194,9 @@ LEGACY_SIGNATURES = {
[['spec_url', 'build_target', 'build', 'task', 'opts'], None, None, (None,)],
],
'createLiveCD': [
[['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info', 'ksfile', 'opts'], None, None, (None,)],
[['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info', 'ksfile',
'opts'],
None, None, (None,)],
],
'appliance': [
[['name', 'version', 'arch', 'target', 'ksfile', 'opts'], None, None, (None,)],
@ -199,19 +205,25 @@ LEGACY_SIGNATURES = {
[['name', 'version', 'arches', 'target', 'inst_tree', 'opts'], None, None, (None,)],
],
'tagBuild': [
[['tag_id', 'build_id', 'force', 'fromtag', 'ignore_success'], None, None, (False, None, False)],
[['tag_id', 'build_id', 'force', 'fromtag', 'ignore_success'],
None, None, (False, None, False)],
],
'chainmaven': [
[['builds', 'target', 'opts'], None, None, (None,)],
],
'newRepo': [
[['tag', 'event', 'src', 'debuginfo', 'separate_src'], None, None, (None, False, False, False)],
[['tag', 'event', 'src', 'debuginfo', 'separate_src'],
None, None, (None, False, False, False)],
],
'createImage': [
[['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info', 'inst_tree', 'opts'], None, None, (None,)],
[['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info',
'inst_tree', 'opts'],
None, None, (None,)],
],
'tagNotification': [
[['recipients', 'is_successful', 'tag_info', 'from_info', 'build_info', 'user_info', 'ignore_success', 'failure_msg'], None, None, (None, '')],
[['recipients', 'is_successful', 'tag_info', 'from_info', 'build_info', 'user_info',
'ignore_success', 'failure_msg'],
None, None, (None, '')],
],
'buildArch': [
[['pkg', 'root', 'arch', 'keep_srpm', 'opts'], None, None, (None,)],
@ -253,7 +265,9 @@ LEGACY_SIGNATURES = {
[['options'], None, None, (None,)],
],
'runroot': [
[['root', 'arch', 'command', 'keep', 'packages', 'mounts', 'repo_id', 'skip_setarch', 'weight', 'upload_logs', 'new_chroot'], None, None, (False, [], [], None, False, None, None, False)],
[['root', 'arch', 'command', 'keep', 'packages', 'mounts', 'repo_id', 'skip_setarch',
'weight', 'upload_logs', 'new_chroot'],
None, None, (False, [], [], None, False, None, None, False)],
],
'distRepo': [
[['tag', 'repo_id', 'keys', 'task_opts'], None, None, None],
@ -400,7 +414,9 @@ class BaseTaskHandler(object):
self.session.getTaskResult(task)
checked.add(task)
except (koji.GenericError, six.moves.xmlrpc_client.Fault):
self.logger.info("task %s failed or was canceled, cancelling unfinished tasks" % task)
self.logger.info(
"task %s failed or was canceled, cancelling unfinished tasks" %
task)
self.session.cancelTaskChildren(self.id)
# reraise the original error now, rather than waiting for
# an error in taskWaitResults()
@ -743,8 +759,10 @@ class RestartHostsTask(BaseTaskHandler):
my_tasks = None
for host in hosts:
# note: currently task assignments bypass channel restrictions
task1 = self.subtask('restart', [host], assign=host['id'], label="restart %i" % host['id'])
task2 = self.subtask('restartVerify', [task1, host], assign=host['id'], label="sleep %i" % host['id'])
task1 = self.subtask('restart', [host],
assign=host['id'], label="restart %i" % host['id'])
task2 = self.subtask('restartVerify', [task1, host],
assign=host['id'], label="sleep %i" % host['id'])
subtasks.append(task1)
subtasks.append(task2)
if host['id'] == this_host:
@ -790,8 +808,10 @@ class DependantTask(BaseTaskHandler):
subtasks = []
for task in task_list:
# **((len(task)>2 and task[2]) or {}) expands task[2] into opts if it exists, allows for things like 'priority=15'
task_id = self.session.host.subtask(method=task[0], arglist=task[1], parent=self.id, **((len(task) > 2 and task[2]) or {}))
# **((len(task)>2 and task[2]) or {}) expands task[2] into opts if it exists, allows
# for things like 'priority=15'
task_id = self.session.host.subtask(method=task[0], arglist=task[1], parent=self.id,
**((len(task) > 2 and task[2]) or {}))
if task_id:
subtasks.append(task_id)
if subtasks:

View file

@ -54,7 +54,8 @@ def deprecated(message):
def _changelogDate(cldate):
return time.strftime('%a %b %d %Y', time.strptime(koji.formatTime(cldate), '%Y-%m-%d %H:%M:%S'))
return time.strftime('%a %b %d %Y',
time.strptime(koji.formatTime(cldate), '%Y-%m-%d %H:%M:%S'))
def formatChangelog(entries):
@ -813,7 +814,8 @@ def parse_maven_param(confs, chain=False, scratch=False, section=None):
else:
raise ValueError("Section %s does not exist in: %s" % (section, ', '.join(confs)))
elif len(builds) > 1:
raise ValueError("Multiple sections in: %s, you must specify the section" % ', '.join(confs))
raise ValueError(
"Multiple sections in: %s, you must specify the section" % ', '.join(confs))
return builds

View file

@ -47,7 +47,8 @@ class RunRootTask(koji.tasks.BaseTaskHandler):
options.append(o)
rel_path = path[len(mount_data['mountpoint']):]
rel_path = rel_path[1:] if rel_path.startswith('/') else rel_path
res = (os.path.join(mount_data['path'], rel_path), path, mount_data['fstype'], ','.join(options))
res = (os.path.join(mount_data['path'], rel_path), path, mount_data['fstype'],
','.join(options))
return res
def _read_config(self):
@ -94,11 +95,15 @@ class RunRootTask(koji.tasks.BaseTaskHandler):
except six.moves.configparser.NoOptionError:
raise koji.GenericError("bad config: missing options in %s section" % section_name)
for path in self.config['default_mounts'] + self.config['safe_roots'] + [x[0] for x in self.config['path_subs']]:
for path in self.config['default_mounts'] + self.config['safe_roots'] + \
[x[0] for x in self.config['path_subs']]:
if not path.startswith('/'):
raise koji.GenericError("bad config: all paths (default_mounts, safe_roots, path_subs) needs to be absolute: %s" % path)
raise koji.GenericError(
"bad config: all paths (default_mounts, safe_roots, path_subs) needs to be "
"absolute: %s" % path)
def handler(self, root, arch, command, keep=False, packages=[], mounts=[], repo_id=None, skip_setarch=False, weight=None, upload_logs=None, new_chroot=None):
def handler(self, root, arch, command, keep=False, packages=[], mounts=[], repo_id=None,
skip_setarch=False, weight=None, upload_logs=None, new_chroot=None):
"""Create a buildroot and run a command (as root) inside of it
Command may be a string or a list.
@ -141,15 +146,19 @@ class RunRootTask(koji.tasks.BaseTaskHandler):
break
else:
# no overlap
raise koji.BuildError("host does not match tag arches: %s (%s)" % (root, tag_arches))
raise koji.BuildError(
"host does not match tag arches: %s (%s)" % (root, tag_arches))
else:
br_arch = arch
if repo_id:
repo_info = self.session.repoInfo(repo_id, strict=True)
if repo_info['tag_name'] != root:
raise koji.BuildError("build tag (%s) does not match repo tag (%s)" % (root, repo_info['tag_name']))
raise koji.BuildError(
"build tag (%s) does not match repo tag (%s)" % (root, repo_info['tag_name']))
if repo_info['state'] not in (koji.REPO_STATES['READY'], koji.REPO_STATES['EXPIRED']):
raise koji.BuildError("repos in the %s state may not be used by runroot" % koji.REPO_STATES[repo_info['state']])
raise koji.BuildError(
"repos in the %s state may not be used by runroot" %
koji.REPO_STATES[repo_info['state']])
else:
repo_info = self.session.getRepo(root)
if not repo_info:
@ -186,12 +195,15 @@ class RunRootTask(koji.tasks.BaseTaskHandler):
cmdstr = ' '.join(["'%s'" % arg.replace("'", r"'\''") for arg in command])
# A nasty hack to put command output into its own file until mock can be
# patched to do something more reasonable than stuff everything into build.log
cmdargs = ['/bin/sh', '-c', "{ %s; } < /dev/null 2>&1 | /usr/bin/tee /builddir/runroot.log; exit ${PIPESTATUS[0]}" % cmdstr]
cmdargs = ['/bin/sh', '-c',
"{ %s; } < /dev/null 2>&1 | /usr/bin/tee /builddir/runroot.log; exit "
"${PIPESTATUS[0]}" % cmdstr]
# always mount /mnt/redhat (read-only)
# always mount /mnt/iso (read-only)
# also need /dev bind mount
self.do_mounts(rootdir, [self._get_path_params(x) for x in self.config['default_mounts']])
self.do_mounts(rootdir,
[self._get_path_params(x) for x in self.config['default_mounts']])
self.do_extra_mounts(rootdir, mounts)
mock_cmd = ['chroot']
if new_chroot:
@ -199,7 +211,8 @@ class RunRootTask(koji.tasks.BaseTaskHandler):
elif new_chroot is False: # None -> no option added
mock_cmd.append('--old-chroot')
if skip_setarch:
# we can't really skip it, but we can set it to the current one instead of of the chroot one
# we can't really skip it, but we can set it to the current one instead of of the
# chroot one
myarch = platform.uname()[5]
mock_cmd.extend(['--arch', myarch])
mock_cmd.append('--')
@ -279,7 +292,8 @@ class RunRootTask(koji.tasks.BaseTaskHandler):
cmd = ['mount', '-t', type, '-o', opts, dev, mpoint]
self.logger.info("Mount command: %r" % cmd)
koji.ensuredir(mpoint)
status = log_output(self.session, cmd[0], cmd, logfile, uploadpath, logerror=True, append=True)
status = log_output(self.session, cmd[0], cmd, logfile, uploadpath,
logerror=True, append=True)
if not isSuccess(status):
error = koji.GenericError("Unable to mount %s: %s"
% (mpoint, parseStatus(status, cmd)))
@ -306,7 +320,8 @@ class RunRootTask(koji.tasks.BaseTaskHandler):
failed = []
self.logger.info("Unmounting (runroot): %s" % mounts)
for dir in mounts:
proc = subprocess.Popen(["umount", "-l", dir], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
proc = subprocess.Popen(["umount", "-l", dir],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if proc.wait() != 0:
output = proc.stdout.read()
output += proc.stderr.read()

View file

@ -22,8 +22,10 @@ def handle_runroot(options, session, args):
usage += _("\n(Specify the --help global option for a list of other help options)")
parser = OptionParser(usage=usage)
parser.disable_interspersed_args()
parser.add_option("-p", "--package", action="append", default=[], help=_("make sure this package is in the chroot"))
parser.add_option("-m", "--mount", action="append", default=[], help=_("mount this directory read-write in the chroot"))
parser.add_option("-p", "--package", action="append", default=[],
help=_("make sure this package is in the chroot"))
parser.add_option("-m", "--mount", action="append", default=[],
help=_("mount this directory read-write in the chroot"))
parser.add_option("--skip-setarch", action="store_true", default=False,
help=_("bypass normal setarch in the chroot"))
parser.add_option("-w", "--weight", type='int', help=_("set task weight"))
@ -39,7 +41,8 @@ def handle_runroot(options, session, args):
parser.add_option("--repo-id", type="int", help=_("ID of the repo to use"))
parser.add_option("--nowait", action="store_false", dest="wait",
default=True, help=_("Do not wait on task"))
parser.add_option("--watch", action="store_true", help=_("Watch task instead of printing runroot.log"))
parser.add_option("--watch", action="store_true",
help=_("Watch task instead of printing runroot.log"))
parser.add_option("--quiet", action="store_true", default=options.quiet,
help=_("Do not print the task information"))

View file

@ -14,7 +14,8 @@ def handle_save_failed_tree(options, session, args):
usage += _("\n(Specify the --help global option for a list of other help options)")
parser = OptionParser(usage=usage)
parser.add_option("-f", "--full", action="store_true", default=False,
help=_("Download whole tree, if not specified, only builddir will be downloaded"))
help=_("Download whole tree, if not specified, "
"only builddir will be downloaded"))
parser.add_option("-t", "--task", action="store_const", dest="mode",
const="task", default="task",
help=_("Treat ID as a task ID (the default)"))
@ -69,4 +70,5 @@ def handle_save_failed_tree(options, session, args):
return
else:
session.logout()
return watch_tasks(session, [task_id], quiet=opts.quiet, poll_interval=options.poll_interval)
return watch_tasks(session, [task_id],
quiet=opts.quiet, poll_interval=options.poll_interval)

View file

@ -40,9 +40,11 @@ def saveFailedTree(buildrootID, full=False, **opts):
taskID = brinfo['task_id']
task_info = kojihub.Task(taskID).getInfo()
if task_info['state'] != koji.TASK_STATES['FAILED']:
raise koji.PreBuildError("Task %s has not failed. Only failed tasks can upload their buildroots." % taskID)
raise koji.PreBuildError(
"Task %s has not failed. Only failed tasks can upload their buildroots." % taskID)
elif allowed_methods != '*' and task_info['method'] not in allowed_methods:
raise koji.PreBuildError("Only %s tasks can upload their buildroots (Task %s is %s)." %
raise koji.PreBuildError(
"Only %s tasks can upload their buildroots (Task %s is %s)." %
(', '.join(allowed_methods), task_info['id'], task_info['method']))
elif task_info["owner"] != context.session.user_id and not context.session.hasPerm('admin'):
raise koji.ActionNotAllowed("Only owner of failed task or 'admin' can run this task.")

View file

@ -364,7 +364,8 @@ def ensure_connection(session):
except requests.exceptions.ConnectionError:
error(_("Error: Unable to connect to server"))
if ret != koji.API_VERSION:
warn(_("WARNING: The server is at API version %d and the client is at %d" % (ret, koji.API_VERSION)))
warn(_("WARNING: The server is at API version %d and the client is at %d" %
(ret, koji.API_VERSION)))
def has_krb_creds():
@ -394,7 +395,8 @@ def activate_session(session):
elif has_krb_creds() or (options.keytab and options.principal):
try:
if options.keytab and options.principal:
session.krb_login(principal=options.principal, keytab=options.keytab, proxyuser=options.runas)
session.krb_login(principal=options.principal, keytab=options.keytab,
proxyuser=options.runas)
else:
session.krb_login(proxyuser=options.runas)
except krbV.Krb5Error as e:
@ -503,7 +505,8 @@ def handle_trash():
continue
if refs.get('archives'):
if options.debug:
print("[%i/%i] Build has %i archive references: %s" % (i, N, len(refs['archives']), nvr))
print("[%i/%i] Build has %i archive references: %s" %
(i, N, len(refs['archives']), nvr))
# pprint.pprint(refs['archives'])
continue
if refs.get('component_of'):
@ -941,7 +944,8 @@ def handle_prune():
else:
print("Untagging build %s from %s" % (nvr, tagname))
try:
session.untagBuildBypass(taginfo['id'], entry['build_id'], force=bypass)
session.untagBuildBypass(taginfo['id'], entry['build_id'],
force=bypass)
untagged.setdefault(nvr, {})[tagname] = 1
except (six.moves.xmlrpc_client.Fault, koji.GenericError) as e:
print("Warning: untag operation failed: %s" % e)

View file

@ -145,13 +145,15 @@ def get_options():
parser.add_option("--rules-ignorelist",
help=_("Rules: list of packages to ignore"))
parser.add_option("--rules-excludelist",
help=_("Rules: list of packages to are excluded using ExcludeArch or ExclusiveArch"))
help=_("Rules: list of packages to are excluded using ExcludeArch or "
"ExclusiveArch"))
parser.add_option("--rules-includelist",
help=_("Rules: list of packages to always include"))
parser.add_option("--rules-protectlist",
help=_("Rules: list of package names to never replace"))
parser.add_option("--tag-build", action="store_true", default=False,
help=_("tag successful builds into the tag we are building, default is to not tag"))
help=_("tag successful builds into the tag we are building, default is to "
"not tag"))
parser.add_option("--logfile",
help=_("file where everything gets logged"))
parser.add_option("--arches",
@ -298,14 +300,16 @@ def activate_session(session):
if os.path.isfile(options.auth_cert):
# authenticate using SSL client cert
session.ssl_login(cert=options.auth_cert, serverca=options.serverca, proxyuser=options.runas)
session.ssl_login(cert=options.auth_cert, serverca=options.serverca,
proxyuser=options.runas)
elif options.user:
# authenticate using user/password
session.login()
elif krbV:
try:
if options.keytab and options.principal:
session.krb_login(principal=options.principal, keytab=options.keytab, proxyuser=options.runas)
session.krb_login(principal=options.principal, keytab=options.keytab,
proxyuser=options.runas)
else:
session.krb_login(proxyuser=options.runas)
except krbV.Krb5Error as e:
@ -537,12 +541,14 @@ class TrackedBuild(object):
# each buildroot had this as a base package
base.append(name)
if len(tags) > 1:
log("Warning: found multiple buildroot tags for %s: %s" % (self.nvr, to_list(tags.keys())))
log("Warning: found multiple buildroot tags for %s: %s" %
(self.nvr, to_list(tags.keys())))
counts = sorted([(n, tag) for tag, n in six.iteritems(tags)])
tag = counts[-1][1]
else:
tag = to_list(tags.keys())[0]
# due bugs in used tools mainline koji instance could store empty buildroot infos for builds
# due bugs in used tools mainline koji instance could store empty buildroot infos for
# builds
if len(builds) == 0:
self.setState("noroot")
self.deps = builds
@ -655,7 +661,8 @@ class BuildTracker(object):
return -1
def newerBuild(self, build, tag):
# XXX: secondary arches need a policy to say if we have newer build localy it will be the substitute
# XXX: secondary arches need a policy to say if we have newer build localy it will be the
# substitute
localBuilds = session.listTagged(tag, inherit=True, package=str(build.name))
newer = None
parentevr = (str(build.epoch), build.version, build.release)
@ -664,14 +671,16 @@ class BuildTracker(object):
latestevr = (str(b['epoch']), b['version'], b['release'])
newestRPM = self.rpmvercmp(parentevr, latestevr)
if options.debug:
log("remote evr: %s \nlocal evr: %s \nResult: %s" % (parentevr, latestevr, newestRPM))
log("remote evr: %s \nlocal evr: %s \nResult: %s" %
(parentevr, latestevr, newestRPM))
if newestRPM == -1:
newer = b
else:
break
# the local is newer
if newer is not None:
info = session.getBuild("%s-%s-%s" % (str(newer['name']), newer['version'], newer['release']))
info = session.getBuild("%s-%s-%s" %
(str(newer['name']), newer['version'], newer['release']))
if info:
build = LocalBuild(info)
self.substitute_idx[parentnvr] = build
@ -751,7 +760,8 @@ class BuildTracker(object):
if depth > 0:
log("%sDep replaced: %s->%s" % (head, build.nvr, replace))
return build
if options.prefer_new and (depth > 0) and (tag is not None) and not (build.state == "common"):
if options.prefer_new and (depth > 0) and (tag is not None) and \
not (build.state == "common"):
latestBuild = self.newerBuild(build, tag)
if latestBuild is not None:
build.substitute = latestBuild.nvr
@ -875,7 +885,8 @@ class BuildTracker(object):
finally:
os.umask(old_umask)
else:
# TODO - would be possible, using uploadFile directly, to upload without writing locally.
# TODO - would be possible, using uploadFile directly,
# to upload without writing locally.
# for now, though, just use uploadWrapper
koji.ensuredir(options.workpath)
dst = "%s/%s" % (options.workpath, fn)
@ -1053,7 +1064,8 @@ class BuildTracker(object):
session.groupListAdd(taginfo['id'], 'build', force=True)
# using force in case group is blocked. This shouldn't be the case, but...
for pkg_name in drop_pkgs:
# in principal, our tag should not have inheritance, so the remove call is the right thing
# in principal, our tag should not have inheritance,
# so the remove call is the right thing
session.groupPackageListRemove(taginfo['id'], 'build', pkg_name)
for pkg_name in add_pkgs:
session.groupPackageListAdd(taginfo['id'], 'build', pkg_name)
@ -1278,7 +1290,8 @@ def main(args):
logfile = None
if logfile is not None:
log("logging to %s" % filename)
os.write(logfile, "\n\n========================================================================\n")
os.write(logfile,
"\n\n========================================================================\n")
if options.build:
binfo = remote.getBuild(options.build, strict=True)

View file

@ -36,7 +36,8 @@ def clean_reservations(cursor, vacuum, test, age):
def clean_notification_tasks(cursor, vacuum, test, age):
q = " FROM task WHERE method = 'build' AND completion_time < NOW() - '%s days'::interval" % int(age)
q = " FROM task WHERE method = 'build' AND completion_time < NOW() - '%s days'::interval" % \
int(age)
if options.verbose:
cursor.execute("SELECT COUNT(*) " + q)
rows = cursor.fetchall()[0][0]
@ -95,7 +96,8 @@ def clean_scratch_tasks(cursor, vacuum, test, age):
return
# delete standard buildroots
cursor.execute("DELETE FROM standard_buildroot WHERE task_id IN (SELECT task_id FROM temp_scratch_tasks)")
cursor.execute(
"DELETE FROM standard_buildroot WHERE task_id IN (SELECT task_id FROM temp_scratch_tasks)")
# delete tasks finally
cursor.execute("DELETE FROM task WHERE id IN (SELECT task_id FROM temp_scratch_tasks)")
@ -106,7 +108,8 @@ def clean_scratch_tasks(cursor, vacuum, test, age):
def clean_buildroots(cursor, vacuum, test):
q = " FROM buildroot WHERE cg_id IS NULL AND id NOT IN (SELECT buildroot_id FROM standard_buildroot)"
q = " FROM buildroot " \
"WHERE cg_id IS NULL AND id NOT IN (SELECT buildroot_id FROM standard_buildroot)"
if options.verbose:
cursor.execute("SELECT COUNT(*) " + q)
@ -206,7 +209,8 @@ if __name__ == "__main__":
clean_sessions(cursor, options.vacuum, options.test, options.sessions_age)
clean_reservations(cursor, options.vacuum, options.test, options.reservations_age)
if options.tag_notifications:
clean_notification_tasks(cursor, options.vacuum, options.test, age=options.tag_notifications_age)
clean_notification_tasks(cursor, options.vacuum, options.test,
age=options.tag_notifications_age)
if options.scratch:
clean_scratch_tasks(cursor, options.vacuum, options.test, age=options.scratch_age)
if options.buildroots:

View file

@ -269,7 +269,8 @@ class RepoManager(object):
self._local.session = value
def printState(self):
self.logger.debug('Tracking %i repos, %i child processes', len(self.repos), len(self.delete_pids))
self.logger.debug('Tracking %i repos, %i child processes',
len(self.repos), len(self.delete_pids))
for tag_id, task_id in six.iteritems(self.tasks):
self.logger.debug("Tracking task %s for tag %s", task_id, tag_id)
for pid, desc in six.iteritems(self.delete_pids):
@ -348,8 +349,9 @@ class RepoManager(object):
if repo:
# we're already tracking it
if repo.state != data['state']:
self.logger.info('State changed for repo %s: %s -> %s'
% (repo_id, koji.REPO_STATES[repo.state], koji.REPO_STATES[data['state']]))
self.logger.info(
'State changed for repo %s: %s -> %s',
repo_id, koji.REPO_STATES[repo.state], koji.REPO_STATES[data['state']])
repo.state = data['state']
else:
self.logger.info('Found repo %s, state=%s'
@ -357,7 +359,7 @@ class RepoManager(object):
repo = ManagedRepo(self, data)
self.repos[repo_id] = repo
if not getTag(self.session, repo.tag_id) and not repo.expired():
self.logger.info('Tag %d for repo %d disappeared, expiring.' % (repo.tag_id, repo_id))
self.logger.info('Tag %d for repo %d disappeared, expiring.', repo.tag_id, repo_id)
repo.expire()
if len(self.repos) > len(repodata):
# This shouldn't normally happen, but might if someone else calls
@ -491,20 +493,23 @@ class RepoManager(object):
self.logger.debug("did not expect %s; age: %s",
repodir, age)
if age > max_age:
self.logger.info("Removing unexpected directory (no such repo): %s", repodir)
self.logger.info(
"Removing unexpected directory (no such repo): %s", repodir)
if symlink:
os.unlink(repodir)
else:
self.rmtree(repodir)
continue
if rinfo['tag_name'] != tag:
self.logger.warn("Tag name mismatch (rename?): %s vs %s", tag, rinfo['tag_name'])
self.logger.warn(
"Tag name mismatch (rename?): %s vs %s", tag, rinfo['tag_name'])
continue
if rinfo['state'] in (koji.REPO_DELETED, koji.REPO_PROBLEM):
age = time.time() - max(rinfo['create_ts'], dir_ts)
self.logger.debug("potential removal candidate: %s; age: %s" % (repodir, age))
if age > max_age:
logger.info("Removing stray repo (state=%s): %s" % (koji.REPO_STATES[rinfo['state']], repodir))
logger.info("Removing stray repo (state=%s): %s",
koji.REPO_STATES[rinfo['state']], repodir)
if symlink:
os.unlink(repodir)
else:
@ -622,11 +627,12 @@ class RepoManager(object):
tstate = koji.TASK_STATES[tinfo['state']]
tag_id = self.tasks[task_id]['tag_id']
if tstate == 'CLOSED':
self.logger.info("Finished: newRepo task %s for tag %s" % (task_id, tag_id))
self.logger.info("Finished: newRepo task %s for tag %s", task_id, tag_id)
self.recent_tasks[task_id] = time.time()
del self.tasks[task_id]
elif tstate in ('CANCELED', 'FAILED'):
self.logger.info("Problem: newRepo task %s for tag %s is %s" % (task_id, tag_id, tstate))
self.logger.info(
"Problem: newRepo task %s for tag %s is %s", task_id, tag_id, tstate)
self.recent_tasks[task_id] = time.time()
del self.tasks[task_id]
else:
@ -635,7 +641,8 @@ class RepoManager(object):
# also check other newRepo tasks
repo_tasks = self.session.listTasks(opts={'method': 'newRepo',
'state': ([koji.TASK_STATES[s] for s in ('FREE', 'OPEN')])})
'state': ([koji.TASK_STATES[s]
for s in ('FREE', 'OPEN')])})
others = [t for t in repo_tasks if t['id'] not in self.tasks]
for tinfo in others:
if tinfo['id'] not in self.other_tasks:
@ -947,8 +954,8 @@ def get_options():
'max_delete_processes', 'max_repo_tasks_maven',
'delete_batch_size', 'dist_repo_lifetime', 'sleeptime',
'recent_tasks_lifetime')
str_opts = ('topdir', 'server', 'user', 'password', 'logfile', 'principal', 'keytab', 'krbservice',
'cert', 'ca', 'serverca', 'debuginfo_tags',
str_opts = ('topdir', 'server', 'user', 'password', 'logfile', 'principal', 'keytab',
'krbservice', 'cert', 'ca', 'serverca', 'debuginfo_tags',
'source_tags', 'separate_source_tags', 'ignore_tags') # FIXME: remove ca here
bool_opts = ('verbose', 'debug', 'ignore_stray_repos', 'offline_retry',
'krb_rdns', 'krb_canon_host', 'no_ssl_verify')

View file

@ -183,17 +183,20 @@ class WindowsBuild(object):
def checkout(self):
"""Checkout sources, winspec, and patches, and apply patches"""
src_scm = SCM(self.source_url) # noqa: F821
self.source_dir = src_scm.checkout(ensuredir(os.path.join(self.workdir, 'source'))) # noqa: F821
self.source_dir = src_scm.checkout(
ensuredir(os.path.join(self.workdir, 'source'))) # noqa: F821
self.zipDir(self.source_dir, os.path.join(self.workdir, 'sources.zip'))
if 'winspec' in self.task_opts:
spec_scm = SCM(self.task_opts['winspec']) # noqa: F821
self.spec_dir = spec_scm.checkout(ensuredir(os.path.join(self.workdir, 'spec'))) # noqa: F821
self.spec_dir = spec_scm.checkout(
ensuredir(os.path.join(self.workdir, 'spec'))) # noqa: F821
self.zipDir(self.spec_dir, os.path.join(self.workdir, 'spec.zip'))
else:
self.spec_dir = self.source_dir
if 'patches' in self.task_opts:
patch_scm = SCM(self.task_opts['patches']) # noqa: F821
self.patches_dir = patch_scm.checkout(ensuredir(os.path.join(self.workdir, 'patches'))) # noqa: F821
self.patches_dir = patch_scm.checkout(
ensuredir(os.path.join(self.workdir, 'patches'))) # noqa: F821
self.zipDir(self.patches_dir, os.path.join(self.workdir, 'patches.zip'))
self.applyPatches(self.source_dir, self.patches_dir)
self.virusCheck(self.workdir)
@ -207,7 +210,8 @@ class WindowsBuild(object):
raise BuildError('no patches found at %s' % patchdir) # noqa: F821
patches.sort()
for patch in patches:
cmd = ['/bin/patch', '--verbose', '-d', sourcedir, '-p1', '-i', os.path.join(patchdir, patch)]
cmd = ['/bin/patch', '--verbose', '-d', sourcedir, '-p1', '-i',
os.path.join(patchdir, patch)]
run(cmd, fatal=True)
def loadConfig(self):
@ -241,7 +245,8 @@ class WindowsBuild(object):
# absolute paths, or without a path in which case it is searched for
# on the PATH.
if conf.has_option('building', 'preinstalled'):
self.preinstalled.extend([e.strip() for e in conf.get('building', 'preinstalled').split('\n') if e])
self.preinstalled.extend(
[e.strip() for e in conf.get('building', 'preinstalled').split('\n') if e])
# buildrequires and provides are multi-valued (space-separated)
for br in conf.get('building', 'buildrequires').split():
@ -336,7 +341,8 @@ class WindowsBuild(object):
with open(destpath, 'w') as destfile:
offset = 0
while True:
encoded = self.server.getFile(buildinfo, fileinfo, encode_int(offset), 1048576, brtype)
encoded = self.server.getFile(buildinfo, fileinfo, encode_int(offset), 1048576,
brtype)
if not encoded:
break
data = base64.b64decode(encoded)
@ -349,9 +355,11 @@ class WindowsBuild(object):
if 'checksum_type' in fileinfo:
digest = checksum.hexdigest()
if fileinfo['checksum'] != digest:
raise BuildError('checksum validation failed for %s, %s (computed) != %s (provided)' % # noqa: F821
raise BuildError( # noqa: F821
'checksum validation failed for %s, %s (computed) != %s (provided)' %
(destpath, digest, fileinfo['checksum']))
self.logger.info('Retrieved %s (%s bytes, %s: %s)', destpath, offset, checksum_type, digest)
self.logger.info(
'Retrieved %s (%s bytes, %s: %s)', destpath, offset, checksum_type, digest)
else:
self.logger.info('Retrieved %s (%s bytes)', destpath, offset)
@ -409,7 +417,8 @@ class WindowsBuild(object):
def cmdBuild(self):
"""Do the build: run the execute line(s) with cmd.exe"""
tmpfd, tmpname = tempfile.mkstemp(prefix='koji-tmp', suffix='.bat', dir='/cygdrive/c/Windows/Temp')
tmpfd, tmpname = tempfile.mkstemp(prefix='koji-tmp', suffix='.bat',
dir='/cygdrive/c/Windows/Temp')
script = os.fdopen(tmpfd, 'w')
for attr in ['source_dir', 'spec_dir', 'patches_dir']:
val = getattr(self, attr)
@ -630,7 +639,8 @@ def get_mgmt_server():
# supported by python/cygwin/Windows
task_port = server.getPort(macaddr)
logger.debug('found task-specific port %s', task_port)
return six.moves.xmlrpc_client.ServerProxy('http://%s:%s/' % (gateway, task_port), allow_none=True)
return six.moves.xmlrpc_client.ServerProxy('http://%s:%s/' % (gateway, task_port),
allow_none=True)
def get_options():
@ -641,8 +651,10 @@ def get_options():
"""
parser = OptionParser(usage=usage)
parser.add_option('-d', '--debug', action='store_true', help='Log debug statements')
parser.add_option('-i', '--install', action='store_true', help='Install this daemon as a service', default=False)
parser.add_option('-u', '--uninstall', action='store_true', help='Uninstall this daemon if it was installed previously as a service', default=False)
parser.add_option('-i', '--install', action='store_true', default=False,
help='Install this daemon as a service')
parser.add_option('-u', '--uninstall', action='store_true', default=False,
help='Uninstall this daemon if it was installed previously as a service')
(options, args) = parser.parse_args()
return options

View file

@ -269,9 +269,11 @@ class DaemonXMLRPCServer(six.moves.xmlrpc_server.SimpleXMLRPCServer):
def __init__(self, addr, port):
if sys.version_info[:2] <= (2, 4):
six.moves.xmlrpc_server.SimpleXMLRPCServer.__init__(self, (addr, port), logRequests=False)
six.moves.xmlrpc_server.SimpleXMLRPCServer.__init__(self, (addr, port),
logRequests=False)
else:
six.moves.xmlrpc_server.SimpleXMLRPCServer.__init__(self, (addr, port), logRequests=False,
six.moves.xmlrpc_server.SimpleXMLRPCServer.__init__(self, (addr, port),
logRequests=False,
allow_none=True)
self.logger = logging.getLogger('koji.vm.DaemonXMLRPCServer')
self.socket.settimeout(5)
@ -307,7 +309,8 @@ class DaemonXMLRPCServer(six.moves.xmlrpc_server.SimpleXMLRPCServer):
else:
response = self._dispatch(method, params)
response = (response,)
response = six.moves.xmlrpc_client.dumps(response, methodresponse=1, allow_none=True)
response = six.moves.xmlrpc_client.dumps(response,
methodresponse=1, allow_none=True)
except six.moves.xmlrpc_client.Fault as fault:
response = six.moves.xmlrpc_client.dumps(fault)
except BaseException:
@ -369,7 +372,9 @@ class WinBuildTask(MultiPlatformTask):
task_opts = koji.util.dslice(opts, ['timeout', 'cpus', 'mem', 'static_mac'], strict=False)
task_id = self.session.host.subtask(method='vmExec',
arglist=[name, [source_url, build_tag['name'], subopts], task_opts],
arglist=[name,
[source_url, build_tag['name'], subopts],
task_opts],
label=name[:255],
parent=self.id)
results = self.wait(task_id)[task_id]
@ -379,7 +384,8 @@ class WinBuildTask(MultiPlatformTask):
if not opts.get('scratch'):
build_info = koji.util.dslice(results, ['name', 'version', 'release', 'epoch'])
build_info['package_name'] = build_info['name']
pkg_cfg = self.session.getPackageConfig(dest_tag['id'], build_info['name'], event=event_id)
pkg_cfg = self.session.getPackageConfig(dest_tag['id'], build_info['name'],
event=event_id)
if not opts.get('skip_tag'):
# Make sure package is on the list for this tag
if pkg_cfg is None:
@ -397,8 +403,8 @@ class WinBuildTask(MultiPlatformTask):
rpm_results = None
spec_url = opts.get('specfile')
if spec_url:
rpm_results = self.buildWrapperRPM(spec_url, task_id, target_info, build_info, repo_id,
channel='default')
rpm_results = self.buildWrapperRPM(spec_url, task_id, target_info, build_info,
repo_id, channel='default')
if opts.get('scratch'):
self.session.host.moveWinBuildToScratch(self.id, results, rpm_results)
@ -436,8 +442,8 @@ class VMExecTask(BaseTaskHandler):
def __init__(self, *args, **kw):
super(VMExecTask, self).__init__(*args, **kw)
self.task_manager = six.moves.xmlrpc_client.ServerProxy('http://%s:%s/' % (self.options.privaddr, self.options.portbase),
allow_none=True)
self.task_manager = six.moves.xmlrpc_client.ServerProxy(
'http://%s:%s/' % (self.options.privaddr, self.options.portbase), allow_none=True)
self.port = None
self.server = None
self.task_info = None
@ -451,12 +457,15 @@ class VMExecTask(BaseTaskHandler):
def mkqcow2(self, clone_name, source_disk, disk_num):
new_name = clone_name + '-disk-' + str(disk_num) + self.QCOW2_EXT
new_path = os.path.join(self.options.imagedir, new_name)
cmd = ['/usr/bin/qemu-img', 'create', '-f', 'qcow2', '-o', 'backing_file=%s' % source_disk, new_path]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True)
cmd = ['/usr/bin/qemu-img', 'create', '-f', 'qcow2', '-o', 'backing_file=%s' % source_disk,
new_path]
proc = subprocess.Popen(cmd,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True)
output, dummy = proc.communicate()
ret = proc.wait()
if ret:
raise koji.BuildError('unable to create qcow2 image, "%s" returned %s; output was: %s' %
raise koji.BuildError(
'unable to create qcow2 image, "%s" returned %s; output was: %s' %
(' '.join(cmd), ret, output))
vm_user = pwd.getpwnam(self.options.vmuser)
os.chown(new_path, vm_user.pw_uid, vm_user.pw_gid)
@ -708,14 +717,17 @@ class VMExecTask(BaseTaskHandler):
hdr = koji.get_rpm_header(localpath)
payloadhash = koji.hex_string(koji.get_header_field(hdr, 'sigmd5'))
if fileinfo['payloadhash'] != payloadhash:
raise koji.BuildError("Downloaded rpm %s doesn't match checksum (expected: %s, got %s)" % (
os.path.basename(fileinfo['localpath']),
fileinfo['payloadhash'], payloadhash))
raise koji.BuildError(
"Downloaded rpm %s doesn't match checksum (expected: %s, got %s)" %
(os.path.basename(fileinfo['localpath']),
fileinfo['payloadhash'],
payloadhash))
if not koji.util.check_sigmd5(localpath):
raise koji.BuildError("Downloaded rpm %s doesn't match sigmd5" %
os.path.basename(fileinfo['localpath']))
else:
self.verifyChecksum(localpath, fileinfo['checksum'], koji.CHECKSUM_TYPES[fileinfo['checksum_type']])
self.verifyChecksum(localpath, fileinfo['checksum'],
koji.CHECKSUM_TYPES[fileinfo['checksum_type']])
return open(localpath, 'r')
@ -796,7 +808,8 @@ class VMExecTask(BaseTaskHandler):
if sum.hexdigest() == checksum:
return True
else:
raise koji.BuildError('%s checksum validation failed for %s, %s (computed) != %s (provided)' %
raise koji.BuildError(
'%s checksum validation failed for %s, %s (computed) != %s (provided)' %
(algo, local_path, sum.hexdigest(), checksum))
def closeTask(self, output):
@ -879,7 +892,8 @@ class VMExecTask(BaseTaskHandler):
if mins > timeout:
vm.destroy()
self.server.server_close()
raise koji.BuildError('Task did not complete after %.2f minutes, VM %s has been destroyed' %
raise koji.BuildError(
'Task did not complete after %.2f minutes, VM %s has been destroyed' %
(mins, clone_name))
else:
vm.destroy()
@ -913,7 +927,9 @@ class VMTaskManager(TaskManager):
if macaddr in self.macaddrs:
raise koji.PreBuildError('duplicate MAC address: %s' % macaddr)
self.macaddrs[macaddr] = (vm_name, task_id, port)
self.logger.info('registered MAC address %s for VM %s (task ID %s, port %s)', macaddr, vm_name, task_id, port)
self.logger.info(
'registered MAC address %s for VM %s (task ID %s, port %s)',
macaddr, vm_name, task_id, port)
return True
finally:
self.macaddr_lock.release()
@ -964,7 +980,8 @@ class VMTaskManager(TaskManager):
availableMB = available // 1024 // 1024
self.logger.debug('disk space available in %s: %i MB', self.options.imagedir, availableMB)
if availableMB < self.options.minspace:
self.status = 'Insufficient disk space: %i MB, %i MB required' % (availableMB, self.options.minspace)
self.status = 'Insufficient disk space: %i MB, %i MB required' % \
(availableMB, self.options.minspace)
self.logger.warn(self.status)
return False
return True

View file

@ -154,9 +154,12 @@ def _assertLogin(environ):
raise koji.AuthError('could not login %s via SSL' % environ['koji.currentLogin'])
elif options['WebPrincipal']:
if not _krbLogin(environ, environ['koji.session'], environ['koji.currentLogin']):
raise koji.AuthError('could not login using principal: %s' % environ['koji.currentLogin'])
raise koji.AuthError(
'could not login using principal: %s' % environ['koji.currentLogin'])
else:
raise koji.AuthError('KojiWeb is incorrectly configured for authentication, contact the system administrator')
raise koji.AuthError(
'KojiWeb is incorrectly configured for authentication, '
'contact the system administrator')
# verify a valid authToken was passed in to avoid CSRF
authToken = environ['koji.form'].getfirst('a', '')
@ -168,7 +171,8 @@ def _assertLogin(environ):
# their authToken is likely expired
# send them back to the page that brought them here so they
# can re-click the link with a valid authToken
_redirectBack(environ, page=None, forceSSL=(_getBaseURL(environ).startswith('https://')))
_redirectBack(environ, page=None,
forceSSL=(_getBaseURL(environ).startswith('https://')))
assert False # pragma: no cover
else:
_redirect(environ, 'login')
@ -188,7 +192,8 @@ def _getServer(environ):
if environ['koji.currentLogin']:
environ['koji.currentUser'] = session.getUser(environ['koji.currentLogin'])
if not environ['koji.currentUser']:
raise koji.AuthError('could not get user for principal: %s' % environ['koji.currentLogin'])
raise koji.AuthError(
'could not get user for principal: %s' % environ['koji.currentLogin'])
_setUserCookie(environ, environ['koji.currentLogin'])
else:
environ['koji.currentUser'] = None
@ -271,7 +276,9 @@ def login(environ, page=None):
elif options['WebPrincipal']:
principal = environ.get('REMOTE_USER')
if not principal:
raise koji.AuthError('configuration error: mod_auth_gssapi should have performed authentication before presenting this page')
raise koji.AuthError(
'configuration error: mod_auth_gssapi should have performed authentication before '
'presenting this page')
if not _krbLogin(environ, session, principal):
raise koji.AuthError('could not login using principal: %s' % principal)
@ -279,7 +286,9 @@ def login(environ, page=None):
username = principal
authlogger.info('Successful Kerberos authentication by %s', username)
else:
raise koji.AuthError('KojiWeb is incorrectly configured for authentication, contact the system administrator')
raise koji.AuthError(
'KojiWeb is incorrectly configured for authentication, contact the system '
'administrator')
_setUserCookie(environ, username)
# To protect the session cookie, we must forceSSL
@ -322,8 +331,10 @@ def index(environ, packageOrder='package_name', packageStart=None):
values['order'] = '-id'
if user:
kojiweb.util.paginateResults(server, values, 'listPackages', kw={'userID': user['id'], 'with_dups': True},
start=packageStart, dataName='packages', prefix='package', order=packageOrder, pageSize=10)
kojiweb.util.paginateResults(server, values, 'listPackages',
kw={'userID': user['id'], 'with_dups': True},
start=packageStart, dataName='packages', prefix='package',
order=packageOrder, pageSize=10)
notifs = server.getBuildNotifications(user['id'])
notifs.sort(key=lambda x: x['id'])
@ -480,12 +491,16 @@ _TASKS = ['build',
'livemedia',
'createLiveMedia']
# Tasks that can exist without a parent
_TOPLEVEL_TASKS = ['build', 'buildNotification', 'chainbuild', 'maven', 'chainmaven', 'wrapperRPM', 'winbuild', 'newRepo', 'distRepo', 'tagBuild', 'tagNotification', 'waitrepo', 'livecd', 'appliance', 'image', 'livemedia']
_TOPLEVEL_TASKS = ['build', 'buildNotification', 'chainbuild', 'maven', 'chainmaven', 'wrapperRPM',
'winbuild', 'newRepo', 'distRepo', 'tagBuild', 'tagNotification', 'waitrepo',
'livecd', 'appliance', 'image', 'livemedia']
# Tasks that can have children
_PARENT_TASKS = ['build', 'chainbuild', 'maven', 'chainmaven', 'winbuild', 'newRepo', 'distRepo', 'wrapperRPM', 'livecd', 'appliance', 'image', 'livemedia']
_PARENT_TASKS = ['build', 'chainbuild', 'maven', 'chainmaven', 'winbuild', 'newRepo', 'distRepo',
'wrapperRPM', 'livecd', 'appliance', 'image', 'livemedia']
def tasks(environ, owner=None, state='active', view='tree', method='all', hostID=None, channelID=None, start=None, order='-id'):
def tasks(environ, owner=None, state='active', view='tree', method='all', hostID=None,
channelID=None, start=None, order='-id'):
values = _initValues(environ, 'Tasks', 'tasks')
server = _getServer(environ)
@ -539,7 +554,9 @@ def tasks(environ, owner=None, state='active', view='tree', method='all', hostID
opts['parent'] = None
if state == 'active':
opts['state'] = [koji.TASK_STATES['FREE'], koji.TASK_STATES['OPEN'], koji.TASK_STATES['ASSIGNED']]
opts['state'] = [koji.TASK_STATES['FREE'],
koji.TASK_STATES['OPEN'],
koji.TASK_STATES['ASSIGNED']]
elif state == 'all':
pass
else:
@ -830,7 +847,8 @@ def _chunk_file(server, environ, taskID, name, offset, size, volume):
chunk_size = 1048576
if remaining < chunk_size:
chunk_size = remaining
content = server.downloadTaskOutput(taskID, name, offset=offset, size=chunk_size, volume=volume)
content = server.downloadTaskOutput(taskID, name,
offset=offset, size=chunk_size, volume=volume)
if not content:
break
yield content
@ -863,7 +881,8 @@ def tags(environ, start=None, order=None, childID=None):
_PREFIX_CHARS = [chr(char) for char in list(range(48, 58)) + list(range(97, 123))]
def packages(environ, tagID=None, userID=None, order='package_name', start=None, prefix=None, inherited='1'):
def packages(environ, tagID=None, userID=None, order='package_name', start=None, prefix=None,
inherited='1'):
values = _initValues(environ, 'Packages', 'packages')
server = _getServer(environ)
tag = None
@ -890,7 +909,10 @@ def packages(environ, tagID=None, userID=None, order='package_name', start=None,
values['inherited'] = inherited
kojiweb.util.paginateMethod(server, values, 'listPackages',
kw={'tagID': tagID, 'userID': userID, 'prefix': prefix, 'inherited': bool(inherited)},
kw={'tagID': tagID,
'userID': userID,
'prefix': prefix,
'inherited': bool(inherited)},
start=start, dataName='packages', prefix='package', order=order)
values['chars'] = _PREFIX_CHARS
@ -898,7 +920,8 @@ def packages(environ, tagID=None, userID=None, order='package_name', start=None,
return _genHTML(environ, 'packages.chtml')
def packageinfo(environ, packageID, tagOrder='name', tagStart=None, buildOrder='-completion_time', buildStart=None):
def packageinfo(environ, packageID, tagOrder='name', tagStart=None, buildOrder='-completion_time',
buildStart=None):
values = _initValues(environ, 'Package Info', 'packages')
server = _getServer(environ)
@ -916,12 +939,14 @@ def packageinfo(environ, packageID, tagOrder='name', tagStart=None, buildOrder='
kojiweb.util.paginateMethod(server, values, 'listTags', kw={'package': package['id']},
start=tagStart, dataName='tags', prefix='tag', order=tagOrder)
kojiweb.util.paginateMethod(server, values, 'listBuilds', kw={'packageID': package['id']},
start=buildStart, dataName='builds', prefix='build', order=buildOrder)
start=buildStart, dataName='builds', prefix='build',
order=buildOrder)
return _genHTML(environ, 'packageinfo.chtml')
def taginfo(environ, tagID, all='0', packageOrder='package_name', packageStart=None, buildOrder='-completion_time', buildStart=None, childID=None):
def taginfo(environ, tagID, all='0', packageOrder='package_name', packageStart=None,
buildOrder='-completion_time', buildStart=None, childID=None):
values = _initValues(environ, 'Tag Info', 'tags')
server = _getServer(environ)
@ -1115,7 +1140,9 @@ def tagparent(environ, tagID, parentID, action):
elif len(inheritanceData) == 1:
values['inheritanceData'] = inheritanceData[0]
else:
raise koji.GenericError('tag %i has tag %i listed as a parent more than once' % (tag['id'], parent['id']))
raise koji.GenericError(
'tag %i has tag %i listed as a parent more than once' %
(tag['id'], parent['id']))
return _genHTML(environ, 'tagparent.chtml')
elif action == 'remove':
@ -1174,7 +1201,8 @@ def buildinfo(environ, buildID):
for archive in archives:
if btype == 'maven':
archive['display'] = archive['filename']
archive['dl_url'] = '/'.join([pathinfo.mavenbuild(build), pathinfo.mavenfile(archive)])
archive['dl_url'] = '/'.join([pathinfo.mavenbuild(build),
pathinfo.mavenfile(archive)])
elif btype == 'win':
archive['display'] = pathinfo.winfile(archive)
archive['dl_url'] = '/'.join([pathinfo.winbuild(build), pathinfo.winfile(archive)])
@ -1210,7 +1238,8 @@ def buildinfo(environ, buildID):
# get the summary, description, and changelogs from the built srpm
# if the build is not yet complete
if build['state'] != koji.BUILD_STATES['COMPLETE']:
srpm_tasks = server.listTasks(opts={'parent': task['id'], 'method': 'buildSRPMFromSCM'})
srpm_tasks = server.listTasks(opts={'parent': task['id'],
'method': 'buildSRPMFromSCM'})
if srpm_tasks:
srpm_task = srpm_tasks[0]
if srpm_task['state'] == koji.TASK_STATES['CLOSED']:
@ -1220,12 +1249,14 @@ def buildinfo(environ, buildID):
srpm_path = output
break
if srpm_path:
srpm_headers = server.getRPMHeaders(taskID=srpm_task['id'], filepath=srpm_path,
srpm_headers = server.getRPMHeaders(taskID=srpm_task['id'],
filepath=srpm_path,
headers=['summary', 'description'])
if srpm_headers:
values['summary'] = koji.fixEncoding(srpm_headers['summary'])
values['description'] = koji.fixEncoding(srpm_headers['description'])
changelog = server.getChangelogEntries(taskID=srpm_task['id'], filepath=srpm_path)
changelog = server.getChangelogEntries(taskID=srpm_task['id'],
filepath=srpm_path)
if changelog:
values['changelog'] = changelog
else:
@ -1276,7 +1307,8 @@ def buildinfo(environ, buildID):
return _genHTML(environ, 'buildinfo.chtml')
def builds(environ, userID=None, tagID=None, packageID=None, state=None, order='-build_id', start=None, prefix=None, inherited='1', latest='1', type=None):
def builds(environ, userID=None, tagID=None, packageID=None, state=None, order='-build_id',
start=None, prefix=None, inherited='1', latest='1', type=None):
values = _initValues(environ, 'Builds', 'builds')
server = _getServer(environ)
@ -1344,13 +1376,18 @@ def builds(environ, userID=None, tagID=None, packageID=None, state=None, order='
if tag:
# don't need to consider 'state' here, since only completed builds would be tagged
kojiweb.util.paginateResults(server, values, 'listTagged', kw={'tag': tag['id'], 'package': (package and package['name'] or None),
kojiweb.util.paginateResults(server, values, 'listTagged',
kw={'tag': tag['id'],
'package': (package and package['name'] or None),
'owner': (user and user['name'] or None),
'type': type,
'inherit': bool(inherited), 'latest': bool(latest), 'prefix': prefix},
'inherit': bool(inherited), 'latest': bool(latest),
'prefix': prefix},
start=start, dataName='builds', prefix='build', order=order)
else:
kojiweb.util.paginateMethod(server, values, 'listBuilds', kw={'userID': (user and user['id'] or None), 'packageID': (package and package['id'] or None),
kojiweb.util.paginateMethod(server, values, 'listBuilds',
kw={'userID': (user and user['id'] or None),
'packageID': (package and package['id'] or None),
'type': type,
'state': state, 'prefix': prefix},
start=start, dataName='builds', prefix='build', order=order)
@ -1380,7 +1417,8 @@ def users(environ, order='name', start=None, prefix=None):
return _genHTML(environ, 'users.chtml')
def userinfo(environ, userID, packageOrder='package_name', packageStart=None, buildOrder='-completion_time', buildStart=None):
def userinfo(environ, userID, packageOrder='package_name', packageStart=None,
buildOrder='-completion_time', buildStart=None):
values = _initValues(environ, 'User Info', 'users')
server = _getServer(environ)
@ -1392,18 +1430,23 @@ def userinfo(environ, userID, packageOrder='package_name', packageStart=None, bu
values['user'] = user
values['userID'] = userID
values['taskCount'] = server.listTasks(opts={'owner': user['id'], 'parent': None}, queryOpts={'countOnly': True})
values['taskCount'] = server.listTasks(opts={'owner': user['id'], 'parent': None},
queryOpts={'countOnly': True})
kojiweb.util.paginateResults(server, values, 'listPackages', kw={'userID': user['id'], 'with_dups': True},
start=packageStart, dataName='packages', prefix='package', order=packageOrder, pageSize=10)
kojiweb.util.paginateResults(server, values, 'listPackages',
kw={'userID': user['id'], 'with_dups': True},
start=packageStart, dataName='packages', prefix='package',
order=packageOrder, pageSize=10)
kojiweb.util.paginateMethod(server, values, 'listBuilds', kw={'userID': user['id']},
start=buildStart, dataName='builds', prefix='build', order=buildOrder, pageSize=10)
start=buildStart, dataName='builds', prefix='build',
order=buildOrder, pageSize=10)
return _genHTML(environ, 'userinfo.chtml')
def rpminfo(environ, rpmID, fileOrder='name', fileStart=None, buildrootOrder='-id', buildrootStart=None):
def rpminfo(environ, rpmID, fileOrder='name', fileStart=None, buildrootOrder='-id',
buildrootStart=None):
values = _initValues(environ, 'RPM Info', 'builds')
server = _getServer(environ)
@ -1441,8 +1484,11 @@ def rpminfo(environ, rpmID, fileOrder='name', fileStart=None, buildrootOrder='-i
values['summary'] = koji.fixEncoding(headers.get('summary'))
values['description'] = koji.fixEncoding(headers.get('description'))
values['license'] = koji.fixEncoding(headers.get('license'))
buildroots = kojiweb.util.paginateMethod(server, values, 'listBuildroots', kw={'rpmID': rpm['id']},
start=buildrootStart, dataName='buildroots', prefix='buildroot',
buildroots = kojiweb.util.paginateMethod(server, values, 'listBuildroots',
kw={'rpmID': rpm['id']},
start=buildrootStart,
dataName='buildroots',
prefix='buildroot',
order=buildrootOrder)
values['rpmID'] = rpmID
@ -1457,7 +1503,8 @@ def rpminfo(environ, rpmID, fileOrder='name', fileStart=None, buildrootOrder='-i
return _genHTML(environ, 'rpminfo.chtml')
def archiveinfo(environ, archiveID, fileOrder='name', fileStart=None, buildrootOrder='-id', buildrootStart=None):
def archiveinfo(environ, archiveID, fileOrder='name', fileStart=None, buildrootOrder='-id',
buildrootStart=None):
values = _initValues(environ, 'Archive Info', 'builds')
server = _getServer(environ)
@ -1476,8 +1523,11 @@ def archiveinfo(environ, archiveID, fileOrder='name', fileStart=None, buildrootO
builtInRoot = server.getBuildroot(archive['buildroot_id'])
kojiweb.util.paginateMethod(server, values, 'listArchiveFiles', args=[archive['id']],
start=fileStart, dataName='files', prefix='file', order=fileOrder)
buildroots = kojiweb.util.paginateMethod(server, values, 'listBuildroots', kw={'archiveID': archive['id']},
start=buildrootStart, dataName='buildroots', prefix='buildroot',
buildroots = kojiweb.util.paginateMethod(server, values, 'listBuildroots',
kw={'archiveID': archive['id']},
start=buildrootStart,
dataName='buildroots',
prefix='buildroot',
order=buildrootOrder)
values['title'] = archive['filename'] + ' | Archive Info'
@ -1491,7 +1541,8 @@ def archiveinfo(environ, archiveID, fileOrder='name', fileStart=None, buildrootO
values['builtInRoot'] = builtInRoot
values['buildroots'] = buildroots
values['show_rpm_components'] = server.listRPMs(imageID=archive['id'], queryOpts={'limit': 1})
values['show_archive_components'] = server.listArchives(imageID=archive['id'], queryOpts={'limit': 1})
values['show_archive_components'] = server.listArchives(imageID=archive['id'],
queryOpts={'limit': 1})
return _genHTML(environ, 'archiveinfo.chtml')
@ -1604,7 +1655,8 @@ def hostinfo(environ, hostID=None, userID=None):
channels = server.listChannels(host['id'])
channels.sort(key=_sortbyname)
buildroots = server.listBuildroots(hostID=host['id'],
state=[state[1] for state in koji.BR_STATES.items() if state[0] != 'EXPIRED'])
state=[state[1] for state in koji.BR_STATES.items()
if state[0] != 'EXPIRED'])
buildroots.sort(key=lambda x: x['create_event_time'], reverse=True)
values['host'] = host
@ -1718,7 +1770,8 @@ def channelinfo(environ, channelID):
return _genHTML(environ, 'channelinfo.chtml')
def buildrootinfo(environ, buildrootID, builtStart=None, builtOrder=None, componentStart=None, componentOrder=None):
def buildrootinfo(environ, buildrootID, builtStart=None, builtOrder=None, componentStart=None,
componentOrder=None):
values = _initValues(environ, 'Buildroot Info', 'hosts')
server = _getServer(environ)
@ -1807,11 +1860,15 @@ def archivelist(environ, type, buildrootID=None, imageID=None, start=None, order
raise koji.GenericError('unknown buildroot ID: %i' % buildrootID)
if type == 'component':
kojiweb.util.paginateMethod(server, values, 'listArchives', kw={'componentBuildrootID': buildroot['id']},
start=start, dataName='archives', prefix='archive', order=order)
kojiweb.util.paginateMethod(server, values, 'listArchives',
kw={'componentBuildrootID': buildroot['id']},
start=start, dataName='archives', prefix='archive',
order=order)
elif type == 'built':
kojiweb.util.paginateMethod(server, values, 'listArchives', kw={'buildrootID': buildroot['id']},
start=start, dataName='archives', prefix='archive', order=order)
kojiweb.util.paginateMethod(server, values, 'listArchives',
kw={'buildrootID': buildroot['id']},
start=start, dataName='archives', prefix='archive',
order=order)
else:
raise koji.GenericError('unrecognized type of archivelist')
elif imageID is not None:
@ -1820,7 +1877,8 @@ def archivelist(environ, type, buildrootID=None, imageID=None, start=None, order
# If/When future image types are supported, add elifs here if needed.
if type == 'image':
kojiweb.util.paginateMethod(server, values, 'listArchives', kw={'imageID': imageID},
start=start, dataName='archives', prefix='archive', order=order)
start=start, dataName='archives', prefix='archive',
order=order)
else:
raise koji.GenericError('unrecognized type of archivelist')
else:
@ -2155,9 +2213,12 @@ def buildsbystatus(environ, days='7'):
server.multicall = True
# use taskID=-1 to filter out builds with a null task_id (imported rather than built in koji)
server.listBuilds(completeAfter=dateAfter, state=koji.BUILD_STATES['COMPLETE'], taskID=-1, queryOpts={'countOnly': True})
server.listBuilds(completeAfter=dateAfter, state=koji.BUILD_STATES['FAILED'], taskID=-1, queryOpts={'countOnly': True})
server.listBuilds(completeAfter=dateAfter, state=koji.BUILD_STATES['CANCELED'], taskID=-1, queryOpts={'countOnly': True})
server.listBuilds(completeAfter=dateAfter, state=koji.BUILD_STATES['COMPLETE'], taskID=-1,
queryOpts={'countOnly': True})
server.listBuilds(completeAfter=dateAfter, state=koji.BUILD_STATES['FAILED'], taskID=-1,
queryOpts={'countOnly': True})
server.listBuilds(completeAfter=dateAfter, state=koji.BUILD_STATES['CANCELED'], taskID=-1,
queryOpts={'countOnly': True})
[[numSucceeded], [numFailed], [numCanceled]] = server.multiCall()
values['numSucceeded'] = numSucceeded
@ -2298,7 +2359,8 @@ def recentbuilds(environ, user=None, tag=None, package=None):
packageObj = server.getPackage(package)
if tagObj is not None:
builds = server.listTagged(tagObj['id'], inherit=True, package=(packageObj and packageObj['name'] or None),
builds = server.listTagged(tagObj['id'], inherit=True,
package=(packageObj and packageObj['name'] or None),
owner=(userObj and userObj['name'] or None))
builds.sort(key=kojiweb.util.sortByKeyFuncNoneGreatest('completion_time'), reverse=True)
builds = builds[:20]
@ -2408,7 +2470,8 @@ def search(environ, start=None, order=None):
values['order'] = order
results = kojiweb.util.paginateMethod(server, values, 'search', args=(terms, type, match),
start=start, dataName='results', prefix='result', order=order)
start=start, dataName='results', prefix='result',
order=order)
if not start and len(results) == 1:
# if we found exactly one result, skip the result list and redirect to the info page
# (you're feeling lucky)

View file

@ -96,7 +96,9 @@ class Dispatcher(object):
['LibPath', 'string', '/usr/share/koji-web/lib'],
['LogLevel', 'string', 'WARNING'],
['LogFormat', 'string', '%(msecs)d [%(levelname)s] m=%(method)s u=%(user_name)s p=%(process)s r=%(remoteaddr)s %(name)s: %(message)s'],
['LogFormat', 'string',
'%(msecs)d [%(levelname)s] m=%(method)s u=%(user_name)s p=%(process)s r=%(remoteaddr)s '
'%(name)s: %(message)s'],
['Tasks', 'list', []],
['ToplevelTasks', 'list', []],
@ -227,7 +229,9 @@ class Dispatcher(object):
raise URLNotFound
# parse form args
data = {}
fs = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ.copy(), keep_blank_values=True)
fs = cgi.FieldStorage(fp=environ['wsgi.input'],
environ=environ.copy(),
keep_blank_values=True)
for field in fs.list:
if field.filename:
val = field

View file

@ -65,7 +65,8 @@ def _initValues(environ, title='Build System Info', pageID='summary'):
themeCache.clear()
themeInfo.clear()
themeInfo['name'] = environ['koji.options'].get('KojiTheme', None)
themeInfo['staticdir'] = environ['koji.options'].get('KojiStaticDir', '/usr/share/koji-web/static')
themeInfo['staticdir'] = environ['koji.options'].get('KojiStaticDir',
'/usr/share/koji-web/static')
environ['koji.values'] = values
@ -227,9 +228,11 @@ def sortImage(template, sortKey, orderVar='order'):
"""
orderVal = template.getVar(orderVar)
if orderVal == sortKey:
return '<img src="%s" class="sort" alt="ascending sort"/>' % themePath("images/gray-triangle-up.gif")
return '<img src="%s" class="sort" alt="ascending sort"/>' % \
themePath("images/gray-triangle-up.gif")
elif orderVal == '-' + sortKey:
return '<img src="%s" class="sort" alt="descending sort"/>' % themePath("images/gray-triangle-down.gif")
return '<img src="%s" class="sort" alt="descending sort"/>' % \
themePath("images/gray-triangle-down.gif")
else:
return ''
@ -283,7 +286,8 @@ def sortByKeyFuncNoneGreatest(key):
return internal_key
def paginateList(values, data, start, dataName, prefix=None, order=None, noneGreatest=False, pageSize=50):
def paginateList(values, data, start, dataName, prefix=None, order=None, noneGreatest=False,
pageSize=50):
"""
Slice the 'data' list into one page worth. Start at offset
'start' and limit the total number of pages to pageSize
@ -317,8 +321,9 @@ def paginateList(values, data, start, dataName, prefix=None, order=None, noneGre
def paginateMethod(server, values, methodName, args=None, kw=None,
start=None, dataName=None, prefix=None, order=None, pageSize=50):
"""Paginate the results of the method with the given name when called with the given args and kws.
The method must support the queryOpts keyword parameter, and pagination is done in the database."""
"""Paginate the results of the method with the given name when called with the given args and
kws. The method must support the queryOpts keyword parameter, and pagination is done in the
database."""
if args is None:
args = []
if kw is None:
@ -346,10 +351,10 @@ def paginateMethod(server, values, methodName, args=None, kw=None,
def paginateResults(server, values, methodName, args=None, kw=None,
start=None, dataName=None, prefix=None, order=None, pageSize=50):
"""Paginate the results of the method with the given name when called with the given args and kws.
This method should only be used when then method does not support the queryOpts command (because
the logic used to generate the result list prevents filtering/ordering from being done in the database).
The method must return a list of maps."""
"""Paginate the results of the method with the given name when called with the given args and
kws. This method should only be used when then method does not support the queryOpts command
(because the logic used to generate the result list prevents filtering/ordering from being done
in the database). The method must return a list of maps."""
if args is None:
args = []
if kw is None:
@ -390,7 +395,8 @@ def _populateValues(values, dataName, prefix, data, totalRows, start, count, pag
totalPages = int(totalRows // pageSize)
if totalRows % pageSize > 0:
totalPages += 1
pages = [page for page in range(0, totalPages) if (abs(page - currentPage) < 100 or ((page + 1) % 100 == 0))]
pages = [page for page in range(0, totalPages)
if (abs(page - currentPage) < 100 or ((page + 1) % 100 == 0))]
values[(prefix and prefix + 'Pages') or 'pages'] = pages