flake8: apply E501 with max-line-length=99
This commit is contained in:
parent
f1ba6cefd7
commit
c5db34a8e1
28 changed files with 1574 additions and 811 deletions
3
.flake8
3
.flake8
|
|
@ -1,6 +1,7 @@
|
||||||
[flake8]
|
[flake8]
|
||||||
select = I,C,F,E1,E2,E3,E4,E502,E7
|
select = I,C,F,E
|
||||||
ignore = E266,E731
|
ignore = E266,E731
|
||||||
|
max_line_length = 99
|
||||||
exclude =
|
exclude =
|
||||||
.git,
|
.git,
|
||||||
__pycache__,
|
__pycache__,
|
||||||
|
|
|
||||||
387
builder/kojid
387
builder/kojid
|
|
@ -273,7 +273,8 @@ class BuildRoot(object):
|
||||||
for k in ('repoid', 'tag_name'):
|
for k in ('repoid', 'tag_name'):
|
||||||
if hasattr(self, k):
|
if hasattr(self, k):
|
||||||
opts[k] = getattr(self, k)
|
opts[k] = getattr(self, k)
|
||||||
for k in ('mockdir', 'topdir', 'topurl', 'topurls', 'packager', 'vendor', 'distribution', 'mockhost', 'yum_proxy', 'rpmbuild_timeout'):
|
for k in ('mockdir', 'topdir', 'topurl', 'topurls', 'packager', 'vendor',
|
||||||
|
'distribution', 'mockhost', 'yum_proxy', 'rpmbuild_timeout'):
|
||||||
if hasattr(self.options, k):
|
if hasattr(self.options, k):
|
||||||
opts[k] = getattr(self.options, k)
|
opts[k] = getattr(self.options, k)
|
||||||
opts['buildroot_id'] = self.id
|
opts['buildroot_id'] = self.id
|
||||||
|
|
@ -476,12 +477,14 @@ class BuildRoot(object):
|
||||||
try:
|
try:
|
||||||
stat_info = os.stat(fpath)
|
stat_info = os.stat(fpath)
|
||||||
if not fd or stat_info.st_ino != inode or stat_info.st_size < size:
|
if not fd or stat_info.st_ino != inode or stat_info.st_size < size:
|
||||||
# either a file we haven't opened before, or mock replaced a file we had open with
|
# either a file we haven't opened before, or mock replaced a file we
|
||||||
# a new file and is writing to it, or truncated the file we're reading,
|
# had open with a new file and is writing to it, or truncated the file
|
||||||
# but our fd is pointing to the previous location in the old file
|
# we're reading, but our fd is pointing to the previous location in the
|
||||||
|
# old file
|
||||||
if fd:
|
if fd:
|
||||||
self.logger.info('Rereading %s, inode: %s -> %s, size: %s -> %s' %
|
self.logger.info('Rereading %s, inode: %s -> %s, size: %s -> %s' %
|
||||||
(fpath, inode, stat_info.st_ino, size, stat_info.st_size))
|
(fpath, inode, stat_info.st_ino, size,
|
||||||
|
stat_info.st_size))
|
||||||
fd.close()
|
fd.close()
|
||||||
fd = open(fpath, 'rb')
|
fd = open(fpath, 'rb')
|
||||||
logs[fname] = (fd, stat_info.st_ino, stat_info.st_size or size, fpath)
|
logs[fname] = (fd, stat_info.st_ino, stat_info.st_size or size, fpath)
|
||||||
|
|
@ -596,8 +599,8 @@ class BuildRoot(object):
|
||||||
def build_srpm(self, specfile, sourcedir, source_cmd):
|
def build_srpm(self, specfile, sourcedir, source_cmd):
|
||||||
self.session.host.setBuildRootState(self.id, 'BUILDING')
|
self.session.host.setBuildRootState(self.id, 'BUILDING')
|
||||||
if source_cmd:
|
if source_cmd:
|
||||||
# call the command defined by source_cmd in the chroot so any required files not stored in
|
# call the command defined by source_cmd in the chroot so any required files not stored
|
||||||
# the SCM can be retrieved
|
# in the SCM can be retrieved
|
||||||
chroot_sourcedir = sourcedir[len(self.rootdir()):]
|
chroot_sourcedir = sourcedir[len(self.rootdir()):]
|
||||||
args = ['--no-clean', '--unpriv', '--cwd', chroot_sourcedir, '--chroot']
|
args = ['--no-clean', '--unpriv', '--cwd', chroot_sourcedir, '--chroot']
|
||||||
args.extend(source_cmd)
|
args.extend(source_cmd)
|
||||||
|
|
@ -633,7 +636,8 @@ class BuildRoot(object):
|
||||||
self.session.host.updateBuildRootList(self.id, self.getPackageList())
|
self.session.host.updateBuildRootList(self.id, self.getPackageList())
|
||||||
if rv:
|
if rv:
|
||||||
self.expire()
|
self.expire()
|
||||||
raise koji.BuildError("error building package (arch %s), %s" % (arch, self._mockResult(rv)))
|
raise koji.BuildError("error building package (arch %s), %s" %
|
||||||
|
(arch, self._mockResult(rv)))
|
||||||
|
|
||||||
def getPackageList(self):
|
def getPackageList(self):
|
||||||
"""Return a list of packages from the buildroot
|
"""Return a list of packages from the buildroot
|
||||||
|
|
@ -688,7 +692,8 @@ class BuildRoot(object):
|
||||||
maven_files = []
|
maven_files = []
|
||||||
for repofile in files:
|
for repofile in files:
|
||||||
if koji.util.multi_fnmatch(repofile, self.options.maven_repo_ignore) or \
|
if koji.util.multi_fnmatch(repofile, self.options.maven_repo_ignore) or \
|
||||||
koji.util.multi_fnmatch(os.path.join(relpath, repofile), self.options.maven_repo_ignore):
|
koji.util.multi_fnmatch(os.path.join(relpath, repofile),
|
||||||
|
self.options.maven_repo_ignore):
|
||||||
continue
|
continue
|
||||||
if relpath == '' and repofile in ['scm-sources.zip', 'patches.zip']:
|
if relpath == '' and repofile in ['scm-sources.zip', 'patches.zip']:
|
||||||
# special-case the archives of the sources and patches, since we drop them in
|
# special-case the archives of the sources and patches, since we drop them in
|
||||||
|
|
@ -699,8 +704,10 @@ class BuildRoot(object):
|
||||||
if maven_files:
|
if maven_files:
|
||||||
path_comps = relpath.split('/')
|
path_comps = relpath.split('/')
|
||||||
if len(path_comps) < 3:
|
if len(path_comps) < 3:
|
||||||
raise koji.BuildrootError('files found in unexpected path in local Maven repo, directory: %s, files: %s' %
|
raise koji.BuildrootError('files found in unexpected path in local Maven repo,'
|
||||||
(relpath, ', '.join([f['filename'] for f in maven_files])))
|
' directory: %s, files: %s' %
|
||||||
|
(relpath,
|
||||||
|
', '.join([f['filename'] for f in maven_files])))
|
||||||
# extract the Maven info from the path within the local repo
|
# extract the Maven info from the path within the local repo
|
||||||
maven_info = {'version': path_comps[-1],
|
maven_info = {'version': path_comps[-1],
|
||||||
'artifact_id': path_comps[-2],
|
'artifact_id': path_comps[-2],
|
||||||
|
|
@ -712,8 +719,8 @@ class BuildRoot(object):
|
||||||
def mavenBuild(self, sourcedir, outputdir, repodir,
|
def mavenBuild(self, sourcedir, outputdir, repodir,
|
||||||
props=None, profiles=None, options=None, goals=None):
|
props=None, profiles=None, options=None, goals=None):
|
||||||
self.session.host.setBuildRootState(self.id, 'BUILDING')
|
self.session.host.setBuildRootState(self.id, 'BUILDING')
|
||||||
cmd = ['--no-clean', '--chroot', '--unpriv', '--cwd', sourcedir[len(self.rootdir()):], '--',
|
cmd = ['--no-clean', '--chroot', '--unpriv', '--cwd', sourcedir[len(self.rootdir()):],
|
||||||
'/usr/bin/mvn', '-C']
|
'--', '/usr/bin/mvn', '-C']
|
||||||
if options:
|
if options:
|
||||||
cmd.extend(options)
|
cmd.extend(options)
|
||||||
if profiles:
|
if profiles:
|
||||||
|
|
@ -734,13 +741,15 @@ class BuildRoot(object):
|
||||||
ignore_unknown = False
|
ignore_unknown = False
|
||||||
if rv:
|
if rv:
|
||||||
ignore_unknown = True
|
ignore_unknown = True
|
||||||
self.session.host.updateMavenBuildRootList(self.id, self.task_id, self.getMavenPackageList(repodir),
|
self.session.host.updateMavenBuildRootList(self.id, self.task_id,
|
||||||
|
self.getMavenPackageList(repodir),
|
||||||
ignore=self.getMavenPackageList(outputdir),
|
ignore=self.getMavenPackageList(outputdir),
|
||||||
project=True, ignore_unknown=ignore_unknown,
|
project=True, ignore_unknown=ignore_unknown,
|
||||||
extra_deps=self.deps)
|
extra_deps=self.deps)
|
||||||
if rv:
|
if rv:
|
||||||
self.expire()
|
self.expire()
|
||||||
raise koji.BuildrootError('error building Maven package, %s' % self._mockResult(rv, logfile='root.log'))
|
raise koji.BuildrootError('error building Maven package, %s' %
|
||||||
|
self._mockResult(rv, logfile='root.log'))
|
||||||
|
|
||||||
def markExternalRPMs(self, rpmlist):
|
def markExternalRPMs(self, rpmlist):
|
||||||
"""Check rpms against pkgorigins and add external repo data to the external ones
|
"""Check rpms against pkgorigins and add external repo data to the external ones
|
||||||
|
|
@ -793,7 +802,8 @@ class BuildRoot(object):
|
||||||
try:
|
try:
|
||||||
repodata = repoMDObject.RepoMD('ourrepo', fo)
|
repodata = repoMDObject.RepoMD('ourrepo', fo)
|
||||||
except BaseException:
|
except BaseException:
|
||||||
raise koji.BuildError("Unable to parse repomd.xml file for %s" % os.path.join(repodir, self.br_arch))
|
raise koji.BuildError("Unable to parse repomd.xml file for %s" %
|
||||||
|
os.path.join(repodir, self.br_arch))
|
||||||
data = repodata.getData('origin')
|
data = repodata.getData('origin')
|
||||||
pkgorigins = data.location[1]
|
pkgorigins = data.location[1]
|
||||||
else:
|
else:
|
||||||
|
|
@ -905,7 +915,8 @@ class ChainBuildTask(BaseTaskHandler):
|
||||||
# if there are any nvrs to wait on, do so
|
# if there are any nvrs to wait on, do so
|
||||||
if nvrs:
|
if nvrs:
|
||||||
task_id = self.session.host.subtask(method='waitrepo',
|
task_id = self.session.host.subtask(method='waitrepo',
|
||||||
arglist=[target_info['build_tag_name'], None, nvrs],
|
arglist=[
|
||||||
|
target_info['build_tag_name'], None, nvrs],
|
||||||
label="wait %i" % n_level,
|
label="wait %i" % n_level,
|
||||||
parent=self.id)
|
parent=self.id)
|
||||||
self.wait(task_id, all=True, failany=True)
|
self.wait(task_id, all=True, failany=True)
|
||||||
|
|
@ -1078,7 +1089,9 @@ class BuildTask(BaseTaskHandler):
|
||||||
def getSRPMFromSRPM(self, src, build_tag, repo_id):
|
def getSRPMFromSRPM(self, src, build_tag, repo_id):
|
||||||
# rebuild srpm in mock, so it gets correct disttag, rpm version, etc.
|
# rebuild srpm in mock, so it gets correct disttag, rpm version, etc.
|
||||||
task_id = self.session.host.subtask(method='rebuildSRPM',
|
task_id = self.session.host.subtask(method='rebuildSRPM',
|
||||||
arglist=[src, build_tag, {'repo_id': repo_id, 'scratch': self.opts.get('scratch')}],
|
arglist=[src, build_tag, {
|
||||||
|
'repo_id': repo_id,
|
||||||
|
'scratch': self.opts.get('scratch')}],
|
||||||
label='srpm',
|
label='srpm',
|
||||||
parent=self.id)
|
parent=self.id)
|
||||||
# wait for subtask to finish
|
# wait for subtask to finish
|
||||||
|
|
@ -1093,7 +1106,9 @@ class BuildTask(BaseTaskHandler):
|
||||||
def getSRPMFromSCM(self, url, build_tag, repo_id):
|
def getSRPMFromSCM(self, url, build_tag, repo_id):
|
||||||
# TODO - allow different ways to get the srpm
|
# TODO - allow different ways to get the srpm
|
||||||
task_id = self.session.host.subtask(method='buildSRPMFromSCM',
|
task_id = self.session.host.subtask(method='buildSRPMFromSCM',
|
||||||
arglist=[url, build_tag, {'repo_id': repo_id, 'scratch': self.opts.get('scratch')}],
|
arglist=[url, build_tag, {
|
||||||
|
'repo_id': repo_id,
|
||||||
|
'scratch': self.opts.get('scratch')}],
|
||||||
label='srpm',
|
label='srpm',
|
||||||
parent=self.id)
|
parent=self.id)
|
||||||
# wait for subtask to finish
|
# wait for subtask to finish
|
||||||
|
|
@ -1211,7 +1226,8 @@ class BuildTask(BaseTaskHandler):
|
||||||
for arch in archlist:
|
for arch in archlist:
|
||||||
taskarch = self.choose_taskarch(arch, srpm, build_tag)
|
taskarch = self.choose_taskarch(arch, srpm, build_tag)
|
||||||
subtasks[arch] = self.session.host.subtask(method='buildArch',
|
subtasks[arch] = self.session.host.subtask(method='buildArch',
|
||||||
arglist=[srpm, build_tag, arch, keep_srpm, {'repo_id': repo_id}],
|
arglist=[srpm, build_tag, arch,
|
||||||
|
keep_srpm, {'repo_id': repo_id}],
|
||||||
label=arch,
|
label=arch,
|
||||||
parent=self.id,
|
parent=self.id,
|
||||||
arch=taskarch)
|
arch=taskarch)
|
||||||
|
|
@ -1430,7 +1446,8 @@ class BuildArchTask(BaseBuildTask):
|
||||||
if len(srpm_files) == 0:
|
if len(srpm_files) == 0:
|
||||||
raise koji.BuildError("no srpm files found for task %i" % self.id)
|
raise koji.BuildError("no srpm files found for task %i" % self.id)
|
||||||
if len(srpm_files) > 1:
|
if len(srpm_files) > 1:
|
||||||
raise koji.BuildError("multiple srpm files found for task %i: %s" % (self.id, srpm_files))
|
raise koji.BuildError("multiple srpm files found for task %i: %s" %
|
||||||
|
(self.id, srpm_files))
|
||||||
|
|
||||||
# Run sanity checks. Any failures will throw a BuildError
|
# Run sanity checks. Any failures will throw a BuildError
|
||||||
self.srpm_sanity_checks("%s/%s" % (resultdir, srpm_files[0]))
|
self.srpm_sanity_checks("%s/%s" % (resultdir, srpm_files[0]))
|
||||||
|
|
@ -1519,12 +1536,14 @@ class MavenTask(MultiPlatformTask):
|
||||||
rpm_results = None
|
rpm_results = None
|
||||||
spec_url = self.opts.get('specfile')
|
spec_url = self.opts.get('specfile')
|
||||||
if spec_url:
|
if spec_url:
|
||||||
rpm_results = self.buildWrapperRPM(spec_url, self.build_task_id, target_info, build_info, repo_id)
|
rpm_results = self.buildWrapperRPM(
|
||||||
|
spec_url, self.build_task_id, target_info, build_info, repo_id)
|
||||||
|
|
||||||
if self.opts.get('scratch'):
|
if self.opts.get('scratch'):
|
||||||
self.session.host.moveMavenBuildToScratch(self.id, maven_results, rpm_results)
|
self.session.host.moveMavenBuildToScratch(self.id, maven_results, rpm_results)
|
||||||
else:
|
else:
|
||||||
self.session.host.completeMavenBuild(self.id, self.build_id, maven_results, rpm_results)
|
self.session.host.completeMavenBuild(
|
||||||
|
self.id, self.build_id, maven_results, rpm_results)
|
||||||
except (SystemExit, ServerExit, KeyboardInterrupt):
|
except (SystemExit, ServerExit, KeyboardInterrupt):
|
||||||
# we do not trap these
|
# we do not trap these
|
||||||
raise
|
raise
|
||||||
|
|
@ -1537,7 +1556,8 @@ class MavenTask(MultiPlatformTask):
|
||||||
|
|
||||||
if not self.opts.get('scratch') and not self.opts.get('skip_tag'):
|
if not self.opts.get('scratch') and not self.opts.get('skip_tag'):
|
||||||
tag_task_id = self.session.host.subtask(method='tagBuild',
|
tag_task_id = self.session.host.subtask(method='tagBuild',
|
||||||
arglist=[dest_tag['id'], self.build_id, False, None, True],
|
arglist=[dest_tag['id'],
|
||||||
|
self.build_id, False, None, True],
|
||||||
label='tag',
|
label='tag',
|
||||||
parent=self.id,
|
parent=self.id,
|
||||||
arch='noarch')
|
arch='noarch')
|
||||||
|
|
@ -1590,7 +1610,8 @@ class BuildMavenTask(BaseBuildTask):
|
||||||
repo_info = self.session.repoInfo(repo_id, strict=True)
|
repo_info = self.session.repoInfo(repo_id, strict=True)
|
||||||
event_id = repo_info['create_event']
|
event_id = repo_info['create_event']
|
||||||
|
|
||||||
br_arch = self.find_arch('noarch', self.session.host.getHost(), self.session.getBuildConfig(build_tag['id'], event=event_id))
|
br_arch = self.find_arch('noarch', self.session.host.getHost(
|
||||||
|
), self.session.getBuildConfig(build_tag['id'], event=event_id))
|
||||||
maven_opts = opts.get('jvm_options')
|
maven_opts = opts.get('jvm_options')
|
||||||
if not maven_opts:
|
if not maven_opts:
|
||||||
maven_opts = []
|
maven_opts = []
|
||||||
|
|
@ -1598,7 +1619,8 @@ class BuildMavenTask(BaseBuildTask):
|
||||||
if opt.startswith('-Xmx'):
|
if opt.startswith('-Xmx'):
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
# Give the JVM 2G to work with by default, if the build isn't specifying its own max. memory
|
# Give the JVM 2G to work with by default, if the build isn't specifying
|
||||||
|
# its own max. memory
|
||||||
maven_opts.append('-Xmx2048m')
|
maven_opts.append('-Xmx2048m')
|
||||||
buildroot = BuildRoot(self.session, self.options, build_tag['id'], br_arch, self.id,
|
buildroot = BuildRoot(self.session, self.options, build_tag['id'], br_arch, self.id,
|
||||||
install_group='maven-build', setup_dns=True, repo_id=repo_id,
|
install_group='maven-build', setup_dns=True, repo_id=repo_id,
|
||||||
|
|
@ -1615,7 +1637,8 @@ class BuildMavenTask(BaseBuildTask):
|
||||||
self.session.host.updateBuildRootList(buildroot.id, buildroot.getPackageList())
|
self.session.host.updateBuildRootList(buildroot.id, buildroot.getPackageList())
|
||||||
if rv:
|
if rv:
|
||||||
buildroot.expire()
|
buildroot.expire()
|
||||||
raise koji.BuildrootError('error installing packages, %s' % buildroot._mockResult(rv, logfile='mock_output.log'))
|
raise koji.BuildrootError('error installing packages, %s' %
|
||||||
|
buildroot._mockResult(rv, logfile='mock_output.log'))
|
||||||
|
|
||||||
# existence of symlink should be sufficient
|
# existence of symlink should be sufficient
|
||||||
if not os.path.lexists('%s/usr/bin/mvn' % buildroot.rootdir()):
|
if not os.path.lexists('%s/usr/bin/mvn' % buildroot.rootdir()):
|
||||||
|
|
@ -1635,7 +1658,8 @@ class BuildMavenTask(BaseBuildTask):
|
||||||
logfile = self.workdir + '/checkout.log'
|
logfile = self.workdir + '/checkout.log'
|
||||||
uploadpath = self.getUploadDir()
|
uploadpath = self.getUploadDir()
|
||||||
|
|
||||||
self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(), build_tag=build_tag, scratch=opts.get('scratch'))
|
self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(),
|
||||||
|
build_tag=build_tag, scratch=opts.get('scratch'))
|
||||||
# Check out sources from the SCM
|
# Check out sources from the SCM
|
||||||
sourcedir = scm.checkout(scmdir, self.session, uploadpath, logfile)
|
sourcedir = scm.checkout(scmdir, self.session, uploadpath, logfile)
|
||||||
self.run_callbacks("postSCMCheckout",
|
self.run_callbacks("postSCMCheckout",
|
||||||
|
|
@ -1652,7 +1676,8 @@ class BuildMavenTask(BaseBuildTask):
|
||||||
patchlog = self.workdir + '/patches.log'
|
patchlog = self.workdir + '/patches.log'
|
||||||
patch_scm = SCM(self.opts.get('patches'))
|
patch_scm = SCM(self.opts.get('patches'))
|
||||||
patch_scm.assert_allowed(self.options.allowed_scms)
|
patch_scm.assert_allowed(self.options.allowed_scms)
|
||||||
self.run_callbacks('preSCMCheckout', scminfo=patch_scm.get_info(), build_tag=build_tag, scratch=opts.get('scratch'))
|
self.run_callbacks('preSCMCheckout', scminfo=patch_scm.get_info(),
|
||||||
|
build_tag=build_tag, scratch=opts.get('scratch'))
|
||||||
# never try to check out a common/ dir when checking out patches
|
# never try to check out a common/ dir when checking out patches
|
||||||
patch_scm.use_common = False
|
patch_scm.use_common = False
|
||||||
patchcheckoutdir = patch_scm.checkout(patchdir, self.session, uploadpath, patchlog)
|
patchcheckoutdir = patch_scm.checkout(patchdir, self.session, uploadpath, patchlog)
|
||||||
|
|
@ -1673,10 +1698,14 @@ class BuildMavenTask(BaseBuildTask):
|
||||||
raise koji.BuildError('no patches found at %s' % self.opts.get('patches'))
|
raise koji.BuildError('no patches found at %s' % self.opts.get('patches'))
|
||||||
patches.sort()
|
patches.sort()
|
||||||
for patch in patches:
|
for patch in patches:
|
||||||
cmd = ['/usr/bin/patch', '--verbose', '--no-backup-if-mismatch', '-d', sourcedir, '-p1', '-i', os.path.join(patchcheckoutdir, patch)]
|
cmd = ['/usr/bin/patch', '--verbose', '--no-backup-if-mismatch', '-d',
|
||||||
ret = log_output(self.session, cmd[0], cmd, patchlog, uploadpath, logerror=1, append=1)
|
sourcedir, '-p1', '-i', os.path.join(patchcheckoutdir, patch)]
|
||||||
|
ret = log_output(self.session, cmd[0], cmd,
|
||||||
|
patchlog, uploadpath, logerror=1, append=1)
|
||||||
if ret:
|
if ret:
|
||||||
raise koji.BuildError('error applying patches from %s, see patches.log for details' % self.opts.get('patches'))
|
raise koji.BuildError(
|
||||||
|
'error applying patches from %s, see patches.log for details' %
|
||||||
|
self.opts.get('patches'))
|
||||||
|
|
||||||
# Set ownership of the entire source tree to the mock user
|
# Set ownership of the entire source tree to the mock user
|
||||||
uid = pwd.getpwnam(self.options.mockuser)[2]
|
uid = pwd.getpwnam(self.options.mockuser)[2]
|
||||||
|
|
@ -1834,7 +1863,8 @@ class WrapperRPMTask(BaseBuildTask):
|
||||||
artifact_name = os.path.basename(artifact_path)
|
artifact_name = os.path.basename(artifact_path)
|
||||||
base, ext = os.path.splitext(artifact_name)
|
base, ext = os.path.splitext(artifact_name)
|
||||||
if ext == '.log':
|
if ext == '.log':
|
||||||
# Exclude log files for consistency with the output of listArchives() used below
|
# Exclude log files for consistency with the output of listArchives() used
|
||||||
|
# below
|
||||||
continue
|
continue
|
||||||
relpath = os.path.join(self.pathinfo.task(task['id']), artifact_path)[1:]
|
relpath = os.path.join(self.pathinfo.task(task['id']), artifact_path)[1:]
|
||||||
for volume in artifact_data[artifact_path]:
|
for volume in artifact_data[artifact_path]:
|
||||||
|
|
@ -1847,10 +1877,11 @@ class WrapperRPMTask(BaseBuildTask):
|
||||||
# called as a top-level task to create wrapper rpms for an existing build
|
# called as a top-level task to create wrapper rpms for an existing build
|
||||||
# verify that the build is complete
|
# verify that the build is complete
|
||||||
if not build['state'] == koji.BUILD_STATES['COMPLETE']:
|
if not build['state'] == koji.BUILD_STATES['COMPLETE']:
|
||||||
raise koji.BuildError('cannot call wrapperRPM on a build that did not complete successfully')
|
raise koji.BuildError(
|
||||||
|
'cannot call wrapperRPM on a build that did not complete successfully')
|
||||||
|
|
||||||
# get the list of files from the build instead of the task, because the task output directory may
|
# get the list of files from the build instead of the task,
|
||||||
# have already been cleaned up
|
# because the task output directory may have already been cleaned up
|
||||||
if maven_info:
|
if maven_info:
|
||||||
build_artifacts = self.session.listArchives(buildID=build['id'], type='maven')
|
build_artifacts = self.session.listArchives(buildID=build['id'], type='maven')
|
||||||
elif win_info:
|
elif win_info:
|
||||||
|
|
@ -1888,7 +1919,8 @@ class WrapperRPMTask(BaseBuildTask):
|
||||||
assert False # pragma: no cover
|
assert False # pragma: no cover
|
||||||
|
|
||||||
if not artifacts:
|
if not artifacts:
|
||||||
raise koji.BuildError('no output found for %s' % (task and koji.taskLabel(task) or koji.buildLabel(build)))
|
raise koji.BuildError('no output found for %s' % (
|
||||||
|
task and koji.taskLabel(task) or koji.buildLabel(build)))
|
||||||
|
|
||||||
values['artifacts'] = artifacts
|
values['artifacts'] = artifacts
|
||||||
values['all_artifacts'] = all_artifacts
|
values['all_artifacts'] = all_artifacts
|
||||||
|
|
@ -1932,9 +1964,11 @@ class WrapperRPMTask(BaseBuildTask):
|
||||||
repo_info = self.session.repoInfo(repo_id, strict=True)
|
repo_info = self.session.repoInfo(repo_id, strict=True)
|
||||||
event_id = repo_info['create_event']
|
event_id = repo_info['create_event']
|
||||||
build_tag = self.session.getTag(build_target['build_tag'], strict=True)
|
build_tag = self.session.getTag(build_target['build_tag'], strict=True)
|
||||||
br_arch = self.find_arch('noarch', self.session.host.getHost(), self.session.getBuildConfig(build_tag['id'], event=event_id))
|
br_arch = self.find_arch('noarch', self.session.host.getHost(
|
||||||
|
), self.session.getBuildConfig(build_tag['id'], event=event_id))
|
||||||
|
|
||||||
buildroot = BuildRoot(self.session, self.options, build_tag['id'], br_arch, self.id, install_group='wrapper-rpm-build', repo_id=repo_id)
|
buildroot = BuildRoot(self.session, self.options, build_tag['id'], br_arch, self.id,
|
||||||
|
install_group='wrapper-rpm-build', repo_id=repo_id)
|
||||||
buildroot.workdir = self.workdir
|
buildroot.workdir = self.workdir
|
||||||
self.logger.debug("Initializing buildroot")
|
self.logger.debug("Initializing buildroot")
|
||||||
buildroot.init()
|
buildroot.init()
|
||||||
|
|
@ -1942,7 +1976,8 @@ class WrapperRPMTask(BaseBuildTask):
|
||||||
logfile = os.path.join(self.workdir, 'checkout.log')
|
logfile = os.path.join(self.workdir, 'checkout.log')
|
||||||
scmdir = buildroot.tmpdir() + '/scmroot'
|
scmdir = buildroot.tmpdir() + '/scmroot'
|
||||||
koji.ensuredir(scmdir)
|
koji.ensuredir(scmdir)
|
||||||
self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(), build_tag=build_tag, scratch=opts.get('scratch'))
|
self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(),
|
||||||
|
build_tag=build_tag, scratch=opts.get('scratch'))
|
||||||
specdir = scm.checkout(scmdir, self.session, self.getUploadDir(), logfile)
|
specdir = scm.checkout(scmdir, self.session, self.getUploadDir(), logfile)
|
||||||
self.run_callbacks("postSCMCheckout",
|
self.run_callbacks("postSCMCheckout",
|
||||||
scminfo=scm.get_info(),
|
scminfo=scm.get_info(),
|
||||||
|
|
@ -2002,7 +2037,8 @@ class WrapperRPMTask(BaseBuildTask):
|
||||||
if len(srpms) == 0:
|
if len(srpms) == 0:
|
||||||
raise koji.BuildError('no srpms found in %s' % buildroot.resultdir())
|
raise koji.BuildError('no srpms found in %s' % buildroot.resultdir())
|
||||||
elif len(srpms) > 1:
|
elif len(srpms) > 1:
|
||||||
raise koji.BuildError('multiple srpms found in %s: %s' % (buildroot.resultdir(), ', '.join(srpms)))
|
raise koji.BuildError('multiple srpms found in %s: %s' %
|
||||||
|
(buildroot.resultdir(), ', '.join(srpms)))
|
||||||
else:
|
else:
|
||||||
srpm = srpms[0]
|
srpm = srpms[0]
|
||||||
|
|
||||||
|
|
@ -2092,13 +2128,15 @@ class WrapperRPMTask(BaseBuildTask):
|
||||||
relrpms = [uploaddir + '/' + r for r in rpms]
|
relrpms = [uploaddir + '/' + r for r in rpms]
|
||||||
rellogs = [uploaddir + '/' + l for l in logs]
|
rellogs = [uploaddir + '/' + l for l in logs]
|
||||||
if opts.get('scratch'):
|
if opts.get('scratch'):
|
||||||
self.session.host.moveBuildToScratch(self.id, relsrpm, relrpms, {'noarch': rellogs})
|
self.session.host.moveBuildToScratch(
|
||||||
|
self.id, relsrpm, relrpms, {'noarch': rellogs})
|
||||||
else:
|
else:
|
||||||
if opts.get('create_build'):
|
if opts.get('create_build'):
|
||||||
brmap = dict.fromkeys([relsrpm] + relrpms, buildroot.id)
|
brmap = dict.fromkeys([relsrpm] + relrpms, buildroot.id)
|
||||||
try:
|
try:
|
||||||
self.session.host.completeBuild(self.id, self.new_build_id,
|
self.session.host.completeBuild(self.id, self.new_build_id,
|
||||||
relsrpm, relrpms, brmap, {'noarch': rellogs})
|
relsrpm, relrpms, brmap,
|
||||||
|
{'noarch': rellogs})
|
||||||
except (SystemExit, ServerExit, KeyboardInterrupt):
|
except (SystemExit, ServerExit, KeyboardInterrupt):
|
||||||
raise
|
raise
|
||||||
except BaseException:
|
except BaseException:
|
||||||
|
|
@ -2107,8 +2145,10 @@ class WrapperRPMTask(BaseBuildTask):
|
||||||
if not opts.get('skip_tag'):
|
if not opts.get('skip_tag'):
|
||||||
tag_task_id = self.session.host.subtask(method='tagBuild',
|
tag_task_id = self.session.host.subtask(method='tagBuild',
|
||||||
arglist=[build_target['dest_tag'],
|
arglist=[build_target['dest_tag'],
|
||||||
self.new_build_id, False, None, True],
|
self.new_build_id, False,
|
||||||
label='tag', parent=self.id, arch='noarch')
|
None, True],
|
||||||
|
label='tag', parent=self.id,
|
||||||
|
arch='noarch')
|
||||||
self.wait(tag_task_id)
|
self.wait(tag_task_id)
|
||||||
else:
|
else:
|
||||||
self.session.host.importWrapperRPMs(self.id, build['id'], results)
|
self.session.host.importWrapperRPMs(self.id, build['id'], results)
|
||||||
|
|
@ -2173,14 +2213,16 @@ class ChainMavenTask(MultiPlatformTask):
|
||||||
if not opts.get('force'):
|
if not opts.get('force'):
|
||||||
# check for a duplicate build (a build performed with the
|
# check for a duplicate build (a build performed with the
|
||||||
# same scmurl and options)
|
# same scmurl and options)
|
||||||
dup_build = self.get_duplicate_build(dest_tag['name'], package, params, task_opts)
|
dup_build = self.get_duplicate_build(
|
||||||
|
dest_tag['name'], package, params, task_opts)
|
||||||
# if we find one, mark the package as built and remove it from todo
|
# if we find one, mark the package as built and remove it from todo
|
||||||
if dup_build:
|
if dup_build:
|
||||||
self.done[package] = dup_build['nvr']
|
self.done[package] = dup_build['nvr']
|
||||||
for deps in todo.values():
|
for deps in todo.values():
|
||||||
deps.discard(package)
|
deps.discard(package)
|
||||||
del todo[package]
|
del todo[package]
|
||||||
self.results.append('%s previously built from %s' % (dup_build['nvr'], task_url))
|
self.results.append('%s previously built from %s' %
|
||||||
|
(dup_build['nvr'], task_url))
|
||||||
continue
|
continue
|
||||||
task_opts.update(dslice(opts, ['skip_tag', 'scratch'], strict=False))
|
task_opts.update(dslice(opts, ['skip_tag', 'scratch'], strict=False))
|
||||||
|
|
||||||
|
|
@ -2235,7 +2277,8 @@ class ChainMavenTask(MultiPlatformTask):
|
||||||
self.done[package] = child['id']
|
self.done[package] = child['id']
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
raise koji.BuildError('could not find buildMaven subtask of %s' % task_id)
|
raise koji.BuildError(
|
||||||
|
'could not find buildMaven subtask of %s' % task_id)
|
||||||
self.results.append('%s built from %s by task %s' %
|
self.results.append('%s built from %s by task %s' %
|
||||||
(package, task_url, task_id))
|
(package, task_url, task_id))
|
||||||
else:
|
else:
|
||||||
|
|
@ -2350,10 +2393,13 @@ class TagBuildTask(BaseTaskHandler):
|
||||||
|
|
||||||
# XXX - add more post tests
|
# XXX - add more post tests
|
||||||
self.session.host.tagBuild(self.id, tag_id, build_id, force=force, fromtag=fromtag)
|
self.session.host.tagBuild(self.id, tag_id, build_id, force=force, fromtag=fromtag)
|
||||||
self.session.host.tagNotification(True, tag_id, fromtag, build_id, user_id, ignore_success)
|
self.session.host.tagNotification(
|
||||||
|
True, tag_id, fromtag, build_id, user_id, ignore_success)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
exctype, value = sys.exc_info()[:2]
|
exctype, value = sys.exc_info()[:2]
|
||||||
self.session.host.tagNotification(False, tag_id, fromtag, build_id, user_id, ignore_success, "%s: %s" % (exctype, value))
|
self.session.host.tagNotification(
|
||||||
|
False, tag_id, fromtag, build_id, user_id, ignore_success, "%s: %s" %
|
||||||
|
(exctype, value))
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -2367,11 +2413,14 @@ class BuildImageTask(MultiPlatformTask):
|
||||||
if not opts.get('skip_tag') and not opts.get('scratch'):
|
if not opts.get('skip_tag') and not opts.get('scratch'):
|
||||||
# Make sure package is on the list for this tag
|
# Make sure package is on the list for this tag
|
||||||
if pkg_cfg is None:
|
if pkg_cfg is None:
|
||||||
raise koji.BuildError("package (image) %s not in list for tag %s" % (name, target_info['dest_tag_name']))
|
raise koji.BuildError("package (image) %s not in list for tag %s" %
|
||||||
|
(name, target_info['dest_tag_name']))
|
||||||
elif pkg_cfg['blocked']:
|
elif pkg_cfg['blocked']:
|
||||||
raise koji.BuildError("package (image) %s is blocked for tag %s" % (name, target_info['dest_tag_name']))
|
raise koji.BuildError("package (image) %s is blocked for tag %s" %
|
||||||
|
(name, target_info['dest_tag_name']))
|
||||||
return self.session.host.initImageBuild(self.id,
|
return self.session.host.initImageBuild(self.id,
|
||||||
dict(name=name, version=version, release=release, epoch=0))
|
dict(name=name, version=version, release=release,
|
||||||
|
epoch=0))
|
||||||
|
|
||||||
def getRelease(self, name, ver):
|
def getRelease(self, name, ver):
|
||||||
"""return the next available release number for an N-V"""
|
"""return the next available release number for an N-V"""
|
||||||
|
|
@ -2399,7 +2448,9 @@ class BuildBaseImageTask(BuildImageTask):
|
||||||
opts = {}
|
opts = {}
|
||||||
|
|
||||||
if not ozif_enabled:
|
if not ozif_enabled:
|
||||||
self.logger.error("ImageFactory features require the following dependencies: pykickstart, imagefactory, oz and possibly python-hashlib")
|
self.logger.error(
|
||||||
|
"ImageFactory features require the following dependencies: pykickstart, "
|
||||||
|
"imagefactory, oz and possibly python-hashlib")
|
||||||
raise koji.ApplianceError('ImageFactory functions not available')
|
raise koji.ApplianceError('ImageFactory functions not available')
|
||||||
|
|
||||||
# build image(s)
|
# build image(s)
|
||||||
|
|
@ -2430,7 +2481,8 @@ class BuildBaseImageTask(BuildImageTask):
|
||||||
canfail.append(subtasks[arch])
|
canfail.append(subtasks[arch])
|
||||||
self.logger.debug("Got image subtasks: %r" % (subtasks))
|
self.logger.debug("Got image subtasks: %r" % (subtasks))
|
||||||
self.logger.debug("Waiting on image subtasks (%s can fail)..." % canfail)
|
self.logger.debug("Waiting on image subtasks (%s can fail)..." % canfail)
|
||||||
results = self.wait(to_list(subtasks.values()), all=True, failany=True, canfail=canfail)
|
results = self.wait(to_list(subtasks.values()), all=True,
|
||||||
|
failany=True, canfail=canfail)
|
||||||
|
|
||||||
# if everything failed, fail even if all subtasks are in canfail
|
# if everything failed, fail even if all subtasks are in canfail
|
||||||
self.logger.debug('subtask results: %r', results)
|
self.logger.debug('subtask results: %r', results)
|
||||||
|
|
@ -2498,7 +2550,8 @@ class BuildBaseImageTask(BuildImageTask):
|
||||||
# tag it
|
# tag it
|
||||||
if not opts.get('scratch') and not opts.get('skip_tag'):
|
if not opts.get('scratch') and not opts.get('skip_tag'):
|
||||||
tag_task_id = self.session.host.subtask(method='tagBuild',
|
tag_task_id = self.session.host.subtask(method='tagBuild',
|
||||||
arglist=[target_info['dest_tag'], bld_info['id'], False, None, True],
|
arglist=[target_info['dest_tag'],
|
||||||
|
bld_info['id'], False, None, True],
|
||||||
label='tag', parent=self.id, arch='noarch')
|
label='tag', parent=self.id, arch='noarch')
|
||||||
self.wait(tag_task_id)
|
self.wait(tag_task_id)
|
||||||
|
|
||||||
|
|
@ -2506,7 +2559,8 @@ class BuildBaseImageTask(BuildImageTask):
|
||||||
report = ''
|
report = ''
|
||||||
if opts.get('scratch'):
|
if opts.get('scratch'):
|
||||||
respath = ', '.join(
|
respath = ', '.join(
|
||||||
[os.path.join(koji.pathinfo.work(), koji.pathinfo.taskrelpath(tid)) for tid in subtasks.values()])
|
[os.path.join(koji.pathinfo.work(),
|
||||||
|
koji.pathinfo.taskrelpath(tid)) for tid in subtasks.values()])
|
||||||
report += 'Scratch '
|
report += 'Scratch '
|
||||||
else:
|
else:
|
||||||
respath = koji.pathinfo.imagebuild(bld_info)
|
respath = koji.pathinfo.imagebuild(bld_info)
|
||||||
|
|
@ -2534,7 +2588,9 @@ class BuildApplianceTask(BuildImageTask):
|
||||||
opts = {}
|
opts = {}
|
||||||
|
|
||||||
if not image_enabled:
|
if not image_enabled:
|
||||||
self.logger.error("Appliance features require the following dependencies: pykickstart, and possibly python-hashlib")
|
self.logger.error(
|
||||||
|
"Appliance features require the following dependencies: "
|
||||||
|
"pykickstart, and possibly python-hashlib")
|
||||||
raise koji.ApplianceError('Appliance functions not available')
|
raise koji.ApplianceError('Appliance functions not available')
|
||||||
|
|
||||||
# build image
|
# build image
|
||||||
|
|
@ -2547,9 +2603,11 @@ class BuildApplianceTask(BuildImageTask):
|
||||||
bld_info = self.initImageBuild(name, version, release,
|
bld_info = self.initImageBuild(name, version, release,
|
||||||
target_info, opts)
|
target_info, opts)
|
||||||
create_task_id = self.session.host.subtask(method='createAppliance',
|
create_task_id = self.session.host.subtask(method='createAppliance',
|
||||||
arglist=[name, version, release, arch, target_info, build_tag,
|
arglist=[name, version, release, arch,
|
||||||
|
target_info, build_tag,
|
||||||
repo_info, ksfile, opts],
|
repo_info, ksfile, opts],
|
||||||
label='appliance', parent=self.id, arch=arch)
|
label='appliance', parent=self.id,
|
||||||
|
arch=arch)
|
||||||
results = self.wait(create_task_id)
|
results = self.wait(create_task_id)
|
||||||
self.logger.info('image build task (%s) completed' % create_task_id)
|
self.logger.info('image build task (%s) completed' % create_task_id)
|
||||||
self.logger.info('results: %s' % results)
|
self.logger.info('results: %s' % results)
|
||||||
|
|
@ -2583,7 +2641,8 @@ class BuildApplianceTask(BuildImageTask):
|
||||||
# tag it
|
# tag it
|
||||||
if not opts.get('scratch') and not opts.get('skip_tag'):
|
if not opts.get('scratch') and not opts.get('skip_tag'):
|
||||||
tag_task_id = self.session.host.subtask(method='tagBuild',
|
tag_task_id = self.session.host.subtask(method='tagBuild',
|
||||||
arglist=[target_info['dest_tag'], bld_info['id'], False, None, True],
|
arglist=[target_info['dest_tag'],
|
||||||
|
bld_info['id'], False, None, True],
|
||||||
label='tag', parent=self.id, arch='noarch')
|
label='tag', parent=self.id, arch='noarch')
|
||||||
self.wait(tag_task_id)
|
self.wait(tag_task_id)
|
||||||
|
|
||||||
|
|
@ -2632,7 +2691,8 @@ class BuildLiveCDTask(BuildImageTask):
|
||||||
bld_info = self.initImageBuild(name, version, release,
|
bld_info = self.initImageBuild(name, version, release,
|
||||||
target_info, opts)
|
target_info, opts)
|
||||||
create_task_id = self.session.host.subtask(method='createLiveCD',
|
create_task_id = self.session.host.subtask(method='createLiveCD',
|
||||||
arglist=[name, version, release, arch, target_info, build_tag,
|
arglist=[name, version, release, arch,
|
||||||
|
target_info, build_tag,
|
||||||
repo_info, ksfile, opts],
|
repo_info, ksfile, opts],
|
||||||
label='livecd', parent=self.id, arch=arch)
|
label='livecd', parent=self.id, arch=arch)
|
||||||
results = self.wait(create_task_id)
|
results = self.wait(create_task_id)
|
||||||
|
|
@ -2668,7 +2728,8 @@ class BuildLiveCDTask(BuildImageTask):
|
||||||
# tag it if necessary
|
# tag it if necessary
|
||||||
if not opts.get('scratch') and not opts.get('skip_tag'):
|
if not opts.get('scratch') and not opts.get('skip_tag'):
|
||||||
tag_task_id = self.session.host.subtask(method='tagBuild',
|
tag_task_id = self.session.host.subtask(method='tagBuild',
|
||||||
arglist=[target_info['dest_tag'], bld_info['id'], False, None, True],
|
arglist=[target_info['dest_tag'],
|
||||||
|
bld_info['id'], False, None, True],
|
||||||
label='tag', parent=self.id, arch='noarch')
|
label='tag', parent=self.id, arch='noarch')
|
||||||
self.wait(tag_task_id)
|
self.wait(tag_task_id)
|
||||||
|
|
||||||
|
|
@ -2726,8 +2787,8 @@ class BuildLiveMediaTask(BuildImageTask):
|
||||||
canfail = []
|
canfail = []
|
||||||
for arch in arches:
|
for arch in arches:
|
||||||
subtasks[arch] = self.subtask('createLiveMedia',
|
subtasks[arch] = self.subtask('createLiveMedia',
|
||||||
[name, version, release, arch, target_info, build_tag,
|
[name, version, release, arch, target_info,
|
||||||
repo_info, ksfile, opts],
|
build_tag, repo_info, ksfile, opts],
|
||||||
label='livemedia %s' % arch, arch=arch)
|
label='livemedia %s' % arch, arch=arch)
|
||||||
if arch in opts.get('optional_arches', []):
|
if arch in opts.get('optional_arches', []):
|
||||||
canfail.append(subtasks[arch])
|
canfail.append(subtasks[arch])
|
||||||
|
|
@ -2735,7 +2796,8 @@ class BuildLiveMediaTask(BuildImageTask):
|
||||||
|
|
||||||
self.logger.debug("Got image subtasks: %r", subtasks)
|
self.logger.debug("Got image subtasks: %r", subtasks)
|
||||||
self.logger.debug("Waiting on livemedia subtasks...")
|
self.logger.debug("Waiting on livemedia subtasks...")
|
||||||
results = self.wait(to_list(subtasks.values()), all=True, failany=True, canfail=canfail)
|
results = self.wait(to_list(subtasks.values()), all=True,
|
||||||
|
failany=True, canfail=canfail)
|
||||||
|
|
||||||
# if everything failed, fail even if all subtasks are in canfail
|
# if everything failed, fail even if all subtasks are in canfail
|
||||||
self.logger.debug('subtask results: %r', results)
|
self.logger.debug('subtask results: %r', results)
|
||||||
|
|
@ -2805,14 +2867,16 @@ class BuildLiveMediaTask(BuildImageTask):
|
||||||
# tag it if necessary
|
# tag it if necessary
|
||||||
if not opts.get('scratch') and not opts.get('skip_tag'):
|
if not opts.get('scratch') and not opts.get('skip_tag'):
|
||||||
tag_task_id = self.session.host.subtask(method='tagBuild',
|
tag_task_id = self.session.host.subtask(method='tagBuild',
|
||||||
arglist=[target_info['dest_tag'], bld_info['id'], False, None, True],
|
arglist=[target_info['dest_tag'],
|
||||||
|
bld_info['id'], False, None, True],
|
||||||
label='tag', parent=self.id, arch='noarch')
|
label='tag', parent=self.id, arch='noarch')
|
||||||
self.wait(tag_task_id)
|
self.wait(tag_task_id)
|
||||||
|
|
||||||
# report the results
|
# report the results
|
||||||
if opts.get('scratch'):
|
if opts.get('scratch'):
|
||||||
respath = ', '.join(
|
respath = ', '.join(
|
||||||
[os.path.join(koji.pathinfo.work(), koji.pathinfo.taskrelpath(tid)) for tid in subtasks.values()])
|
[os.path.join(koji.pathinfo.work(),
|
||||||
|
koji.pathinfo.taskrelpath(tid)) for tid in subtasks.values()])
|
||||||
report = 'Scratch '
|
report = 'Scratch '
|
||||||
else:
|
else:
|
||||||
respath = koji.pathinfo.imagebuild(bld_info)
|
respath = koji.pathinfo.imagebuild(bld_info)
|
||||||
|
|
@ -2881,7 +2945,8 @@ class ImageTask(BaseTaskHandler):
|
||||||
scm = SCM(self.opts['ksurl'])
|
scm = SCM(self.opts['ksurl'])
|
||||||
scm.assert_allowed(self.options.allowed_scms)
|
scm.assert_allowed(self.options.allowed_scms)
|
||||||
logfile = os.path.join(self.workdir, 'checkout.log')
|
logfile = os.path.join(self.workdir, 'checkout.log')
|
||||||
self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(), build_tag=build_tag, scratch=self.opts.get('scratch'))
|
self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(),
|
||||||
|
build_tag=build_tag, scratch=self.opts.get('scratch'))
|
||||||
scmsrcdir = scm.checkout(scmdir, self.session, self.getUploadDir(), logfile)
|
scmsrcdir = scm.checkout(scmdir, self.session, self.getUploadDir(), logfile)
|
||||||
self.run_callbacks("postSCMCheckout",
|
self.run_callbacks("postSCMCheckout",
|
||||||
scminfo=scm.get_info(),
|
scminfo=scm.get_info(),
|
||||||
|
|
@ -2953,7 +3018,8 @@ class ImageTask(BaseTaskHandler):
|
||||||
user_repos = user_repos.split(',')
|
user_repos = user_repos.split(',')
|
||||||
index = 0
|
index = 0
|
||||||
for user_repo in user_repos:
|
for user_repo in user_repos:
|
||||||
self.ks.handler.repo.repoList.append(repo_class(baseurl=user_repo, name='koji-override-%i' % index))
|
self.ks.handler.repo.repoList.append(repo_class(
|
||||||
|
baseurl=user_repo, name='koji-override-%i' % index))
|
||||||
index += 1
|
index += 1
|
||||||
else:
|
else:
|
||||||
path_info = koji.PathInfo(topdir=self.options.topurl)
|
path_info = koji.PathInfo(topdir=self.options.topurl)
|
||||||
|
|
@ -2961,7 +3027,9 @@ class ImageTask(BaseTaskHandler):
|
||||||
target_info['build_tag_name'])
|
target_info['build_tag_name'])
|
||||||
baseurl = '%s/%s' % (repopath, arch)
|
baseurl = '%s/%s' % (repopath, arch)
|
||||||
self.logger.debug('BASEURL: %s' % baseurl)
|
self.logger.debug('BASEURL: %s' % baseurl)
|
||||||
self.ks.handler.repo.repoList.append(repo_class(baseurl=baseurl, name='koji-%s-%i' % (target_info['build_tag_name'], repo_info['id'])))
|
self.ks.handler.repo.repoList.append(repo_class(
|
||||||
|
baseurl=baseurl, name='koji-%s-%i' % (target_info['build_tag_name'],
|
||||||
|
repo_info['id'])))
|
||||||
# inject url if provided
|
# inject url if provided
|
||||||
if opts.get('install_tree_url'):
|
if opts.get('install_tree_url'):
|
||||||
self.ks.handler.url(url=opts['install_tree_url'])
|
self.ks.handler.url(url=opts['install_tree_url'])
|
||||||
|
|
@ -3065,7 +3133,8 @@ class ApplianceTask(ImageTask):
|
||||||
return part.disk
|
return part.disk
|
||||||
raise koji.ApplianceError('kickstart lacks a "/" mountpoint')
|
raise koji.ApplianceError('kickstart lacks a "/" mountpoint')
|
||||||
|
|
||||||
def handler(self, name, version, release, arch, target_info, build_tag, repo_info, ksfile, opts=None):
|
def handler(self, name, version, release, arch, target_info,
|
||||||
|
build_tag, repo_info, ksfile, opts=None):
|
||||||
|
|
||||||
if opts is None:
|
if opts is None:
|
||||||
opts = {}
|
opts = {}
|
||||||
|
|
@ -3099,7 +3168,8 @@ class ApplianceTask(ImageTask):
|
||||||
self.uploadFile(os.path.join(broot.rootdir(), app_log[1:]))
|
self.uploadFile(os.path.join(broot.rootdir(), app_log[1:]))
|
||||||
if rv:
|
if rv:
|
||||||
raise koji.ApplianceError(
|
raise koji.ApplianceError(
|
||||||
"Could not create appliance: %s" % parseStatus(rv, 'appliance-creator') + "; see root.log or appliance.log for more information")
|
"Could not create appliance: %s" % parseStatus(rv, 'appliance-creator') +
|
||||||
|
"; see root.log or appliance.log for more information")
|
||||||
|
|
||||||
# Find the results
|
# Find the results
|
||||||
results = []
|
results = []
|
||||||
|
|
@ -3221,7 +3291,8 @@ class LiveCDTask(ImageTask):
|
||||||
|
|
||||||
return manifest
|
return manifest
|
||||||
|
|
||||||
def handler(self, name, version, release, arch, target_info, build_tag, repo_info, ksfile, opts=None):
|
def handler(self, name, version, release, arch, target_info,
|
||||||
|
build_tag, repo_info, ksfile, opts=None):
|
||||||
|
|
||||||
if opts is None:
|
if opts is None:
|
||||||
opts = {}
|
opts = {}
|
||||||
|
|
@ -3251,7 +3322,8 @@ class LiveCDTask(ImageTask):
|
||||||
self.uploadFile(os.path.join(broot.rootdir(), livecd_log[1:]))
|
self.uploadFile(os.path.join(broot.rootdir(), livecd_log[1:]))
|
||||||
if rv:
|
if rv:
|
||||||
raise koji.LiveCDError(
|
raise koji.LiveCDError(
|
||||||
'Could not create LiveCD: %s' % parseStatus(rv, 'livecd-creator') + '; see root.log or livecd.log for more information')
|
'Could not create LiveCD: %s' % parseStatus(rv, 'livecd-creator') +
|
||||||
|
'; see root.log or livecd.log for more information')
|
||||||
|
|
||||||
# Find the resultant iso
|
# Find the resultant iso
|
||||||
# The cwd of the livecd-creator process is tmpdir() in the chroot, so
|
# The cwd of the livecd-creator process is tmpdir() in the chroot, so
|
||||||
|
|
@ -3263,7 +3335,8 @@ class LiveCDTask(ImageTask):
|
||||||
if not isofile:
|
if not isofile:
|
||||||
isofile = afile
|
isofile = afile
|
||||||
else:
|
else:
|
||||||
raise koji.LiveCDError('multiple .iso files found: %s and %s' % (isofile, afile))
|
raise koji.LiveCDError(
|
||||||
|
'multiple .iso files found: %s and %s' % (isofile, afile))
|
||||||
if not isofile:
|
if not isofile:
|
||||||
raise koji.LiveCDError('could not find iso file in chroot')
|
raise koji.LiveCDError('could not find iso file in chroot')
|
||||||
isosrc = os.path.join(broot.tmpdir(), isofile)
|
isosrc = os.path.join(broot.tmpdir(), isofile)
|
||||||
|
|
@ -3407,7 +3480,8 @@ class LiveMediaTask(ImageTask):
|
||||||
|
|
||||||
return manifest
|
return manifest
|
||||||
|
|
||||||
def handler(self, name, version, release, arch, target_info, build_tag, repo_info, ksfile, opts=None):
|
def handler(self, name, version, release, arch, target_info,
|
||||||
|
build_tag, repo_info, ksfile, opts=None):
|
||||||
|
|
||||||
if opts is None:
|
if opts is None:
|
||||||
opts = {}
|
opts = {}
|
||||||
|
|
@ -3485,7 +3559,8 @@ class LiveMediaTask(ImageTask):
|
||||||
|
|
||||||
if rv:
|
if rv:
|
||||||
raise koji.LiveMediaError(
|
raise koji.LiveMediaError(
|
||||||
'Could not create LiveMedia: %s' % parseStatus(rv, 'livemedia-creator') + '; see root.log or livemedia-out.log for more information')
|
'Could not create LiveMedia: %s' % parseStatus(rv, 'livemedia-creator') +
|
||||||
|
'; see root.log or livemedia-out.log for more information')
|
||||||
|
|
||||||
# Find the resultant iso
|
# Find the resultant iso
|
||||||
# The cwd of the livemedia-creator process is broot.tmpdir() in the chroot, so
|
# The cwd of the livemedia-creator process is broot.tmpdir() in the chroot, so
|
||||||
|
|
@ -3498,7 +3573,8 @@ class LiveMediaTask(ImageTask):
|
||||||
if not isofile:
|
if not isofile:
|
||||||
isofile = afile
|
isofile = afile
|
||||||
else:
|
else:
|
||||||
raise koji.LiveMediaError('multiple .iso files found: %s and %s' % (isofile, afile))
|
raise koji.LiveMediaError(
|
||||||
|
'multiple .iso files found: %s and %s' % (isofile, afile))
|
||||||
if not isofile:
|
if not isofile:
|
||||||
raise koji.LiveMediaError('could not find iso file in chroot')
|
raise koji.LiveMediaError('could not find iso file in chroot')
|
||||||
isosrc = os.path.join(rootresultsdir, isofile)
|
isosrc = os.path.join(rootresultsdir, isofile)
|
||||||
|
|
@ -3561,7 +3637,8 @@ class OzImageTask(BaseTaskHandler):
|
||||||
scm = SCM(self.opts['ksurl'])
|
scm = SCM(self.opts['ksurl'])
|
||||||
scm.assert_allowed(self.options.allowed_scms)
|
scm.assert_allowed(self.options.allowed_scms)
|
||||||
logfile = os.path.join(self.workdir, 'checkout-%s.log' % self.arch)
|
logfile = os.path.join(self.workdir, 'checkout-%s.log' % self.arch)
|
||||||
self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(), build_tag=build_tag, scratch=self.opts.get('scratch'))
|
self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(),
|
||||||
|
build_tag=build_tag, scratch=self.opts.get('scratch'))
|
||||||
scmsrcdir = scm.checkout(self.workdir, self.session,
|
scmsrcdir = scm.checkout(self.workdir, self.session,
|
||||||
self.getUploadDir(), logfile)
|
self.getUploadDir(), logfile)
|
||||||
self.run_callbacks("postSCMCheckout",
|
self.run_callbacks("postSCMCheckout",
|
||||||
|
|
@ -3732,10 +3809,12 @@ class OzImageTask(BaseTaskHandler):
|
||||||
<url>%s</url>
|
<url>%s</url>
|
||||||
</install>
|
</install>
|
||||||
""" % (name, distname, distver, arch, inst_tree)
|
""" % (name, distname, distver, arch, inst_tree)
|
||||||
template += """<icicle>
|
template += ("<icicle>\n"
|
||||||
<extra_command>rpm -qa --qf '%{NAME},%{VERSION},%{RELEASE},%{ARCH},%{EPOCH},%{SIZE},%{SIGMD5},%{BUILDTIME}\n'</extra_command>
|
" <extra_command>rpm -qa --qf"
|
||||||
</icicle>
|
" '%{NAME},%{VERSION},%{RELEASE},%{ARCH},%{EPOCH},%{SIZE},%{SIGMD5},"
|
||||||
"""
|
"%{BUILDTIME}\\n'</extra_command>\n"
|
||||||
|
" </icicle>\n"
|
||||||
|
" ")
|
||||||
# TODO: intelligently guess the size based on the kickstart file
|
# TODO: intelligently guess the size based on the kickstart file
|
||||||
template += """</os>
|
template += """</os>
|
||||||
<description>%s OS</description>
|
<description>%s OS</description>
|
||||||
|
|
@ -3743,7 +3822,7 @@ class OzImageTask(BaseTaskHandler):
|
||||||
<size>%sG</size>
|
<size>%sG</size>
|
||||||
</disk>
|
</disk>
|
||||||
</template>
|
</template>
|
||||||
""" % (name, self.opts.get('disk_size'))
|
""" % (name, self.opts.get('disk_size')) # noqa: E501
|
||||||
return template
|
return template
|
||||||
|
|
||||||
def parseDistro(self, distro):
|
def parseDistro(self, distro):
|
||||||
|
|
@ -3831,7 +3910,9 @@ class BaseImageTask(OzImageTask):
|
||||||
Some image formats require others to be processed first, which is why
|
Some image formats require others to be processed first, which is why
|
||||||
we have to do this. raw files in particular may not be kept.
|
we have to do this. raw files in particular may not be kept.
|
||||||
"""
|
"""
|
||||||
supported = ('raw', 'raw-xz', 'liveimg-squashfs', 'vmdk', 'qcow', 'qcow2', 'vdi', 'rhevm-ova', 'vsphere-ova', 'docker', 'vagrant-virtualbox', 'vagrant-libvirt', 'vagrant-vmware-fusion', 'vagrant-hyperv', 'vpc', "tar-gz")
|
supported = ('raw', 'raw-xz', 'liveimg-squashfs', 'vmdk', 'qcow', 'qcow2', 'vdi',
|
||||||
|
'rhevm-ova', 'vsphere-ova', 'docker', 'vagrant-virtualbox', 'vagrant-libvirt',
|
||||||
|
'vagrant-vmware-fusion', 'vagrant-hyperv', 'vpc', "tar-gz")
|
||||||
for f in formats:
|
for f in formats:
|
||||||
if f not in supported:
|
if f not in supported:
|
||||||
raise koji.ApplianceError('Invalid format: %s' % f)
|
raise koji.ApplianceError('Invalid format: %s' % f)
|
||||||
|
|
@ -3945,7 +4026,8 @@ class BaseImageTask(OzImageTask):
|
||||||
self.tlog.removeHandler(self.fhandler)
|
self.tlog.removeHandler(self.fhandler)
|
||||||
self.uploadFile(self.ozlog)
|
self.uploadFile(self.ozlog)
|
||||||
if 'No disk activity' in details:
|
if 'No disk activity' in details:
|
||||||
details = 'Automated install failed or prompted for input. See the screenshot in the task results for more information.'
|
details = 'Automated install failed or prompted for input. ' \
|
||||||
|
'See the screenshot in the task results for more information'
|
||||||
raise koji.ApplianceError('Image status is %s: %s' %
|
raise koji.ApplianceError('Image status is %s: %s' %
|
||||||
(status, details))
|
(status, details))
|
||||||
|
|
||||||
|
|
@ -4108,8 +4190,8 @@ class BaseImageTask(OzImageTask):
|
||||||
if format == 'vagrant-vmware-fusion':
|
if format == 'vagrant-vmware-fusion':
|
||||||
format = 'vsphere-ova'
|
format = 'vsphere-ova'
|
||||||
img_opts['vsphere_ova_format'] = 'vagrant-vmware-fusion'
|
img_opts['vsphere_ova_format'] = 'vagrant-vmware-fusion'
|
||||||
# The initial disk image transform for VMWare Fusion/Workstation requires a "standard" VMDK
|
# The initial disk image transform for VMWare Fusion/Workstation requires a "standard"
|
||||||
# not the stream oriented format used for VirtualBox or regular VMWare OVAs
|
# VMDK, not the stream oriented format used for VirtualBox or regular VMWare OVAs
|
||||||
img_opts['vsphere_vmdk_format'] = 'standard'
|
img_opts['vsphere_vmdk_format'] = 'standard'
|
||||||
fixed_params = ['vsphere_ova_format', 'vsphere_vmdk_format']
|
fixed_params = ['vsphere_ova_format', 'vsphere_vmdk_format']
|
||||||
if format == 'vagrant-hyperv':
|
if format == 'vagrant-hyperv':
|
||||||
|
|
@ -4117,7 +4199,8 @@ class BaseImageTask(OzImageTask):
|
||||||
img_opts['hyperv_ova_format'] = 'hyperv-vagrant'
|
img_opts['hyperv_ova_format'] = 'hyperv-vagrant'
|
||||||
fixed_params = ['hyperv_ova_format']
|
fixed_params = ['hyperv_ova_format']
|
||||||
targ = self._do_target_image(self.base_img.base_image.identifier,
|
targ = self._do_target_image(self.base_img.base_image.identifier,
|
||||||
format.replace('-ova', ''), img_opts=img_opts, fixed_params=fixed_params)
|
format.replace('-ova', ''), img_opts=img_opts,
|
||||||
|
fixed_params=fixed_params)
|
||||||
targ2 = self._do_target_image(targ.target_image.identifier, 'OVA',
|
targ2 = self._do_target_image(targ.target_image.identifier, 'OVA',
|
||||||
img_opts=img_opts, fixed_params=fixed_params)
|
img_opts=img_opts, fixed_params=fixed_params)
|
||||||
return {'image': targ2.target_image.data}
|
return {'image': targ2.target_image.data}
|
||||||
|
|
@ -4166,7 +4249,9 @@ class BaseImageTask(OzImageTask):
|
||||||
self._mergeFactoryParams(img_opts, fixed_params)
|
self._mergeFactoryParams(img_opts, fixed_params)
|
||||||
self.logger.debug('img_opts_post_merge: %s' % img_opts)
|
self.logger.debug('img_opts_post_merge: %s' % img_opts)
|
||||||
target = self.bd.builder_for_target_image(image_type,
|
target = self.bd.builder_for_target_image(image_type,
|
||||||
image_id=base_id, template=None, parameters=img_opts)
|
image_id=base_id,
|
||||||
|
template=None,
|
||||||
|
parameters=img_opts)
|
||||||
target.target_thread.join()
|
target.target_thread.join()
|
||||||
self._checkImageState(target)
|
self._checkImageState(target)
|
||||||
return target
|
return target
|
||||||
|
|
@ -4206,9 +4291,12 @@ class BaseImageTask(OzImageTask):
|
||||||
self.getUploadDir(), logerror=1)
|
self.getUploadDir(), logerror=1)
|
||||||
return {'image': newimg}
|
return {'image': newimg}
|
||||||
|
|
||||||
def handler(self, name, version, release, arch, target_info, build_tag, repo_info, inst_tree, opts=None):
|
def handler(self, name, version, release, arch, target_info,
|
||||||
|
build_tag, repo_info, inst_tree, opts=None):
|
||||||
if not ozif_enabled:
|
if not ozif_enabled:
|
||||||
self.logger.error("ImageFactory features require the following dependencies: pykickstart, imagefactory, oz and possibly python-hashlib")
|
self.logger.error(
|
||||||
|
"ImageFactory features require the following dependencies: "
|
||||||
|
"pykickstart, imagefactory, oz and possibly python-hashlib")
|
||||||
raise koji.ApplianceError('ImageFactory functions not available')
|
raise koji.ApplianceError('ImageFactory functions not available')
|
||||||
|
|
||||||
if opts is None:
|
if opts is None:
|
||||||
|
|
@ -4339,11 +4427,14 @@ class BuildIndirectionImageTask(OzImageTask):
|
||||||
if not opts.get('skip_tag') and not opts.get('scratch'):
|
if not opts.get('skip_tag') and not opts.get('scratch'):
|
||||||
# Make sure package is on the list for this tag
|
# Make sure package is on the list for this tag
|
||||||
if pkg_cfg is None:
|
if pkg_cfg is None:
|
||||||
raise koji.BuildError("package (image) %s not in list for tag %s" % (name, target_info['dest_tag_name']))
|
raise koji.BuildError("package (image) %s not in list for tag %s" %
|
||||||
|
(name, target_info['dest_tag_name']))
|
||||||
elif pkg_cfg['blocked']:
|
elif pkg_cfg['blocked']:
|
||||||
raise koji.BuildError("package (image) %s is blocked for tag %s" % (name, target_info['dest_tag_name']))
|
raise koji.BuildError("package (image) %s is blocked for tag %s" %
|
||||||
|
(name, target_info['dest_tag_name']))
|
||||||
return self.session.host.initImageBuild(self.id,
|
return self.session.host.initImageBuild(self.id,
|
||||||
dict(name=name, version=version, release=release, epoch=0))
|
dict(name=name, version=version, release=release,
|
||||||
|
epoch=0))
|
||||||
|
|
||||||
def getRelease(self, name, ver):
|
def getRelease(self, name, ver):
|
||||||
"""return the next available release number for an N-V"""
|
"""return the next available release number for an N-V"""
|
||||||
|
|
@ -4371,7 +4462,8 @@ class BuildIndirectionImageTask(OzImageTask):
|
||||||
if fileurl:
|
if fileurl:
|
||||||
scm = SCM(fileurl)
|
scm = SCM(fileurl)
|
||||||
scm.assert_allowed(self.options.allowed_scms)
|
scm.assert_allowed(self.options.allowed_scms)
|
||||||
self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(), build_tag=build_tag, scratch=self.opts.get('scratch'))
|
self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(),
|
||||||
|
build_tag=build_tag, scratch=self.opts.get('scratch'))
|
||||||
logfile = os.path.join(self.workdir, 'checkout.log')
|
logfile = os.path.join(self.workdir, 'checkout.log')
|
||||||
scmsrcdir = scm.checkout(self.workdir, self.session,
|
scmsrcdir = scm.checkout(self.workdir, self.session,
|
||||||
self.getUploadDir(), logfile)
|
self.getUploadDir(), logfile)
|
||||||
|
|
@ -4402,11 +4494,13 @@ class BuildIndirectionImageTask(OzImageTask):
|
||||||
taskinfo = self.session.getTaskInfo(task_id)
|
taskinfo = self.session.getTaskInfo(task_id)
|
||||||
taskstate = koji.TASK_STATES[taskinfo['state']].lower()
|
taskstate = koji.TASK_STATES[taskinfo['state']].lower()
|
||||||
if taskstate != 'closed':
|
if taskstate != 'closed':
|
||||||
raise koji.BuildError("Input task (%d) must be in closed state - current state is (%s)" %
|
raise koji.BuildError("Input task (%d) must be in closed state"
|
||||||
|
" - current state is (%s)" %
|
||||||
(task_id, taskstate))
|
(task_id, taskstate))
|
||||||
taskmethod = taskinfo['method']
|
taskmethod = taskinfo['method']
|
||||||
if taskmethod != "createImage":
|
if taskmethod != "createImage":
|
||||||
raise koji.BuildError("Input task method must be 'createImage' - actual method (%s)" %
|
raise koji.BuildError("Input task method must be 'createImage'"
|
||||||
|
" - actual method (%s)" %
|
||||||
(taskmethod))
|
(taskmethod))
|
||||||
result = self.session.getTaskResult(task_id)
|
result = self.session.getTaskResult(task_id)
|
||||||
|
|
||||||
|
|
@ -4424,7 +4518,9 @@ class BuildIndirectionImageTask(OzImageTask):
|
||||||
tdl_full = os.path.join(task_dir, task_tdl)
|
tdl_full = os.path.join(task_dir, task_tdl)
|
||||||
|
|
||||||
if not (os.path.isfile(diskimage_full) and os.path.isfile(tdl_full)):
|
if not (os.path.isfile(diskimage_full) and os.path.isfile(tdl_full)):
|
||||||
raise koji.BuildError("Missing TDL or qcow2 image for task (%d) - possible expired scratch build" % (task_id))
|
raise koji.BuildError(
|
||||||
|
"Missing TDL or qcow2 image for task (%d) - possible expired scratch build" %
|
||||||
|
(task_id))
|
||||||
|
|
||||||
# The sequence to recreate a valid persistent image is as follows
|
# The sequence to recreate a valid persistent image is as follows
|
||||||
# Create a new BaseImage object
|
# Create a new BaseImage object
|
||||||
|
|
@ -4445,7 +4541,10 @@ class BuildIndirectionImageTask(OzImageTask):
|
||||||
return factory_base_image
|
return factory_base_image
|
||||||
|
|
||||||
def _nvr_to_image(nvr, arch):
|
def _nvr_to_image(nvr, arch):
|
||||||
""" Take a build ID or NVR plus arch and turn it into an Image Factory Base Image object """
|
"""
|
||||||
|
Take a build ID or NVR plus arch and turn it into
|
||||||
|
an Image Factory Base Image object
|
||||||
|
"""
|
||||||
pim = PersistentImageManager.default_manager()
|
pim = PersistentImageManager.default_manager()
|
||||||
build = self.session.getBuild(nvr)
|
build = self.session.getBuild(nvr)
|
||||||
if not build:
|
if not build:
|
||||||
|
|
@ -4471,7 +4570,8 @@ class BuildIndirectionImageTask(OzImageTask):
|
||||||
tdl_full = os.path.join(builddir, build_tdl)
|
tdl_full = os.path.join(builddir, build_tdl)
|
||||||
|
|
||||||
if not (os.path.isfile(diskimage_full) and os.path.isfile(tdl_full)):
|
if not (os.path.isfile(diskimage_full) and os.path.isfile(tdl_full)):
|
||||||
raise koji.BuildError("Missing TDL (%s) or qcow2 (%s) image for image (%s) - this should never happen" %
|
raise koji.BuildError("Missing TDL (%s) or qcow2 (%s) image for image (%s)"
|
||||||
|
" - this should never happen" %
|
||||||
(build_tdl, build_diskimage, nvr))
|
(build_tdl, build_diskimage, nvr))
|
||||||
|
|
||||||
# The sequence to recreate a valid persistent image is as follows
|
# The sequence to recreate a valid persistent image is as follows
|
||||||
|
|
@ -4617,7 +4717,8 @@ class BuildIndirectionImageTask(OzImageTask):
|
||||||
tlog.removeHandler(fhandler)
|
tlog.removeHandler(fhandler)
|
||||||
self.uploadFile(ozlog)
|
self.uploadFile(ozlog)
|
||||||
raise koji.ApplianceError('Image status is %s: %s' %
|
raise koji.ApplianceError('Image status is %s: %s' %
|
||||||
(target.target_image.status, target.target_image.status_detail))
|
(target.target_image.status,
|
||||||
|
target.target_image.status_detail))
|
||||||
|
|
||||||
self.uploadFile(target.target_image.data, remoteName=os.path.basename(results_loc))
|
self.uploadFile(target.target_image.data, remoteName=os.path.basename(results_loc))
|
||||||
|
|
||||||
|
|
@ -4644,7 +4745,8 @@ class BuildIndirectionImageTask(OzImageTask):
|
||||||
# tag it
|
# tag it
|
||||||
if not opts.get('scratch') and not opts.get('skip_tag'):
|
if not opts.get('scratch') and not opts.get('skip_tag'):
|
||||||
tag_task_id = self.session.host.subtask(method='tagBuild',
|
tag_task_id = self.session.host.subtask(method='tagBuild',
|
||||||
arglist=[target_info['dest_tag'], bld_info['id'], False, None, True],
|
arglist=[target_info['dest_tag'],
|
||||||
|
bld_info['id'], False, None, True],
|
||||||
label='tag', parent=self.id, arch='noarch')
|
label='tag', parent=self.id, arch='noarch')
|
||||||
self.wait(tag_task_id)
|
self.wait(tag_task_id)
|
||||||
|
|
||||||
|
|
@ -4652,7 +4754,8 @@ class BuildIndirectionImageTask(OzImageTask):
|
||||||
report = ''
|
report = ''
|
||||||
if opts.get('scratch'):
|
if opts.get('scratch'):
|
||||||
respath = ', '.join(
|
respath = ', '.join(
|
||||||
[os.path.join(koji.pathinfo.work(), koji.pathinfo.taskrelpath(tid)) for tid in [self.id]])
|
[os.path.join(koji.pathinfo.work(),
|
||||||
|
koji.pathinfo.taskrelpath(tid)) for tid in [self.id]])
|
||||||
report += 'Scratch '
|
report += 'Scratch '
|
||||||
else:
|
else:
|
||||||
respath = koji.pathinfo.imagebuild(bld_info)
|
respath = koji.pathinfo.imagebuild(bld_info)
|
||||||
|
|
@ -4681,8 +4784,10 @@ class RebuildSRPM(BaseBuildTask):
|
||||||
build_tag = self.session.getTag(build_tag, strict=True, event=event_id)
|
build_tag = self.session.getTag(build_tag, strict=True, event=event_id)
|
||||||
|
|
||||||
rootopts = {'install_group': 'srpm-build', 'repo_id': repo_id}
|
rootopts = {'install_group': 'srpm-build', 'repo_id': repo_id}
|
||||||
br_arch = self.find_arch('noarch', self.session.host.getHost(), self.session.getBuildConfig(build_tag['id'], event=event_id))
|
br_arch = self.find_arch('noarch', self.session.host.getHost(
|
||||||
broot = BuildRoot(self.session, self.options, build_tag['id'], br_arch, self.id, **rootopts)
|
), self.session.getBuildConfig(build_tag['id'], event=event_id))
|
||||||
|
broot = BuildRoot(self.session, self.options,
|
||||||
|
build_tag['id'], br_arch, self.id, **rootopts)
|
||||||
broot.workdir = self.workdir
|
broot.workdir = self.workdir
|
||||||
|
|
||||||
self.logger.debug("Initializing buildroot")
|
self.logger.debug("Initializing buildroot")
|
||||||
|
|
@ -4720,7 +4825,8 @@ class RebuildSRPM(BaseBuildTask):
|
||||||
release = koji.get_header_field(h, 'release')
|
release = koji.get_header_field(h, 'release')
|
||||||
srpm_name = "%(name)s-%(version)s-%(release)s.src.rpm" % locals()
|
srpm_name = "%(name)s-%(version)s-%(release)s.src.rpm" % locals()
|
||||||
if srpm_name != os.path.basename(srpm):
|
if srpm_name != os.path.basename(srpm):
|
||||||
raise koji.BuildError('srpm name mismatch: %s != %s' % (srpm_name, os.path.basename(srpm)))
|
raise koji.BuildError('srpm name mismatch: %s != %s' %
|
||||||
|
(srpm_name, os.path.basename(srpm)))
|
||||||
|
|
||||||
# upload srpm and return
|
# upload srpm and return
|
||||||
self.uploadFile(srpm)
|
self.uploadFile(srpm)
|
||||||
|
|
@ -4784,12 +4890,15 @@ class BuildSRPMFromSCMTask(BaseBuildTask):
|
||||||
rootopts = {'install_group': 'srpm-build',
|
rootopts = {'install_group': 'srpm-build',
|
||||||
'setup_dns': True,
|
'setup_dns': True,
|
||||||
'repo_id': repo_id}
|
'repo_id': repo_id}
|
||||||
if self.options.scm_credentials_dir is not None and os.path.isdir(self.options.scm_credentials_dir):
|
if self.options.scm_credentials_dir is not None and os.path.isdir(
|
||||||
|
self.options.scm_credentials_dir):
|
||||||
rootopts['bind_opts'] = {'dirs': {self.options.scm_credentials_dir: '/credentials', }}
|
rootopts['bind_opts'] = {'dirs': {self.options.scm_credentials_dir: '/credentials', }}
|
||||||
# Force internal_dev_setup back to true because bind_opts is used to turn it off
|
# Force internal_dev_setup back to true because bind_opts is used to turn it off
|
||||||
rootopts['internal_dev_setup'] = True
|
rootopts['internal_dev_setup'] = True
|
||||||
br_arch = self.find_arch('noarch', self.session.host.getHost(), self.session.getBuildConfig(build_tag['id'], event=event_id))
|
br_arch = self.find_arch('noarch', self.session.host.getHost(
|
||||||
broot = BuildRoot(self.session, self.options, build_tag['id'], br_arch, self.id, **rootopts)
|
), self.session.getBuildConfig(build_tag['id'], event=event_id))
|
||||||
|
broot = BuildRoot(self.session, self.options,
|
||||||
|
build_tag['id'], br_arch, self.id, **rootopts)
|
||||||
broot.workdir = self.workdir
|
broot.workdir = self.workdir
|
||||||
|
|
||||||
self.logger.debug("Initializing buildroot")
|
self.logger.debug("Initializing buildroot")
|
||||||
|
|
@ -4803,7 +4912,8 @@ class BuildSRPMFromSCMTask(BaseBuildTask):
|
||||||
logfile = self.workdir + '/checkout.log'
|
logfile = self.workdir + '/checkout.log'
|
||||||
uploadpath = self.getUploadDir()
|
uploadpath = self.getUploadDir()
|
||||||
|
|
||||||
self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(), build_tag=build_tag, scratch=opts.get('scratch'))
|
self.run_callbacks('preSCMCheckout', scminfo=scm.get_info(),
|
||||||
|
build_tag=build_tag, scratch=opts.get('scratch'))
|
||||||
# Check out spec file, etc. from SCM
|
# Check out spec file, etc. from SCM
|
||||||
sourcedir = scm.checkout(scmdir, self.session, uploadpath, logfile)
|
sourcedir = scm.checkout(scmdir, self.session, uploadpath, logfile)
|
||||||
self.run_callbacks("postSCMCheckout",
|
self.run_callbacks("postSCMCheckout",
|
||||||
|
|
@ -4855,7 +4965,8 @@ class BuildSRPMFromSCMTask(BaseBuildTask):
|
||||||
release = koji.get_header_field(h, 'release')
|
release = koji.get_header_field(h, 'release')
|
||||||
srpm_name = "%(name)s-%(version)s-%(release)s.src.rpm" % locals()
|
srpm_name = "%(name)s-%(version)s-%(release)s.src.rpm" % locals()
|
||||||
if srpm_name != os.path.basename(srpm):
|
if srpm_name != os.path.basename(srpm):
|
||||||
raise koji.BuildError('srpm name mismatch: %s != %s' % (srpm_name, os.path.basename(srpm)))
|
raise koji.BuildError('srpm name mismatch: %s != %s' %
|
||||||
|
(srpm_name, os.path.basename(srpm)))
|
||||||
|
|
||||||
# upload srpm and return
|
# upload srpm and return
|
||||||
self.uploadFile(srpm)
|
self.uploadFile(srpm)
|
||||||
|
|
@ -4898,13 +5009,16 @@ Status: %(status)s\r
|
||||||
%(failure_info)s\r
|
%(failure_info)s\r
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def handler(self, recipients, is_successful, tag_info, from_info, build_info, user_info, ignore_success=None, failure_msg=''):
|
def handler(self, recipients, is_successful, tag_info, from_info,
|
||||||
|
build_info, user_info, ignore_success=None, failure_msg=''):
|
||||||
if len(recipients) == 0:
|
if len(recipients) == 0:
|
||||||
self.logger.debug('task %i: no recipients, not sending notifications', self.id)
|
self.logger.debug('task %i: no recipients, not sending notifications', self.id)
|
||||||
return
|
return
|
||||||
|
|
||||||
if ignore_success and is_successful:
|
if ignore_success and is_successful:
|
||||||
self.logger.debug('task %i: tag operation successful and ignore success is true, not sending notifications', self.id)
|
self.logger.debug(
|
||||||
|
'task %i: tag operation successful and ignore success is true, '
|
||||||
|
'not sending notifications', self.id)
|
||||||
return
|
return
|
||||||
|
|
||||||
build = self.session.getBuild(build_info)
|
build = self.session.getBuild(build_info)
|
||||||
|
|
@ -4972,7 +5086,8 @@ class BuildNotificationTask(BaseTaskHandler):
|
||||||
_taskWeight = 0.1
|
_taskWeight = 0.1
|
||||||
|
|
||||||
# XXX externalize these templates somewhere
|
# XXX externalize these templates somewhere
|
||||||
subject_templ = """Package: %(build_nvr)s Tag: %(dest_tag)s Status: %(status)s Built by: %(build_owner)s"""
|
subject_templ = "Package: %(build_nvr)s Tag: %(dest_tag)s Status: %(status)s " \
|
||||||
|
"Built by: %(build_owner)s"
|
||||||
message_templ = \
|
message_templ = \
|
||||||
"""From: %(from_addr)s\r
|
"""From: %(from_addr)s\r
|
||||||
Subject: %(subject)s\r
|
Subject: %(subject)s\r
|
||||||
|
|
@ -5073,7 +5188,8 @@ Build Info: %(weburl)s/buildinfo?buildID=%(build_id)i\r
|
||||||
return
|
return
|
||||||
|
|
||||||
build_pkg_name = build['package_name']
|
build_pkg_name = build['package_name']
|
||||||
build_pkg_evr = '%s%s-%s' % ((build['epoch'] and str(build['epoch']) + ':' or ''), build['version'], build['release'])
|
build_pkg_evr = '%s%s-%s' % ((build['epoch'] and str(build['epoch']) +
|
||||||
|
':' or ''), build['version'], build['release'])
|
||||||
build_nvr = koji.buildLabel(build)
|
build_nvr = koji.buildLabel(build)
|
||||||
build_id = build['id']
|
build_id = build['id']
|
||||||
build_owner = build['owner_name']
|
build_owner = build['owner_name']
|
||||||
|
|
@ -5099,7 +5215,9 @@ Build Info: %(weburl)s/buildinfo?buildID=%(build_id)i\r
|
||||||
cancel_info = "\r\nCanceled by: %s" % canceler['name']
|
cancel_info = "\r\nCanceled by: %s" % canceler['name']
|
||||||
elif build['state'] == koji.BUILD_STATES['FAILED']:
|
elif build['state'] == koji.BUILD_STATES['FAILED']:
|
||||||
failure_data = task_data[task_id]['result']
|
failure_data = task_data[task_id]['result']
|
||||||
failed_hosts = ['%s (%s)' % (task['host'], task['arch']) for task in task_data.values() if task['host'] and task['state'] == 'failed']
|
failed_hosts = ['%s (%s)' % (task['host'], task['arch'])
|
||||||
|
for task in task_data.values()
|
||||||
|
if task['host'] and task['state'] == 'failed']
|
||||||
failure_info = "\r\n%s (%d) failed on %s:\r\n %s" % (build_nvr, build_id,
|
failure_info = "\r\n%s (%d) failed on %s:\r\n %s" % (build_nvr, build_id,
|
||||||
', '.join(failed_hosts),
|
', '.join(failed_hosts),
|
||||||
failure_data)
|
failure_data)
|
||||||
|
|
@ -5142,9 +5260,11 @@ Build Info: %(weburl)s/buildinfo?buildID=%(build_id)i\r
|
||||||
output += "logs:\r\n"
|
output += "logs:\r\n"
|
||||||
for (file_, volume) in task['logs']:
|
for (file_, volume) in task['logs']:
|
||||||
if tasks[task_state] != 'closed':
|
if tasks[task_state] != 'closed':
|
||||||
output += " %s/getfile?taskID=%s&name=%s&volume=%s\r\n" % (weburl, task['id'], file_, volume)
|
output += " %s/getfile?taskID=%s&name=%s&volume=%s\r\n" % (
|
||||||
|
weburl, task['id'], file_, volume)
|
||||||
else:
|
else:
|
||||||
output += " %s\r\n" % '/'.join([buildurl, 'data', 'logs', task['build_arch'], file_])
|
output += " %s\r\n" % '/'.join([buildurl, 'data', 'logs',
|
||||||
|
task['build_arch'], file_])
|
||||||
if task['rpms']:
|
if task['rpms']:
|
||||||
output += "rpms:\r\n"
|
output += "rpms:\r\n"
|
||||||
for file_ in task['rpms']:
|
for file_ in task['rpms']:
|
||||||
|
|
@ -5152,11 +5272,13 @@ Build Info: %(weburl)s/buildinfo?buildID=%(build_id)i\r
|
||||||
if task['misc']:
|
if task['misc']:
|
||||||
output += "misc:\r\n"
|
output += "misc:\r\n"
|
||||||
for (file_, volume) in task['misc']:
|
for (file_, volume) in task['misc']:
|
||||||
output += " %s/getfile?taskID=%s&name=%s&volume=%s\r\n" % (weburl, task['id'], file_, volume)
|
output += " %s/getfile?taskID=%s&name=%s&volume=%s\r\n" % (
|
||||||
|
weburl, task['id'], file_, volume)
|
||||||
output += "\r\n"
|
output += "\r\n"
|
||||||
output += "\r\n"
|
output += "\r\n"
|
||||||
|
|
||||||
changelog = koji.util.formatChangelog(self.session.getChangelogEntries(build_id, queryOpts={'limit': 3})).replace("\n", "\r\n")
|
changelog = koji.util.formatChangelog(self.session.getChangelogEntries(
|
||||||
|
build_id, queryOpts={'limit': 3})).replace("\n", "\r\n")
|
||||||
if changelog:
|
if changelog:
|
||||||
changelog = "Changelog:\r\n%s" % changelog
|
changelog = "Changelog:\r\n%s" % changelog
|
||||||
|
|
||||||
|
|
@ -5464,7 +5586,8 @@ class createDistRepoTask(BaseTaskHandler):
|
||||||
"sparc": ("sparcv9v", "sparcv9", "sparcv8", "sparc", "noarch"),
|
"sparc": ("sparcv9v", "sparcv9", "sparcv8", "sparc", "noarch"),
|
||||||
"sparc64": ("sparc64v", "sparc64", "noarch"),
|
"sparc64": ("sparc64v", "sparc64", "noarch"),
|
||||||
"alpha": ("alphaev6", "alphaev56", "alphaev5", "alpha", "noarch"),
|
"alpha": ("alphaev6", "alphaev56", "alphaev5", "alpha", "noarch"),
|
||||||
"arm": ("arm", "armv4l", "armv4tl", "armv5tel", "armv5tejl", "armv6l", "armv7l", "noarch"),
|
"arm": ("arm", "armv4l", "armv4tl", "armv5tel", "armv5tejl", "armv6l", "armv7l",
|
||||||
|
"noarch"),
|
||||||
"armhfp": ("armv7hl", "armv7hnl", "noarch"),
|
"armhfp": ("armv7hl", "armv7hnl", "noarch"),
|
||||||
"aarch64": ("aarch64", "noarch"),
|
"aarch64": ("aarch64", "noarch"),
|
||||||
"riscv64": ("riscv64", "noarch"),
|
"riscv64": ("riscv64", "noarch"),
|
||||||
|
|
@ -5926,7 +6049,8 @@ enabled=1
|
||||||
for a in self.compat[arch]:
|
for a in self.compat[arch]:
|
||||||
# note: self.compat includes noarch for non-src already
|
# note: self.compat includes noarch for non-src already
|
||||||
rpm_iter, builds = self.session.listTaggedRPMS(tag_id,
|
rpm_iter, builds = self.session.listTaggedRPMS(tag_id,
|
||||||
event=opts['event'], arch=a, latest=opts['latest'],
|
event=opts['event'], arch=a,
|
||||||
|
latest=opts['latest'],
|
||||||
inherit=opts['inherit'], rpmsigs=True)
|
inherit=opts['inherit'], rpmsigs=True)
|
||||||
for build in builds:
|
for build in builds:
|
||||||
builddirs[build['id']] = koji.pathinfo.build(build)
|
builddirs[build['id']] = koji.pathinfo.build(build)
|
||||||
|
|
@ -6105,9 +6229,12 @@ class WaitrepoTask(BaseTaskHandler):
|
||||||
repo = self.session.getRepo(taginfo['id'])
|
repo = self.session.getRepo(taginfo['id'])
|
||||||
if repo and repo != last_repo:
|
if repo and repo != last_repo:
|
||||||
if builds:
|
if builds:
|
||||||
if koji.util.checkForBuilds(self.session, taginfo['id'], builds, repo['create_event']):
|
if koji.util.checkForBuilds(
|
||||||
self.logger.debug("Successfully waited %s for %s to appear in the %s repo" %
|
self.session, taginfo['id'], builds, repo['create_event']):
|
||||||
(koji.util.duration(start), koji.util.printList(nvrs), taginfo['name']))
|
self.logger.debug("Successfully waited %s for %s to appear "
|
||||||
|
"in the %s repo" %
|
||||||
|
(koji.util.duration(start), koji.util.printList(nvrs),
|
||||||
|
taginfo['name']))
|
||||||
return repo
|
return repo
|
||||||
elif newer_than:
|
elif newer_than:
|
||||||
if repo['create_ts'] > newer_than:
|
if repo['create_ts'] > newer_than:
|
||||||
|
|
@ -6120,8 +6247,10 @@ class WaitrepoTask(BaseTaskHandler):
|
||||||
|
|
||||||
if (time.time() - start) > (self.TIMEOUT * 60.0):
|
if (time.time() - start) > (self.TIMEOUT * 60.0):
|
||||||
if builds:
|
if builds:
|
||||||
raise koji.GenericError("Unsuccessfully waited %s for %s to appear in the %s repo" %
|
raise koji.GenericError("Unsuccessfully waited %s for %s to appear "
|
||||||
(koji.util.duration(start), koji.util.printList(nvrs), taginfo['name']))
|
"in the %s repo" %
|
||||||
|
(koji.util.duration(start), koji.util.printList(nvrs),
|
||||||
|
taginfo['name']))
|
||||||
else:
|
else:
|
||||||
raise koji.GenericError("Unsuccessfully waited %s for a new %s repo" %
|
raise koji.GenericError("Unsuccessfully waited %s for a new %s repo" %
|
||||||
(koji.util.duration(start), taginfo['name']))
|
(koji.util.duration(start), taginfo['name']))
|
||||||
|
|
|
||||||
|
|
@ -61,7 +61,8 @@ MULTILIB_ARCHES = {
|
||||||
def parse_args(args):
|
def parse_args(args):
|
||||||
"""Parse our opts/args"""
|
"""Parse our opts/args"""
|
||||||
usage = """
|
usage = """
|
||||||
mergerepos: take 2 or more repositories and merge their metadata into a new repo using Koji semantics
|
mergerepos: take 2 or more repositories and merge their metadata into a new
|
||||||
|
repo using Koji semantics
|
||||||
|
|
||||||
mergerepos --repo=url --repo=url --outputdir=/some/path"""
|
mergerepos --repo=url --repo=url --outputdir=/some/path"""
|
||||||
|
|
||||||
|
|
@ -74,7 +75,8 @@ def parse_args(args):
|
||||||
parser.add_option("-a", "--arch", dest="arches", default=[], action="append",
|
parser.add_option("-a", "--arch", dest="arches", default=[], action="append",
|
||||||
help="List of arches to include in the repo")
|
help="List of arches to include in the repo")
|
||||||
parser.add_option("-b", "--blocked", default=None,
|
parser.add_option("-b", "--blocked", default=None,
|
||||||
help="A file containing a list of srpm names to exclude from the merged repo")
|
help="A file containing a list of srpm names to exclude "
|
||||||
|
"from the merged repo")
|
||||||
parser.add_option("--mode", default='koji', help="Select the merge mode")
|
parser.add_option("--mode", default='koji', help="Select the merge mode")
|
||||||
parser.add_option("-o", "--outputdir", default=None,
|
parser.add_option("-o", "--outputdir", default=None,
|
||||||
help="Location to create the repository")
|
help="Location to create the repository")
|
||||||
|
|
@ -175,18 +177,18 @@ class RepoMerge(object):
|
||||||
For each package object, check if the srpm name has ever been seen before.
|
For each package object, check if the srpm name has ever been seen before.
|
||||||
If is has not, keep the package. If it has, check if the srpm name was first seen
|
If is has not, keep the package. If it has, check if the srpm name was first seen
|
||||||
in the same repo as the current package. If so, keep the package from the srpm with the
|
in the same repo as the current package. If so, keep the package from the srpm with the
|
||||||
highest NVR. If not, keep the packages from the first srpm we found, and delete packages from
|
highest NVR. If not, keep the packages from the first srpm we found, and delete packages
|
||||||
all other srpms.
|
from all other srpms.
|
||||||
|
|
||||||
Packages with matching NVRs in multiple repos will be taken from the first repo.
|
Packages with matching NVRs in multiple repos will be taken from the first repo.
|
||||||
|
|
||||||
If the srpm name appears in the blocked package list, any packages generated from the srpm
|
If the srpm name appears in the blocked package list, any packages generated from the srpm
|
||||||
will be deleted from the package sack as well.
|
will be deleted from the package sack as well.
|
||||||
|
|
||||||
This method will also generate a file called "pkgorigins" and add it to the repo metadata. This
|
This method will also generate a file called "pkgorigins" and add it to the repo metadata.
|
||||||
is a tab-separated map of package E:N-V-R.A to repo URL (as specified on the command-line). This
|
This is a tab-separated map of package E:N-V-R.A to repo URL (as specified on the
|
||||||
allows a package to be tracked back to its origin, even if the location field in the repodata does
|
command-line). This allows a package to be tracked back to its origin, even if the location
|
||||||
not match the original repo location.
|
field in the repodata does not match the original repo location.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if self.mode == 'simple':
|
if self.mode == 'simple':
|
||||||
|
|
@ -208,7 +210,8 @@ class RepoMerge(object):
|
||||||
# to be using relative urls
|
# to be using relative urls
|
||||||
# XXX - kind of a hack, but yum leaves us little choice
|
# XXX - kind of a hack, but yum leaves us little choice
|
||||||
# force the pkg object to report a relative location
|
# force the pkg object to report a relative location
|
||||||
loc = """<location href="%s"/>\n""" % yum.misc.to_xml(pkg.remote_path, attrib=True)
|
loc = """<location href="%s"/>\n""" % yum.misc.to_xml(pkg.remote_path,
|
||||||
|
attrib=True)
|
||||||
pkg._return_remote_location = make_const_func(loc)
|
pkg._return_remote_location = make_const_func(loc)
|
||||||
if pkg.sourcerpm in seen_srpms:
|
if pkg.sourcerpm in seen_srpms:
|
||||||
# we're just looking at sourcerpms this pass and we've
|
# we're just looking at sourcerpms this pass and we've
|
||||||
|
|
@ -299,7 +302,8 @@ class RepoMerge(object):
|
||||||
# to be using relative urls
|
# to be using relative urls
|
||||||
# XXX - kind of a hack, but yum leaves us little choice
|
# XXX - kind of a hack, but yum leaves us little choice
|
||||||
# force the pkg object to report a relative location
|
# force the pkg object to report a relative location
|
||||||
loc = """<location href="%s"/>\n""" % yum.misc.to_xml(pkg.remote_path, attrib=True)
|
loc = """<location href="%s"/>\n""" % yum.misc.to_xml(pkg.remote_path,
|
||||||
|
attrib=True)
|
||||||
pkg._return_remote_location = make_const_func(loc)
|
pkg._return_remote_location = make_const_func(loc)
|
||||||
|
|
||||||
pkgorigins = os.path.join(self.yumbase.conf.cachedir, 'pkgorigins')
|
pkgorigins = os.path.join(self.yumbase.conf.cachedir, 'pkgorigins')
|
||||||
|
|
|
||||||
6
cli/koji
6
cli/koji
|
|
@ -129,7 +129,8 @@ def get_options():
|
||||||
help=_("do not authenticate"))
|
help=_("do not authenticate"))
|
||||||
parser.add_option("--force-auth", action="store_true", default=False,
|
parser.add_option("--force-auth", action="store_true", default=False,
|
||||||
help=_("authenticate even for read-only operations"))
|
help=_("authenticate even for read-only operations"))
|
||||||
parser.add_option("--authtype", help=_("force use of a type of authentication, options: noauth, ssl, password, or kerberos"))
|
parser.add_option("--authtype", help=_("force use of a type of authentication, options: "
|
||||||
|
"noauth, ssl, password, or kerberos"))
|
||||||
parser.add_option("-d", "--debug", action="store_true",
|
parser.add_option("-d", "--debug", action="store_true",
|
||||||
help=_("show debug output"))
|
help=_("show debug output"))
|
||||||
parser.add_option("--debug-xmlrpc", action="store_true",
|
parser.add_option("--debug-xmlrpc", action="store_true",
|
||||||
|
|
@ -145,7 +146,8 @@ def get_options():
|
||||||
parser.add_option("--pkgurl", help=SUPPRESS_HELP)
|
parser.add_option("--pkgurl", help=SUPPRESS_HELP)
|
||||||
parser.add_option("--plugin-paths", metavar='PATHS',
|
parser.add_option("--plugin-paths", metavar='PATHS',
|
||||||
help=_("specify additional plugin paths (colon separated)"))
|
help=_("specify additional plugin paths (colon separated)"))
|
||||||
parser.add_option("--help-commands", action="store_true", default=False, help=_("list commands"))
|
parser.add_option("--help-commands", action="store_true", default=False,
|
||||||
|
help=_("list commands"))
|
||||||
(options, args) = parser.parse_args()
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
# load local config
|
# load local config
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -112,7 +112,8 @@ def ensure_connection(session):
|
||||||
except requests.exceptions.ConnectionError:
|
except requests.exceptions.ConnectionError:
|
||||||
error(_("Error: Unable to connect to server"))
|
error(_("Error: Unable to connect to server"))
|
||||||
if ret != koji.API_VERSION:
|
if ret != koji.API_VERSION:
|
||||||
warn(_("WARNING: The server is at API version %d and the client is at %d" % (ret, koji.API_VERSION)))
|
warn(_("WARNING: The server is at API version %d and "
|
||||||
|
"the client is at %d" % (ret, koji.API_VERSION)))
|
||||||
|
|
||||||
|
|
||||||
def print_task_headers():
|
def print_task_headers():
|
||||||
|
|
@ -194,7 +195,8 @@ class TaskWatcher(object):
|
||||||
laststate = last['state']
|
laststate = last['state']
|
||||||
if laststate != state:
|
if laststate != state:
|
||||||
if not self.quiet:
|
if not self.quiet:
|
||||||
print("%s: %s -> %s" % (self.str(), self.display_state(last), self.display_state(self.info)))
|
print("%s: %s -> %s" % (self.str(), self.display_state(last),
|
||||||
|
self.display_state(self.info)))
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
|
|
@ -277,9 +279,9 @@ def watch_tasks(session, tasklist, quiet=False, poll_interval=60, ki_handler=Non
|
||||||
tlist = ['%s: %s' % (t.str(), t.display_state(t.info))
|
tlist = ['%s: %s' % (t.str(), t.display_state(t.info))
|
||||||
for t in tasks.values() if not t.is_done()]
|
for t in tasks.values() if not t.is_done()]
|
||||||
print(
|
print(
|
||||||
"""Tasks still running. You can continue to watch with the '%s watch-task' command.
|
"Tasks still running. You can continue to watch with the"
|
||||||
Running Tasks:
|
" '%s watch-task' command.\n"
|
||||||
%s""" % (progname, '\n'.join(tlist)))
|
"Running Tasks:\n%s" % (progname, '\n'.join(tlist)))
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
rv = 0
|
rv = 0
|
||||||
try:
|
try:
|
||||||
|
|
@ -302,7 +304,8 @@ Running Tasks:
|
||||||
for child in session.getTaskChildren(task_id):
|
for child in session.getTaskChildren(task_id):
|
||||||
child_id = child['id']
|
child_id = child['id']
|
||||||
if child_id not in tasks.keys():
|
if child_id not in tasks.keys():
|
||||||
tasks[child_id] = TaskWatcher(child_id, session, task.level + 1, quiet=quiet)
|
tasks[child_id] = TaskWatcher(child_id, session, task.level + 1,
|
||||||
|
quiet=quiet)
|
||||||
tasks[child_id].update()
|
tasks[child_id].update()
|
||||||
# If we found new children, go through the list again,
|
# If we found new children, go through the list again,
|
||||||
# in case they have children also
|
# in case they have children also
|
||||||
|
|
@ -370,7 +373,8 @@ def watch_logs(session, tasklist, opts, poll_interval):
|
||||||
if (log, volume) not in taskoffsets:
|
if (log, volume) not in taskoffsets:
|
||||||
taskoffsets[(log, volume)] = 0
|
taskoffsets[(log, volume)] = 0
|
||||||
|
|
||||||
contents = session.downloadTaskOutput(task_id, log, taskoffsets[(log, volume)], 16384, volume=volume)
|
contents = session.downloadTaskOutput(task_id, log, taskoffsets[(log, volume)],
|
||||||
|
16384, volume=volume)
|
||||||
taskoffsets[(log, volume)] += len(contents)
|
taskoffsets[(log, volume)] += len(contents)
|
||||||
if contents:
|
if contents:
|
||||||
currlog = "%d:%s:%s:" % (task_id, volume, log)
|
currlog = "%d:%s:%s:" % (task_id, volume, log)
|
||||||
|
|
@ -452,7 +456,9 @@ def _progress_callback(uploaded, total, piece, time, total_time):
|
||||||
speed = _format_size(float(total) / float(total_time)) + "/sec"
|
speed = _format_size(float(total) / float(total_time)) + "/sec"
|
||||||
|
|
||||||
# write formated string and flush
|
# write formated string and flush
|
||||||
sys.stdout.write("[% -36s] % 4s % 8s % 10s % 14s\r" % ('=' * (int(percent_done * 36)), percent_done_str, elapsed, data_done, speed))
|
sys.stdout.write("[% -36s] % 4s % 8s % 10s % 14s\r" % ('=' * (int(percent_done * 36)),
|
||||||
|
percent_done_str, elapsed, data_done,
|
||||||
|
speed))
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -520,7 +526,8 @@ def _download_progress(download_t, download_d):
|
||||||
percent_done_str = "%3d%%" % (percent_done * 100)
|
percent_done_str = "%3d%%" % (percent_done * 100)
|
||||||
data_done = _format_size(download_d)
|
data_done = _format_size(download_d)
|
||||||
|
|
||||||
sys.stdout.write("[% -36s] % 4s % 10s\r" % ('=' * (int(percent_done * 36)), percent_done_str, data_done))
|
sys.stdout.write("[% -36s] % 4s % 10s\r" % ('=' * (int(percent_done * 36)), percent_done_str,
|
||||||
|
data_done))
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -560,13 +567,16 @@ def activate_session(session, options):
|
||||||
elif options.authtype == "ssl" or os.path.isfile(options.cert) and options.authtype is None:
|
elif options.authtype == "ssl" or os.path.isfile(options.cert) and options.authtype is None:
|
||||||
# authenticate using SSL client cert
|
# authenticate using SSL client cert
|
||||||
session.ssl_login(options.cert, None, options.serverca, proxyuser=runas)
|
session.ssl_login(options.cert, None, options.serverca, proxyuser=runas)
|
||||||
elif options.authtype == "password" or getattr(options, 'user', None) and options.authtype is None:
|
elif options.authtype == "password" \
|
||||||
|
or getattr(options, 'user', None) \
|
||||||
|
and options.authtype is None:
|
||||||
# authenticate using user/password
|
# authenticate using user/password
|
||||||
session.login()
|
session.login()
|
||||||
elif options.authtype == "kerberos" or has_krb_creds() and options.authtype is None:
|
elif options.authtype == "kerberos" or has_krb_creds() and options.authtype is None:
|
||||||
try:
|
try:
|
||||||
if getattr(options, 'keytab', None) and getattr(options, 'principal', None):
|
if getattr(options, 'keytab', None) and getattr(options, 'principal', None):
|
||||||
session.krb_login(principal=options.principal, keytab=options.keytab, proxyuser=runas)
|
session.krb_login(principal=options.principal, keytab=options.keytab,
|
||||||
|
proxyuser=runas)
|
||||||
else:
|
else:
|
||||||
session.krb_login(proxyuser=runas)
|
session.krb_login(proxyuser=runas)
|
||||||
except socket.error as e:
|
except socket.error as e:
|
||||||
|
|
|
||||||
609
hub/kojihub.py
609
hub/kojihub.py
File diff suppressed because it is too large
Load diff
|
|
@ -155,7 +155,9 @@ class HandlerRegistry(object):
|
||||||
if x == 0 and func.__code__.co_varnames[x] == "self":
|
if x == 0 and func.__code__.co_varnames[x] == "self":
|
||||||
continue
|
continue
|
||||||
if func.__defaults__ and func.__code__.co_argcount - x <= len(func.__defaults__):
|
if func.__defaults__ and func.__code__.co_argcount - x <= len(func.__defaults__):
|
||||||
args.append((func.__code__.co_varnames[x], func.__defaults__[x - func.__code__.co_argcount + len(func.__defaults__)]))
|
args.append(
|
||||||
|
(func.__code__.co_varnames[x],
|
||||||
|
func.__defaults__[x - func.__code__.co_argcount + len(func.__defaults__)]))
|
||||||
else:
|
else:
|
||||||
args.append(func.__code__.co_varnames[x])
|
args.append(func.__code__.co_varnames[x])
|
||||||
return args
|
return args
|
||||||
|
|
@ -317,10 +319,11 @@ class ModXMLRPCRequestHandler(object):
|
||||||
|
|
||||||
if self.logger.isEnabledFor(logging.INFO):
|
if self.logger.isEnabledFor(logging.INFO):
|
||||||
rusage = resource.getrusage(resource.RUSAGE_SELF)
|
rusage = resource.getrusage(resource.RUSAGE_SELF)
|
||||||
self.logger.info("Completed method %s for session %s (#%s): %f seconds, rss %s, stime %f",
|
self.logger.info(
|
||||||
method, context.session.id, context.session.callnum,
|
"Completed method %s for session %s (#%s): %f seconds, rss %s, stime %f",
|
||||||
time.time() - start,
|
method, context.session.id, context.session.callnum,
|
||||||
rusage.ru_maxrss, rusage.ru_stime)
|
time.time() - start,
|
||||||
|
rusage.ru_maxrss, rusage.ru_stime)
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
@ -344,8 +347,11 @@ class ModXMLRPCRequestHandler(object):
|
||||||
faultCode = getattr(exc_type, 'faultCode', 1)
|
faultCode = getattr(exc_type, 'faultCode', 1)
|
||||||
faultString = ', '.join(exc_value.args)
|
faultString = ', '.join(exc_value.args)
|
||||||
trace = traceback.format_exception(*sys.exc_info())
|
trace = traceback.format_exception(*sys.exc_info())
|
||||||
# traceback is not part of the multicall spec, but we include it for debugging purposes
|
# traceback is not part of the multicall spec,
|
||||||
results.append({'faultCode': faultCode, 'faultString': faultString, 'traceback': trace})
|
# but we include it for debugging purposes
|
||||||
|
results.append({'faultCode': faultCode,
|
||||||
|
'faultString': faultString,
|
||||||
|
'traceback': trace})
|
||||||
else:
|
else:
|
||||||
results.append([result])
|
results.append([result])
|
||||||
|
|
||||||
|
|
@ -438,7 +444,9 @@ def load_config(environ):
|
||||||
['VerbosePolicy', 'boolean', False],
|
['VerbosePolicy', 'boolean', False],
|
||||||
|
|
||||||
['LogLevel', 'string', 'WARNING'],
|
['LogLevel', 'string', 'WARNING'],
|
||||||
['LogFormat', 'string', '%(asctime)s [%(levelname)s] m=%(method)s u=%(user_name)s p=%(process)s r=%(remoteaddr)s %(name)s: %(message)s'],
|
['LogFormat', 'string',
|
||||||
|
'%(asctime)s [%(levelname)s] m=%(method)s u=%(user_name)s p=%(process)s r=%(remoteaddr)s '
|
||||||
|
'%(name)s: %(message)s'],
|
||||||
|
|
||||||
['MissingPolicyOk', 'boolean', True],
|
['MissingPolicyOk', 'boolean', True],
|
||||||
['EnableMaven', 'boolean', False],
|
['EnableMaven', 'boolean', False],
|
||||||
|
|
@ -660,7 +668,8 @@ def load_scripts(environ):
|
||||||
|
|
||||||
def get_memory_usage():
|
def get_memory_usage():
|
||||||
pagesize = resource.getpagesize()
|
pagesize = resource.getpagesize()
|
||||||
statm = [pagesize * int(y) // 1024 for y in "".join(open("/proc/self/statm").readlines()).strip().split()]
|
statm = [pagesize * int(y) // 1024
|
||||||
|
for y in "".join(open("/proc/self/statm").readlines()).strip().split()]
|
||||||
size, res, shr, text, lib, data, dirty = statm
|
size, res, shr, text, lib, data, dirty = statm
|
||||||
return res - shr
|
return res - shr
|
||||||
|
|
||||||
|
|
@ -713,7 +722,8 @@ def application(environ, start_response):
|
||||||
('Allow', 'POST'),
|
('Allow', 'POST'),
|
||||||
]
|
]
|
||||||
start_response('405 Method Not Allowed', headers)
|
start_response('405 Method Not Allowed', headers)
|
||||||
response = "Method Not Allowed\nThis is an XML-RPC server. Only POST requests are accepted."
|
response = "Method Not Allowed\n" \
|
||||||
|
"This is an XML-RPC server. Only POST requests are accepted."
|
||||||
if six.PY3:
|
if six.PY3:
|
||||||
response = response.encode()
|
response = response.encode()
|
||||||
headers = [
|
headers = [
|
||||||
|
|
@ -767,7 +777,11 @@ def application(environ, start_response):
|
||||||
paramstr = repr(getattr(context, 'params', 'UNKNOWN'))
|
paramstr = repr(getattr(context, 'params', 'UNKNOWN'))
|
||||||
if len(paramstr) > 120:
|
if len(paramstr) > 120:
|
||||||
paramstr = paramstr[:117] + "..."
|
paramstr = paramstr[:117] + "..."
|
||||||
h.logger.warning("Memory usage of process %d grew from %d KiB to %d KiB (+%d KiB) processing request %s with args %s" % (os.getpid(), memory_usage_at_start, memory_usage_at_end, memory_usage_at_end - memory_usage_at_start, context.method, paramstr))
|
h.logger.warning(
|
||||||
|
"Memory usage of process %d grew from %d KiB to %d KiB (+%d KiB) processing "
|
||||||
|
"request %s with args %s" %
|
||||||
|
(os.getpid(), memory_usage_at_start, memory_usage_at_end,
|
||||||
|
memory_usage_at_end - memory_usage_at_start, context.method, paramstr))
|
||||||
h.logger.debug("Returning %d bytes after %f seconds", len(response),
|
h.logger.debug("Returning %d bytes after %f seconds", len(response),
|
||||||
time.time() - start)
|
time.time() - start)
|
||||||
finally:
|
finally:
|
||||||
|
|
|
||||||
|
|
@ -75,7 +75,7 @@ try:
|
||||||
from OpenSSL.SSL import Error as SSL_Error
|
from OpenSSL.SSL import Error as SSL_Error
|
||||||
except Exception: # pragma: no cover
|
except Exception: # pragma: no cover
|
||||||
# the hub imports koji, and sometimes this import fails there
|
# the hub imports koji, and sometimes this import fails there
|
||||||
# see: https://cryptography.io/en/latest/faq/#starting-cryptography-using-mod-wsgi-produces-an-internalerror-during-a-call-in-register-osrandom-engine
|
# see: https://cryptography.io/en/latest/faq/#starting-cryptography-using-mod-wsgi-produces-an-internalerror-during-a-call-in-register-osrandom-engine # noqa: E501
|
||||||
# unfortunately the workaround at the above link does not always work, so
|
# unfortunately the workaround at the above link does not always work, so
|
||||||
# we ignore it here
|
# we ignore it here
|
||||||
pass
|
pass
|
||||||
|
|
@ -1270,7 +1270,8 @@ def parse_pom(path=None, contents=None):
|
||||||
fd.close()
|
fd.close()
|
||||||
|
|
||||||
if not contents:
|
if not contents:
|
||||||
raise GenericError('either a path to a pom file or the contents of a pom file must be specified')
|
raise GenericError(
|
||||||
|
'either a path to a pom file or the contents of a pom file must be specified')
|
||||||
|
|
||||||
# A common problem is non-UTF8 characters in XML files, so we'll convert the string first
|
# A common problem is non-UTF8 characters in XML files, so we'll convert the string first
|
||||||
|
|
||||||
|
|
@ -1287,7 +1288,8 @@ def parse_pom(path=None, contents=None):
|
||||||
|
|
||||||
for field in fields:
|
for field in fields:
|
||||||
if field not in util.to_list(values.keys()):
|
if field not in util.to_list(values.keys()):
|
||||||
raise GenericError('could not extract %s from POM: %s' % (field, (path or '<contents>')))
|
raise GenericError('could not extract %s from POM: %s' %
|
||||||
|
(field, (path or '<contents>')))
|
||||||
return values
|
return values
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1649,7 +1651,8 @@ name=build
|
||||||
# The following macro values cannot be overridden by tag options
|
# The following macro values cannot be overridden by tag options
|
||||||
macros['%_topdir'] = '%s/build' % config_opts['chroothome']
|
macros['%_topdir'] = '%s/build' % config_opts['chroothome']
|
||||||
macros['%_host_cpu'] = opts.get('target_arch', arch)
|
macros['%_host_cpu'] = opts.get('target_arch', arch)
|
||||||
macros['%_host'] = '%s-%s' % (opts.get('target_arch', arch), opts.get('mockhost', 'koji-linux-gnu'))
|
macros['%_host'] = '%s-%s' % (opts.get('target_arch', arch),
|
||||||
|
opts.get('mockhost', 'koji-linux-gnu'))
|
||||||
|
|
||||||
parts = ["""# Auto-generated by the Koji build system
|
parts = ["""# Auto-generated by the Koji build system
|
||||||
"""]
|
"""]
|
||||||
|
|
@ -1681,7 +1684,9 @@ name=build
|
||||||
if bind_opts:
|
if bind_opts:
|
||||||
for key in bind_opts.keys():
|
for key in bind_opts.keys():
|
||||||
for mnt_src, mnt_dest in six.iteritems(bind_opts.get(key)):
|
for mnt_src, mnt_dest in six.iteritems(bind_opts.get(key)):
|
||||||
parts.append("config_opts['plugin_conf']['bind_mount_opts'][%r].append((%r, %r))\n" % (key, mnt_src, mnt_dest))
|
parts.append(
|
||||||
|
"config_opts['plugin_conf']['bind_mount_opts'][%r].append((%r, %r))\n" %
|
||||||
|
(key, mnt_src, mnt_dest))
|
||||||
parts.append("\n")
|
parts.append("\n")
|
||||||
|
|
||||||
for key in sorted(macros):
|
for key in sorted(macros):
|
||||||
|
|
@ -1886,7 +1891,8 @@ def read_config(profile_name, user_config=None):
|
||||||
try:
|
try:
|
||||||
result[name] = int(value)
|
result[name] = int(value)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise ConfigurationError("value for %s config option must be a valid integer" % name)
|
raise ConfigurationError(
|
||||||
|
"value for %s config option must be a valid integer" % name)
|
||||||
else:
|
else:
|
||||||
result[name] = value
|
result[name] = value
|
||||||
|
|
||||||
|
|
@ -2030,7 +2036,8 @@ def read_config_files(config_files, raw=False):
|
||||||
|
|
||||||
class PathInfo(object):
|
class PathInfo(object):
|
||||||
# ASCII numbers and upper- and lower-case letter for use in tmpdir()
|
# ASCII numbers and upper- and lower-case letter for use in tmpdir()
|
||||||
ASCII_CHARS = [chr(i) for i in list(range(48, 58)) + list(range(65, 91)) + list(range(97, 123))]
|
ASCII_CHARS = [chr(i)
|
||||||
|
for i in list(range(48, 58)) + list(range(65, 91)) + list(range(97, 123))]
|
||||||
|
|
||||||
def __init__(self, topdir=None):
|
def __init__(self, topdir=None):
|
||||||
self._topdir = topdir
|
self._topdir = topdir
|
||||||
|
|
@ -2053,10 +2060,12 @@ class PathInfo(object):
|
||||||
|
|
||||||
def build(self, build):
|
def build(self, build):
|
||||||
"""Return the directory where a build belongs"""
|
"""Return the directory where a build belongs"""
|
||||||
return self.volumedir(build.get('volume_name')) + ("/packages/%(name)s/%(version)s/%(release)s" % build)
|
return self.volumedir(build.get('volume_name')) + \
|
||||||
|
("/packages/%(name)s/%(version)s/%(release)s" % build)
|
||||||
|
|
||||||
def mavenbuild(self, build):
|
def mavenbuild(self, build):
|
||||||
"""Return the directory where the Maven build exists in the global store (/mnt/koji/packages)"""
|
"""Return the directory where the Maven build exists in the global store
|
||||||
|
(/mnt/koji/packages)"""
|
||||||
return self.build(build) + '/maven'
|
return self.build(build) + '/maven'
|
||||||
|
|
||||||
def mavenrepo(self, maveninfo):
|
def mavenrepo(self, maveninfo):
|
||||||
|
|
@ -2137,7 +2146,8 @@ class PathInfo(object):
|
||||||
"""Return a path to a unique directory under work()/tmp/"""
|
"""Return a path to a unique directory under work()/tmp/"""
|
||||||
tmp = None
|
tmp = None
|
||||||
while tmp is None or os.path.exists(tmp):
|
while tmp is None or os.path.exists(tmp):
|
||||||
tmp = self.work(volume) + '/tmp/' + ''.join([random.choice(self.ASCII_CHARS) for dummy in '123456'])
|
tmp = self.work(volume) + '/tmp/' + ''.join([random.choice(self.ASCII_CHARS)
|
||||||
|
for dummy in '123456'])
|
||||||
return tmp
|
return tmp
|
||||||
|
|
||||||
def scratch(self):
|
def scratch(self):
|
||||||
|
|
@ -2781,9 +2791,9 @@ class ClientSession(object):
|
||||||
# basically, we want to retry on most errors, with a few exceptions
|
# basically, we want to retry on most errors, with a few exceptions
|
||||||
# - faults (this means the call completed and failed)
|
# - faults (this means the call completed and failed)
|
||||||
# - SystemExit, KeyboardInterrupt
|
# - SystemExit, KeyboardInterrupt
|
||||||
# note that, for logged-in sessions the server should tell us (via a RetryError fault)
|
# note that, for logged-in sessions the server should tell us (via a RetryError
|
||||||
# if the call cannot be retried. For non-logged-in sessions, all calls should be read-only
|
# fault) if the call cannot be retried. For non-logged-in sessions, all calls
|
||||||
# and hence retryable.
|
# should be read-only and hence retryable.
|
||||||
except Fault as fault:
|
except Fault as fault:
|
||||||
# try to convert the fault to a known exception
|
# try to convert the fault to a known exception
|
||||||
err = convertFault(fault)
|
err = convertFault(fault)
|
||||||
|
|
@ -2792,13 +2802,14 @@ class ClientSession(object):
|
||||||
secs = self.opts.get('offline_retry_interval', interval)
|
secs = self.opts.get('offline_retry_interval', interval)
|
||||||
self.logger.debug("Server offline. Retrying in %i seconds", secs)
|
self.logger.debug("Server offline. Retrying in %i seconds", secs)
|
||||||
time.sleep(secs)
|
time.sleep(secs)
|
||||||
# reset try count - this isn't a typical error, this is a running server
|
# reset try count - this isn't a typical error, this is a running
|
||||||
# correctly reporting an outage
|
# server correctly reporting an outage
|
||||||
tries = 0
|
tries = 0
|
||||||
continue
|
continue
|
||||||
raise err
|
raise err
|
||||||
except (SystemExit, KeyboardInterrupt):
|
except (SystemExit, KeyboardInterrupt):
|
||||||
# (depending on the python version, these may or may not be subclasses of Exception)
|
# (depending on the python version, these may or may not be subclasses of
|
||||||
|
# Exception)
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
tb_str = ''.join(traceback.format_exception(*sys.exc_info()))
|
tb_str = ''.join(traceback.format_exception(*sys.exc_info()))
|
||||||
|
|
@ -2809,8 +2820,9 @@ class ClientSession(object):
|
||||||
raise
|
raise
|
||||||
|
|
||||||
if not self.logged_in:
|
if not self.logged_in:
|
||||||
# in the past, non-logged-in sessions did not retry. For compatibility purposes
|
# in the past, non-logged-in sessions did not retry.
|
||||||
# this behavior is governed by the anon_retry opt.
|
# For compatibility purposes this behavior is governed by the anon_retry
|
||||||
|
# opt.
|
||||||
if not self.opts.get('anon_retry', False):
|
if not self.opts.get('anon_retry', False):
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
@ -2822,7 +2834,8 @@ class ClientSession(object):
|
||||||
# otherwise keep retrying
|
# otherwise keep retrying
|
||||||
if self.logger.isEnabledFor(logging.DEBUG):
|
if self.logger.isEnabledFor(logging.DEBUG):
|
||||||
self.logger.debug(tb_str)
|
self.logger.debug(tb_str)
|
||||||
self.logger.info("Try #%s for call %s (%s) failed: %s", tries, self.callnum, name, e)
|
self.logger.info("Try #%s for call %s (%s) failed: %s",
|
||||||
|
tries, self.callnum, name, e)
|
||||||
if tries > 1:
|
if tries > 1:
|
||||||
# first retry is immediate, after that we honor retry_interval
|
# first retry is immediate, after that we honor retry_interval
|
||||||
time.sleep(interval)
|
time.sleep(interval)
|
||||||
|
|
@ -2864,7 +2877,8 @@ class ClientSession(object):
|
||||||
transaction.
|
transaction.
|
||||||
"""
|
"""
|
||||||
if not self.multicall:
|
if not self.multicall:
|
||||||
raise GenericError('ClientSession.multicall must be set to True before calling multiCall()')
|
raise GenericError(
|
||||||
|
'ClientSession.multicall must be set to True before calling multiCall()')
|
||||||
self.multicall = False
|
self.multicall = False
|
||||||
if len(self._calls) == 0:
|
if len(self._calls) == 0:
|
||||||
return []
|
return []
|
||||||
|
|
@ -2896,7 +2910,8 @@ class ClientSession(object):
|
||||||
return self.__dict__['_apidoc']
|
return self.__dict__['_apidoc']
|
||||||
return VirtualMethod(self._callMethod, name, self)
|
return VirtualMethod(self._callMethod, name, self)
|
||||||
|
|
||||||
def fastUpload(self, localfile, path, name=None, callback=None, blocksize=None, overwrite=False, volume=None):
|
def fastUpload(self, localfile, path, name=None, callback=None, blocksize=None,
|
||||||
|
overwrite=False, volume=None):
|
||||||
if blocksize is None:
|
if blocksize is None:
|
||||||
blocksize = self.opts.get('upload_blocksize', 1048576)
|
blocksize = self.opts.get('upload_blocksize', 1048576)
|
||||||
|
|
||||||
|
|
@ -2930,7 +2945,8 @@ class ClientSession(object):
|
||||||
hexdigest = util.adler32_constructor(chunk).hexdigest()
|
hexdigest = util.adler32_constructor(chunk).hexdigest()
|
||||||
full_chksum.update(chunk)
|
full_chksum.update(chunk)
|
||||||
if result['size'] != len(chunk):
|
if result['size'] != len(chunk):
|
||||||
raise GenericError("server returned wrong chunk size: %s != %s" % (result['size'], len(chunk)))
|
raise GenericError("server returned wrong chunk size: %s != %s" %
|
||||||
|
(result['size'], len(chunk)))
|
||||||
if result['hexdigest'] != hexdigest:
|
if result['hexdigest'] != hexdigest:
|
||||||
raise GenericError('upload checksum failed: %s != %s'
|
raise GenericError('upload checksum failed: %s != %s'
|
||||||
% (result['hexdigest'], hexdigest))
|
% (result['hexdigest'], hexdigest))
|
||||||
|
|
@ -2957,9 +2973,11 @@ class ClientSession(object):
|
||||||
if problems and result['hexdigest'] != full_chksum.hexdigest():
|
if problems and result['hexdigest'] != full_chksum.hexdigest():
|
||||||
raise GenericError("Uploaded file has wrong checksum: %s/%s, %s != %s"
|
raise GenericError("Uploaded file has wrong checksum: %s/%s, %s != %s"
|
||||||
% (path, name, result['hexdigest'], full_chksum.hexdigest()))
|
% (path, name, result['hexdigest'], full_chksum.hexdigest()))
|
||||||
self.logger.debug("Fast upload: %s complete. %i bytes in %.1f seconds", localfile, size, t2)
|
self.logger.debug("Fast upload: %s complete. %i bytes in %.1f seconds",
|
||||||
|
localfile, size, t2)
|
||||||
|
|
||||||
def _prepUpload(self, chunk, offset, path, name, verify="adler32", overwrite=False, volume=None):
|
def _prepUpload(self, chunk, offset, path, name, verify="adler32", overwrite=False,
|
||||||
|
volume=None):
|
||||||
"""prep a rawUpload call"""
|
"""prep a rawUpload call"""
|
||||||
if not self.logged_in:
|
if not self.logged_in:
|
||||||
raise ActionNotAllowed("you must be logged in to upload")
|
raise ActionNotAllowed("you must be logged in to upload")
|
||||||
|
|
@ -2989,7 +3007,8 @@ class ClientSession(object):
|
||||||
request = chunk
|
request = chunk
|
||||||
return handler, headers, request
|
return handler, headers, request
|
||||||
|
|
||||||
def uploadWrapper(self, localfile, path, name=None, callback=None, blocksize=None, overwrite=True, volume=None):
|
def uploadWrapper(self, localfile, path, name=None, callback=None, blocksize=None,
|
||||||
|
overwrite=True, volume=None):
|
||||||
"""upload a file in chunks using the uploadFile call"""
|
"""upload a file in chunks using the uploadFile call"""
|
||||||
if blocksize is None:
|
if blocksize is None:
|
||||||
blocksize = self.opts.get('upload_blocksize', 1048576)
|
blocksize = self.opts.get('upload_blocksize', 1048576)
|
||||||
|
|
@ -3044,7 +3063,8 @@ class ClientSession(object):
|
||||||
tries = 0
|
tries = 0
|
||||||
while True:
|
while True:
|
||||||
if debug:
|
if debug:
|
||||||
self.logger.debug("uploadFile(%r,%r,%r,%r,%r,...)" % (path, name, sz, digest, offset))
|
self.logger.debug("uploadFile(%r,%r,%r,%r,%r,...)" %
|
||||||
|
(path, name, sz, digest, offset))
|
||||||
if self.callMethod('uploadFile', path, name, sz, digest, offset, data, **volopts):
|
if self.callMethod('uploadFile', path, name, sz, digest, offset, data, **volopts):
|
||||||
break
|
break
|
||||||
if tries <= retries:
|
if tries <= retries:
|
||||||
|
|
@ -3063,9 +3083,11 @@ class ClientSession(object):
|
||||||
if t2 <= 0:
|
if t2 <= 0:
|
||||||
t2 = 1
|
t2 = 1
|
||||||
if debug:
|
if debug:
|
||||||
self.logger.debug("Uploaded %d bytes in %f seconds (%f kbytes/sec)" % (size, t1, size / t1 / 1024.0))
|
self.logger.debug("Uploaded %d bytes in %f seconds (%f kbytes/sec)" %
|
||||||
|
(size, t1, size / t1 / 1024.0))
|
||||||
if debug:
|
if debug:
|
||||||
self.logger.debug("Total: %d bytes in %f seconds (%f kbytes/sec)" % (ofs, t2, ofs / t2 / 1024.0))
|
self.logger.debug("Total: %d bytes in %f seconds (%f kbytes/sec)" %
|
||||||
|
(ofs, t2, ofs / t2 / 1024.0))
|
||||||
if callback:
|
if callback:
|
||||||
callback(ofs, totalsize, size, t1, t2)
|
callback(ofs, totalsize, size, t1, t2)
|
||||||
fo.close()
|
fo.close()
|
||||||
|
|
@ -3281,8 +3303,8 @@ class DBHandler(logging.Handler):
|
||||||
cursor.execute(command, data)
|
cursor.execute(command, data)
|
||||||
cursor.close()
|
cursor.close()
|
||||||
# self.cnx.commit()
|
# self.cnx.commit()
|
||||||
# XXX - committing here is most likely wrong, but we need to set commit_pending or something
|
# XXX - committing here is most likely wrong, but we need to set commit_pending or
|
||||||
# ...and this is really the wrong place for that
|
# something...and this is really the wrong place for that
|
||||||
except BaseException:
|
except BaseException:
|
||||||
self.handleError(record)
|
self.handleError(record)
|
||||||
|
|
||||||
|
|
@ -3583,7 +3605,9 @@ def add_file_logger(logger, fn):
|
||||||
|
|
||||||
def add_stderr_logger(logger):
|
def add_stderr_logger(logger):
|
||||||
handler = logging.StreamHandler()
|
handler = logging.StreamHandler()
|
||||||
handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] {%(process)d} %(name)s:%(lineno)d %(message)s'))
|
handler.setFormatter(
|
||||||
|
logging.Formatter(
|
||||||
|
'%(asctime)s [%(levelname)s] {%(process)d} %(name)s:%(lineno)d %(message)s'))
|
||||||
handler.setLevel(logging.DEBUG)
|
handler.setLevel(logging.DEBUG)
|
||||||
logging.getLogger(logger).addHandler(handler)
|
logging.getLogger(logger).addHandler(handler)
|
||||||
|
|
||||||
|
|
@ -3612,7 +3636,8 @@ def add_mail_logger(logger, addr):
|
||||||
return
|
return
|
||||||
addresses = addr.split(',')
|
addresses = addr.split(',')
|
||||||
handler = logging.handlers.SMTPHandler("localhost",
|
handler = logging.handlers.SMTPHandler("localhost",
|
||||||
"%s@%s" % (pwd.getpwuid(os.getuid())[0], socket.getfqdn()),
|
"%s@%s" % (pwd.getpwuid(os.getuid())[0],
|
||||||
|
socket.getfqdn()),
|
||||||
addresses,
|
addresses,
|
||||||
"%s: error notice" % socket.getfqdn())
|
"%s: error notice" % socket.getfqdn())
|
||||||
handler.setFormatter(logging.Formatter('%(pathname)s:%(lineno)d [%(levelname)s] %(message)s'))
|
handler.setFormatter(logging.Formatter('%(pathname)s:%(lineno)d [%(levelname)s] %(message)s'))
|
||||||
|
|
|
||||||
10
koji/auth.py
10
koji/auth.py
|
|
@ -334,7 +334,8 @@ class Session(object):
|
||||||
|
|
||||||
# Successfully authenticated via Kerberos, now log in
|
# Successfully authenticated via Kerberos, now log in
|
||||||
if proxyuser:
|
if proxyuser:
|
||||||
proxyprincs = [princ.strip() for princ in context.opts.get('ProxyPrincipals', '').split(',')]
|
proxyprincs = [princ.strip()
|
||||||
|
for princ in context.opts.get('ProxyPrincipals', '').split(',')]
|
||||||
if cprinc.name in proxyprincs:
|
if cprinc.name in proxyprincs:
|
||||||
login_principal = proxyuser
|
login_principal = proxyuser
|
||||||
else:
|
else:
|
||||||
|
|
@ -408,12 +409,15 @@ class Session(object):
|
||||||
authtype = koji.AUTHTYPE_GSSAPI
|
authtype = koji.AUTHTYPE_GSSAPI
|
||||||
else:
|
else:
|
||||||
if context.environ.get('SSL_CLIENT_VERIFY') != 'SUCCESS':
|
if context.environ.get('SSL_CLIENT_VERIFY') != 'SUCCESS':
|
||||||
raise koji.AuthError('could not verify client: %s' % context.environ.get('SSL_CLIENT_VERIFY'))
|
raise koji.AuthError('could not verify client: %s' %
|
||||||
|
context.environ.get('SSL_CLIENT_VERIFY'))
|
||||||
|
|
||||||
name_dn_component = context.opts.get('DNUsernameComponent', 'CN')
|
name_dn_component = context.opts.get('DNUsernameComponent', 'CN')
|
||||||
username = context.environ.get('SSL_CLIENT_S_DN_%s' % name_dn_component)
|
username = context.environ.get('SSL_CLIENT_S_DN_%s' % name_dn_component)
|
||||||
if not username:
|
if not username:
|
||||||
raise koji.AuthError('unable to get user information (%s) from client certificate' % name_dn_component)
|
raise koji.AuthError(
|
||||||
|
'unable to get user information (%s) from client certificate' %
|
||||||
|
name_dn_component)
|
||||||
client_dn = context.environ.get('SSL_CLIENT_S_DN')
|
client_dn = context.environ.get('SSL_CLIENT_S_DN')
|
||||||
authtype = koji.AUTHTYPE_SSL
|
authtype = koji.AUTHTYPE_SSL
|
||||||
|
|
||||||
|
|
|
||||||
102
koji/daemon.py
102
koji/daemon.py
|
|
@ -110,8 +110,9 @@ def fast_incremental_upload(session, fname, fd, path, retries, logger):
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
||||||
def log_output(session, path, args, outfile, uploadpath, cwd=None, logerror=0, append=0, chroot=None, env=None):
|
def log_output(session, path, args, outfile, uploadpath, cwd=None, logerror=0, append=0,
|
||||||
"""Run command with output redirected. If chroot is not None, chroot to the directory specified
|
chroot=None, env=None):
|
||||||
|
"""Run command with output redirected. If chroot is not None, chroot to the directory specified
|
||||||
before running the command."""
|
before running the command."""
|
||||||
pid = os.fork()
|
pid = os.fork()
|
||||||
fd = None
|
fd = None
|
||||||
|
|
@ -287,11 +288,13 @@ class SCM(object):
|
||||||
elif len(userhost) > 2:
|
elif len(userhost) > 2:
|
||||||
raise koji.GenericError('Invalid username@hostname specified: %s' % netloc)
|
raise koji.GenericError('Invalid username@hostname specified: %s' % netloc)
|
||||||
if not netloc:
|
if not netloc:
|
||||||
raise koji.GenericError('Unable to parse SCM URL: %s . Could not find the netloc element.' % self.url)
|
raise koji.GenericError(
|
||||||
|
'Unable to parse SCM URL: %s . Could not find the netloc element.' % self.url)
|
||||||
|
|
||||||
# check for empty path before we apply normpath
|
# check for empty path before we apply normpath
|
||||||
if not path:
|
if not path:
|
||||||
raise koji.GenericError('Unable to parse SCM URL: %s . Could not find the path element.' % self.url)
|
raise koji.GenericError(
|
||||||
|
'Unable to parse SCM URL: %s . Could not find the path element.' % self.url)
|
||||||
|
|
||||||
path = os.path.normpath(path)
|
path = os.path.normpath(path)
|
||||||
|
|
||||||
|
|
@ -306,14 +309,19 @@ class SCM(object):
|
||||||
# any such url should have already been caught by is_scm_url
|
# any such url should have already been caught by is_scm_url
|
||||||
raise koji.GenericError('Invalid SCM URL. Path should begin with /: %s) ')
|
raise koji.GenericError('Invalid SCM URL. Path should begin with /: %s) ')
|
||||||
|
|
||||||
# check for validity: params should be empty, query may be empty, everything else should be populated
|
# check for validity: params should be empty, query may be empty, everything else should be
|
||||||
|
# populated
|
||||||
if params:
|
if params:
|
||||||
raise koji.GenericError('Unable to parse SCM URL: %s . Params element %s should be empty.' % (self.url, params))
|
raise koji.GenericError(
|
||||||
|
'Unable to parse SCM URL: %s . Params element %s should be empty.' %
|
||||||
|
(self.url, params))
|
||||||
if not scheme: # pragma: no cover
|
if not scheme: # pragma: no cover
|
||||||
# should not happen because of is_scm_url check earlier
|
# should not happen because of is_scm_url check earlier
|
||||||
raise koji.GenericError('Unable to parse SCM URL: %s . Could not find the scheme element.' % self.url)
|
raise koji.GenericError(
|
||||||
|
'Unable to parse SCM URL: %s . Could not find the scheme element.' % self.url)
|
||||||
if not fragment:
|
if not fragment:
|
||||||
raise koji.GenericError('Unable to parse SCM URL: %s . Could not find the fragment element.' % self.url)
|
raise koji.GenericError(
|
||||||
|
'Unable to parse SCM URL: %s . Could not find the fragment element.' % self.url)
|
||||||
|
|
||||||
# return parsed values
|
# return parsed values
|
||||||
return (scheme, user, netloc, path, query, fragment)
|
return (scheme, user, netloc, path, query, fragment)
|
||||||
|
|
@ -356,7 +364,8 @@ class SCM(object):
|
||||||
for allowed_scm in allowed.split():
|
for allowed_scm in allowed.split():
|
||||||
scm_tuple = allowed_scm.split(':')
|
scm_tuple = allowed_scm.split(':')
|
||||||
if len(scm_tuple) < 2:
|
if len(scm_tuple) < 2:
|
||||||
self.logger.warn('Ignoring incorrectly formatted SCM host:repository: %s' % allowed_scm)
|
self.logger.warn('Ignoring incorrectly formatted SCM host:repository: %s' %
|
||||||
|
allowed_scm)
|
||||||
continue
|
continue
|
||||||
host_pat = scm_tuple[0]
|
host_pat = scm_tuple[0]
|
||||||
repo_pat = scm_tuple[1]
|
repo_pat = scm_tuple[1]
|
||||||
|
|
@ -378,11 +387,13 @@ class SCM(object):
|
||||||
if scm_tuple[3]:
|
if scm_tuple[3]:
|
||||||
self.source_cmd = scm_tuple[3].split(',')
|
self.source_cmd = scm_tuple[3].split(',')
|
||||||
else:
|
else:
|
||||||
# there was nothing after the trailing :, so they don't want to run a source_cmd at all
|
# there was nothing after the trailing :,
|
||||||
|
# so they don't want to run a source_cmd at all
|
||||||
self.source_cmd = None
|
self.source_cmd = None
|
||||||
break
|
break
|
||||||
if not is_allowed:
|
if not is_allowed:
|
||||||
raise koji.BuildError('%s:%s is not in the list of allowed SCMs' % (self.host, self.repository))
|
raise koji.BuildError(
|
||||||
|
'%s:%s is not in the list of allowed SCMs' % (self.host, self.repository))
|
||||||
|
|
||||||
def checkout(self, scmdir, session=None, uploadpath=None, logfile=None):
|
def checkout(self, scmdir, session=None, uploadpath=None, logfile=None):
|
||||||
"""
|
"""
|
||||||
|
|
@ -416,16 +427,20 @@ class SCM(object):
|
||||||
(self.scmtype, ' '.join(cmd), os.path.basename(logfile)))
|
(self.scmtype, ' '.join(cmd), os.path.basename(logfile)))
|
||||||
|
|
||||||
if self.scmtype == 'CVS':
|
if self.scmtype == 'CVS':
|
||||||
pserver = ':pserver:%s@%s:%s' % ((self.user or 'anonymous'), self.host, self.repository)
|
pserver = ':pserver:%s@%s:%s' % ((self.user or 'anonymous'), self.host,
|
||||||
module_checkout_cmd = ['cvs', '-d', pserver, 'checkout', '-r', self.revision, self.module]
|
self.repository)
|
||||||
|
module_checkout_cmd = ['cvs', '-d', pserver, 'checkout', '-r', self.revision,
|
||||||
|
self.module]
|
||||||
common_checkout_cmd = ['cvs', '-d', pserver, 'checkout', 'common']
|
common_checkout_cmd = ['cvs', '-d', pserver, 'checkout', 'common']
|
||||||
|
|
||||||
elif self.scmtype == 'CVS+SSH':
|
elif self.scmtype == 'CVS+SSH':
|
||||||
if not self.user:
|
if not self.user:
|
||||||
raise koji.BuildError('No user specified for repository access scheme: %s' % self.scheme)
|
raise koji.BuildError(
|
||||||
|
'No user specified for repository access scheme: %s' % self.scheme)
|
||||||
|
|
||||||
cvsserver = ':ext:%s@%s:%s' % (self.user, self.host, self.repository)
|
cvsserver = ':ext:%s@%s:%s' % (self.user, self.host, self.repository)
|
||||||
module_checkout_cmd = ['cvs', '-d', cvsserver, 'checkout', '-r', self.revision, self.module]
|
module_checkout_cmd = ['cvs', '-d', cvsserver, 'checkout', '-r', self.revision,
|
||||||
|
self.module]
|
||||||
common_checkout_cmd = ['cvs', '-d', cvsserver, 'checkout', 'common']
|
common_checkout_cmd = ['cvs', '-d', cvsserver, 'checkout', 'common']
|
||||||
env = {'CVS_RSH': 'ssh'}
|
env = {'CVS_RSH': 'ssh'}
|
||||||
|
|
||||||
|
|
@ -453,14 +468,16 @@ class SCM(object):
|
||||||
update_checkout_cmd = ['git', 'reset', '--hard', self.revision]
|
update_checkout_cmd = ['git', 'reset', '--hard', self.revision]
|
||||||
update_checkout_dir = sourcedir
|
update_checkout_dir = sourcedir
|
||||||
|
|
||||||
# self.module may be empty, in which case the specfile should be in the top-level directory
|
# self.module may be empty, in which case the specfile should be in the top-level
|
||||||
|
# directory
|
||||||
if self.module:
|
if self.module:
|
||||||
# Treat the module as a directory inside the git repository
|
# Treat the module as a directory inside the git repository
|
||||||
sourcedir = '%s/%s' % (sourcedir, self.module)
|
sourcedir = '%s/%s' % (sourcedir, self.module)
|
||||||
|
|
||||||
elif self.scmtype == 'GIT+SSH':
|
elif self.scmtype == 'GIT+SSH':
|
||||||
if not self.user:
|
if not self.user:
|
||||||
raise koji.BuildError('No user specified for repository access scheme: %s' % self.scheme)
|
raise koji.BuildError(
|
||||||
|
'No user specified for repository access scheme: %s' % self.scheme)
|
||||||
gitrepo = 'git+ssh://%s@%s%s' % (self.user, self.host, self.repository)
|
gitrepo = 'git+ssh://%s@%s%s' % (self.user, self.host, self.repository)
|
||||||
commonrepo = os.path.dirname(gitrepo) + '/common'
|
commonrepo = os.path.dirname(gitrepo) + '/common'
|
||||||
checkout_path = os.path.basename(self.repository)
|
checkout_path = os.path.basename(self.repository)
|
||||||
|
|
@ -481,7 +498,8 @@ class SCM(object):
|
||||||
update_checkout_cmd = ['git', 'reset', '--hard', self.revision]
|
update_checkout_cmd = ['git', 'reset', '--hard', self.revision]
|
||||||
update_checkout_dir = sourcedir
|
update_checkout_dir = sourcedir
|
||||||
|
|
||||||
# self.module may be empty, in which case the specfile should be in the top-level directory
|
# self.module may be empty, in which case the specfile should be in the top-level
|
||||||
|
# directory
|
||||||
if self.module:
|
if self.module:
|
||||||
# Treat the module as a directory inside the git repository
|
# Treat the module as a directory inside the git repository
|
||||||
sourcedir = '%s/%s' % (sourcedir, self.module)
|
sourcedir = '%s/%s' % (sourcedir, self.module)
|
||||||
|
|
@ -492,15 +510,18 @@ class SCM(object):
|
||||||
scheme = scheme.split('+')[1]
|
scheme = scheme.split('+')[1]
|
||||||
|
|
||||||
svnserver = '%s%s%s' % (scheme, self.host, self.repository)
|
svnserver = '%s%s%s' % (scheme, self.host, self.repository)
|
||||||
module_checkout_cmd = ['svn', 'checkout', '-r', self.revision, '%s/%s' % (svnserver, self.module), self.module]
|
module_checkout_cmd = ['svn', 'checkout', '-r', self.revision,
|
||||||
|
'%s/%s' % (svnserver, self.module), self.module]
|
||||||
common_checkout_cmd = ['svn', 'checkout', '%s/common' % svnserver]
|
common_checkout_cmd = ['svn', 'checkout', '%s/common' % svnserver]
|
||||||
|
|
||||||
elif self.scmtype == 'SVN+SSH':
|
elif self.scmtype == 'SVN+SSH':
|
||||||
if not self.user:
|
if not self.user:
|
||||||
raise koji.BuildError('No user specified for repository access scheme: %s' % self.scheme)
|
raise koji.BuildError(
|
||||||
|
'No user specified for repository access scheme: %s' % self.scheme)
|
||||||
|
|
||||||
svnserver = 'svn+ssh://%s@%s%s' % (self.user, self.host, self.repository)
|
svnserver = 'svn+ssh://%s@%s%s' % (self.user, self.host, self.repository)
|
||||||
module_checkout_cmd = ['svn', 'checkout', '-r', self.revision, '%s/%s' % (svnserver, self.module), self.module]
|
module_checkout_cmd = ['svn', 'checkout', '-r', self.revision,
|
||||||
|
'%s/%s' % (svnserver, self.module), self.module]
|
||||||
common_checkout_cmd = ['svn', 'checkout', '%s/common' % svnserver]
|
common_checkout_cmd = ['svn', 'checkout', '%s/common' % svnserver]
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
@ -513,8 +534,10 @@ class SCM(object):
|
||||||
# Currently only required for GIT checkouts
|
# Currently only required for GIT checkouts
|
||||||
# Run the command in the directory the source was checked out into
|
# Run the command in the directory the source was checked out into
|
||||||
if self.scmtype.startswith('GIT') and globals().get('KOJIKAMID'):
|
if self.scmtype.startswith('GIT') and globals().get('KOJIKAMID'):
|
||||||
_run(['git', 'config', 'core.autocrlf', 'true'], chdir=update_checkout_dir, fatal=True)
|
_run(['git', 'config', 'core.autocrlf', 'true'],
|
||||||
_run(['git', 'config', 'core.safecrlf', 'true'], chdir=update_checkout_dir, fatal=True)
|
chdir=update_checkout_dir, fatal=True)
|
||||||
|
_run(['git', 'config', 'core.safecrlf', 'true'],
|
||||||
|
chdir=update_checkout_dir, fatal=True)
|
||||||
_run(update_checkout_cmd, chdir=update_checkout_dir, fatal=True)
|
_run(update_checkout_cmd, chdir=update_checkout_dir, fatal=True)
|
||||||
|
|
||||||
if self.use_common and not globals().get('KOJIKAMID'):
|
if self.use_common and not globals().get('KOJIKAMID'):
|
||||||
|
|
@ -583,7 +606,8 @@ class TaskManager(object):
|
||||||
|
|
||||||
def registerHandler(self, entry):
|
def registerHandler(self, entry):
|
||||||
"""register and index task handler"""
|
"""register and index task handler"""
|
||||||
if isinstance(entry, type(koji.tasks.BaseTaskHandler)) and issubclass(entry, koji.tasks.BaseTaskHandler):
|
if isinstance(entry, type(koji.tasks.BaseTaskHandler)) and \
|
||||||
|
issubclass(entry, koji.tasks.BaseTaskHandler):
|
||||||
for method in entry.Methods:
|
for method in entry.Methods:
|
||||||
self.handlers[method] = entry
|
self.handlers[method] = entry
|
||||||
|
|
||||||
|
|
@ -638,7 +662,9 @@ class TaskManager(object):
|
||||||
# task not running - expire the buildroot
|
# task not running - expire the buildroot
|
||||||
# TODO - consider recycling hooks here (with strong sanity checks)
|
# TODO - consider recycling hooks here (with strong sanity checks)
|
||||||
self.logger.info("Expiring buildroot: %(id)i/%(tag_name)s/%(arch)s" % br)
|
self.logger.info("Expiring buildroot: %(id)i/%(tag_name)s/%(arch)s" % br)
|
||||||
self.logger.debug("Buildroot task: %r, Current tasks: %r" % (task_id, to_list(self.tasks.keys())))
|
self.logger.debug(
|
||||||
|
"Buildroot task: %r, Current tasks: %r" %
|
||||||
|
(task_id, to_list(self.tasks.keys())))
|
||||||
self.session.host.setBuildRootState(id, st_expired)
|
self.session.host.setBuildRootState(id, st_expired)
|
||||||
continue
|
continue
|
||||||
if nolocal:
|
if nolocal:
|
||||||
|
|
@ -678,7 +704,8 @@ class TaskManager(object):
|
||||||
if not task:
|
if not task:
|
||||||
self.logger.warn("%s: invalid task %s" % (desc, br['task_id']))
|
self.logger.warn("%s: invalid task %s" % (desc, br['task_id']))
|
||||||
continue
|
continue
|
||||||
if (task['state'] == koji.TASK_STATES['FAILED'] and age < self.options.failed_buildroot_lifetime):
|
if task['state'] == koji.TASK_STATES['FAILED'] and \
|
||||||
|
age < self.options.failed_buildroot_lifetime:
|
||||||
# XXX - this could be smarter
|
# XXX - this could be smarter
|
||||||
# keep buildroots for failed tasks around for a little while
|
# keep buildroots for failed tasks around for a little while
|
||||||
self.logger.debug("Keeping failed buildroot: %s" % desc)
|
self.logger.debug("Keeping failed buildroot: %s" % desc)
|
||||||
|
|
@ -1004,7 +1031,9 @@ class TaskManager(object):
|
||||||
self.logger.info('%s (pid %i, taskID %i) is running' % (execname, pid, task_id))
|
self.logger.info('%s (pid %i, taskID %i) is running' % (execname, pid, task_id))
|
||||||
else:
|
else:
|
||||||
if signaled:
|
if signaled:
|
||||||
self.logger.info('%s (pid %i, taskID %i) was killed by signal %i' % (execname, pid, task_id, sig))
|
self.logger.info(
|
||||||
|
'%s (pid %i, taskID %i) was killed by signal %i' %
|
||||||
|
(execname, pid, task_id, sig))
|
||||||
else:
|
else:
|
||||||
self.logger.info('%s (pid %i, taskID %i) exited' % (execname, pid, task_id))
|
self.logger.info('%s (pid %i, taskID %i) exited' % (execname, pid, task_id))
|
||||||
return True
|
return True
|
||||||
|
|
@ -1041,7 +1070,8 @@ class TaskManager(object):
|
||||||
if not os.path.isfile(proc_path):
|
if not os.path.isfile(proc_path):
|
||||||
return None
|
return None
|
||||||
proc_file = open(proc_path)
|
proc_file = open(proc_path)
|
||||||
procstats = [not field.isdigit() and field or int(field) for field in proc_file.read().split()]
|
procstats = [not field.isdigit() and field or int(field)
|
||||||
|
for field in proc_file.read().split()]
|
||||||
proc_file.close()
|
proc_file.close()
|
||||||
|
|
||||||
cmd_path = '/proc/%i/cmdline' % pid
|
cmd_path = '/proc/%i/cmdline' % pid
|
||||||
|
|
@ -1084,9 +1114,9 @@ class TaskManager(object):
|
||||||
while parents:
|
while parents:
|
||||||
for ppid in parents[:]:
|
for ppid in parents[:]:
|
||||||
for procstats in statsByPPID.get(ppid, []):
|
for procstats in statsByPPID.get(ppid, []):
|
||||||
# get the /proc entries with ppid as their parent, and append their pid to the list,
|
# get the /proc entries with ppid as their parent, and append their pid to the
|
||||||
# then recheck for their children
|
# list, then recheck for their children pid is the 0th field, ppid is the 3rd
|
||||||
# pid is the 0th field, ppid is the 3rd field
|
# field
|
||||||
pids.append((procstats[0], procstats[1]))
|
pids.append((procstats[0], procstats[1]))
|
||||||
parents.append(procstats[0])
|
parents.append(procstats[0])
|
||||||
parents.remove(ppid)
|
parents.remove(ppid)
|
||||||
|
|
@ -1154,7 +1184,8 @@ class TaskManager(object):
|
||||||
availableMB = available // 1024 // 1024
|
availableMB = available // 1024 // 1024
|
||||||
self.logger.debug("disk space available in '%s': %i MB", br_path, availableMB)
|
self.logger.debug("disk space available in '%s': %i MB", br_path, availableMB)
|
||||||
if availableMB < self.options.minspace:
|
if availableMB < self.options.minspace:
|
||||||
self.status = "Insufficient disk space at %s: %i MB, %i MB required" % (br_path, availableMB, self.options.minspace)
|
self.status = "Insufficient disk space at %s: %i MB, %i MB required" % \
|
||||||
|
(br_path, availableMB, self.options.minspace)
|
||||||
self.logger.warn(self.status)
|
self.logger.warn(self.status)
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
@ -1189,7 +1220,9 @@ class TaskManager(object):
|
||||||
return False
|
return False
|
||||||
if self.task_load > self.hostdata['capacity']:
|
if self.task_load > self.hostdata['capacity']:
|
||||||
self.status = "Over capacity"
|
self.status = "Over capacity"
|
||||||
self.logger.info("Task load (%.2f) exceeds capacity (%.2f)" % (self.task_load, self.hostdata['capacity']))
|
self.logger.info(
|
||||||
|
"Task load (%.2f) exceeds capacity (%.2f)" %
|
||||||
|
(self.task_load, self.hostdata['capacity']))
|
||||||
return False
|
return False
|
||||||
if len(self.tasks) >= self.options.maxjobs:
|
if len(self.tasks) >= self.options.maxjobs:
|
||||||
# This serves as a backup to the capacity check and prevents
|
# This serves as a backup to the capacity check and prevents
|
||||||
|
|
@ -1238,7 +1271,8 @@ class TaskManager(object):
|
||||||
self.logger.warn('Error during host check')
|
self.logger.warn('Error during host check')
|
||||||
self.logger.warn(''.join(traceback.format_exception(*sys.exc_info())))
|
self.logger.warn(''.join(traceback.format_exception(*sys.exc_info())))
|
||||||
if not valid_host:
|
if not valid_host:
|
||||||
self.logger.info('Skipping task %s (%s) due to host check', task['id'], task['method'])
|
self.logger.info(
|
||||||
|
'Skipping task %s (%s) due to host check', task['id'], task['method'])
|
||||||
return False
|
return False
|
||||||
data = self.session.host.openTask(task['id'])
|
data = self.session.host.openTask(task['id'])
|
||||||
if data is None:
|
if data is None:
|
||||||
|
|
|
||||||
|
|
@ -110,7 +110,8 @@ class CursorWrapper:
|
||||||
try:
|
try:
|
||||||
return quote(operation, parameters)
|
return quote(operation, parameters)
|
||||||
except Exception:
|
except Exception:
|
||||||
self.logger.exception('Unable to quote query:\n%s\nParameters: %s', operation, parameters)
|
self.logger.exception(
|
||||||
|
'Unable to quote query:\n%s\nParameters: %s', operation, parameters)
|
||||||
return "INVALID QUERY"
|
return "INVALID QUERY"
|
||||||
|
|
||||||
def preformat(self, sql, params):
|
def preformat(self, sql, params):
|
||||||
|
|
|
||||||
|
|
@ -154,10 +154,14 @@ LEGACY_SIGNATURES = {
|
||||||
[['tag', 'newer_than', 'nvrs'], None, None, (None, None)],
|
[['tag', 'newer_than', 'nvrs'], None, None, (None, None)],
|
||||||
],
|
],
|
||||||
'createLiveMedia': [
|
'createLiveMedia': [
|
||||||
[['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info', 'ksfile', 'opts'], None, None, (None,)],
|
[['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info', 'ksfile',
|
||||||
|
'opts'],
|
||||||
|
None, None, (None,)],
|
||||||
],
|
],
|
||||||
'createAppliance': [
|
'createAppliance': [
|
||||||
[['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info', 'ksfile', 'opts'], None, None, (None,)],
|
[['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info', 'ksfile',
|
||||||
|
'opts'],
|
||||||
|
None, None, (None,)],
|
||||||
],
|
],
|
||||||
'livecd': [
|
'livecd': [
|
||||||
[['name', 'version', 'arch', 'target', 'ksfile', 'opts'], None, None, (None,)],
|
[['name', 'version', 'arch', 'target', 'ksfile', 'opts'], None, None, (None,)],
|
||||||
|
|
@ -190,7 +194,9 @@ LEGACY_SIGNATURES = {
|
||||||
[['spec_url', 'build_target', 'build', 'task', 'opts'], None, None, (None,)],
|
[['spec_url', 'build_target', 'build', 'task', 'opts'], None, None, (None,)],
|
||||||
],
|
],
|
||||||
'createLiveCD': [
|
'createLiveCD': [
|
||||||
[['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info', 'ksfile', 'opts'], None, None, (None,)],
|
[['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info', 'ksfile',
|
||||||
|
'opts'],
|
||||||
|
None, None, (None,)],
|
||||||
],
|
],
|
||||||
'appliance': [
|
'appliance': [
|
||||||
[['name', 'version', 'arch', 'target', 'ksfile', 'opts'], None, None, (None,)],
|
[['name', 'version', 'arch', 'target', 'ksfile', 'opts'], None, None, (None,)],
|
||||||
|
|
@ -199,19 +205,25 @@ LEGACY_SIGNATURES = {
|
||||||
[['name', 'version', 'arches', 'target', 'inst_tree', 'opts'], None, None, (None,)],
|
[['name', 'version', 'arches', 'target', 'inst_tree', 'opts'], None, None, (None,)],
|
||||||
],
|
],
|
||||||
'tagBuild': [
|
'tagBuild': [
|
||||||
[['tag_id', 'build_id', 'force', 'fromtag', 'ignore_success'], None, None, (False, None, False)],
|
[['tag_id', 'build_id', 'force', 'fromtag', 'ignore_success'],
|
||||||
|
None, None, (False, None, False)],
|
||||||
],
|
],
|
||||||
'chainmaven': [
|
'chainmaven': [
|
||||||
[['builds', 'target', 'opts'], None, None, (None,)],
|
[['builds', 'target', 'opts'], None, None, (None,)],
|
||||||
],
|
],
|
||||||
'newRepo': [
|
'newRepo': [
|
||||||
[['tag', 'event', 'src', 'debuginfo', 'separate_src'], None, None, (None, False, False, False)],
|
[['tag', 'event', 'src', 'debuginfo', 'separate_src'],
|
||||||
|
None, None, (None, False, False, False)],
|
||||||
],
|
],
|
||||||
'createImage': [
|
'createImage': [
|
||||||
[['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info', 'inst_tree', 'opts'], None, None, (None,)],
|
[['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info',
|
||||||
|
'inst_tree', 'opts'],
|
||||||
|
None, None, (None,)],
|
||||||
],
|
],
|
||||||
'tagNotification': [
|
'tagNotification': [
|
||||||
[['recipients', 'is_successful', 'tag_info', 'from_info', 'build_info', 'user_info', 'ignore_success', 'failure_msg'], None, None, (None, '')],
|
[['recipients', 'is_successful', 'tag_info', 'from_info', 'build_info', 'user_info',
|
||||||
|
'ignore_success', 'failure_msg'],
|
||||||
|
None, None, (None, '')],
|
||||||
],
|
],
|
||||||
'buildArch': [
|
'buildArch': [
|
||||||
[['pkg', 'root', 'arch', 'keep_srpm', 'opts'], None, None, (None,)],
|
[['pkg', 'root', 'arch', 'keep_srpm', 'opts'], None, None, (None,)],
|
||||||
|
|
@ -253,7 +265,9 @@ LEGACY_SIGNATURES = {
|
||||||
[['options'], None, None, (None,)],
|
[['options'], None, None, (None,)],
|
||||||
],
|
],
|
||||||
'runroot': [
|
'runroot': [
|
||||||
[['root', 'arch', 'command', 'keep', 'packages', 'mounts', 'repo_id', 'skip_setarch', 'weight', 'upload_logs', 'new_chroot'], None, None, (False, [], [], None, False, None, None, False)],
|
[['root', 'arch', 'command', 'keep', 'packages', 'mounts', 'repo_id', 'skip_setarch',
|
||||||
|
'weight', 'upload_logs', 'new_chroot'],
|
||||||
|
None, None, (False, [], [], None, False, None, None, False)],
|
||||||
],
|
],
|
||||||
'distRepo': [
|
'distRepo': [
|
||||||
[['tag', 'repo_id', 'keys', 'task_opts'], None, None, None],
|
[['tag', 'repo_id', 'keys', 'task_opts'], None, None, None],
|
||||||
|
|
@ -400,7 +414,9 @@ class BaseTaskHandler(object):
|
||||||
self.session.getTaskResult(task)
|
self.session.getTaskResult(task)
|
||||||
checked.add(task)
|
checked.add(task)
|
||||||
except (koji.GenericError, six.moves.xmlrpc_client.Fault):
|
except (koji.GenericError, six.moves.xmlrpc_client.Fault):
|
||||||
self.logger.info("task %s failed or was canceled, cancelling unfinished tasks" % task)
|
self.logger.info(
|
||||||
|
"task %s failed or was canceled, cancelling unfinished tasks" %
|
||||||
|
task)
|
||||||
self.session.cancelTaskChildren(self.id)
|
self.session.cancelTaskChildren(self.id)
|
||||||
# reraise the original error now, rather than waiting for
|
# reraise the original error now, rather than waiting for
|
||||||
# an error in taskWaitResults()
|
# an error in taskWaitResults()
|
||||||
|
|
@ -743,8 +759,10 @@ class RestartHostsTask(BaseTaskHandler):
|
||||||
my_tasks = None
|
my_tasks = None
|
||||||
for host in hosts:
|
for host in hosts:
|
||||||
# note: currently task assignments bypass channel restrictions
|
# note: currently task assignments bypass channel restrictions
|
||||||
task1 = self.subtask('restart', [host], assign=host['id'], label="restart %i" % host['id'])
|
task1 = self.subtask('restart', [host],
|
||||||
task2 = self.subtask('restartVerify', [task1, host], assign=host['id'], label="sleep %i" % host['id'])
|
assign=host['id'], label="restart %i" % host['id'])
|
||||||
|
task2 = self.subtask('restartVerify', [task1, host],
|
||||||
|
assign=host['id'], label="sleep %i" % host['id'])
|
||||||
subtasks.append(task1)
|
subtasks.append(task1)
|
||||||
subtasks.append(task2)
|
subtasks.append(task2)
|
||||||
if host['id'] == this_host:
|
if host['id'] == this_host:
|
||||||
|
|
@ -790,8 +808,10 @@ class DependantTask(BaseTaskHandler):
|
||||||
|
|
||||||
subtasks = []
|
subtasks = []
|
||||||
for task in task_list:
|
for task in task_list:
|
||||||
# **((len(task)>2 and task[2]) or {}) expands task[2] into opts if it exists, allows for things like 'priority=15'
|
# **((len(task)>2 and task[2]) or {}) expands task[2] into opts if it exists, allows
|
||||||
task_id = self.session.host.subtask(method=task[0], arglist=task[1], parent=self.id, **((len(task) > 2 and task[2]) or {}))
|
# for things like 'priority=15'
|
||||||
|
task_id = self.session.host.subtask(method=task[0], arglist=task[1], parent=self.id,
|
||||||
|
**((len(task) > 2 and task[2]) or {}))
|
||||||
if task_id:
|
if task_id:
|
||||||
subtasks.append(task_id)
|
subtasks.append(task_id)
|
||||||
if subtasks:
|
if subtasks:
|
||||||
|
|
|
||||||
|
|
@ -54,7 +54,8 @@ def deprecated(message):
|
||||||
|
|
||||||
|
|
||||||
def _changelogDate(cldate):
|
def _changelogDate(cldate):
|
||||||
return time.strftime('%a %b %d %Y', time.strptime(koji.formatTime(cldate), '%Y-%m-%d %H:%M:%S'))
|
return time.strftime('%a %b %d %Y',
|
||||||
|
time.strptime(koji.formatTime(cldate), '%Y-%m-%d %H:%M:%S'))
|
||||||
|
|
||||||
|
|
||||||
def formatChangelog(entries):
|
def formatChangelog(entries):
|
||||||
|
|
@ -813,7 +814,8 @@ def parse_maven_param(confs, chain=False, scratch=False, section=None):
|
||||||
else:
|
else:
|
||||||
raise ValueError("Section %s does not exist in: %s" % (section, ', '.join(confs)))
|
raise ValueError("Section %s does not exist in: %s" % (section, ', '.join(confs)))
|
||||||
elif len(builds) > 1:
|
elif len(builds) > 1:
|
||||||
raise ValueError("Multiple sections in: %s, you must specify the section" % ', '.join(confs))
|
raise ValueError(
|
||||||
|
"Multiple sections in: %s, you must specify the section" % ', '.join(confs))
|
||||||
return builds
|
return builds
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -47,7 +47,8 @@ class RunRootTask(koji.tasks.BaseTaskHandler):
|
||||||
options.append(o)
|
options.append(o)
|
||||||
rel_path = path[len(mount_data['mountpoint']):]
|
rel_path = path[len(mount_data['mountpoint']):]
|
||||||
rel_path = rel_path[1:] if rel_path.startswith('/') else rel_path
|
rel_path = rel_path[1:] if rel_path.startswith('/') else rel_path
|
||||||
res = (os.path.join(mount_data['path'], rel_path), path, mount_data['fstype'], ','.join(options))
|
res = (os.path.join(mount_data['path'], rel_path), path, mount_data['fstype'],
|
||||||
|
','.join(options))
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def _read_config(self):
|
def _read_config(self):
|
||||||
|
|
@ -94,11 +95,15 @@ class RunRootTask(koji.tasks.BaseTaskHandler):
|
||||||
except six.moves.configparser.NoOptionError:
|
except six.moves.configparser.NoOptionError:
|
||||||
raise koji.GenericError("bad config: missing options in %s section" % section_name)
|
raise koji.GenericError("bad config: missing options in %s section" % section_name)
|
||||||
|
|
||||||
for path in self.config['default_mounts'] + self.config['safe_roots'] + [x[0] for x in self.config['path_subs']]:
|
for path in self.config['default_mounts'] + self.config['safe_roots'] + \
|
||||||
|
[x[0] for x in self.config['path_subs']]:
|
||||||
if not path.startswith('/'):
|
if not path.startswith('/'):
|
||||||
raise koji.GenericError("bad config: all paths (default_mounts, safe_roots, path_subs) needs to be absolute: %s" % path)
|
raise koji.GenericError(
|
||||||
|
"bad config: all paths (default_mounts, safe_roots, path_subs) needs to be "
|
||||||
|
"absolute: %s" % path)
|
||||||
|
|
||||||
def handler(self, root, arch, command, keep=False, packages=[], mounts=[], repo_id=None, skip_setarch=False, weight=None, upload_logs=None, new_chroot=None):
|
def handler(self, root, arch, command, keep=False, packages=[], mounts=[], repo_id=None,
|
||||||
|
skip_setarch=False, weight=None, upload_logs=None, new_chroot=None):
|
||||||
"""Create a buildroot and run a command (as root) inside of it
|
"""Create a buildroot and run a command (as root) inside of it
|
||||||
|
|
||||||
Command may be a string or a list.
|
Command may be a string or a list.
|
||||||
|
|
@ -141,15 +146,19 @@ class RunRootTask(koji.tasks.BaseTaskHandler):
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
# no overlap
|
# no overlap
|
||||||
raise koji.BuildError("host does not match tag arches: %s (%s)" % (root, tag_arches))
|
raise koji.BuildError(
|
||||||
|
"host does not match tag arches: %s (%s)" % (root, tag_arches))
|
||||||
else:
|
else:
|
||||||
br_arch = arch
|
br_arch = arch
|
||||||
if repo_id:
|
if repo_id:
|
||||||
repo_info = self.session.repoInfo(repo_id, strict=True)
|
repo_info = self.session.repoInfo(repo_id, strict=True)
|
||||||
if repo_info['tag_name'] != root:
|
if repo_info['tag_name'] != root:
|
||||||
raise koji.BuildError("build tag (%s) does not match repo tag (%s)" % (root, repo_info['tag_name']))
|
raise koji.BuildError(
|
||||||
|
"build tag (%s) does not match repo tag (%s)" % (root, repo_info['tag_name']))
|
||||||
if repo_info['state'] not in (koji.REPO_STATES['READY'], koji.REPO_STATES['EXPIRED']):
|
if repo_info['state'] not in (koji.REPO_STATES['READY'], koji.REPO_STATES['EXPIRED']):
|
||||||
raise koji.BuildError("repos in the %s state may not be used by runroot" % koji.REPO_STATES[repo_info['state']])
|
raise koji.BuildError(
|
||||||
|
"repos in the %s state may not be used by runroot" %
|
||||||
|
koji.REPO_STATES[repo_info['state']])
|
||||||
else:
|
else:
|
||||||
repo_info = self.session.getRepo(root)
|
repo_info = self.session.getRepo(root)
|
||||||
if not repo_info:
|
if not repo_info:
|
||||||
|
|
@ -186,12 +195,15 @@ class RunRootTask(koji.tasks.BaseTaskHandler):
|
||||||
cmdstr = ' '.join(["'%s'" % arg.replace("'", r"'\''") for arg in command])
|
cmdstr = ' '.join(["'%s'" % arg.replace("'", r"'\''") for arg in command])
|
||||||
# A nasty hack to put command output into its own file until mock can be
|
# A nasty hack to put command output into its own file until mock can be
|
||||||
# patched to do something more reasonable than stuff everything into build.log
|
# patched to do something more reasonable than stuff everything into build.log
|
||||||
cmdargs = ['/bin/sh', '-c', "{ %s; } < /dev/null 2>&1 | /usr/bin/tee /builddir/runroot.log; exit ${PIPESTATUS[0]}" % cmdstr]
|
cmdargs = ['/bin/sh', '-c',
|
||||||
|
"{ %s; } < /dev/null 2>&1 | /usr/bin/tee /builddir/runroot.log; exit "
|
||||||
|
"${PIPESTATUS[0]}" % cmdstr]
|
||||||
|
|
||||||
# always mount /mnt/redhat (read-only)
|
# always mount /mnt/redhat (read-only)
|
||||||
# always mount /mnt/iso (read-only)
|
# always mount /mnt/iso (read-only)
|
||||||
# also need /dev bind mount
|
# also need /dev bind mount
|
||||||
self.do_mounts(rootdir, [self._get_path_params(x) for x in self.config['default_mounts']])
|
self.do_mounts(rootdir,
|
||||||
|
[self._get_path_params(x) for x in self.config['default_mounts']])
|
||||||
self.do_extra_mounts(rootdir, mounts)
|
self.do_extra_mounts(rootdir, mounts)
|
||||||
mock_cmd = ['chroot']
|
mock_cmd = ['chroot']
|
||||||
if new_chroot:
|
if new_chroot:
|
||||||
|
|
@ -199,7 +211,8 @@ class RunRootTask(koji.tasks.BaseTaskHandler):
|
||||||
elif new_chroot is False: # None -> no option added
|
elif new_chroot is False: # None -> no option added
|
||||||
mock_cmd.append('--old-chroot')
|
mock_cmd.append('--old-chroot')
|
||||||
if skip_setarch:
|
if skip_setarch:
|
||||||
# we can't really skip it, but we can set it to the current one instead of of the chroot one
|
# we can't really skip it, but we can set it to the current one instead of of the
|
||||||
|
# chroot one
|
||||||
myarch = platform.uname()[5]
|
myarch = platform.uname()[5]
|
||||||
mock_cmd.extend(['--arch', myarch])
|
mock_cmd.extend(['--arch', myarch])
|
||||||
mock_cmd.append('--')
|
mock_cmd.append('--')
|
||||||
|
|
@ -279,7 +292,8 @@ class RunRootTask(koji.tasks.BaseTaskHandler):
|
||||||
cmd = ['mount', '-t', type, '-o', opts, dev, mpoint]
|
cmd = ['mount', '-t', type, '-o', opts, dev, mpoint]
|
||||||
self.logger.info("Mount command: %r" % cmd)
|
self.logger.info("Mount command: %r" % cmd)
|
||||||
koji.ensuredir(mpoint)
|
koji.ensuredir(mpoint)
|
||||||
status = log_output(self.session, cmd[0], cmd, logfile, uploadpath, logerror=True, append=True)
|
status = log_output(self.session, cmd[0], cmd, logfile, uploadpath,
|
||||||
|
logerror=True, append=True)
|
||||||
if not isSuccess(status):
|
if not isSuccess(status):
|
||||||
error = koji.GenericError("Unable to mount %s: %s"
|
error = koji.GenericError("Unable to mount %s: %s"
|
||||||
% (mpoint, parseStatus(status, cmd)))
|
% (mpoint, parseStatus(status, cmd)))
|
||||||
|
|
@ -306,7 +320,8 @@ class RunRootTask(koji.tasks.BaseTaskHandler):
|
||||||
failed = []
|
failed = []
|
||||||
self.logger.info("Unmounting (runroot): %s" % mounts)
|
self.logger.info("Unmounting (runroot): %s" % mounts)
|
||||||
for dir in mounts:
|
for dir in mounts:
|
||||||
proc = subprocess.Popen(["umount", "-l", dir], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
proc = subprocess.Popen(["umount", "-l", dir],
|
||||||
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
if proc.wait() != 0:
|
if proc.wait() != 0:
|
||||||
output = proc.stdout.read()
|
output = proc.stdout.read()
|
||||||
output += proc.stderr.read()
|
output += proc.stderr.read()
|
||||||
|
|
|
||||||
|
|
@ -22,8 +22,10 @@ def handle_runroot(options, session, args):
|
||||||
usage += _("\n(Specify the --help global option for a list of other help options)")
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
||||||
parser = OptionParser(usage=usage)
|
parser = OptionParser(usage=usage)
|
||||||
parser.disable_interspersed_args()
|
parser.disable_interspersed_args()
|
||||||
parser.add_option("-p", "--package", action="append", default=[], help=_("make sure this package is in the chroot"))
|
parser.add_option("-p", "--package", action="append", default=[],
|
||||||
parser.add_option("-m", "--mount", action="append", default=[], help=_("mount this directory read-write in the chroot"))
|
help=_("make sure this package is in the chroot"))
|
||||||
|
parser.add_option("-m", "--mount", action="append", default=[],
|
||||||
|
help=_("mount this directory read-write in the chroot"))
|
||||||
parser.add_option("--skip-setarch", action="store_true", default=False,
|
parser.add_option("--skip-setarch", action="store_true", default=False,
|
||||||
help=_("bypass normal setarch in the chroot"))
|
help=_("bypass normal setarch in the chroot"))
|
||||||
parser.add_option("-w", "--weight", type='int', help=_("set task weight"))
|
parser.add_option("-w", "--weight", type='int', help=_("set task weight"))
|
||||||
|
|
@ -39,7 +41,8 @@ def handle_runroot(options, session, args):
|
||||||
parser.add_option("--repo-id", type="int", help=_("ID of the repo to use"))
|
parser.add_option("--repo-id", type="int", help=_("ID of the repo to use"))
|
||||||
parser.add_option("--nowait", action="store_false", dest="wait",
|
parser.add_option("--nowait", action="store_false", dest="wait",
|
||||||
default=True, help=_("Do not wait on task"))
|
default=True, help=_("Do not wait on task"))
|
||||||
parser.add_option("--watch", action="store_true", help=_("Watch task instead of printing runroot.log"))
|
parser.add_option("--watch", action="store_true",
|
||||||
|
help=_("Watch task instead of printing runroot.log"))
|
||||||
parser.add_option("--quiet", action="store_true", default=options.quiet,
|
parser.add_option("--quiet", action="store_true", default=options.quiet,
|
||||||
help=_("Do not print the task information"))
|
help=_("Do not print the task information"))
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -14,7 +14,8 @@ def handle_save_failed_tree(options, session, args):
|
||||||
usage += _("\n(Specify the --help global option for a list of other help options)")
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
||||||
parser = OptionParser(usage=usage)
|
parser = OptionParser(usage=usage)
|
||||||
parser.add_option("-f", "--full", action="store_true", default=False,
|
parser.add_option("-f", "--full", action="store_true", default=False,
|
||||||
help=_("Download whole tree, if not specified, only builddir will be downloaded"))
|
help=_("Download whole tree, if not specified, "
|
||||||
|
"only builddir will be downloaded"))
|
||||||
parser.add_option("-t", "--task", action="store_const", dest="mode",
|
parser.add_option("-t", "--task", action="store_const", dest="mode",
|
||||||
const="task", default="task",
|
const="task", default="task",
|
||||||
help=_("Treat ID as a task ID (the default)"))
|
help=_("Treat ID as a task ID (the default)"))
|
||||||
|
|
@ -69,4 +70,5 @@ def handle_save_failed_tree(options, session, args):
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
session.logout()
|
session.logout()
|
||||||
return watch_tasks(session, [task_id], quiet=opts.quiet, poll_interval=options.poll_interval)
|
return watch_tasks(session, [task_id],
|
||||||
|
quiet=opts.quiet, poll_interval=options.poll_interval)
|
||||||
|
|
|
||||||
|
|
@ -40,10 +40,12 @@ def saveFailedTree(buildrootID, full=False, **opts):
|
||||||
taskID = brinfo['task_id']
|
taskID = brinfo['task_id']
|
||||||
task_info = kojihub.Task(taskID).getInfo()
|
task_info = kojihub.Task(taskID).getInfo()
|
||||||
if task_info['state'] != koji.TASK_STATES['FAILED']:
|
if task_info['state'] != koji.TASK_STATES['FAILED']:
|
||||||
raise koji.PreBuildError("Task %s has not failed. Only failed tasks can upload their buildroots." % taskID)
|
raise koji.PreBuildError(
|
||||||
|
"Task %s has not failed. Only failed tasks can upload their buildroots." % taskID)
|
||||||
elif allowed_methods != '*' and task_info['method'] not in allowed_methods:
|
elif allowed_methods != '*' and task_info['method'] not in allowed_methods:
|
||||||
raise koji.PreBuildError("Only %s tasks can upload their buildroots (Task %s is %s)." %
|
raise koji.PreBuildError(
|
||||||
(', '.join(allowed_methods), task_info['id'], task_info['method']))
|
"Only %s tasks can upload their buildroots (Task %s is %s)." %
|
||||||
|
(', '.join(allowed_methods), task_info['id'], task_info['method']))
|
||||||
elif task_info["owner"] != context.session.user_id and not context.session.hasPerm('admin'):
|
elif task_info["owner"] != context.session.user_id and not context.session.hasPerm('admin'):
|
||||||
raise koji.ActionNotAllowed("Only owner of failed task or 'admin' can run this task.")
|
raise koji.ActionNotAllowed("Only owner of failed task or 'admin' can run this task.")
|
||||||
elif not kojihub.get_host(task_info['host_id'])['enabled']:
|
elif not kojihub.get_host(task_info['host_id'])['enabled']:
|
||||||
|
|
|
||||||
|
|
@ -251,7 +251,7 @@ Options:
|
||||||
--quiet Do not print the header information
|
--quiet Do not print the header information
|
||||||
--paths Show the file paths
|
--paths Show the file paths
|
||||||
--sigs Show signatures
|
--sigs Show signatures
|
||||||
--type=TYPE Show builds of the given type only. Currently supported
|
--type=TYPE Show builds of the given type only. Currently supported
|
||||||
types: maven, win, image
|
types: maven, win, image
|
||||||
--event=EVENT# query at event
|
--event=EVENT# query at event
|
||||||
--ts=TIMESTAMP query at last event before timestamp
|
--ts=TIMESTAMP query at last event before timestamp
|
||||||
|
|
|
||||||
12
util/koji-gc
12
util/koji-gc
|
|
@ -364,7 +364,8 @@ def ensure_connection(session):
|
||||||
except requests.exceptions.ConnectionError:
|
except requests.exceptions.ConnectionError:
|
||||||
error(_("Error: Unable to connect to server"))
|
error(_("Error: Unable to connect to server"))
|
||||||
if ret != koji.API_VERSION:
|
if ret != koji.API_VERSION:
|
||||||
warn(_("WARNING: The server is at API version %d and the client is at %d" % (ret, koji.API_VERSION)))
|
warn(_("WARNING: The server is at API version %d and the client is at %d" %
|
||||||
|
(ret, koji.API_VERSION)))
|
||||||
|
|
||||||
|
|
||||||
def has_krb_creds():
|
def has_krb_creds():
|
||||||
|
|
@ -394,7 +395,8 @@ def activate_session(session):
|
||||||
elif has_krb_creds() or (options.keytab and options.principal):
|
elif has_krb_creds() or (options.keytab and options.principal):
|
||||||
try:
|
try:
|
||||||
if options.keytab and options.principal:
|
if options.keytab and options.principal:
|
||||||
session.krb_login(principal=options.principal, keytab=options.keytab, proxyuser=options.runas)
|
session.krb_login(principal=options.principal, keytab=options.keytab,
|
||||||
|
proxyuser=options.runas)
|
||||||
else:
|
else:
|
||||||
session.krb_login(proxyuser=options.runas)
|
session.krb_login(proxyuser=options.runas)
|
||||||
except krbV.Krb5Error as e:
|
except krbV.Krb5Error as e:
|
||||||
|
|
@ -503,7 +505,8 @@ def handle_trash():
|
||||||
continue
|
continue
|
||||||
if refs.get('archives'):
|
if refs.get('archives'):
|
||||||
if options.debug:
|
if options.debug:
|
||||||
print("[%i/%i] Build has %i archive references: %s" % (i, N, len(refs['archives']), nvr))
|
print("[%i/%i] Build has %i archive references: %s" %
|
||||||
|
(i, N, len(refs['archives']), nvr))
|
||||||
# pprint.pprint(refs['archives'])
|
# pprint.pprint(refs['archives'])
|
||||||
continue
|
continue
|
||||||
if refs.get('component_of'):
|
if refs.get('component_of'):
|
||||||
|
|
@ -941,7 +944,8 @@ def handle_prune():
|
||||||
else:
|
else:
|
||||||
print("Untagging build %s from %s" % (nvr, tagname))
|
print("Untagging build %s from %s" % (nvr, tagname))
|
||||||
try:
|
try:
|
||||||
session.untagBuildBypass(taginfo['id'], entry['build_id'], force=bypass)
|
session.untagBuildBypass(taginfo['id'], entry['build_id'],
|
||||||
|
force=bypass)
|
||||||
untagged.setdefault(nvr, {})[tagname] = 1
|
untagged.setdefault(nvr, {})[tagname] = 1
|
||||||
except (six.moves.xmlrpc_client.Fault, koji.GenericError) as e:
|
except (six.moves.xmlrpc_client.Fault, koji.GenericError) as e:
|
||||||
print("Warning: untag operation failed: %s" % e)
|
print("Warning: untag operation failed: %s" % e)
|
||||||
|
|
|
||||||
|
|
@ -145,13 +145,15 @@ def get_options():
|
||||||
parser.add_option("--rules-ignorelist",
|
parser.add_option("--rules-ignorelist",
|
||||||
help=_("Rules: list of packages to ignore"))
|
help=_("Rules: list of packages to ignore"))
|
||||||
parser.add_option("--rules-excludelist",
|
parser.add_option("--rules-excludelist",
|
||||||
help=_("Rules: list of packages to are excluded using ExcludeArch or ExclusiveArch"))
|
help=_("Rules: list of packages to are excluded using ExcludeArch or "
|
||||||
|
"ExclusiveArch"))
|
||||||
parser.add_option("--rules-includelist",
|
parser.add_option("--rules-includelist",
|
||||||
help=_("Rules: list of packages to always include"))
|
help=_("Rules: list of packages to always include"))
|
||||||
parser.add_option("--rules-protectlist",
|
parser.add_option("--rules-protectlist",
|
||||||
help=_("Rules: list of package names to never replace"))
|
help=_("Rules: list of package names to never replace"))
|
||||||
parser.add_option("--tag-build", action="store_true", default=False,
|
parser.add_option("--tag-build", action="store_true", default=False,
|
||||||
help=_("tag successful builds into the tag we are building, default is to not tag"))
|
help=_("tag successful builds into the tag we are building, default is to "
|
||||||
|
"not tag"))
|
||||||
parser.add_option("--logfile",
|
parser.add_option("--logfile",
|
||||||
help=_("file where everything gets logged"))
|
help=_("file where everything gets logged"))
|
||||||
parser.add_option("--arches",
|
parser.add_option("--arches",
|
||||||
|
|
@ -298,14 +300,16 @@ def activate_session(session):
|
||||||
|
|
||||||
if os.path.isfile(options.auth_cert):
|
if os.path.isfile(options.auth_cert):
|
||||||
# authenticate using SSL client cert
|
# authenticate using SSL client cert
|
||||||
session.ssl_login(cert=options.auth_cert, serverca=options.serverca, proxyuser=options.runas)
|
session.ssl_login(cert=options.auth_cert, serverca=options.serverca,
|
||||||
|
proxyuser=options.runas)
|
||||||
elif options.user:
|
elif options.user:
|
||||||
# authenticate using user/password
|
# authenticate using user/password
|
||||||
session.login()
|
session.login()
|
||||||
elif krbV:
|
elif krbV:
|
||||||
try:
|
try:
|
||||||
if options.keytab and options.principal:
|
if options.keytab and options.principal:
|
||||||
session.krb_login(principal=options.principal, keytab=options.keytab, proxyuser=options.runas)
|
session.krb_login(principal=options.principal, keytab=options.keytab,
|
||||||
|
proxyuser=options.runas)
|
||||||
else:
|
else:
|
||||||
session.krb_login(proxyuser=options.runas)
|
session.krb_login(proxyuser=options.runas)
|
||||||
except krbV.Krb5Error as e:
|
except krbV.Krb5Error as e:
|
||||||
|
|
@ -537,12 +541,14 @@ class TrackedBuild(object):
|
||||||
# each buildroot had this as a base package
|
# each buildroot had this as a base package
|
||||||
base.append(name)
|
base.append(name)
|
||||||
if len(tags) > 1:
|
if len(tags) > 1:
|
||||||
log("Warning: found multiple buildroot tags for %s: %s" % (self.nvr, to_list(tags.keys())))
|
log("Warning: found multiple buildroot tags for %s: %s" %
|
||||||
|
(self.nvr, to_list(tags.keys())))
|
||||||
counts = sorted([(n, tag) for tag, n in six.iteritems(tags)])
|
counts = sorted([(n, tag) for tag, n in six.iteritems(tags)])
|
||||||
tag = counts[-1][1]
|
tag = counts[-1][1]
|
||||||
else:
|
else:
|
||||||
tag = to_list(tags.keys())[0]
|
tag = to_list(tags.keys())[0]
|
||||||
# due bugs in used tools mainline koji instance could store empty buildroot infos for builds
|
# due bugs in used tools mainline koji instance could store empty buildroot infos for
|
||||||
|
# builds
|
||||||
if len(builds) == 0:
|
if len(builds) == 0:
|
||||||
self.setState("noroot")
|
self.setState("noroot")
|
||||||
self.deps = builds
|
self.deps = builds
|
||||||
|
|
@ -655,7 +661,8 @@ class BuildTracker(object):
|
||||||
return -1
|
return -1
|
||||||
|
|
||||||
def newerBuild(self, build, tag):
|
def newerBuild(self, build, tag):
|
||||||
# XXX: secondary arches need a policy to say if we have newer build localy it will be the substitute
|
# XXX: secondary arches need a policy to say if we have newer build localy it will be the
|
||||||
|
# substitute
|
||||||
localBuilds = session.listTagged(tag, inherit=True, package=str(build.name))
|
localBuilds = session.listTagged(tag, inherit=True, package=str(build.name))
|
||||||
newer = None
|
newer = None
|
||||||
parentevr = (str(build.epoch), build.version, build.release)
|
parentevr = (str(build.epoch), build.version, build.release)
|
||||||
|
|
@ -664,14 +671,16 @@ class BuildTracker(object):
|
||||||
latestevr = (str(b['epoch']), b['version'], b['release'])
|
latestevr = (str(b['epoch']), b['version'], b['release'])
|
||||||
newestRPM = self.rpmvercmp(parentevr, latestevr)
|
newestRPM = self.rpmvercmp(parentevr, latestevr)
|
||||||
if options.debug:
|
if options.debug:
|
||||||
log("remote evr: %s \nlocal evr: %s \nResult: %s" % (parentevr, latestevr, newestRPM))
|
log("remote evr: %s \nlocal evr: %s \nResult: %s" %
|
||||||
|
(parentevr, latestevr, newestRPM))
|
||||||
if newestRPM == -1:
|
if newestRPM == -1:
|
||||||
newer = b
|
newer = b
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
# the local is newer
|
# the local is newer
|
||||||
if newer is not None:
|
if newer is not None:
|
||||||
info = session.getBuild("%s-%s-%s" % (str(newer['name']), newer['version'], newer['release']))
|
info = session.getBuild("%s-%s-%s" %
|
||||||
|
(str(newer['name']), newer['version'], newer['release']))
|
||||||
if info:
|
if info:
|
||||||
build = LocalBuild(info)
|
build = LocalBuild(info)
|
||||||
self.substitute_idx[parentnvr] = build
|
self.substitute_idx[parentnvr] = build
|
||||||
|
|
@ -751,7 +760,8 @@ class BuildTracker(object):
|
||||||
if depth > 0:
|
if depth > 0:
|
||||||
log("%sDep replaced: %s->%s" % (head, build.nvr, replace))
|
log("%sDep replaced: %s->%s" % (head, build.nvr, replace))
|
||||||
return build
|
return build
|
||||||
if options.prefer_new and (depth > 0) and (tag is not None) and not (build.state == "common"):
|
if options.prefer_new and (depth > 0) and (tag is not None) and \
|
||||||
|
not (build.state == "common"):
|
||||||
latestBuild = self.newerBuild(build, tag)
|
latestBuild = self.newerBuild(build, tag)
|
||||||
if latestBuild is not None:
|
if latestBuild is not None:
|
||||||
build.substitute = latestBuild.nvr
|
build.substitute = latestBuild.nvr
|
||||||
|
|
@ -875,7 +885,8 @@ class BuildTracker(object):
|
||||||
finally:
|
finally:
|
||||||
os.umask(old_umask)
|
os.umask(old_umask)
|
||||||
else:
|
else:
|
||||||
# TODO - would be possible, using uploadFile directly, to upload without writing locally.
|
# TODO - would be possible, using uploadFile directly,
|
||||||
|
# to upload without writing locally.
|
||||||
# for now, though, just use uploadWrapper
|
# for now, though, just use uploadWrapper
|
||||||
koji.ensuredir(options.workpath)
|
koji.ensuredir(options.workpath)
|
||||||
dst = "%s/%s" % (options.workpath, fn)
|
dst = "%s/%s" % (options.workpath, fn)
|
||||||
|
|
@ -1053,7 +1064,8 @@ class BuildTracker(object):
|
||||||
session.groupListAdd(taginfo['id'], 'build', force=True)
|
session.groupListAdd(taginfo['id'], 'build', force=True)
|
||||||
# using force in case group is blocked. This shouldn't be the case, but...
|
# using force in case group is blocked. This shouldn't be the case, but...
|
||||||
for pkg_name in drop_pkgs:
|
for pkg_name in drop_pkgs:
|
||||||
# in principal, our tag should not have inheritance, so the remove call is the right thing
|
# in principal, our tag should not have inheritance,
|
||||||
|
# so the remove call is the right thing
|
||||||
session.groupPackageListRemove(taginfo['id'], 'build', pkg_name)
|
session.groupPackageListRemove(taginfo['id'], 'build', pkg_name)
|
||||||
for pkg_name in add_pkgs:
|
for pkg_name in add_pkgs:
|
||||||
session.groupPackageListAdd(taginfo['id'], 'build', pkg_name)
|
session.groupPackageListAdd(taginfo['id'], 'build', pkg_name)
|
||||||
|
|
@ -1278,7 +1290,8 @@ def main(args):
|
||||||
logfile = None
|
logfile = None
|
||||||
if logfile is not None:
|
if logfile is not None:
|
||||||
log("logging to %s" % filename)
|
log("logging to %s" % filename)
|
||||||
os.write(logfile, "\n\n========================================================================\n")
|
os.write(logfile,
|
||||||
|
"\n\n========================================================================\n")
|
||||||
|
|
||||||
if options.build:
|
if options.build:
|
||||||
binfo = remote.getBuild(options.build, strict=True)
|
binfo = remote.getBuild(options.build, strict=True)
|
||||||
|
|
|
||||||
|
|
@ -36,7 +36,8 @@ def clean_reservations(cursor, vacuum, test, age):
|
||||||
|
|
||||||
|
|
||||||
def clean_notification_tasks(cursor, vacuum, test, age):
|
def clean_notification_tasks(cursor, vacuum, test, age):
|
||||||
q = " FROM task WHERE method = 'build' AND completion_time < NOW() - '%s days'::interval" % int(age)
|
q = " FROM task WHERE method = 'build' AND completion_time < NOW() - '%s days'::interval" % \
|
||||||
|
int(age)
|
||||||
if options.verbose:
|
if options.verbose:
|
||||||
cursor.execute("SELECT COUNT(*) " + q)
|
cursor.execute("SELECT COUNT(*) " + q)
|
||||||
rows = cursor.fetchall()[0][0]
|
rows = cursor.fetchall()[0][0]
|
||||||
|
|
@ -95,7 +96,8 @@ def clean_scratch_tasks(cursor, vacuum, test, age):
|
||||||
return
|
return
|
||||||
|
|
||||||
# delete standard buildroots
|
# delete standard buildroots
|
||||||
cursor.execute("DELETE FROM standard_buildroot WHERE task_id IN (SELECT task_id FROM temp_scratch_tasks)")
|
cursor.execute(
|
||||||
|
"DELETE FROM standard_buildroot WHERE task_id IN (SELECT task_id FROM temp_scratch_tasks)")
|
||||||
|
|
||||||
# delete tasks finally
|
# delete tasks finally
|
||||||
cursor.execute("DELETE FROM task WHERE id IN (SELECT task_id FROM temp_scratch_tasks)")
|
cursor.execute("DELETE FROM task WHERE id IN (SELECT task_id FROM temp_scratch_tasks)")
|
||||||
|
|
@ -106,7 +108,8 @@ def clean_scratch_tasks(cursor, vacuum, test, age):
|
||||||
|
|
||||||
|
|
||||||
def clean_buildroots(cursor, vacuum, test):
|
def clean_buildroots(cursor, vacuum, test):
|
||||||
q = " FROM buildroot WHERE cg_id IS NULL AND id NOT IN (SELECT buildroot_id FROM standard_buildroot)"
|
q = " FROM buildroot " \
|
||||||
|
"WHERE cg_id IS NULL AND id NOT IN (SELECT buildroot_id FROM standard_buildroot)"
|
||||||
|
|
||||||
if options.verbose:
|
if options.verbose:
|
||||||
cursor.execute("SELECT COUNT(*) " + q)
|
cursor.execute("SELECT COUNT(*) " + q)
|
||||||
|
|
@ -206,7 +209,8 @@ if __name__ == "__main__":
|
||||||
clean_sessions(cursor, options.vacuum, options.test, options.sessions_age)
|
clean_sessions(cursor, options.vacuum, options.test, options.sessions_age)
|
||||||
clean_reservations(cursor, options.vacuum, options.test, options.reservations_age)
|
clean_reservations(cursor, options.vacuum, options.test, options.reservations_age)
|
||||||
if options.tag_notifications:
|
if options.tag_notifications:
|
||||||
clean_notification_tasks(cursor, options.vacuum, options.test, age=options.tag_notifications_age)
|
clean_notification_tasks(cursor, options.vacuum, options.test,
|
||||||
|
age=options.tag_notifications_age)
|
||||||
if options.scratch:
|
if options.scratch:
|
||||||
clean_scratch_tasks(cursor, options.vacuum, options.test, age=options.scratch_age)
|
clean_scratch_tasks(cursor, options.vacuum, options.test, age=options.scratch_age)
|
||||||
if options.buildroots:
|
if options.buildroots:
|
||||||
|
|
|
||||||
31
util/kojira
31
util/kojira
|
|
@ -269,7 +269,8 @@ class RepoManager(object):
|
||||||
self._local.session = value
|
self._local.session = value
|
||||||
|
|
||||||
def printState(self):
|
def printState(self):
|
||||||
self.logger.debug('Tracking %i repos, %i child processes', len(self.repos), len(self.delete_pids))
|
self.logger.debug('Tracking %i repos, %i child processes',
|
||||||
|
len(self.repos), len(self.delete_pids))
|
||||||
for tag_id, task_id in six.iteritems(self.tasks):
|
for tag_id, task_id in six.iteritems(self.tasks):
|
||||||
self.logger.debug("Tracking task %s for tag %s", task_id, tag_id)
|
self.logger.debug("Tracking task %s for tag %s", task_id, tag_id)
|
||||||
for pid, desc in six.iteritems(self.delete_pids):
|
for pid, desc in six.iteritems(self.delete_pids):
|
||||||
|
|
@ -348,8 +349,9 @@ class RepoManager(object):
|
||||||
if repo:
|
if repo:
|
||||||
# we're already tracking it
|
# we're already tracking it
|
||||||
if repo.state != data['state']:
|
if repo.state != data['state']:
|
||||||
self.logger.info('State changed for repo %s: %s -> %s'
|
self.logger.info(
|
||||||
% (repo_id, koji.REPO_STATES[repo.state], koji.REPO_STATES[data['state']]))
|
'State changed for repo %s: %s -> %s',
|
||||||
|
repo_id, koji.REPO_STATES[repo.state], koji.REPO_STATES[data['state']])
|
||||||
repo.state = data['state']
|
repo.state = data['state']
|
||||||
else:
|
else:
|
||||||
self.logger.info('Found repo %s, state=%s'
|
self.logger.info('Found repo %s, state=%s'
|
||||||
|
|
@ -357,7 +359,7 @@ class RepoManager(object):
|
||||||
repo = ManagedRepo(self, data)
|
repo = ManagedRepo(self, data)
|
||||||
self.repos[repo_id] = repo
|
self.repos[repo_id] = repo
|
||||||
if not getTag(self.session, repo.tag_id) and not repo.expired():
|
if not getTag(self.session, repo.tag_id) and not repo.expired():
|
||||||
self.logger.info('Tag %d for repo %d disappeared, expiring.' % (repo.tag_id, repo_id))
|
self.logger.info('Tag %d for repo %d disappeared, expiring.', repo.tag_id, repo_id)
|
||||||
repo.expire()
|
repo.expire()
|
||||||
if len(self.repos) > len(repodata):
|
if len(self.repos) > len(repodata):
|
||||||
# This shouldn't normally happen, but might if someone else calls
|
# This shouldn't normally happen, but might if someone else calls
|
||||||
|
|
@ -491,20 +493,23 @@ class RepoManager(object):
|
||||||
self.logger.debug("did not expect %s; age: %s",
|
self.logger.debug("did not expect %s; age: %s",
|
||||||
repodir, age)
|
repodir, age)
|
||||||
if age > max_age:
|
if age > max_age:
|
||||||
self.logger.info("Removing unexpected directory (no such repo): %s", repodir)
|
self.logger.info(
|
||||||
|
"Removing unexpected directory (no such repo): %s", repodir)
|
||||||
if symlink:
|
if symlink:
|
||||||
os.unlink(repodir)
|
os.unlink(repodir)
|
||||||
else:
|
else:
|
||||||
self.rmtree(repodir)
|
self.rmtree(repodir)
|
||||||
continue
|
continue
|
||||||
if rinfo['tag_name'] != tag:
|
if rinfo['tag_name'] != tag:
|
||||||
self.logger.warn("Tag name mismatch (rename?): %s vs %s", tag, rinfo['tag_name'])
|
self.logger.warn(
|
||||||
|
"Tag name mismatch (rename?): %s vs %s", tag, rinfo['tag_name'])
|
||||||
continue
|
continue
|
||||||
if rinfo['state'] in (koji.REPO_DELETED, koji.REPO_PROBLEM):
|
if rinfo['state'] in (koji.REPO_DELETED, koji.REPO_PROBLEM):
|
||||||
age = time.time() - max(rinfo['create_ts'], dir_ts)
|
age = time.time() - max(rinfo['create_ts'], dir_ts)
|
||||||
self.logger.debug("potential removal candidate: %s; age: %s" % (repodir, age))
|
self.logger.debug("potential removal candidate: %s; age: %s" % (repodir, age))
|
||||||
if age > max_age:
|
if age > max_age:
|
||||||
logger.info("Removing stray repo (state=%s): %s" % (koji.REPO_STATES[rinfo['state']], repodir))
|
logger.info("Removing stray repo (state=%s): %s",
|
||||||
|
koji.REPO_STATES[rinfo['state']], repodir)
|
||||||
if symlink:
|
if symlink:
|
||||||
os.unlink(repodir)
|
os.unlink(repodir)
|
||||||
else:
|
else:
|
||||||
|
|
@ -622,11 +627,12 @@ class RepoManager(object):
|
||||||
tstate = koji.TASK_STATES[tinfo['state']]
|
tstate = koji.TASK_STATES[tinfo['state']]
|
||||||
tag_id = self.tasks[task_id]['tag_id']
|
tag_id = self.tasks[task_id]['tag_id']
|
||||||
if tstate == 'CLOSED':
|
if tstate == 'CLOSED':
|
||||||
self.logger.info("Finished: newRepo task %s for tag %s" % (task_id, tag_id))
|
self.logger.info("Finished: newRepo task %s for tag %s", task_id, tag_id)
|
||||||
self.recent_tasks[task_id] = time.time()
|
self.recent_tasks[task_id] = time.time()
|
||||||
del self.tasks[task_id]
|
del self.tasks[task_id]
|
||||||
elif tstate in ('CANCELED', 'FAILED'):
|
elif tstate in ('CANCELED', 'FAILED'):
|
||||||
self.logger.info("Problem: newRepo task %s for tag %s is %s" % (task_id, tag_id, tstate))
|
self.logger.info(
|
||||||
|
"Problem: newRepo task %s for tag %s is %s", task_id, tag_id, tstate)
|
||||||
self.recent_tasks[task_id] = time.time()
|
self.recent_tasks[task_id] = time.time()
|
||||||
del self.tasks[task_id]
|
del self.tasks[task_id]
|
||||||
else:
|
else:
|
||||||
|
|
@ -635,7 +641,8 @@ class RepoManager(object):
|
||||||
|
|
||||||
# also check other newRepo tasks
|
# also check other newRepo tasks
|
||||||
repo_tasks = self.session.listTasks(opts={'method': 'newRepo',
|
repo_tasks = self.session.listTasks(opts={'method': 'newRepo',
|
||||||
'state': ([koji.TASK_STATES[s] for s in ('FREE', 'OPEN')])})
|
'state': ([koji.TASK_STATES[s]
|
||||||
|
for s in ('FREE', 'OPEN')])})
|
||||||
others = [t for t in repo_tasks if t['id'] not in self.tasks]
|
others = [t for t in repo_tasks if t['id'] not in self.tasks]
|
||||||
for tinfo in others:
|
for tinfo in others:
|
||||||
if tinfo['id'] not in self.other_tasks:
|
if tinfo['id'] not in self.other_tasks:
|
||||||
|
|
@ -947,8 +954,8 @@ def get_options():
|
||||||
'max_delete_processes', 'max_repo_tasks_maven',
|
'max_delete_processes', 'max_repo_tasks_maven',
|
||||||
'delete_batch_size', 'dist_repo_lifetime', 'sleeptime',
|
'delete_batch_size', 'dist_repo_lifetime', 'sleeptime',
|
||||||
'recent_tasks_lifetime')
|
'recent_tasks_lifetime')
|
||||||
str_opts = ('topdir', 'server', 'user', 'password', 'logfile', 'principal', 'keytab', 'krbservice',
|
str_opts = ('topdir', 'server', 'user', 'password', 'logfile', 'principal', 'keytab',
|
||||||
'cert', 'ca', 'serverca', 'debuginfo_tags',
|
'krbservice', 'cert', 'ca', 'serverca', 'debuginfo_tags',
|
||||||
'source_tags', 'separate_source_tags', 'ignore_tags') # FIXME: remove ca here
|
'source_tags', 'separate_source_tags', 'ignore_tags') # FIXME: remove ca here
|
||||||
bool_opts = ('verbose', 'debug', 'ignore_stray_repos', 'offline_retry',
|
bool_opts = ('verbose', 'debug', 'ignore_stray_repos', 'offline_retry',
|
||||||
'krb_rdns', 'krb_canon_host', 'no_ssl_verify')
|
'krb_rdns', 'krb_canon_host', 'no_ssl_verify')
|
||||||
|
|
|
||||||
|
|
@ -183,17 +183,20 @@ class WindowsBuild(object):
|
||||||
def checkout(self):
|
def checkout(self):
|
||||||
"""Checkout sources, winspec, and patches, and apply patches"""
|
"""Checkout sources, winspec, and patches, and apply patches"""
|
||||||
src_scm = SCM(self.source_url) # noqa: F821
|
src_scm = SCM(self.source_url) # noqa: F821
|
||||||
self.source_dir = src_scm.checkout(ensuredir(os.path.join(self.workdir, 'source'))) # noqa: F821
|
self.source_dir = src_scm.checkout(
|
||||||
|
ensuredir(os.path.join(self.workdir, 'source'))) # noqa: F821
|
||||||
self.zipDir(self.source_dir, os.path.join(self.workdir, 'sources.zip'))
|
self.zipDir(self.source_dir, os.path.join(self.workdir, 'sources.zip'))
|
||||||
if 'winspec' in self.task_opts:
|
if 'winspec' in self.task_opts:
|
||||||
spec_scm = SCM(self.task_opts['winspec']) # noqa: F821
|
spec_scm = SCM(self.task_opts['winspec']) # noqa: F821
|
||||||
self.spec_dir = spec_scm.checkout(ensuredir(os.path.join(self.workdir, 'spec'))) # noqa: F821
|
self.spec_dir = spec_scm.checkout(
|
||||||
|
ensuredir(os.path.join(self.workdir, 'spec'))) # noqa: F821
|
||||||
self.zipDir(self.spec_dir, os.path.join(self.workdir, 'spec.zip'))
|
self.zipDir(self.spec_dir, os.path.join(self.workdir, 'spec.zip'))
|
||||||
else:
|
else:
|
||||||
self.spec_dir = self.source_dir
|
self.spec_dir = self.source_dir
|
||||||
if 'patches' in self.task_opts:
|
if 'patches' in self.task_opts:
|
||||||
patch_scm = SCM(self.task_opts['patches']) # noqa: F821
|
patch_scm = SCM(self.task_opts['patches']) # noqa: F821
|
||||||
self.patches_dir = patch_scm.checkout(ensuredir(os.path.join(self.workdir, 'patches'))) # noqa: F821
|
self.patches_dir = patch_scm.checkout(
|
||||||
|
ensuredir(os.path.join(self.workdir, 'patches'))) # noqa: F821
|
||||||
self.zipDir(self.patches_dir, os.path.join(self.workdir, 'patches.zip'))
|
self.zipDir(self.patches_dir, os.path.join(self.workdir, 'patches.zip'))
|
||||||
self.applyPatches(self.source_dir, self.patches_dir)
|
self.applyPatches(self.source_dir, self.patches_dir)
|
||||||
self.virusCheck(self.workdir)
|
self.virusCheck(self.workdir)
|
||||||
|
|
@ -207,7 +210,8 @@ class WindowsBuild(object):
|
||||||
raise BuildError('no patches found at %s' % patchdir) # noqa: F821
|
raise BuildError('no patches found at %s' % patchdir) # noqa: F821
|
||||||
patches.sort()
|
patches.sort()
|
||||||
for patch in patches:
|
for patch in patches:
|
||||||
cmd = ['/bin/patch', '--verbose', '-d', sourcedir, '-p1', '-i', os.path.join(patchdir, patch)]
|
cmd = ['/bin/patch', '--verbose', '-d', sourcedir, '-p1', '-i',
|
||||||
|
os.path.join(patchdir, patch)]
|
||||||
run(cmd, fatal=True)
|
run(cmd, fatal=True)
|
||||||
|
|
||||||
def loadConfig(self):
|
def loadConfig(self):
|
||||||
|
|
@ -241,7 +245,8 @@ class WindowsBuild(object):
|
||||||
# absolute paths, or without a path in which case it is searched for
|
# absolute paths, or without a path in which case it is searched for
|
||||||
# on the PATH.
|
# on the PATH.
|
||||||
if conf.has_option('building', 'preinstalled'):
|
if conf.has_option('building', 'preinstalled'):
|
||||||
self.preinstalled.extend([e.strip() for e in conf.get('building', 'preinstalled').split('\n') if e])
|
self.preinstalled.extend(
|
||||||
|
[e.strip() for e in conf.get('building', 'preinstalled').split('\n') if e])
|
||||||
|
|
||||||
# buildrequires and provides are multi-valued (space-separated)
|
# buildrequires and provides are multi-valued (space-separated)
|
||||||
for br in conf.get('building', 'buildrequires').split():
|
for br in conf.get('building', 'buildrequires').split():
|
||||||
|
|
@ -336,7 +341,8 @@ class WindowsBuild(object):
|
||||||
with open(destpath, 'w') as destfile:
|
with open(destpath, 'w') as destfile:
|
||||||
offset = 0
|
offset = 0
|
||||||
while True:
|
while True:
|
||||||
encoded = self.server.getFile(buildinfo, fileinfo, encode_int(offset), 1048576, brtype)
|
encoded = self.server.getFile(buildinfo, fileinfo, encode_int(offset), 1048576,
|
||||||
|
brtype)
|
||||||
if not encoded:
|
if not encoded:
|
||||||
break
|
break
|
||||||
data = base64.b64decode(encoded)
|
data = base64.b64decode(encoded)
|
||||||
|
|
@ -349,9 +355,11 @@ class WindowsBuild(object):
|
||||||
if 'checksum_type' in fileinfo:
|
if 'checksum_type' in fileinfo:
|
||||||
digest = checksum.hexdigest()
|
digest = checksum.hexdigest()
|
||||||
if fileinfo['checksum'] != digest:
|
if fileinfo['checksum'] != digest:
|
||||||
raise BuildError('checksum validation failed for %s, %s (computed) != %s (provided)' % # noqa: F821
|
raise BuildError( # noqa: F821
|
||||||
(destpath, digest, fileinfo['checksum']))
|
'checksum validation failed for %s, %s (computed) != %s (provided)' %
|
||||||
self.logger.info('Retrieved %s (%s bytes, %s: %s)', destpath, offset, checksum_type, digest)
|
(destpath, digest, fileinfo['checksum']))
|
||||||
|
self.logger.info(
|
||||||
|
'Retrieved %s (%s bytes, %s: %s)', destpath, offset, checksum_type, digest)
|
||||||
else:
|
else:
|
||||||
self.logger.info('Retrieved %s (%s bytes)', destpath, offset)
|
self.logger.info('Retrieved %s (%s bytes)', destpath, offset)
|
||||||
|
|
||||||
|
|
@ -409,7 +417,8 @@ class WindowsBuild(object):
|
||||||
|
|
||||||
def cmdBuild(self):
|
def cmdBuild(self):
|
||||||
"""Do the build: run the execute line(s) with cmd.exe"""
|
"""Do the build: run the execute line(s) with cmd.exe"""
|
||||||
tmpfd, tmpname = tempfile.mkstemp(prefix='koji-tmp', suffix='.bat', dir='/cygdrive/c/Windows/Temp')
|
tmpfd, tmpname = tempfile.mkstemp(prefix='koji-tmp', suffix='.bat',
|
||||||
|
dir='/cygdrive/c/Windows/Temp')
|
||||||
script = os.fdopen(tmpfd, 'w')
|
script = os.fdopen(tmpfd, 'w')
|
||||||
for attr in ['source_dir', 'spec_dir', 'patches_dir']:
|
for attr in ['source_dir', 'spec_dir', 'patches_dir']:
|
||||||
val = getattr(self, attr)
|
val = getattr(self, attr)
|
||||||
|
|
@ -630,7 +639,8 @@ def get_mgmt_server():
|
||||||
# supported by python/cygwin/Windows
|
# supported by python/cygwin/Windows
|
||||||
task_port = server.getPort(macaddr)
|
task_port = server.getPort(macaddr)
|
||||||
logger.debug('found task-specific port %s', task_port)
|
logger.debug('found task-specific port %s', task_port)
|
||||||
return six.moves.xmlrpc_client.ServerProxy('http://%s:%s/' % (gateway, task_port), allow_none=True)
|
return six.moves.xmlrpc_client.ServerProxy('http://%s:%s/' % (gateway, task_port),
|
||||||
|
allow_none=True)
|
||||||
|
|
||||||
|
|
||||||
def get_options():
|
def get_options():
|
||||||
|
|
@ -641,8 +651,10 @@ def get_options():
|
||||||
"""
|
"""
|
||||||
parser = OptionParser(usage=usage)
|
parser = OptionParser(usage=usage)
|
||||||
parser.add_option('-d', '--debug', action='store_true', help='Log debug statements')
|
parser.add_option('-d', '--debug', action='store_true', help='Log debug statements')
|
||||||
parser.add_option('-i', '--install', action='store_true', help='Install this daemon as a service', default=False)
|
parser.add_option('-i', '--install', action='store_true', default=False,
|
||||||
parser.add_option('-u', '--uninstall', action='store_true', help='Uninstall this daemon if it was installed previously as a service', default=False)
|
help='Install this daemon as a service')
|
||||||
|
parser.add_option('-u', '--uninstall', action='store_true', default=False,
|
||||||
|
help='Uninstall this daemon if it was installed previously as a service')
|
||||||
(options, args) = parser.parse_args()
|
(options, args) = parser.parse_args()
|
||||||
return options
|
return options
|
||||||
|
|
||||||
|
|
|
||||||
63
vm/kojivmd
63
vm/kojivmd
|
|
@ -269,9 +269,11 @@ class DaemonXMLRPCServer(six.moves.xmlrpc_server.SimpleXMLRPCServer):
|
||||||
|
|
||||||
def __init__(self, addr, port):
|
def __init__(self, addr, port):
|
||||||
if sys.version_info[:2] <= (2, 4):
|
if sys.version_info[:2] <= (2, 4):
|
||||||
six.moves.xmlrpc_server.SimpleXMLRPCServer.__init__(self, (addr, port), logRequests=False)
|
six.moves.xmlrpc_server.SimpleXMLRPCServer.__init__(self, (addr, port),
|
||||||
|
logRequests=False)
|
||||||
else:
|
else:
|
||||||
six.moves.xmlrpc_server.SimpleXMLRPCServer.__init__(self, (addr, port), logRequests=False,
|
six.moves.xmlrpc_server.SimpleXMLRPCServer.__init__(self, (addr, port),
|
||||||
|
logRequests=False,
|
||||||
allow_none=True)
|
allow_none=True)
|
||||||
self.logger = logging.getLogger('koji.vm.DaemonXMLRPCServer')
|
self.logger = logging.getLogger('koji.vm.DaemonXMLRPCServer')
|
||||||
self.socket.settimeout(5)
|
self.socket.settimeout(5)
|
||||||
|
|
@ -307,7 +309,8 @@ class DaemonXMLRPCServer(six.moves.xmlrpc_server.SimpleXMLRPCServer):
|
||||||
else:
|
else:
|
||||||
response = self._dispatch(method, params)
|
response = self._dispatch(method, params)
|
||||||
response = (response,)
|
response = (response,)
|
||||||
response = six.moves.xmlrpc_client.dumps(response, methodresponse=1, allow_none=True)
|
response = six.moves.xmlrpc_client.dumps(response,
|
||||||
|
methodresponse=1, allow_none=True)
|
||||||
except six.moves.xmlrpc_client.Fault as fault:
|
except six.moves.xmlrpc_client.Fault as fault:
|
||||||
response = six.moves.xmlrpc_client.dumps(fault)
|
response = six.moves.xmlrpc_client.dumps(fault)
|
||||||
except BaseException:
|
except BaseException:
|
||||||
|
|
@ -369,7 +372,9 @@ class WinBuildTask(MultiPlatformTask):
|
||||||
|
|
||||||
task_opts = koji.util.dslice(opts, ['timeout', 'cpus', 'mem', 'static_mac'], strict=False)
|
task_opts = koji.util.dslice(opts, ['timeout', 'cpus', 'mem', 'static_mac'], strict=False)
|
||||||
task_id = self.session.host.subtask(method='vmExec',
|
task_id = self.session.host.subtask(method='vmExec',
|
||||||
arglist=[name, [source_url, build_tag['name'], subopts], task_opts],
|
arglist=[name,
|
||||||
|
[source_url, build_tag['name'], subopts],
|
||||||
|
task_opts],
|
||||||
label=name[:255],
|
label=name[:255],
|
||||||
parent=self.id)
|
parent=self.id)
|
||||||
results = self.wait(task_id)[task_id]
|
results = self.wait(task_id)[task_id]
|
||||||
|
|
@ -379,7 +384,8 @@ class WinBuildTask(MultiPlatformTask):
|
||||||
if not opts.get('scratch'):
|
if not opts.get('scratch'):
|
||||||
build_info = koji.util.dslice(results, ['name', 'version', 'release', 'epoch'])
|
build_info = koji.util.dslice(results, ['name', 'version', 'release', 'epoch'])
|
||||||
build_info['package_name'] = build_info['name']
|
build_info['package_name'] = build_info['name']
|
||||||
pkg_cfg = self.session.getPackageConfig(dest_tag['id'], build_info['name'], event=event_id)
|
pkg_cfg = self.session.getPackageConfig(dest_tag['id'], build_info['name'],
|
||||||
|
event=event_id)
|
||||||
if not opts.get('skip_tag'):
|
if not opts.get('skip_tag'):
|
||||||
# Make sure package is on the list for this tag
|
# Make sure package is on the list for this tag
|
||||||
if pkg_cfg is None:
|
if pkg_cfg is None:
|
||||||
|
|
@ -397,8 +403,8 @@ class WinBuildTask(MultiPlatformTask):
|
||||||
rpm_results = None
|
rpm_results = None
|
||||||
spec_url = opts.get('specfile')
|
spec_url = opts.get('specfile')
|
||||||
if spec_url:
|
if spec_url:
|
||||||
rpm_results = self.buildWrapperRPM(spec_url, task_id, target_info, build_info, repo_id,
|
rpm_results = self.buildWrapperRPM(spec_url, task_id, target_info, build_info,
|
||||||
channel='default')
|
repo_id, channel='default')
|
||||||
|
|
||||||
if opts.get('scratch'):
|
if opts.get('scratch'):
|
||||||
self.session.host.moveWinBuildToScratch(self.id, results, rpm_results)
|
self.session.host.moveWinBuildToScratch(self.id, results, rpm_results)
|
||||||
|
|
@ -436,8 +442,8 @@ class VMExecTask(BaseTaskHandler):
|
||||||
|
|
||||||
def __init__(self, *args, **kw):
|
def __init__(self, *args, **kw):
|
||||||
super(VMExecTask, self).__init__(*args, **kw)
|
super(VMExecTask, self).__init__(*args, **kw)
|
||||||
self.task_manager = six.moves.xmlrpc_client.ServerProxy('http://%s:%s/' % (self.options.privaddr, self.options.portbase),
|
self.task_manager = six.moves.xmlrpc_client.ServerProxy(
|
||||||
allow_none=True)
|
'http://%s:%s/' % (self.options.privaddr, self.options.portbase), allow_none=True)
|
||||||
self.port = None
|
self.port = None
|
||||||
self.server = None
|
self.server = None
|
||||||
self.task_info = None
|
self.task_info = None
|
||||||
|
|
@ -451,13 +457,16 @@ class VMExecTask(BaseTaskHandler):
|
||||||
def mkqcow2(self, clone_name, source_disk, disk_num):
|
def mkqcow2(self, clone_name, source_disk, disk_num):
|
||||||
new_name = clone_name + '-disk-' + str(disk_num) + self.QCOW2_EXT
|
new_name = clone_name + '-disk-' + str(disk_num) + self.QCOW2_EXT
|
||||||
new_path = os.path.join(self.options.imagedir, new_name)
|
new_path = os.path.join(self.options.imagedir, new_name)
|
||||||
cmd = ['/usr/bin/qemu-img', 'create', '-f', 'qcow2', '-o', 'backing_file=%s' % source_disk, new_path]
|
cmd = ['/usr/bin/qemu-img', 'create', '-f', 'qcow2', '-o', 'backing_file=%s' % source_disk,
|
||||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True)
|
new_path]
|
||||||
|
proc = subprocess.Popen(cmd,
|
||||||
|
stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True)
|
||||||
output, dummy = proc.communicate()
|
output, dummy = proc.communicate()
|
||||||
ret = proc.wait()
|
ret = proc.wait()
|
||||||
if ret:
|
if ret:
|
||||||
raise koji.BuildError('unable to create qcow2 image, "%s" returned %s; output was: %s' %
|
raise koji.BuildError(
|
||||||
(' '.join(cmd), ret, output))
|
'unable to create qcow2 image, "%s" returned %s; output was: %s' %
|
||||||
|
(' '.join(cmd), ret, output))
|
||||||
vm_user = pwd.getpwnam(self.options.vmuser)
|
vm_user = pwd.getpwnam(self.options.vmuser)
|
||||||
os.chown(new_path, vm_user.pw_uid, vm_user.pw_gid)
|
os.chown(new_path, vm_user.pw_uid, vm_user.pw_gid)
|
||||||
return new_path
|
return new_path
|
||||||
|
|
@ -708,14 +717,17 @@ class VMExecTask(BaseTaskHandler):
|
||||||
hdr = koji.get_rpm_header(localpath)
|
hdr = koji.get_rpm_header(localpath)
|
||||||
payloadhash = koji.hex_string(koji.get_header_field(hdr, 'sigmd5'))
|
payloadhash = koji.hex_string(koji.get_header_field(hdr, 'sigmd5'))
|
||||||
if fileinfo['payloadhash'] != payloadhash:
|
if fileinfo['payloadhash'] != payloadhash:
|
||||||
raise koji.BuildError("Downloaded rpm %s doesn't match checksum (expected: %s, got %s)" % (
|
raise koji.BuildError(
|
||||||
os.path.basename(fileinfo['localpath']),
|
"Downloaded rpm %s doesn't match checksum (expected: %s, got %s)" %
|
||||||
fileinfo['payloadhash'], payloadhash))
|
(os.path.basename(fileinfo['localpath']),
|
||||||
|
fileinfo['payloadhash'],
|
||||||
|
payloadhash))
|
||||||
if not koji.util.check_sigmd5(localpath):
|
if not koji.util.check_sigmd5(localpath):
|
||||||
raise koji.BuildError("Downloaded rpm %s doesn't match sigmd5" %
|
raise koji.BuildError("Downloaded rpm %s doesn't match sigmd5" %
|
||||||
os.path.basename(fileinfo['localpath']))
|
os.path.basename(fileinfo['localpath']))
|
||||||
else:
|
else:
|
||||||
self.verifyChecksum(localpath, fileinfo['checksum'], koji.CHECKSUM_TYPES[fileinfo['checksum_type']])
|
self.verifyChecksum(localpath, fileinfo['checksum'],
|
||||||
|
koji.CHECKSUM_TYPES[fileinfo['checksum_type']])
|
||||||
|
|
||||||
return open(localpath, 'r')
|
return open(localpath, 'r')
|
||||||
|
|
||||||
|
|
@ -796,8 +808,9 @@ class VMExecTask(BaseTaskHandler):
|
||||||
if sum.hexdigest() == checksum:
|
if sum.hexdigest() == checksum:
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
raise koji.BuildError('%s checksum validation failed for %s, %s (computed) != %s (provided)' %
|
raise koji.BuildError(
|
||||||
(algo, local_path, sum.hexdigest(), checksum))
|
'%s checksum validation failed for %s, %s (computed) != %s (provided)' %
|
||||||
|
(algo, local_path, sum.hexdigest(), checksum))
|
||||||
|
|
||||||
def closeTask(self, output):
|
def closeTask(self, output):
|
||||||
self.output = output
|
self.output = output
|
||||||
|
|
@ -879,8 +892,9 @@ class VMExecTask(BaseTaskHandler):
|
||||||
if mins > timeout:
|
if mins > timeout:
|
||||||
vm.destroy()
|
vm.destroy()
|
||||||
self.server.server_close()
|
self.server.server_close()
|
||||||
raise koji.BuildError('Task did not complete after %.2f minutes, VM %s has been destroyed' %
|
raise koji.BuildError(
|
||||||
(mins, clone_name))
|
'Task did not complete after %.2f minutes, VM %s has been destroyed' %
|
||||||
|
(mins, clone_name))
|
||||||
else:
|
else:
|
||||||
vm.destroy()
|
vm.destroy()
|
||||||
self.server.server_close()
|
self.server.server_close()
|
||||||
|
|
@ -913,7 +927,9 @@ class VMTaskManager(TaskManager):
|
||||||
if macaddr in self.macaddrs:
|
if macaddr in self.macaddrs:
|
||||||
raise koji.PreBuildError('duplicate MAC address: %s' % macaddr)
|
raise koji.PreBuildError('duplicate MAC address: %s' % macaddr)
|
||||||
self.macaddrs[macaddr] = (vm_name, task_id, port)
|
self.macaddrs[macaddr] = (vm_name, task_id, port)
|
||||||
self.logger.info('registered MAC address %s for VM %s (task ID %s, port %s)', macaddr, vm_name, task_id, port)
|
self.logger.info(
|
||||||
|
'registered MAC address %s for VM %s (task ID %s, port %s)',
|
||||||
|
macaddr, vm_name, task_id, port)
|
||||||
return True
|
return True
|
||||||
finally:
|
finally:
|
||||||
self.macaddr_lock.release()
|
self.macaddr_lock.release()
|
||||||
|
|
@ -964,7 +980,8 @@ class VMTaskManager(TaskManager):
|
||||||
availableMB = available // 1024 // 1024
|
availableMB = available // 1024 // 1024
|
||||||
self.logger.debug('disk space available in %s: %i MB', self.options.imagedir, availableMB)
|
self.logger.debug('disk space available in %s: %i MB', self.options.imagedir, availableMB)
|
||||||
if availableMB < self.options.minspace:
|
if availableMB < self.options.minspace:
|
||||||
self.status = 'Insufficient disk space: %i MB, %i MB required' % (availableMB, self.options.minspace)
|
self.status = 'Insufficient disk space: %i MB, %i MB required' % \
|
||||||
|
(availableMB, self.options.minspace)
|
||||||
self.logger.warn(self.status)
|
self.logger.warn(self.status)
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
|
||||||
|
|
@ -154,9 +154,12 @@ def _assertLogin(environ):
|
||||||
raise koji.AuthError('could not login %s via SSL' % environ['koji.currentLogin'])
|
raise koji.AuthError('could not login %s via SSL' % environ['koji.currentLogin'])
|
||||||
elif options['WebPrincipal']:
|
elif options['WebPrincipal']:
|
||||||
if not _krbLogin(environ, environ['koji.session'], environ['koji.currentLogin']):
|
if not _krbLogin(environ, environ['koji.session'], environ['koji.currentLogin']):
|
||||||
raise koji.AuthError('could not login using principal: %s' % environ['koji.currentLogin'])
|
raise koji.AuthError(
|
||||||
|
'could not login using principal: %s' % environ['koji.currentLogin'])
|
||||||
else:
|
else:
|
||||||
raise koji.AuthError('KojiWeb is incorrectly configured for authentication, contact the system administrator')
|
raise koji.AuthError(
|
||||||
|
'KojiWeb is incorrectly configured for authentication, '
|
||||||
|
'contact the system administrator')
|
||||||
|
|
||||||
# verify a valid authToken was passed in to avoid CSRF
|
# verify a valid authToken was passed in to avoid CSRF
|
||||||
authToken = environ['koji.form'].getfirst('a', '')
|
authToken = environ['koji.form'].getfirst('a', '')
|
||||||
|
|
@ -168,7 +171,8 @@ def _assertLogin(environ):
|
||||||
# their authToken is likely expired
|
# their authToken is likely expired
|
||||||
# send them back to the page that brought them here so they
|
# send them back to the page that brought them here so they
|
||||||
# can re-click the link with a valid authToken
|
# can re-click the link with a valid authToken
|
||||||
_redirectBack(environ, page=None, forceSSL=(_getBaseURL(environ).startswith('https://')))
|
_redirectBack(environ, page=None,
|
||||||
|
forceSSL=(_getBaseURL(environ).startswith('https://')))
|
||||||
assert False # pragma: no cover
|
assert False # pragma: no cover
|
||||||
else:
|
else:
|
||||||
_redirect(environ, 'login')
|
_redirect(environ, 'login')
|
||||||
|
|
@ -188,7 +192,8 @@ def _getServer(environ):
|
||||||
if environ['koji.currentLogin']:
|
if environ['koji.currentLogin']:
|
||||||
environ['koji.currentUser'] = session.getUser(environ['koji.currentLogin'])
|
environ['koji.currentUser'] = session.getUser(environ['koji.currentLogin'])
|
||||||
if not environ['koji.currentUser']:
|
if not environ['koji.currentUser']:
|
||||||
raise koji.AuthError('could not get user for principal: %s' % environ['koji.currentLogin'])
|
raise koji.AuthError(
|
||||||
|
'could not get user for principal: %s' % environ['koji.currentLogin'])
|
||||||
_setUserCookie(environ, environ['koji.currentLogin'])
|
_setUserCookie(environ, environ['koji.currentLogin'])
|
||||||
else:
|
else:
|
||||||
environ['koji.currentUser'] = None
|
environ['koji.currentUser'] = None
|
||||||
|
|
@ -271,7 +276,9 @@ def login(environ, page=None):
|
||||||
elif options['WebPrincipal']:
|
elif options['WebPrincipal']:
|
||||||
principal = environ.get('REMOTE_USER')
|
principal = environ.get('REMOTE_USER')
|
||||||
if not principal:
|
if not principal:
|
||||||
raise koji.AuthError('configuration error: mod_auth_gssapi should have performed authentication before presenting this page')
|
raise koji.AuthError(
|
||||||
|
'configuration error: mod_auth_gssapi should have performed authentication before '
|
||||||
|
'presenting this page')
|
||||||
|
|
||||||
if not _krbLogin(environ, session, principal):
|
if not _krbLogin(environ, session, principal):
|
||||||
raise koji.AuthError('could not login using principal: %s' % principal)
|
raise koji.AuthError('could not login using principal: %s' % principal)
|
||||||
|
|
@ -279,7 +286,9 @@ def login(environ, page=None):
|
||||||
username = principal
|
username = principal
|
||||||
authlogger.info('Successful Kerberos authentication by %s', username)
|
authlogger.info('Successful Kerberos authentication by %s', username)
|
||||||
else:
|
else:
|
||||||
raise koji.AuthError('KojiWeb is incorrectly configured for authentication, contact the system administrator')
|
raise koji.AuthError(
|
||||||
|
'KojiWeb is incorrectly configured for authentication, contact the system '
|
||||||
|
'administrator')
|
||||||
|
|
||||||
_setUserCookie(environ, username)
|
_setUserCookie(environ, username)
|
||||||
# To protect the session cookie, we must forceSSL
|
# To protect the session cookie, we must forceSSL
|
||||||
|
|
@ -322,8 +331,10 @@ def index(environ, packageOrder='package_name', packageStart=None):
|
||||||
values['order'] = '-id'
|
values['order'] = '-id'
|
||||||
|
|
||||||
if user:
|
if user:
|
||||||
kojiweb.util.paginateResults(server, values, 'listPackages', kw={'userID': user['id'], 'with_dups': True},
|
kojiweb.util.paginateResults(server, values, 'listPackages',
|
||||||
start=packageStart, dataName='packages', prefix='package', order=packageOrder, pageSize=10)
|
kw={'userID': user['id'], 'with_dups': True},
|
||||||
|
start=packageStart, dataName='packages', prefix='package',
|
||||||
|
order=packageOrder, pageSize=10)
|
||||||
|
|
||||||
notifs = server.getBuildNotifications(user['id'])
|
notifs = server.getBuildNotifications(user['id'])
|
||||||
notifs.sort(key=lambda x: x['id'])
|
notifs.sort(key=lambda x: x['id'])
|
||||||
|
|
@ -480,12 +491,16 @@ _TASKS = ['build',
|
||||||
'livemedia',
|
'livemedia',
|
||||||
'createLiveMedia']
|
'createLiveMedia']
|
||||||
# Tasks that can exist without a parent
|
# Tasks that can exist without a parent
|
||||||
_TOPLEVEL_TASKS = ['build', 'buildNotification', 'chainbuild', 'maven', 'chainmaven', 'wrapperRPM', 'winbuild', 'newRepo', 'distRepo', 'tagBuild', 'tagNotification', 'waitrepo', 'livecd', 'appliance', 'image', 'livemedia']
|
_TOPLEVEL_TASKS = ['build', 'buildNotification', 'chainbuild', 'maven', 'chainmaven', 'wrapperRPM',
|
||||||
|
'winbuild', 'newRepo', 'distRepo', 'tagBuild', 'tagNotification', 'waitrepo',
|
||||||
|
'livecd', 'appliance', 'image', 'livemedia']
|
||||||
# Tasks that can have children
|
# Tasks that can have children
|
||||||
_PARENT_TASKS = ['build', 'chainbuild', 'maven', 'chainmaven', 'winbuild', 'newRepo', 'distRepo', 'wrapperRPM', 'livecd', 'appliance', 'image', 'livemedia']
|
_PARENT_TASKS = ['build', 'chainbuild', 'maven', 'chainmaven', 'winbuild', 'newRepo', 'distRepo',
|
||||||
|
'wrapperRPM', 'livecd', 'appliance', 'image', 'livemedia']
|
||||||
|
|
||||||
|
|
||||||
def tasks(environ, owner=None, state='active', view='tree', method='all', hostID=None, channelID=None, start=None, order='-id'):
|
def tasks(environ, owner=None, state='active', view='tree', method='all', hostID=None,
|
||||||
|
channelID=None, start=None, order='-id'):
|
||||||
values = _initValues(environ, 'Tasks', 'tasks')
|
values = _initValues(environ, 'Tasks', 'tasks')
|
||||||
server = _getServer(environ)
|
server = _getServer(environ)
|
||||||
|
|
||||||
|
|
@ -539,7 +554,9 @@ def tasks(environ, owner=None, state='active', view='tree', method='all', hostID
|
||||||
opts['parent'] = None
|
opts['parent'] = None
|
||||||
|
|
||||||
if state == 'active':
|
if state == 'active':
|
||||||
opts['state'] = [koji.TASK_STATES['FREE'], koji.TASK_STATES['OPEN'], koji.TASK_STATES['ASSIGNED']]
|
opts['state'] = [koji.TASK_STATES['FREE'],
|
||||||
|
koji.TASK_STATES['OPEN'],
|
||||||
|
koji.TASK_STATES['ASSIGNED']]
|
||||||
elif state == 'all':
|
elif state == 'all':
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
|
|
@ -830,7 +847,8 @@ def _chunk_file(server, environ, taskID, name, offset, size, volume):
|
||||||
chunk_size = 1048576
|
chunk_size = 1048576
|
||||||
if remaining < chunk_size:
|
if remaining < chunk_size:
|
||||||
chunk_size = remaining
|
chunk_size = remaining
|
||||||
content = server.downloadTaskOutput(taskID, name, offset=offset, size=chunk_size, volume=volume)
|
content = server.downloadTaskOutput(taskID, name,
|
||||||
|
offset=offset, size=chunk_size, volume=volume)
|
||||||
if not content:
|
if not content:
|
||||||
break
|
break
|
||||||
yield content
|
yield content
|
||||||
|
|
@ -863,7 +881,8 @@ def tags(environ, start=None, order=None, childID=None):
|
||||||
_PREFIX_CHARS = [chr(char) for char in list(range(48, 58)) + list(range(97, 123))]
|
_PREFIX_CHARS = [chr(char) for char in list(range(48, 58)) + list(range(97, 123))]
|
||||||
|
|
||||||
|
|
||||||
def packages(environ, tagID=None, userID=None, order='package_name', start=None, prefix=None, inherited='1'):
|
def packages(environ, tagID=None, userID=None, order='package_name', start=None, prefix=None,
|
||||||
|
inherited='1'):
|
||||||
values = _initValues(environ, 'Packages', 'packages')
|
values = _initValues(environ, 'Packages', 'packages')
|
||||||
server = _getServer(environ)
|
server = _getServer(environ)
|
||||||
tag = None
|
tag = None
|
||||||
|
|
@ -890,7 +909,10 @@ def packages(environ, tagID=None, userID=None, order='package_name', start=None,
|
||||||
values['inherited'] = inherited
|
values['inherited'] = inherited
|
||||||
|
|
||||||
kojiweb.util.paginateMethod(server, values, 'listPackages',
|
kojiweb.util.paginateMethod(server, values, 'listPackages',
|
||||||
kw={'tagID': tagID, 'userID': userID, 'prefix': prefix, 'inherited': bool(inherited)},
|
kw={'tagID': tagID,
|
||||||
|
'userID': userID,
|
||||||
|
'prefix': prefix,
|
||||||
|
'inherited': bool(inherited)},
|
||||||
start=start, dataName='packages', prefix='package', order=order)
|
start=start, dataName='packages', prefix='package', order=order)
|
||||||
|
|
||||||
values['chars'] = _PREFIX_CHARS
|
values['chars'] = _PREFIX_CHARS
|
||||||
|
|
@ -898,7 +920,8 @@ def packages(environ, tagID=None, userID=None, order='package_name', start=None,
|
||||||
return _genHTML(environ, 'packages.chtml')
|
return _genHTML(environ, 'packages.chtml')
|
||||||
|
|
||||||
|
|
||||||
def packageinfo(environ, packageID, tagOrder='name', tagStart=None, buildOrder='-completion_time', buildStart=None):
|
def packageinfo(environ, packageID, tagOrder='name', tagStart=None, buildOrder='-completion_time',
|
||||||
|
buildStart=None):
|
||||||
values = _initValues(environ, 'Package Info', 'packages')
|
values = _initValues(environ, 'Package Info', 'packages')
|
||||||
server = _getServer(environ)
|
server = _getServer(environ)
|
||||||
|
|
||||||
|
|
@ -916,12 +939,14 @@ def packageinfo(environ, packageID, tagOrder='name', tagStart=None, buildOrder='
|
||||||
kojiweb.util.paginateMethod(server, values, 'listTags', kw={'package': package['id']},
|
kojiweb.util.paginateMethod(server, values, 'listTags', kw={'package': package['id']},
|
||||||
start=tagStart, dataName='tags', prefix='tag', order=tagOrder)
|
start=tagStart, dataName='tags', prefix='tag', order=tagOrder)
|
||||||
kojiweb.util.paginateMethod(server, values, 'listBuilds', kw={'packageID': package['id']},
|
kojiweb.util.paginateMethod(server, values, 'listBuilds', kw={'packageID': package['id']},
|
||||||
start=buildStart, dataName='builds', prefix='build', order=buildOrder)
|
start=buildStart, dataName='builds', prefix='build',
|
||||||
|
order=buildOrder)
|
||||||
|
|
||||||
return _genHTML(environ, 'packageinfo.chtml')
|
return _genHTML(environ, 'packageinfo.chtml')
|
||||||
|
|
||||||
|
|
||||||
def taginfo(environ, tagID, all='0', packageOrder='package_name', packageStart=None, buildOrder='-completion_time', buildStart=None, childID=None):
|
def taginfo(environ, tagID, all='0', packageOrder='package_name', packageStart=None,
|
||||||
|
buildOrder='-completion_time', buildStart=None, childID=None):
|
||||||
values = _initValues(environ, 'Tag Info', 'tags')
|
values = _initValues(environ, 'Tag Info', 'tags')
|
||||||
server = _getServer(environ)
|
server = _getServer(environ)
|
||||||
|
|
||||||
|
|
@ -1115,7 +1140,9 @@ def tagparent(environ, tagID, parentID, action):
|
||||||
elif len(inheritanceData) == 1:
|
elif len(inheritanceData) == 1:
|
||||||
values['inheritanceData'] = inheritanceData[0]
|
values['inheritanceData'] = inheritanceData[0]
|
||||||
else:
|
else:
|
||||||
raise koji.GenericError('tag %i has tag %i listed as a parent more than once' % (tag['id'], parent['id']))
|
raise koji.GenericError(
|
||||||
|
'tag %i has tag %i listed as a parent more than once' %
|
||||||
|
(tag['id'], parent['id']))
|
||||||
|
|
||||||
return _genHTML(environ, 'tagparent.chtml')
|
return _genHTML(environ, 'tagparent.chtml')
|
||||||
elif action == 'remove':
|
elif action == 'remove':
|
||||||
|
|
@ -1174,7 +1201,8 @@ def buildinfo(environ, buildID):
|
||||||
for archive in archives:
|
for archive in archives:
|
||||||
if btype == 'maven':
|
if btype == 'maven':
|
||||||
archive['display'] = archive['filename']
|
archive['display'] = archive['filename']
|
||||||
archive['dl_url'] = '/'.join([pathinfo.mavenbuild(build), pathinfo.mavenfile(archive)])
|
archive['dl_url'] = '/'.join([pathinfo.mavenbuild(build),
|
||||||
|
pathinfo.mavenfile(archive)])
|
||||||
elif btype == 'win':
|
elif btype == 'win':
|
||||||
archive['display'] = pathinfo.winfile(archive)
|
archive['display'] = pathinfo.winfile(archive)
|
||||||
archive['dl_url'] = '/'.join([pathinfo.winbuild(build), pathinfo.winfile(archive)])
|
archive['dl_url'] = '/'.join([pathinfo.winbuild(build), pathinfo.winfile(archive)])
|
||||||
|
|
@ -1210,7 +1238,8 @@ def buildinfo(environ, buildID):
|
||||||
# get the summary, description, and changelogs from the built srpm
|
# get the summary, description, and changelogs from the built srpm
|
||||||
# if the build is not yet complete
|
# if the build is not yet complete
|
||||||
if build['state'] != koji.BUILD_STATES['COMPLETE']:
|
if build['state'] != koji.BUILD_STATES['COMPLETE']:
|
||||||
srpm_tasks = server.listTasks(opts={'parent': task['id'], 'method': 'buildSRPMFromSCM'})
|
srpm_tasks = server.listTasks(opts={'parent': task['id'],
|
||||||
|
'method': 'buildSRPMFromSCM'})
|
||||||
if srpm_tasks:
|
if srpm_tasks:
|
||||||
srpm_task = srpm_tasks[0]
|
srpm_task = srpm_tasks[0]
|
||||||
if srpm_task['state'] == koji.TASK_STATES['CLOSED']:
|
if srpm_task['state'] == koji.TASK_STATES['CLOSED']:
|
||||||
|
|
@ -1220,12 +1249,14 @@ def buildinfo(environ, buildID):
|
||||||
srpm_path = output
|
srpm_path = output
|
||||||
break
|
break
|
||||||
if srpm_path:
|
if srpm_path:
|
||||||
srpm_headers = server.getRPMHeaders(taskID=srpm_task['id'], filepath=srpm_path,
|
srpm_headers = server.getRPMHeaders(taskID=srpm_task['id'],
|
||||||
|
filepath=srpm_path,
|
||||||
headers=['summary', 'description'])
|
headers=['summary', 'description'])
|
||||||
if srpm_headers:
|
if srpm_headers:
|
||||||
values['summary'] = koji.fixEncoding(srpm_headers['summary'])
|
values['summary'] = koji.fixEncoding(srpm_headers['summary'])
|
||||||
values['description'] = koji.fixEncoding(srpm_headers['description'])
|
values['description'] = koji.fixEncoding(srpm_headers['description'])
|
||||||
changelog = server.getChangelogEntries(taskID=srpm_task['id'], filepath=srpm_path)
|
changelog = server.getChangelogEntries(taskID=srpm_task['id'],
|
||||||
|
filepath=srpm_path)
|
||||||
if changelog:
|
if changelog:
|
||||||
values['changelog'] = changelog
|
values['changelog'] = changelog
|
||||||
else:
|
else:
|
||||||
|
|
@ -1276,7 +1307,8 @@ def buildinfo(environ, buildID):
|
||||||
return _genHTML(environ, 'buildinfo.chtml')
|
return _genHTML(environ, 'buildinfo.chtml')
|
||||||
|
|
||||||
|
|
||||||
def builds(environ, userID=None, tagID=None, packageID=None, state=None, order='-build_id', start=None, prefix=None, inherited='1', latest='1', type=None):
|
def builds(environ, userID=None, tagID=None, packageID=None, state=None, order='-build_id',
|
||||||
|
start=None, prefix=None, inherited='1', latest='1', type=None):
|
||||||
values = _initValues(environ, 'Builds', 'builds')
|
values = _initValues(environ, 'Builds', 'builds')
|
||||||
server = _getServer(environ)
|
server = _getServer(environ)
|
||||||
|
|
||||||
|
|
@ -1344,15 +1376,20 @@ def builds(environ, userID=None, tagID=None, packageID=None, state=None, order='
|
||||||
|
|
||||||
if tag:
|
if tag:
|
||||||
# don't need to consider 'state' here, since only completed builds would be tagged
|
# don't need to consider 'state' here, since only completed builds would be tagged
|
||||||
kojiweb.util.paginateResults(server, values, 'listTagged', kw={'tag': tag['id'], 'package': (package and package['name'] or None),
|
kojiweb.util.paginateResults(server, values, 'listTagged',
|
||||||
'owner': (user and user['name'] or None),
|
kw={'tag': tag['id'],
|
||||||
'type': type,
|
'package': (package and package['name'] or None),
|
||||||
'inherit': bool(inherited), 'latest': bool(latest), 'prefix': prefix},
|
'owner': (user and user['name'] or None),
|
||||||
|
'type': type,
|
||||||
|
'inherit': bool(inherited), 'latest': bool(latest),
|
||||||
|
'prefix': prefix},
|
||||||
start=start, dataName='builds', prefix='build', order=order)
|
start=start, dataName='builds', prefix='build', order=order)
|
||||||
else:
|
else:
|
||||||
kojiweb.util.paginateMethod(server, values, 'listBuilds', kw={'userID': (user and user['id'] or None), 'packageID': (package and package['id'] or None),
|
kojiweb.util.paginateMethod(server, values, 'listBuilds',
|
||||||
'type': type,
|
kw={'userID': (user and user['id'] or None),
|
||||||
'state': state, 'prefix': prefix},
|
'packageID': (package and package['id'] or None),
|
||||||
|
'type': type,
|
||||||
|
'state': state, 'prefix': prefix},
|
||||||
start=start, dataName='builds', prefix='build', order=order)
|
start=start, dataName='builds', prefix='build', order=order)
|
||||||
|
|
||||||
values['chars'] = _PREFIX_CHARS
|
values['chars'] = _PREFIX_CHARS
|
||||||
|
|
@ -1380,7 +1417,8 @@ def users(environ, order='name', start=None, prefix=None):
|
||||||
return _genHTML(environ, 'users.chtml')
|
return _genHTML(environ, 'users.chtml')
|
||||||
|
|
||||||
|
|
||||||
def userinfo(environ, userID, packageOrder='package_name', packageStart=None, buildOrder='-completion_time', buildStart=None):
|
def userinfo(environ, userID, packageOrder='package_name', packageStart=None,
|
||||||
|
buildOrder='-completion_time', buildStart=None):
|
||||||
values = _initValues(environ, 'User Info', 'users')
|
values = _initValues(environ, 'User Info', 'users')
|
||||||
server = _getServer(environ)
|
server = _getServer(environ)
|
||||||
|
|
||||||
|
|
@ -1392,18 +1430,23 @@ def userinfo(environ, userID, packageOrder='package_name', packageStart=None, bu
|
||||||
|
|
||||||
values['user'] = user
|
values['user'] = user
|
||||||
values['userID'] = userID
|
values['userID'] = userID
|
||||||
values['taskCount'] = server.listTasks(opts={'owner': user['id'], 'parent': None}, queryOpts={'countOnly': True})
|
values['taskCount'] = server.listTasks(opts={'owner': user['id'], 'parent': None},
|
||||||
|
queryOpts={'countOnly': True})
|
||||||
|
|
||||||
kojiweb.util.paginateResults(server, values, 'listPackages', kw={'userID': user['id'], 'with_dups': True},
|
kojiweb.util.paginateResults(server, values, 'listPackages',
|
||||||
start=packageStart, dataName='packages', prefix='package', order=packageOrder, pageSize=10)
|
kw={'userID': user['id'], 'with_dups': True},
|
||||||
|
start=packageStart, dataName='packages', prefix='package',
|
||||||
|
order=packageOrder, pageSize=10)
|
||||||
|
|
||||||
kojiweb.util.paginateMethod(server, values, 'listBuilds', kw={'userID': user['id']},
|
kojiweb.util.paginateMethod(server, values, 'listBuilds', kw={'userID': user['id']},
|
||||||
start=buildStart, dataName='builds', prefix='build', order=buildOrder, pageSize=10)
|
start=buildStart, dataName='builds', prefix='build',
|
||||||
|
order=buildOrder, pageSize=10)
|
||||||
|
|
||||||
return _genHTML(environ, 'userinfo.chtml')
|
return _genHTML(environ, 'userinfo.chtml')
|
||||||
|
|
||||||
|
|
||||||
def rpminfo(environ, rpmID, fileOrder='name', fileStart=None, buildrootOrder='-id', buildrootStart=None):
|
def rpminfo(environ, rpmID, fileOrder='name', fileStart=None, buildrootOrder='-id',
|
||||||
|
buildrootStart=None):
|
||||||
values = _initValues(environ, 'RPM Info', 'builds')
|
values = _initValues(environ, 'RPM Info', 'builds')
|
||||||
server = _getServer(environ)
|
server = _getServer(environ)
|
||||||
|
|
||||||
|
|
@ -1441,8 +1484,11 @@ def rpminfo(environ, rpmID, fileOrder='name', fileStart=None, buildrootOrder='-i
|
||||||
values['summary'] = koji.fixEncoding(headers.get('summary'))
|
values['summary'] = koji.fixEncoding(headers.get('summary'))
|
||||||
values['description'] = koji.fixEncoding(headers.get('description'))
|
values['description'] = koji.fixEncoding(headers.get('description'))
|
||||||
values['license'] = koji.fixEncoding(headers.get('license'))
|
values['license'] = koji.fixEncoding(headers.get('license'))
|
||||||
buildroots = kojiweb.util.paginateMethod(server, values, 'listBuildroots', kw={'rpmID': rpm['id']},
|
buildroots = kojiweb.util.paginateMethod(server, values, 'listBuildroots',
|
||||||
start=buildrootStart, dataName='buildroots', prefix='buildroot',
|
kw={'rpmID': rpm['id']},
|
||||||
|
start=buildrootStart,
|
||||||
|
dataName='buildroots',
|
||||||
|
prefix='buildroot',
|
||||||
order=buildrootOrder)
|
order=buildrootOrder)
|
||||||
|
|
||||||
values['rpmID'] = rpmID
|
values['rpmID'] = rpmID
|
||||||
|
|
@ -1457,7 +1503,8 @@ def rpminfo(environ, rpmID, fileOrder='name', fileStart=None, buildrootOrder='-i
|
||||||
return _genHTML(environ, 'rpminfo.chtml')
|
return _genHTML(environ, 'rpminfo.chtml')
|
||||||
|
|
||||||
|
|
||||||
def archiveinfo(environ, archiveID, fileOrder='name', fileStart=None, buildrootOrder='-id', buildrootStart=None):
|
def archiveinfo(environ, archiveID, fileOrder='name', fileStart=None, buildrootOrder='-id',
|
||||||
|
buildrootStart=None):
|
||||||
values = _initValues(environ, 'Archive Info', 'builds')
|
values = _initValues(environ, 'Archive Info', 'builds')
|
||||||
server = _getServer(environ)
|
server = _getServer(environ)
|
||||||
|
|
||||||
|
|
@ -1476,8 +1523,11 @@ def archiveinfo(environ, archiveID, fileOrder='name', fileStart=None, buildrootO
|
||||||
builtInRoot = server.getBuildroot(archive['buildroot_id'])
|
builtInRoot = server.getBuildroot(archive['buildroot_id'])
|
||||||
kojiweb.util.paginateMethod(server, values, 'listArchiveFiles', args=[archive['id']],
|
kojiweb.util.paginateMethod(server, values, 'listArchiveFiles', args=[archive['id']],
|
||||||
start=fileStart, dataName='files', prefix='file', order=fileOrder)
|
start=fileStart, dataName='files', prefix='file', order=fileOrder)
|
||||||
buildroots = kojiweb.util.paginateMethod(server, values, 'listBuildroots', kw={'archiveID': archive['id']},
|
buildroots = kojiweb.util.paginateMethod(server, values, 'listBuildroots',
|
||||||
start=buildrootStart, dataName='buildroots', prefix='buildroot',
|
kw={'archiveID': archive['id']},
|
||||||
|
start=buildrootStart,
|
||||||
|
dataName='buildroots',
|
||||||
|
prefix='buildroot',
|
||||||
order=buildrootOrder)
|
order=buildrootOrder)
|
||||||
|
|
||||||
values['title'] = archive['filename'] + ' | Archive Info'
|
values['title'] = archive['filename'] + ' | Archive Info'
|
||||||
|
|
@ -1491,7 +1541,8 @@ def archiveinfo(environ, archiveID, fileOrder='name', fileStart=None, buildrootO
|
||||||
values['builtInRoot'] = builtInRoot
|
values['builtInRoot'] = builtInRoot
|
||||||
values['buildroots'] = buildroots
|
values['buildroots'] = buildroots
|
||||||
values['show_rpm_components'] = server.listRPMs(imageID=archive['id'], queryOpts={'limit': 1})
|
values['show_rpm_components'] = server.listRPMs(imageID=archive['id'], queryOpts={'limit': 1})
|
||||||
values['show_archive_components'] = server.listArchives(imageID=archive['id'], queryOpts={'limit': 1})
|
values['show_archive_components'] = server.listArchives(imageID=archive['id'],
|
||||||
|
queryOpts={'limit': 1})
|
||||||
|
|
||||||
return _genHTML(environ, 'archiveinfo.chtml')
|
return _genHTML(environ, 'archiveinfo.chtml')
|
||||||
|
|
||||||
|
|
@ -1604,7 +1655,8 @@ def hostinfo(environ, hostID=None, userID=None):
|
||||||
channels = server.listChannels(host['id'])
|
channels = server.listChannels(host['id'])
|
||||||
channels.sort(key=_sortbyname)
|
channels.sort(key=_sortbyname)
|
||||||
buildroots = server.listBuildroots(hostID=host['id'],
|
buildroots = server.listBuildroots(hostID=host['id'],
|
||||||
state=[state[1] for state in koji.BR_STATES.items() if state[0] != 'EXPIRED'])
|
state=[state[1] for state in koji.BR_STATES.items()
|
||||||
|
if state[0] != 'EXPIRED'])
|
||||||
buildroots.sort(key=lambda x: x['create_event_time'], reverse=True)
|
buildroots.sort(key=lambda x: x['create_event_time'], reverse=True)
|
||||||
|
|
||||||
values['host'] = host
|
values['host'] = host
|
||||||
|
|
@ -1718,7 +1770,8 @@ def channelinfo(environ, channelID):
|
||||||
return _genHTML(environ, 'channelinfo.chtml')
|
return _genHTML(environ, 'channelinfo.chtml')
|
||||||
|
|
||||||
|
|
||||||
def buildrootinfo(environ, buildrootID, builtStart=None, builtOrder=None, componentStart=None, componentOrder=None):
|
def buildrootinfo(environ, buildrootID, builtStart=None, builtOrder=None, componentStart=None,
|
||||||
|
componentOrder=None):
|
||||||
values = _initValues(environ, 'Buildroot Info', 'hosts')
|
values = _initValues(environ, 'Buildroot Info', 'hosts')
|
||||||
server = _getServer(environ)
|
server = _getServer(environ)
|
||||||
|
|
||||||
|
|
@ -1807,11 +1860,15 @@ def archivelist(environ, type, buildrootID=None, imageID=None, start=None, order
|
||||||
raise koji.GenericError('unknown buildroot ID: %i' % buildrootID)
|
raise koji.GenericError('unknown buildroot ID: %i' % buildrootID)
|
||||||
|
|
||||||
if type == 'component':
|
if type == 'component':
|
||||||
kojiweb.util.paginateMethod(server, values, 'listArchives', kw={'componentBuildrootID': buildroot['id']},
|
kojiweb.util.paginateMethod(server, values, 'listArchives',
|
||||||
start=start, dataName='archives', prefix='archive', order=order)
|
kw={'componentBuildrootID': buildroot['id']},
|
||||||
|
start=start, dataName='archives', prefix='archive',
|
||||||
|
order=order)
|
||||||
elif type == 'built':
|
elif type == 'built':
|
||||||
kojiweb.util.paginateMethod(server, values, 'listArchives', kw={'buildrootID': buildroot['id']},
|
kojiweb.util.paginateMethod(server, values, 'listArchives',
|
||||||
start=start, dataName='archives', prefix='archive', order=order)
|
kw={'buildrootID': buildroot['id']},
|
||||||
|
start=start, dataName='archives', prefix='archive',
|
||||||
|
order=order)
|
||||||
else:
|
else:
|
||||||
raise koji.GenericError('unrecognized type of archivelist')
|
raise koji.GenericError('unrecognized type of archivelist')
|
||||||
elif imageID is not None:
|
elif imageID is not None:
|
||||||
|
|
@ -1820,7 +1877,8 @@ def archivelist(environ, type, buildrootID=None, imageID=None, start=None, order
|
||||||
# If/When future image types are supported, add elifs here if needed.
|
# If/When future image types are supported, add elifs here if needed.
|
||||||
if type == 'image':
|
if type == 'image':
|
||||||
kojiweb.util.paginateMethod(server, values, 'listArchives', kw={'imageID': imageID},
|
kojiweb.util.paginateMethod(server, values, 'listArchives', kw={'imageID': imageID},
|
||||||
start=start, dataName='archives', prefix='archive', order=order)
|
start=start, dataName='archives', prefix='archive',
|
||||||
|
order=order)
|
||||||
else:
|
else:
|
||||||
raise koji.GenericError('unrecognized type of archivelist')
|
raise koji.GenericError('unrecognized type of archivelist')
|
||||||
else:
|
else:
|
||||||
|
|
@ -2155,9 +2213,12 @@ def buildsbystatus(environ, days='7'):
|
||||||
|
|
||||||
server.multicall = True
|
server.multicall = True
|
||||||
# use taskID=-1 to filter out builds with a null task_id (imported rather than built in koji)
|
# use taskID=-1 to filter out builds with a null task_id (imported rather than built in koji)
|
||||||
server.listBuilds(completeAfter=dateAfter, state=koji.BUILD_STATES['COMPLETE'], taskID=-1, queryOpts={'countOnly': True})
|
server.listBuilds(completeAfter=dateAfter, state=koji.BUILD_STATES['COMPLETE'], taskID=-1,
|
||||||
server.listBuilds(completeAfter=dateAfter, state=koji.BUILD_STATES['FAILED'], taskID=-1, queryOpts={'countOnly': True})
|
queryOpts={'countOnly': True})
|
||||||
server.listBuilds(completeAfter=dateAfter, state=koji.BUILD_STATES['CANCELED'], taskID=-1, queryOpts={'countOnly': True})
|
server.listBuilds(completeAfter=dateAfter, state=koji.BUILD_STATES['FAILED'], taskID=-1,
|
||||||
|
queryOpts={'countOnly': True})
|
||||||
|
server.listBuilds(completeAfter=dateAfter, state=koji.BUILD_STATES['CANCELED'], taskID=-1,
|
||||||
|
queryOpts={'countOnly': True})
|
||||||
[[numSucceeded], [numFailed], [numCanceled]] = server.multiCall()
|
[[numSucceeded], [numFailed], [numCanceled]] = server.multiCall()
|
||||||
|
|
||||||
values['numSucceeded'] = numSucceeded
|
values['numSucceeded'] = numSucceeded
|
||||||
|
|
@ -2298,7 +2359,8 @@ def recentbuilds(environ, user=None, tag=None, package=None):
|
||||||
packageObj = server.getPackage(package)
|
packageObj = server.getPackage(package)
|
||||||
|
|
||||||
if tagObj is not None:
|
if tagObj is not None:
|
||||||
builds = server.listTagged(tagObj['id'], inherit=True, package=(packageObj and packageObj['name'] or None),
|
builds = server.listTagged(tagObj['id'], inherit=True,
|
||||||
|
package=(packageObj and packageObj['name'] or None),
|
||||||
owner=(userObj and userObj['name'] or None))
|
owner=(userObj and userObj['name'] or None))
|
||||||
builds.sort(key=kojiweb.util.sortByKeyFuncNoneGreatest('completion_time'), reverse=True)
|
builds.sort(key=kojiweb.util.sortByKeyFuncNoneGreatest('completion_time'), reverse=True)
|
||||||
builds = builds[:20]
|
builds = builds[:20]
|
||||||
|
|
@ -2408,7 +2470,8 @@ def search(environ, start=None, order=None):
|
||||||
values['order'] = order
|
values['order'] = order
|
||||||
|
|
||||||
results = kojiweb.util.paginateMethod(server, values, 'search', args=(terms, type, match),
|
results = kojiweb.util.paginateMethod(server, values, 'search', args=(terms, type, match),
|
||||||
start=start, dataName='results', prefix='result', order=order)
|
start=start, dataName='results', prefix='result',
|
||||||
|
order=order)
|
||||||
if not start and len(results) == 1:
|
if not start and len(results) == 1:
|
||||||
# if we found exactly one result, skip the result list and redirect to the info page
|
# if we found exactly one result, skip the result list and redirect to the info page
|
||||||
# (you're feeling lucky)
|
# (you're feeling lucky)
|
||||||
|
|
|
||||||
|
|
@ -96,7 +96,9 @@ class Dispatcher(object):
|
||||||
['LibPath', 'string', '/usr/share/koji-web/lib'],
|
['LibPath', 'string', '/usr/share/koji-web/lib'],
|
||||||
|
|
||||||
['LogLevel', 'string', 'WARNING'],
|
['LogLevel', 'string', 'WARNING'],
|
||||||
['LogFormat', 'string', '%(msecs)d [%(levelname)s] m=%(method)s u=%(user_name)s p=%(process)s r=%(remoteaddr)s %(name)s: %(message)s'],
|
['LogFormat', 'string',
|
||||||
|
'%(msecs)d [%(levelname)s] m=%(method)s u=%(user_name)s p=%(process)s r=%(remoteaddr)s '
|
||||||
|
'%(name)s: %(message)s'],
|
||||||
|
|
||||||
['Tasks', 'list', []],
|
['Tasks', 'list', []],
|
||||||
['ToplevelTasks', 'list', []],
|
['ToplevelTasks', 'list', []],
|
||||||
|
|
@ -227,7 +229,9 @@ class Dispatcher(object):
|
||||||
raise URLNotFound
|
raise URLNotFound
|
||||||
# parse form args
|
# parse form args
|
||||||
data = {}
|
data = {}
|
||||||
fs = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ.copy(), keep_blank_values=True)
|
fs = cgi.FieldStorage(fp=environ['wsgi.input'],
|
||||||
|
environ=environ.copy(),
|
||||||
|
keep_blank_values=True)
|
||||||
for field in fs.list:
|
for field in fs.list:
|
||||||
if field.filename:
|
if field.filename:
|
||||||
val = field
|
val = field
|
||||||
|
|
|
||||||
|
|
@ -65,7 +65,8 @@ def _initValues(environ, title='Build System Info', pageID='summary'):
|
||||||
themeCache.clear()
|
themeCache.clear()
|
||||||
themeInfo.clear()
|
themeInfo.clear()
|
||||||
themeInfo['name'] = environ['koji.options'].get('KojiTheme', None)
|
themeInfo['name'] = environ['koji.options'].get('KojiTheme', None)
|
||||||
themeInfo['staticdir'] = environ['koji.options'].get('KojiStaticDir', '/usr/share/koji-web/static')
|
themeInfo['staticdir'] = environ['koji.options'].get('KojiStaticDir',
|
||||||
|
'/usr/share/koji-web/static')
|
||||||
|
|
||||||
environ['koji.values'] = values
|
environ['koji.values'] = values
|
||||||
|
|
||||||
|
|
@ -227,9 +228,11 @@ def sortImage(template, sortKey, orderVar='order'):
|
||||||
"""
|
"""
|
||||||
orderVal = template.getVar(orderVar)
|
orderVal = template.getVar(orderVar)
|
||||||
if orderVal == sortKey:
|
if orderVal == sortKey:
|
||||||
return '<img src="%s" class="sort" alt="ascending sort"/>' % themePath("images/gray-triangle-up.gif")
|
return '<img src="%s" class="sort" alt="ascending sort"/>' % \
|
||||||
|
themePath("images/gray-triangle-up.gif")
|
||||||
elif orderVal == '-' + sortKey:
|
elif orderVal == '-' + sortKey:
|
||||||
return '<img src="%s" class="sort" alt="descending sort"/>' % themePath("images/gray-triangle-down.gif")
|
return '<img src="%s" class="sort" alt="descending sort"/>' % \
|
||||||
|
themePath("images/gray-triangle-down.gif")
|
||||||
else:
|
else:
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
|
|
@ -283,7 +286,8 @@ def sortByKeyFuncNoneGreatest(key):
|
||||||
return internal_key
|
return internal_key
|
||||||
|
|
||||||
|
|
||||||
def paginateList(values, data, start, dataName, prefix=None, order=None, noneGreatest=False, pageSize=50):
|
def paginateList(values, data, start, dataName, prefix=None, order=None, noneGreatest=False,
|
||||||
|
pageSize=50):
|
||||||
"""
|
"""
|
||||||
Slice the 'data' list into one page worth. Start at offset
|
Slice the 'data' list into one page worth. Start at offset
|
||||||
'start' and limit the total number of pages to pageSize
|
'start' and limit the total number of pages to pageSize
|
||||||
|
|
@ -317,8 +321,9 @@ def paginateList(values, data, start, dataName, prefix=None, order=None, noneGre
|
||||||
|
|
||||||
def paginateMethod(server, values, methodName, args=None, kw=None,
|
def paginateMethod(server, values, methodName, args=None, kw=None,
|
||||||
start=None, dataName=None, prefix=None, order=None, pageSize=50):
|
start=None, dataName=None, prefix=None, order=None, pageSize=50):
|
||||||
"""Paginate the results of the method with the given name when called with the given args and kws.
|
"""Paginate the results of the method with the given name when called with the given args and
|
||||||
The method must support the queryOpts keyword parameter, and pagination is done in the database."""
|
kws. The method must support the queryOpts keyword parameter, and pagination is done in the
|
||||||
|
database."""
|
||||||
if args is None:
|
if args is None:
|
||||||
args = []
|
args = []
|
||||||
if kw is None:
|
if kw is None:
|
||||||
|
|
@ -346,10 +351,10 @@ def paginateMethod(server, values, methodName, args=None, kw=None,
|
||||||
|
|
||||||
def paginateResults(server, values, methodName, args=None, kw=None,
|
def paginateResults(server, values, methodName, args=None, kw=None,
|
||||||
start=None, dataName=None, prefix=None, order=None, pageSize=50):
|
start=None, dataName=None, prefix=None, order=None, pageSize=50):
|
||||||
"""Paginate the results of the method with the given name when called with the given args and kws.
|
"""Paginate the results of the method with the given name when called with the given args and
|
||||||
This method should only be used when then method does not support the queryOpts command (because
|
kws. This method should only be used when then method does not support the queryOpts command
|
||||||
the logic used to generate the result list prevents filtering/ordering from being done in the database).
|
(because the logic used to generate the result list prevents filtering/ordering from being done
|
||||||
The method must return a list of maps."""
|
in the database). The method must return a list of maps."""
|
||||||
if args is None:
|
if args is None:
|
||||||
args = []
|
args = []
|
||||||
if kw is None:
|
if kw is None:
|
||||||
|
|
@ -390,7 +395,8 @@ def _populateValues(values, dataName, prefix, data, totalRows, start, count, pag
|
||||||
totalPages = int(totalRows // pageSize)
|
totalPages = int(totalRows // pageSize)
|
||||||
if totalRows % pageSize > 0:
|
if totalRows % pageSize > 0:
|
||||||
totalPages += 1
|
totalPages += 1
|
||||||
pages = [page for page in range(0, totalPages) if (abs(page - currentPage) < 100 or ((page + 1) % 100 == 0))]
|
pages = [page for page in range(0, totalPages)
|
||||||
|
if (abs(page - currentPage) < 100 or ((page + 1) % 100 == 0))]
|
||||||
values[(prefix and prefix + 'Pages') or 'pages'] = pages
|
values[(prefix and prefix + 'Pages') or 'pages'] = pages
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue