remove changes not relevant to cli

This commit is contained in:
Tomas Kopecek 2017-05-12 11:23:10 +02:00
parent d2ab86584a
commit d625c0aa4d
33 changed files with 359 additions and 420 deletions

View file

@ -21,9 +21,6 @@
# Mike McLean <mikem@redhat.com>
# Mike Bonnet <mikeb@redhat.com>
from __future__ import absolute_import
from six.moves import zip
import six
try:
import krbV
except ImportError: # pragma: no cover
@ -58,11 +55,11 @@ import sys
import time
import traceback
import xml.dom.minidom
import six.moves.xmlrpc_client
import xmlrpclib
import zipfile
import copy
import Cheetah.Template
from six.moves.configparser import ConfigParser
from ConfigParser import ConfigParser
from fnmatch import fnmatch
from gzip import GzipFile
from optparse import OptionParser, SUPPRESS_HELP
@ -256,7 +253,7 @@ class BuildRoot(object):
output = koji.genMockConfig(self.name, self.br_arch, managed=True, **opts)
#write config
fo = open(configfile,'w')
fo = file(configfile,'w')
fo.write(output)
fo.close()
@ -269,7 +266,7 @@ class BuildRoot(object):
id_suffix = 'repo'
name_prefix = 'Repository for Koji'
for dep in self.deps:
if isinstance(dep, six.integer_types):
if isinstance(dep, (int, long)):
# dep is a task ID, the url points to the task output directory
repo_type = 'task'
dep_url = pi.task(dep)
@ -356,7 +353,7 @@ class BuildRoot(object):
</settings>
"""
settings = settings % locals()
fo = open(self.rootdir() + destfile, 'w')
fo = file(self.rootdir() + destfile, 'w')
fo.write(settings)
fo.close()
@ -410,7 +407,7 @@ class BuildRoot(object):
self.logger.info('Rereading %s, inode: %s -> %s, size: %s -> %s' %
(fpath, inode, stat_info.st_ino, size, stat_info.st_size))
fd.close()
fd = open(fpath, 'r')
fd = file(fpath, 'r')
logs[fname] = (fd, stat_info.st_ino, stat_info.st_size, fpath)
except:
self.logger.error("Error reading mock log: %s", fpath)
@ -431,7 +428,7 @@ class BuildRoot(object):
if workdir:
outfile = os.path.join(workdir, mocklog)
flags = os.O_CREAT | os.O_WRONLY | os.O_APPEND
fd = os.open(outfile, flags, 0o666)
fd = os.open(outfile, flags, 0666)
os.dup2(fd, 1)
os.dup2(fd, 2)
if os.getuid() == 0 and hasattr(self.options,"mockuser"):
@ -659,7 +656,7 @@ class BuildRoot(object):
repodir = pathinfo.repo(self.repo_info['id'], self.repo_info['tag_name'])
repomdpath = os.path.join(repodir, self.br_arch, 'repodata', 'repomd.xml')
opts = dict([(k, getattr(self.options, k)) for k in ('topurl','topdir')])
opts = dict([(k, getattr(self.options, k)) for k in 'topurl','topdir'])
opts['tempdir'] = self.options.workdir
fo = koji.openRemoteFile(repomdpath, **opts)
try:
@ -914,7 +911,7 @@ class BuildTask(BaseTaskHandler):
#srpm arg should be a path relative to <BASEDIR>/work
self.logger.debug("Reading SRPM")
relpath = "work/%s" % srpm
opts = dict([(k, getattr(self.options, k)) for k in ('topurl','topdir')])
opts = dict([(k, getattr(self.options, k)) for k in 'topurl','topdir'])
opts['tempdir'] = self.workdir
fo = koji.openRemoteFile(relpath, **opts)
h = koji.get_rpm_header(fo)
@ -967,7 +964,7 @@ class BuildTask(BaseTaskHandler):
archdict[a] = 1
if not archdict:
raise koji.BuildError("No matching arches were found")
return list(archdict.keys())
return archdict.keys()
def choose_taskarch(self, arch, srpm, build_tag):
@ -1029,7 +1026,7 @@ class BuildTask(BaseTaskHandler):
# wait for subtasks to finish
failany = not getattr(self.options, 'build_arch_can_fail', False)
results = self.wait(list(subtasks.values()), all=True, failany=failany)
results = self.wait(subtasks.values(), all=True, failany=failany)
# finalize import
# merge data into needed args for completeBuild call
@ -1037,7 +1034,7 @@ class BuildTask(BaseTaskHandler):
brmap = {}
logs = {}
built_srpm = None
for (arch, task_id) in six.iteritems(subtasks):
for (arch, task_id) in subtasks.iteritems():
result = results[task_id]
self.logger.debug("DEBUG: %r : %r " % (arch,result,))
brootid = result['brootid']
@ -1348,7 +1345,7 @@ class BuildMavenTask(BaseBuildTask):
st = os.lstat(filepath)
mtime = time.localtime(st.st_mtime)
info = zipfile.ZipInfo(filepath[roottrim:])
info.external_attr |= 0o120000 << 16 # symlink file type
info.external_attr |= 0120000 << 16L # symlink file type
info.compress_type = zipfile.ZIP_STORED
info.date_time = mtime[:6]
zfo.writestr(info, content)
@ -1516,7 +1513,7 @@ class BuildMavenTask(BaseBuildTask):
for filepath in logs:
self.uploadFile(os.path.join(outputdir, filepath),
relPath=os.path.dirname(filepath))
for relpath, files in six.iteritems(output_files):
for relpath, files in output_files.iteritems():
for filename in files:
self.uploadFile(os.path.join(outputdir, relpath, filename),
relPath=relpath)
@ -1746,7 +1743,7 @@ class WrapperRPMTask(BaseBuildTask):
contents = contents.encode('utf-8')
specfile = spec_template[:-5]
specfd = open(specfile, 'w')
specfd = file(specfile, 'w')
specfd.write(contents)
specfd.close()
@ -1957,7 +1954,7 @@ class ChainMavenTask(MultiPlatformTask):
pkg_to_wrap = params['buildrequires'][0]
to_wrap = self.done[pkg_to_wrap]
if isinstance(to_wrap, six.integer_types):
if isinstance(to_wrap, (int, long)):
task_to_wrap = self.session.getTaskInfo(to_wrap, request=True)
build_to_wrap = None
else:
@ -1976,8 +1973,8 @@ class ChainMavenTask(MultiPlatformTask):
running[task_id] = package
del todo[package]
try:
results = self.wait(list(running.keys()))
except (six.moves.xmlrpc_client.Fault, koji.GenericError) as e:
results = self.wait(running.keys())
except (xmlrpclib.Fault, koji.GenericError), e:
# One task has failed, wait for the rest to complete before the
# chainmaven task fails. self.wait(all=True) should thrown an exception.
self.wait(all=True)
@ -2029,8 +2026,8 @@ class ChainMavenTask(MultiPlatformTask):
have the same keys and those keys have the same values. If a value is
list, it will be considered equal to a list with the same values in
a different order."""
akeys = list(a.keys())
bkeys = list(b.keys())
akeys = a.keys()
bkeys = b.keys()
if sorted(akeys) != sorted(bkeys):
return False
for key in akeys:
@ -2114,7 +2111,7 @@ class TagBuildTask(BaseTaskHandler):
#XXX - add more post tests
self.session.host.tagBuild(self.id,tag_id,build_id,force=force,fromtag=fromtag)
self.session.host.tagNotification(True, tag_id, fromtag, build_id, user_id, ignore_success)
except Exception as e:
except Exception, e:
exctype, value = sys.exc_info()[:2]
self.session.host.tagNotification(False, tag_id, fromtag, build_id, user_id, ignore_success, "%s: %s" % (exctype, value))
raise e
@ -2191,7 +2188,7 @@ class BuildBaseImageTask(BuildImageTask):
canfail.append(subtasks[arch])
self.logger.debug("Got image subtasks: %r" % (subtasks))
self.logger.debug("Waiting on image subtasks (%s can fail)..." % canfail)
results = self.wait(list(subtasks.values()), all=True, failany=True, canfail=canfail)
results = self.wait(subtasks.values(), all=True, failany=True, canfail=canfail)
# if everything failed, fail even if all subtasks are in canfail
self.logger.debug('subtask results: %r', results)
@ -2500,7 +2497,7 @@ class BuildLiveMediaTask(BuildImageTask):
self.logger.debug("Got image subtasks: %r", subtasks)
self.logger.debug("Waiting on livemedia subtasks...")
results = self.wait(list(subtasks.values()), all=True, failany=True, canfail=canfail)
results = self.wait(subtasks.values(), all=True, failany=True, canfail=canfail)
# if everything failed, fail even if all subtasks are in canfail
self.logger.debug('subtask results: %r', results)
@ -2536,7 +2533,7 @@ class BuildLiveMediaTask(BuildImageTask):
wrapper_tasks[arch] = self.subtask('wrapperRPM', arglist,
label='wrapper %s' % arch, arch='noarch')
results2 = self.wait(list(wrapper_tasks.values()), all=True, failany=True)
results2 = self.wait(wrapper_tasks.values(), all=True, failany=True)
self.logger.debug('wrapper results: %r', results2)
# add wrapper rpm results into main results
@ -2677,10 +2674,10 @@ class ImageTask(BaseTaskHandler):
self.ks = ksparser.KickstartParser(version)
try:
self.ks.readKickstart(kspath)
except IOError as e:
except IOError, e:
raise koji.LiveCDError("Failed to read kickstart file "
"'%s' : %s" % (kspath, e))
except kserrors.KickstartError as e:
except kserrors.KickstartError, e:
raise koji.LiveCDError("Failed to parse kickstart file "
"'%s' : %s" % (kspath, e))
@ -2707,7 +2704,7 @@ class ImageTask(BaseTaskHandler):
self.ks.handler.repo.repoList = [] # delete whatever the ks file told us
if opts.get('repo'):
user_repos = opts['repo']
if isinstance(user_repos, six.string_types):
if isinstance(user_repos, basestring):
user_repos = user_repos.split(',')
index = 0
for user_repo in user_repos:
@ -2781,7 +2778,7 @@ class ImageTask(BaseTaskHandler):
'TC': 'T',
}
for k, v in six.iteritems(substitutions):
for k, v in substitutions.iteritems():
if k in name:
name = name.replace(k, v)
if k in version:
@ -3261,7 +3258,7 @@ class OzImageTask(BaseTaskHandler):
self.getUploadDir(), logfile)
kspath = os.path.join(scmsrcdir, os.path.basename(ksfile))
else:
tops = dict([(k, getattr(self.options, k)) for k in ('topurl','topdir')])
tops = dict([(k, getattr(self.options, k)) for k in 'topurl','topdir'])
tops['tempdir'] = self.workdir
ks_src = koji.openRemoteFile(ksfile, **tops)
kspath = os.path.join(self.workdir, os.path.basename(ksfile))
@ -3292,10 +3289,10 @@ class OzImageTask(BaseTaskHandler):
self.logger.debug('attempting to read kickstart: %s' % kspath)
try:
ks.readKickstart(kspath)
except IOError as e:
except IOError, e:
raise koji.BuildError("Failed to read kickstart file "
"'%s' : %s" % (kspath, e))
except kserrors.KickstartError as e:
except kserrors.KickstartError, e:
raise koji.BuildError("Failed to parse kickstart file "
"'%s' : %s" % (kspath, e))
return ks
@ -3538,7 +3535,7 @@ class BaseImageTask(OzImageTask):
if len(formats) == 0:
# we only want a raw disk image (no format option given)
f_dict['raw'] = True
elif 'raw' not in list(f_dict.keys()):
elif 'raw' not in f_dict.keys():
f_dict['raw'] = False
self.logger.debug('Image delivery plan: %s' % f_dict)
return f_dict
@ -4068,7 +4065,7 @@ class BuildIndirectionImageTask(OzImageTask):
self.getUploadDir(), logfile)
final_path = os.path.join(scmsrcdir, os.path.basename(filepath))
else:
tops = dict([(k, getattr(self.options, k)) for k in ('topurl','topdir')])
tops = dict([(k, getattr(self.options, k)) for k in 'topurl','topdir'])
tops['tempdir'] = self.workdir
remote_fileobj = koji.openRemoteFile(filepath, **tops)
final_path = os.path.join(self.workdir, os.path.basename(filepath))
@ -4229,7 +4226,7 @@ class BuildIndirectionImageTask(OzImageTask):
base_factory_image = _nvr_to_image(opts['base_image_build'], opts['arch'])
else:
base_factory_image = _task_to_image(int(opts['base_image_task']))
except Exception as e:
except Exception, e:
self.logger.exception(e)
raise
@ -4291,7 +4288,7 @@ class BuildIndirectionImageTask(OzImageTask):
image_id=base_factory_image.identifier,
parameters=params)
target.target_thread.join()
except Exception as e:
except Exception, e:
self.logger.debug("Exception encountered during target build")
self.logger.exception(e)
finally:
@ -4761,8 +4758,8 @@ Build Info: %(weburl)s/buildinfo?buildID=%(build_id)i\r
def uniq(self, items):
"""Remove duplicates from the list of items, and sort the list."""
m = dict(list(zip(items, [1] * len(items))))
l = list(m.keys())
m = dict(zip(items, [1] * len(items)))
l = m.keys()
l.sort()
return l
@ -4807,8 +4804,8 @@ class NewRepoTask(BaseTaskHandler):
# gather subtask results
data = {}
if subtasks:
results = self.wait(list(subtasks.values()), all=True, failany=True)
for (arch, task_id) in six.iteritems(subtasks):
results = self.wait(subtasks.values(), all=True, failany=True)
for (arch, task_id) in subtasks.iteritems():
data[arch] = results[task_id]
self.logger.debug("DEBUG: %r : %r " % (arch,data[arch],))
@ -4849,7 +4846,7 @@ class CreaterepoTask(BaseTaskHandler):
if external_repos:
self.merge_repos(external_repos, arch, groupdata)
elif pkglist is None:
fo = open(os.path.join(self.datadir, "EMPTY_REPO"), 'w')
fo = file(os.path.join(self.datadir, "EMPTY_REPO"), 'w')
fo.write("This repo is empty because its tag has no content for this arch\n")
fo.close()
@ -4967,7 +4964,7 @@ class NewDistRepoTask(BaseTaskHandler):
method='createdistrepo', arglist=arglist, label=arch,
parent=self.id, arch='noarch')
if len(subtasks) > 0 and task_opts['multilib']:
results = self.wait(list(subtasks.values()), all=True, failany=True)
results = self.wait(subtasks.values(), all=True, failany=True)
for arch in arch32s:
# move the 32-bit task output to the final resting place
# so the 64-bit arches can use it for multilib
@ -4983,8 +4980,8 @@ class NewDistRepoTask(BaseTaskHandler):
parent=self.id, arch='noarch')
# wait for 64-bit subtasks to finish
data = {}
results = self.wait(list(subtasks.values()), all=True, failany=True)
for (arch, task_id) in six.iteritems(subtasks):
results = self.wait(subtasks.values(), all=True, failany=True)
for (arch, task_id) in subtasks.iteritems():
data[arch] = results[task_id]
self.logger.debug("DEBUG: %r : %r " % (arch, data[arch]))
if task_opts['multilib'] and arch in arch32s:
@ -5064,7 +5061,7 @@ class createDistRepoTask(CreaterepoTask):
self.pkglist = None
self.create_local_repo(self.rinfo, arch, self.pkglist, groupdata, None, oldpkgs=oldpkgs)
if self.pkglist is None:
fo = open(os.path.join(self.datadir, "EMPTY_REPO"), 'w')
fo = file(os.path.join(self.datadir, "EMPTY_REPO"), 'w')
fo.write("This repo is empty because its tag has no content for this arch\n")
fo.close()
files = ['pkglist', 'kojipkgs']
@ -5078,7 +5075,7 @@ class createDistRepoTask(CreaterepoTask):
files.append(f)
self.session.uploadWrapper('%s/%s' % (ddir, f),
self.uploadpath, f)
return [self.uploadpath, files, list(self.sigmap.items())]
return [self.uploadpath, files, self.sigmap.items()]
def do_multilib(self, arch, ml_arch, conf):
self.repo_id = self.rinfo['id']
@ -5258,7 +5255,7 @@ enabled=1
# select our rpms
selected = {}
for rpm_id in rpm_idx:
avail_keys = list(rpm_idx[rpm_id].keys())
avail_keys = rpm_idx[rpm_id].keys()
best_key = self.pick_key(keys, avail_keys)
if best_key is None:
# we lack a matching key for this rpm
@ -5271,7 +5268,7 @@ enabled=1
#generate pkglist files
pkgfile = os.path.join(self.repodir, 'pkglist')
pkglist = open(pkgfile, 'w')
pkglist = file(pkgfile, 'w')
fs_missing = []
sig_missing = []
kojipkgs = {}
@ -5331,7 +5328,7 @@ enabled=1
fmt = '%(name)s-%(version)s-%(release)s.%(arch)s'
filenames = [[fmt % selected[r], r] for r in sig_missing]
for fname, rpm_id in sorted(filenames):
avail = list(rpm_idx.get(rpm_id, {}).keys())
avail = rpm_idx.get(rpm_id, {}).keys()
outfile.write('%s: %r\n' % (fname, avail))
outfile.close()
self.session.uploadWrapper(missing_log, self.uploadpath)
@ -5344,7 +5341,7 @@ enabled=1
def write_kojipkgs(self):
filename = os.path.join(self.repodir, 'kojipkgs')
datafile = open(filename, 'w')
datafile = file(filename, 'w')
try:
json.dump(self.kojipkgs, datafile, indent=4)
finally:
@ -5383,7 +5380,7 @@ class WaitrepoTask(BaseTaskHandler):
if not targets:
raise koji.GenericError("No build target for tag: %s" % taginfo['name'])
if isinstance(newer_than, six.string_types) and newer_than.lower() == "now":
if isinstance(newer_than, basestring) and newer_than.lower() == "now":
newer_than = start
if not isinstance(newer_than, (type(None), int, long, float)):
raise koji.GenericError("Invalid value for newer_than: %s" % newer_than)
@ -5546,7 +5543,7 @@ def get_options():
defaults[name] = config.getboolean('kojid', name)
elif name in ['plugin', 'plugins']:
defaults['plugin'] = value.split()
elif name in list(defaults.keys()):
elif name in defaults.keys():
defaults[name] = value
elif name.upper().startswith('RLIMIT_'):
defaults[name.upper()] = value
@ -5634,9 +5631,9 @@ if __name__ == "__main__":
# authenticate using SSL client certificates
session.ssl_login(options.cert, None,
options.serverca)
except koji.AuthError as e:
except koji.AuthError, e:
quit("Error: Unable to log in: %s" % e)
except six.moves.xmlrpc_client.ProtocolError:
except xmlrpclib.ProtocolError:
quit("Error: Unable to connect to server %s" % (options.server))
elif options.user:
try:
@ -5644,7 +5641,7 @@ if __name__ == "__main__":
session.login()
except koji.AuthError:
quit("Error: Unable to log in. Bad credentials?")
except six.moves.xmlrpc_client.ProtocolError:
except xmlrpclib.ProtocolError:
quit("Error: Unable to connect to server %s" % (options.server))
elif 'krbV' in sys.modules:
krb_principal = options.krb_principal
@ -5654,9 +5651,9 @@ if __name__ == "__main__":
session.krb_login(principal=krb_principal,
keytab=options.keytab,
ccache=options.ccache)
except krbV.Krb5Error as e:
except krbV.Krb5Error, e:
quit("Kerberos authentication failed: '%s' (%s)" % (e.args[1], e.args[0]))
except socket.error as e:
except socket.error, e:
quit("Could not connect to Kerberos authentication service: '%s'" % e.args[1])
else:
quit("No username/password supplied and Kerberos missing or not configured")
@ -5670,7 +5667,7 @@ if __name__ == "__main__":
#make sure it works
try:
ret = session.echo("OK")
except six.moves.xmlrpc_client.ProtocolError:
except xmlrpclib.ProtocolError:
quit("Error: Unable to connect to server %s" % (options.server))
if ret != ["OK"]:
quit("Error: incorrect server response: %r" % (ret))

View file

@ -21,7 +21,6 @@
# Largely borrowed from the mergerepo script included in createrepo and
# written by Seth Vidal
from __future__ import absolute_import
import createrepo
import os.path
import rpmUtils.miscutils
@ -239,7 +238,7 @@ class RepoMerge(object):
include_srpms[srpm_name] = (pkg.sourcerpm, pkg.repoid)
pkgorigins = os.path.join(self.yumbase.conf.cachedir, 'pkgorigins')
origins = open(pkgorigins, 'w')
origins = file(pkgorigins, 'w')
seen_rpms = {}
for repo in repos:
@ -284,7 +283,7 @@ def main(args):
opts = parse_args(args)
if opts.blocked:
blocked_fo = open(opts.blocked)
blocked_fo = file(opts.blocked)
blocked_list = blocked_fo.readlines()
blocked_fo.close()
blocked = dict([(b.strip(), 1) for b in blocked_list])

View file

@ -1863,7 +1863,7 @@ def _import_comps(session, filename, tag, options):
if pkg.type == libcomps.PACKAGE_TYPE_CONDITIONAL:
pkgopts['requires'] = pkg.requires
for k in pkgopts.keys():
if isinstance(pkgopts[k], unicode):
if six.PY2 and isinstance(pkgopts[k], unicode):
pkgopts[k] = str(pkgopts[k])
s_opts = ', '.join(["'%s': %r" % (k, pkgopts[k]) for k in sorted(list(pkgopts.keys()))])
print(" Package: %s: {%s}" % (pkg.name, s_opts))
@ -1896,7 +1896,7 @@ def _import_comps_alt(session, filename, tag, options):
if ptype == 'conditional':
pkgopts['requires'] = pdata[pkg]
for k in pkgopts.keys():
if isinstance(pkgopts[k], unicode):
if six.PY2 and isinstance(pkgopts[k], unicode):
pkgopts[k] = str(pkgopts[k])
s_opts = ', '.join(["'%s': %r" % (k, pkgopts[k]) for k in sorted(list(pkgopts.keys()))])
print(" Package: %s: {%s}" % (pkg, s_opts))

View file

@ -22,7 +22,6 @@
# Mike Bonnet <mikeb@redhat.com>
# Cristian Balint <cbalint@redhat.com>
from __future__ import absolute_import
import base64
import calendar
import cgi
@ -54,12 +53,9 @@ import tarfile
import tempfile
import time
import types
import six.moves.xmlrpc_client
import xmlrpclib
import zipfile
from koji.context import context
from six.moves import range
from six.moves import zip
import six
try:
import json
@ -401,7 +397,7 @@ class Task(object):
if xml_request.find('<?xml', 0, 10) == -1:
#handle older base64 encoded data
xml_request = base64.decodestring(xml_request)
params, method = six.moves.xmlrpc_client.loads(xml_request)
params, method = xmlrpclib.loads(xml_request)
return params
def getResult(self, raise_fault=True):
@ -420,8 +416,8 @@ class Task(object):
try:
# If the result is a Fault, then loads will raise it
# This is normally what we want to happen
result, method = six.moves.xmlrpc_client.loads(xml_result)
except six.moves.xmlrpc_client.Fault as fault:
result, method = xmlrpclib.loads(xml_result)
except xmlrpclib.Fault, fault:
if raise_fault:
raise
# Note that you can't really return a fault over xmlrpc, except by
@ -452,7 +448,7 @@ class Task(object):
if task['request'].find('<?xml', 0, 10) == -1:
#handle older base64 encoded data
task['request'] = base64.decodestring(task['request'])
task['request'] = six.moves.xmlrpc_client.loads(task['request'])[0]
task['request'] = xmlrpclib.loads(task['request'])[0]
return results
def runCallbacks(self, cbtype, old_info, attr, new_val):
@ -494,7 +490,7 @@ def make_task(method, arglist, **opts):
r = _fetchSingle(q, opts)
if not r:
raise koji.GenericError("Invalid parent task: %(parent)s" % opts)
pdata = dict(list(zip(fields, r)))
pdata = dict(zip(fields, r))
if pdata['state'] != koji.TASK_STATES['OPEN']:
raise koji.GenericError("Parent task (id %(parent)s) is not open" % opts)
#default to a higher priority than parent
@ -563,7 +559,7 @@ def make_task(method, arglist, **opts):
raise koji.GenericError("invalid channel policy")
# encode xmlrpc request
opts['request'] = six.moves.xmlrpc_client.dumps(tuple(arglist), methodname=method,
opts['request'] = xmlrpclib.dumps(tuple(arglist), methodname=method,
allow_none=1)
opts['state'] = koji.TASK_STATES['FREE']
opts['method'] = method
@ -605,7 +601,7 @@ def readGlobalInheritance(event=None):
""" % (",".join(fields), eventCondition(event))
c.execute(q, locals())
#convert list of lists into a list of dictionaries
return [dict(list(zip(fields, x))) for x in c.fetchall()]
return [dict(zip(fields, x)) for x in c.fetchall()]
def readInheritanceData(tag_id, event=None):
c = context.cnx.cursor()
@ -616,7 +612,7 @@ def readInheritanceData(tag_id, event=None):
""" % (",".join(fields), eventCondition(event))
c.execute(q, locals())
#convert list of lists into a list of dictionaries
data = [dict(list(zip(fields, x))) for x in c.fetchall()]
data = [dict(zip(fields, x)) for x in c.fetchall()]
# include the current tag_id as child_id, so we can retrace the inheritance chain later
for datum in data:
datum['child_id'] = tag_id
@ -631,7 +627,7 @@ def readDescendantsData(tag_id, event=None):
""" % (",".join(fields), eventCondition(event))
c.execute(q, locals())
#convert list of lists into a list of dictionaries
data = [dict(list(zip(fields, x))) for x in c.fetchall()]
data = [dict(zip(fields, x)) for x in c.fetchall()]
return data
@ -671,12 +667,12 @@ def _writeInheritanceData(tag_id, changes, clear=False):
data[parent_id] = link
break
if clear:
for link in six.itervalues(data):
for link in data.itervalues():
if not link.get('is_update'):
link['delete link'] = True
link['is_update'] = True
changed = False
for link in six.itervalues(data):
for link in data.itervalues():
if link.get('is_update'):
changed = True
break
@ -686,17 +682,17 @@ def _writeInheritanceData(tag_id, changes, clear=False):
return
#check for duplicate priorities
pri_index = {}
for link in six.itervalues(data):
for link in data.itervalues():
if link.get('delete link'):
continue
pri_index.setdefault(link['priority'], []).append(link)
for pri, dups in six.iteritems(pri_index):
for pri, dups in pri_index.iteritems():
if len(dups) <= 1:
continue
#oops, duplicate entries for a single priority
dup_ids = [link['parent_id'] for link in dups]
raise koji.GenericError("Inheritance priorities must be unique (pri %s: %r )" % (pri, dup_ids))
for parent_id, link in six.iteritems(data):
for parent_id, link in data.iteritems():
if not link.get('is_update'):
continue
# revoke old values
@ -704,7 +700,7 @@ def _writeInheritanceData(tag_id, changes, clear=False):
clauses=['tag_id=%(tag_id)s', 'parent_id = %(parent_id)s'])
update.make_revoke()
update.execute()
for parent_id, link in six.iteritems(data):
for parent_id, link in data.iteritems():
if not link.get('is_update'):
continue
# skip rest if we are just deleting
@ -862,7 +858,7 @@ def _direct_pkglist_add(taginfo, pkginfo, owner, block, extra_arches, force,
tag_id = tag['id']
pkg = lookup_package(pkginfo, strict=False)
if not pkg:
if not isinstance(pkginfo, six.string_types):
if not isinstance(pkginfo, basestring):
raise koji.GenericError("Invalid package: %s" % pkginfo)
if owner is not None:
owner = get_user(owner, strict=True)['id']
@ -1303,14 +1299,14 @@ def readTaggedRPMS(tag, package=None, arch=None, event=None, inherit=False, late
joins.append('LEFT OUTER JOIN rpmsigs on rpminfo.id = rpmsigs.rpm_id')
if arch:
data['arch'] = arch
if isinstance(arch, six.string_types):
if isinstance(arch, basestring):
clauses.append('rpminfo.arch = %(arch)s')
elif isinstance(arch, (list, tuple)):
clauses.append('rpminfo.arch IN %(arch)s')
else:
raise koji.GenericError('invalid arch option: %s' % arch)
fields, aliases = list(zip(*fields))
fields, aliases = zip(*fields)
query = QueryProcessor(tables=tables, joins=joins, clauses=clauses,
columns=fields, aliases=aliases, values=data, transform=_fix_rpm_row)
@ -1972,7 +1968,7 @@ def get_tag_groups(tag, event=None, inherit=True, incl_pkgs=True, incl_reqs=True
groups.setdefault(grp_id, group)
if incl_pkgs:
for group in six.itervalues(groups):
for group in groups.itervalues():
group['packagelist'] = {}
fields = ('group_id', 'tag_id', 'package', 'blocked', 'type', 'basearchonly', 'requires')
q = """
@ -1994,7 +1990,7 @@ def get_tag_groups(tag, event=None, inherit=True, incl_pkgs=True, incl_reqs=True
if incl_reqs:
# and now the group reqs
for group in six.itervalues(groups):
for group in groups.itervalues():
group['grouplist'] = {}
fields = ('group_id', 'tag_id', 'req_id', 'blocked', 'type', 'is_metapkg', 'name')
q = """SELECT %s FROM group_req_listing JOIN groups on req_id = id
@ -2089,7 +2085,7 @@ def remove_host_from_channel(hostname, channel_name):
def rename_channel(old, new):
"""Rename a channel"""
context.session.assertPerm('admin')
if not isinstance(new, six.string_types):
if not isinstance(new, basestring):
raise koji.GenericError("new channel name must be a string")
cinfo = get_channel(old, strict=True)
dup_check = get_channel(new, strict=False)
@ -2143,7 +2139,7 @@ def get_ready_hosts():
""" % ','.join(fields)
# XXX - magic number in query
c.execute(q)
hosts = [dict(list(zip(aliases, row))) for row in c.fetchall()]
hosts = [dict(zip(aliases, row)) for row in c.fetchall()]
for host in hosts:
q = """SELECT channel_id FROM host_channels WHERE host_id=%(id)s"""
c.execute(q, host)
@ -2160,7 +2156,7 @@ def get_all_arches():
#in a perfect world, this list would only include canonical
#arches, but not all admins will undertand that.
ret[koji.canonArch(arch)] = 1
return list(ret.keys())
return ret.keys()
def get_active_tasks(host=None):
"""Return data on tasks that are yet to be run"""
@ -2330,7 +2326,7 @@ def repo_init(tag, with_src=False, with_debuginfo=False, event=None):
groupsdir = "%s/groups" % (repodir)
koji.ensuredir(groupsdir)
comps = koji.generate_comps(groups, expand_groups=True)
fo = open("%s/comps.xml" % groupsdir, 'w')
fo = file("%s/comps.xml" % groupsdir, 'w')
fo.write(comps)
fo.close()
@ -2349,7 +2345,7 @@ def repo_init(tag, with_src=False, with_debuginfo=False, event=None):
top_relpath = koji.util.relpath(koji.pathinfo.topdir, archdir)
top_link = os.path.join(archdir, 'toplink')
os.symlink(top_relpath, top_link)
pkglist[repoarch] = open(os.path.join(archdir, 'pkglist'), 'w')
pkglist[repoarch] = file(os.path.join(archdir, 'pkglist'), 'w')
#NOTE - rpms is now an iterator
for rpminfo in rpms:
if not with_debuginfo and koji.is_debuginfo(rpminfo['name']):
@ -2374,7 +2370,7 @@ def repo_init(tag, with_src=False, with_debuginfo=False, event=None):
#write blocked package lists
for repoarch in repo_arches:
blocklist = open(os.path.join(repodir, repoarch, 'blocklist'), 'w')
blocklist = file(os.path.join(repodir, repoarch, 'blocklist'), 'w')
for pkg in blocks:
blocklist.write(pkg['package_name'])
blocklist.write('\n')
@ -2410,7 +2406,7 @@ def repo_init(tag, with_src=False, with_debuginfo=False, event=None):
os.symlink(relpath, destlink)
except:
log_error('Error linking %s to %s' % (destlink, relpath))
for artifact_dir, artifacts in six.iteritems(artifact_dirs):
for artifact_dir, artifacts in artifact_dirs.iteritems():
_write_maven_repo_metadata(artifact_dir, artifacts)
koji.plugin.run_callbacks('postRepoInit', tag=tinfo, with_src=with_src, with_debuginfo=with_debuginfo,
@ -2422,7 +2418,7 @@ def _write_maven_repo_metadata(destdir, artifacts):
# group_id and artifact_id should be the same for all entries,
# so we're really only comparing versions.
artifacts = sorted(artifacts, cmp=lambda a, b: rpm.labelCompare(a, b))
artifactinfo = dict(list(zip(['group_id', 'artifact_id', 'version'], artifacts[-1])))
artifactinfo = dict(zip(['group_id', 'artifact_id', 'version'], artifacts[-1]))
artifactinfo['timestamp'] = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
contents = """<?xml version="1.0"?>
<metadata>
@ -2441,7 +2437,7 @@ def _write_maven_repo_metadata(destdir, artifacts):
</versioning>
</metadata>
""" % datetime.datetime.now().strftime('%Y%m%d%H%M%S')
mdfile = open(os.path.join(destdir, 'maven-metadata.xml'), 'w')
mdfile = file(os.path.join(destdir, 'maven-metadata.xml'), 'w')
mdfile.write(contents)
mdfile.close()
_generate_maven_metadata(destdir)
@ -2549,7 +2545,7 @@ def repo_references(repo_id):
'host_id': 'host_id',
'create_event': 'create_event',
'state': 'state'}
fields, aliases = list(zip(*list(fields.items())))
fields, aliases = zip(*fields.items())
values = {'repo_id': repo_id}
clauses = ['repo_id=%(repo_id)s', 'retire_event IS NULL']
query = QueryProcessor(columns=fields, aliases=aliases, tables=['standard_buildroot'],
@ -2796,7 +2792,7 @@ def lookup_name(table, info, strict=False, create=False):
q = """SELECT id,name FROM %s WHERE id=%%(info)d""" % table
elif isinstance(info, str):
q = """SELECT id,name FROM %s WHERE name=%%(info)s""" % table
elif isinstance(info, six.text_type):
elif isinstance(info, unicode):
info = koji.fixEncoding(info)
q = """SELECT id,name FROM %s WHERE name=%%(info)s""" % table
else:
@ -2914,7 +2910,7 @@ def _create_tag(name, parent=None, arches=None, perm=None, locked=False, maven_s
# add extra data
if extra is not None:
for key, value in six.iteritems(extra):
for key, value in extra.iteritems():
data = {
'tag_id': tag_id,
'key': key,
@ -2971,15 +2967,15 @@ def get_tag(tagInfo, strict=False, event=None):
'tag_config.maven_include_all': 'maven_include_all'
}
clauses = [eventCondition(event, table='tag_config')]
if isinstance(tagInfo, six.integer_types):
if isinstance(tagInfo, (int, long)):
clauses.append("tag.id = %(tagInfo)i")
elif isinstance(tagInfo, six.string_types):
elif isinstance(tagInfo, basestring):
clauses.append("tag.name = %(tagInfo)s")
else:
raise koji.GenericError('invalid type for tagInfo: %s' % type(tagInfo))
data = {'tagInfo': tagInfo}
fields, aliases = list(zip(*list(fields.items())))
fields, aliases = zip(*fields.items())
query = QueryProcessor(columns=fields, aliases=aliases, tables=tables,
joins=joins, clauses=clauses, values=data)
result = query.executeOne()
@ -3199,7 +3195,7 @@ def get_external_repos(info=None, url=None, event=None, queryOpts=None):
if info is not None:
if isinstance(info, str):
clauses.append('name = %(info)s')
elif isinstance(info, six.integer_types):
elif isinstance(info, (int, long)):
clauses.append('id = %(info)i')
else:
raise koji.GenericError('invalid type for lookup: %s' % type(info))
@ -3521,7 +3517,7 @@ def get_build(buildInfo, strict=False):
('users.id', 'owner_id'), ('users.name', 'owner_name'),
('build.source', 'source'),
('build.extra', 'extra'))
fields, aliases = list(zip(*fields))
fields, aliases = zip(*fields)
joins = ['events ON build.create_event = events.id',
'package on build.pkg_id = package.id',
'volume on build.volume_id = volume.id',
@ -3638,7 +3634,7 @@ def get_rpm(rpminfo, strict=False, multi=False):
)
# we can look up by id or NVRA
data = None
if isinstance(rpminfo, six.integer_types):
if isinstance(rpminfo, (int, long)):
data = {'id': rpminfo}
elif isinstance(rpminfo, str):
data = koji.parse_NVRA(rpminfo)
@ -3756,7 +3752,7 @@ def list_rpms(buildID=None, buildrootID=None, imageID=None, componentBuildrootID
else:
raise koji.GenericError('invalid type for "arches" parameter: %s' % type(arches))
fields, aliases = list(zip(*fields))
fields, aliases = zip(*fields)
query = QueryProcessor(columns=fields, aliases=aliases,
tables=['rpminfo'], joins=joins, clauses=clauses,
values=locals(), transform=_fix_rpm_row, opts=queryOpts)
@ -4081,7 +4077,7 @@ def list_archives(buildID=None, buildrootID=None, componentBuildrootID=None, hos
clauses.append('archiveinfo.btype_id = %(btype_id)s')
values['btype_id'] = btype['id']
columns, aliases = list(zip(*fields))
columns, aliases = zip(*fields)
ret = QueryProcessor(tables=tables, columns=columns, aliases=aliases, joins=joins,
transform=_fix_archive_row,
clauses=clauses, values=values, opts=queryOpts).execute()
@ -4400,7 +4396,7 @@ def _multiRow(query, values, fields):
as a list of maps. Each map in the list will have a key for each
element in the "fields" list. If there are no results, an empty
list will be returned."""
return [dict(list(zip(fields, row))) for row in _fetchMulti(query, values)]
return [dict(zip(fields, row)) for row in _fetchMulti(query, values)]
def _singleRow(query, values, fields, strict=False):
"""Return a single row from "query". Named parameters can be
@ -4412,7 +4408,7 @@ def _singleRow(query, values, fields, strict=False):
returned."""
row = _fetchSingle(query, values, strict)
if row:
return dict(list(zip(fields, row)))
return dict(zip(fields, row))
else:
#strict enforced by _fetchSingle
return None
@ -4964,7 +4960,7 @@ def import_build(srpm, rpms, brmap=None, task_id=None, build_id=None, logs=None)
import_rpm_file(fn, binfo, rpminfo)
add_rpm_sig(rpminfo['id'], koji.rip_rpm_sighdr(fn))
if logs:
for key, files in six.iteritems(logs):
for key, files in logs.iteritems():
if not key:
key = None
for relpath in files:
@ -5133,7 +5129,7 @@ class CG_Importer(object):
if metadata is None:
#default to looking for uploaded file
metadata = 'metadata.json'
if not isinstance(metadata, (str, six.text_type)):
if not isinstance(metadata, (str, unicode)):
raise koji.GenericError("Invalid metadata value: %r" % metadata)
if metadata.endswith('.json'):
# handle uploaded metadata
@ -5141,7 +5137,7 @@ class CG_Importer(object):
path = os.path.join(workdir, directory, metadata)
if not os.path.exists(path):
raise koji.GenericError("No such file: %s" % metadata)
fo = open(path, 'rt')
fo = open(path, 'rb')
metadata = fo.read()
fo.close()
self.raw_metadata = metadata
@ -5191,7 +5187,7 @@ class CG_Importer(object):
datetime.datetime.fromtimestamp(float(metadata['build']['end_time'])).isoformat(' ')
owner = metadata['build'].get('owner', None)
if owner:
if not isinstance(owner, six.string_types):
if not isinstance(owner, basestring):
raise koji.GenericError("Invalid owner format (expected username): %s" % owner)
buildinfo['owner'] = get_user(owner, strict=True)['id']
self.buildinfo = buildinfo
@ -5532,14 +5528,14 @@ def add_external_rpm(rpminfo, external_repo, strict=True):
#sanity check rpminfo
dtypes = (
('name', six.string_types),
('version', six.string_types),
('release', six.string_types),
('name', basestring),
('version', basestring),
('release', basestring),
('epoch', (int, type(None))),
('arch', six.string_types),
('arch', basestring),
('payloadhash', str),
('size', int),
('buildtime', six.integer_types))
('buildtime', (int, long)))
for field, allowed in dtypes:
if field not in rpminfo:
raise koji.GenericError("%s field missing: %r" % (field, rpminfo))
@ -5973,7 +5969,7 @@ def check_old_image_files(old):
(img_path, img_size, old['filesize']))
# old images always used sha256 hashes
sha256sum = hashlib.sha256()
image_fo = open(img_path, 'r')
image_fo = file(img_path, 'r')
while True:
data = image_fo.read(1048576)
sha256sum.update(data)
@ -6125,7 +6121,7 @@ def import_archive_internal(filepath, buildinfo, type, typeInfo, buildroot_id=No
filename = koji.fixEncoding(os.path.basename(filepath))
archiveinfo['filename'] = filename
archiveinfo['size'] = os.path.getsize(filepath)
archivefp = open(filepath)
archivefp = file(filepath)
m = md5_constructor()
while True:
contents = archivefp.read(8192)
@ -6265,14 +6261,14 @@ def _generate_maven_metadata(mavendir):
sumfile = mavenfile + ext
if sumfile not in mavenfiles:
sum = sum_constr()
fobj = open('%s/%s' % (mavendir, mavenfile))
fobj = file('%s/%s' % (mavendir, mavenfile))
while True:
content = fobj.read(8192)
if not content:
break
sum.update(content)
fobj.close()
sumobj = open('%s/%s' % (mavendir, sumfile), 'w')
sumobj = file('%s/%s' % (mavendir, sumfile), 'w')
sumobj.write(sum.hexdigest())
sumobj.close()
@ -6328,7 +6324,7 @@ def add_rpm_sig(an_rpm, sighdr):
# - write to fs
sigpath = "%s/%s" % (builddir, koji.pathinfo.sighdr(rinfo, sigkey))
koji.ensuredir(os.path.dirname(sigpath))
fo = open(sigpath, 'wb')
fo = file(sigpath, 'wb')
fo.write(sighdr)
fo.close()
koji.plugin.run_callbacks('postRPMSign', sigkey=sigkey, sighash=sighash, build=binfo, rpm=rinfo)
@ -6344,7 +6340,7 @@ def _scan_sighdr(sighdr, fn):
sig_start, sigsize = koji.find_rpm_sighdr(fn)
hdr_start = sig_start + sigsize
hdrsize = koji.rpm_hdr_size(fn, hdr_start)
inp = open(fn, 'rb')
inp = file(fn, 'rb')
outp = tempfile.TemporaryFile(mode='w+b')
#before signature
outp.write(inp.read(sig_start))
@ -6381,7 +6377,7 @@ def check_rpm_sig(an_rpm, sigkey, sighdr):
koji.splice_rpm_sighdr(sighdr, rpm_path, temp)
ts = rpm.TransactionSet()
ts.setVSFlags(0) #full verify
fo = open(temp, 'rb')
fo = file(temp, 'rb')
hdr = ts.hdrFromFdno(fo.fileno())
fo.close()
except:
@ -6444,7 +6440,7 @@ def write_signed_rpm(an_rpm, sigkey, force=False):
else:
os.unlink(signedpath)
sigpath = "%s/%s" % (builddir, koji.pathinfo.sighdr(rinfo, sigkey))
fo = open(sigpath, 'rb')
fo = file(sigpath, 'rb')
sighdr = fo.read()
fo.close()
koji.ensuredir(os.path.dirname(signedpath))
@ -6677,7 +6673,7 @@ def query_history(tables=None, **kwargs):
fields['creator.id = %(editor)i'] = '_created_by'
fields['revoker.id = %(editor)i'] = '_revoked_by'
elif arg == 'after':
if not isinstance(value, six.string_types):
if not isinstance(value, basestring):
value = datetime.datetime.fromtimestamp(value).isoformat(' ')
data['after'] = value
clauses.append('ev1.time > %(after)s OR ev2.time > %(after)s')
@ -6692,7 +6688,7 @@ def query_history(tables=None, **kwargs):
fields[c_test] = '_created_after_event'
fields[r_test] = '_revoked_after_event'
elif arg == 'before':
if not isinstance(value, six.string_types):
if not isinstance(value, basestring):
value = datetime.datetime.fromtimestamp(value).isoformat(' ')
data['before'] = value
clauses.append('ev1.time < %(before)s OR ev2.time < %(before)s')
@ -6708,7 +6704,7 @@ def query_history(tables=None, **kwargs):
fields[r_test] = '_revoked_before_event'
if skip:
continue
fields, aliases = list(zip(*list(fields.items())))
fields, aliases = zip(*fields.items())
query = QueryProcessor(columns=fields, aliases=aliases, tables=[table],
joins=joins, clauses=clauses, values=data)
ret[table] = query.iterate()
@ -6847,7 +6843,7 @@ def build_references(build_id, limit=None):
idx.setdefault(row['id'], row)
if limit is not None and len(idx) > limit:
break
ret['rpms'] = list(idx.values())
ret['rpms'] = idx.values()
ret['component_of'] = []
# find images/archives that contain the build rpms
@ -6878,7 +6874,7 @@ def build_references(build_id, limit=None):
idx.setdefault(row['id'], row)
if limit is not None and len(idx) > limit:
break
ret['archives'] = list(idx.values())
ret['archives'] = idx.values()
# find images/archives that contain the build archives
fields = ['archive_id']
@ -7202,7 +7198,7 @@ def get_notification_recipients(build, tag_id, state):
#FIXME - if tag_id is None, we don't have a good way to get the package owner.
# using all package owners from all tags would be way overkill.
emails_uniq = list(dict([(x, 1) for x in emails]).keys())
emails_uniq = dict([(x, 1) for x in emails]).keys()
return emails_uniq
def tag_notification(is_successful, tag_id, from_id, build_id, user_id, ignore_success=False, failure_msg=''):
@ -7225,7 +7221,7 @@ def tag_notification(is_successful, tag_id, from_id, build_id, user_id, ignore_s
from_tag = get_tag(from_id)
for email in get_notification_recipients(build, from_tag['id'], state):
recipients[email] = 1
recipients_uniq = list(recipients.keys())
recipients_uniq = recipients.keys()
if len(recipients_uniq) > 0 and not (is_successful and ignore_success):
task_id = make_task('tagNotification', [recipients_uniq, is_successful, tag_id, from_id, build_id, user_id, ignore_success, failure_msg])
return task_id
@ -7439,8 +7435,8 @@ class InsertProcessor(object):
if not self.data and not self.rawdata:
return "-- incomplete update: no assigns"
parts = ['INSERT INTO %s ' % self.table]
columns = list(self.data.keys())
columns.extend(list(self.rawdata.keys()))
columns = self.data.keys()
columns.extend(self.rawdata.keys())
parts.append("(%s) " % ', '.join(columns))
values = []
for key in columns:
@ -7483,7 +7479,7 @@ class InsertProcessor(object):
del data['create_event']
del data['creator_id']
clauses = ["%s = %%(%s)s" % (k, k) for k in data]
query = QueryProcessor(columns=list(data.keys()), tables=[self.table],
query = QueryProcessor(columns=data.keys(), tables=[self.table],
clauses=clauses, values=data)
if query.execute():
return True
@ -7602,7 +7598,7 @@ class QueryProcessor(object):
if columns and aliases:
if len(columns) != len(aliases):
raise Exception('column and alias lists must be the same length')
self.colsByAlias = dict(list(zip(aliases, columns)))
self.colsByAlias = dict(zip(aliases, columns))
else:
self.colsByAlias = {}
self.tables = tables
@ -7862,7 +7858,7 @@ def policy_get_pkg(data):
if not pkginfo:
#for some operations (e.g. adding a new package), the package
#entry may not exist yet
if isinstance(data['package'], six.string_types):
if isinstance(data['package'], basestring):
return {'id' : None, 'name' : data['package']}
else:
raise koji.GenericError("Invalid package: %s" % data['package'])
@ -8144,7 +8140,7 @@ class UserInGroupTest(koji.policy.BaseSimpleTest):
return False
groups = koji.auth.get_user_groups(user['id'])
args = self.str.split()[1:]
for group_id, group in six.iteritems(groups):
for group_id, group in groups.iteritems():
for pattern in args:
if fnmatch.fnmatch(group, pattern):
return True
@ -8782,7 +8778,7 @@ class RootExports(object):
# we will accept offset and size as strings to work around xmlrpc limits
offset = koji.decode_int(offset)
size = koji.decode_int(size)
if isinstance(md5sum, six.string_types):
if isinstance(md5sum, basestring):
# this case is for backwards compatibility
verify = "md5"
digest = md5sum
@ -8800,7 +8796,7 @@ class RootExports(object):
fn = get_upload_path(path, name, create=True, volume=volume)
try:
st = os.lstat(fn)
except OSError as e:
except OSError, e:
if e.errno == errno.ENOENT:
pass
else:
@ -8814,7 +8810,7 @@ class RootExports(object):
# but we allow .log files to be uploaded multiple times to support
# realtime log-file viewing
raise koji.GenericError("file already exists: %s" % fn)
fd = os.open(fn, os.O_RDWR | os.O_CREAT, 0o666)
fd = os.open(fn, os.O_RDWR | os.O_CREAT, 0666)
# log_error("fd=%r" %fd)
try:
if offset == 0 or (offset == -1 and size == len(contents)):
@ -8869,7 +8865,7 @@ class RootExports(object):
data = {}
try:
fd = os.open(fn, os.O_RDONLY)
except OSError as e:
except OSError, e:
if e.errno == errno.ENOENT:
return None
else:
@ -8877,7 +8873,7 @@ class RootExports(object):
try:
try:
fcntl.lockf(fd, fcntl.LOCK_SH|fcntl.LOCK_NB)
except IOError as e:
except IOError, e:
raise koji.LockError(e)
st = os.fstat(fd)
if not stat.S_ISREG(st.st_mode):
@ -8916,7 +8912,7 @@ class RootExports(object):
if not os.path.isfile(filePath):
raise koji.GenericError('no file "%s" output by task %i' % (fileName, taskID))
# Let the caller handler any IO or permission errors
f = open(filePath, 'r')
f = file(filePath, 'r')
if isinstance(offset, str):
offset = int(offset)
if offset != None and offset > 0:
@ -9342,9 +9338,9 @@ class RootExports(object):
if before:
if isinstance(before, datetime.datetime):
before = calendar.timegm(before.utctimetuple())
elif isinstance(before, (str, six.text_type)):
elif isinstance(before, (str, unicode)):
before = koji.util.parseTime(before)
elif isinstance(before, six.integer_types):
elif isinstance(before, (int, long)):
pass
else:
raise koji.GenericError('invalid type for before: %s' % type(before))
@ -9352,9 +9348,9 @@ class RootExports(object):
if after:
if isinstance(after, datetime.datetime):
after = calendar.timegm(after.utctimetuple())
elif isinstance(after, (str, six.text_type)):
elif isinstance(after, (str, unicode)):
after = koji.util.parseTime(after)
elif isinstance(after, six.integer_types):
elif isinstance(after, (int, long)):
pass
else:
raise koji.GenericError('invalid type for after: %s' % type(after))
@ -9443,7 +9439,7 @@ class RootExports(object):
def listTagged(self, tag, event=None, inherit=False, prefix=None, latest=False, package=None, owner=None, type=None):
"""List builds tagged with tag"""
if not isinstance(tag, six.integer_types):
if not isinstance(tag, (int, long)):
#lookup tag id
tag = get_tag_id(tag, strict=True)
results = readTaggedBuilds(tag, event, inherit=inherit, latest=latest, package=package, owner=owner, type=type)
@ -9454,14 +9450,14 @@ class RootExports(object):
def listTaggedRPMS(self, tag, event=None, inherit=False, latest=False, package=None, arch=None, rpmsigs=False, owner=None, type=None):
"""List rpms and builds within tag"""
if not isinstance(tag, six.integer_types):
if not isinstance(tag, (int, long)):
#lookup tag id
tag = get_tag_id(tag, strict=True)
return readTaggedRPMS(tag, event=event, inherit=inherit, latest=latest, package=package, arch=arch, rpmsigs=rpmsigs, owner=owner, type=type)
def listTaggedArchives(self, tag, event=None, inherit=False, latest=False, package=None, type=None):
"""List archives and builds within a tag"""
if not isinstance(tag, six.integer_types):
if not isinstance(tag, (int, long)):
tag = get_tag_id(tag, strict=True)
return readTaggedArchives(tag, event=event, inherit=inherit, latest=latest, package=package, type=type)
@ -9618,14 +9614,14 @@ class RootExports(object):
def getLatestBuilds(self, tag, event=None, package=None, type=None):
"""List latest builds for tag (inheritance enabled)"""
if not isinstance(tag, six.integer_types):
if not isinstance(tag, (int, long)):
#lookup tag id
tag = get_tag_id(tag, strict=True)
return readTaggedBuilds(tag, event, inherit=True, latest=True, package=package, type=type)
def getLatestRPMS(self, tag, package=None, arch=None, event=None, rpmsigs=False, type=None):
"""List latest RPMS for tag (inheritance enabled)"""
if not isinstance(tag, six.integer_types):
if not isinstance(tag, (int, long)):
#lookup tag id
tag = get_tag_id(tag, strict=True)
return readTaggedRPMS(tag, package=package, arch=arch, event=event, inherit=True, latest=True, rpmsigs=rpmsigs, type=type)
@ -9685,13 +9681,13 @@ class RootExports(object):
def getInheritanceData(self, tag, event=None):
"""Return inheritance data for tag"""
if not isinstance(tag, six.integer_types):
if not isinstance(tag, (int, long)):
#lookup tag id
tag = get_tag_id(tag, strict=True)
return readInheritanceData(tag, event)
def setInheritanceData(self, tag, data, clear=False):
if not isinstance(tag, six.integer_types):
if not isinstance(tag, (int, long)):
#lookup tag id
tag = get_tag_id(tag, strict=True)
context.session.assertPerm('admin')
@ -9702,7 +9698,7 @@ class RootExports(object):
stops = {}
if jumps is None:
jumps = {}
if not isinstance(tag, six.integer_types):
if not isinstance(tag, (int, long)):
#lookup tag id
tag = get_tag_id(tag, strict=True)
for mapping in [stops, jumps]:
@ -9729,7 +9725,7 @@ class RootExports(object):
- buildroot_id
If no build has the given ID, or the build generated no RPMs, an empty list is returned."""
if not isinstance(build, six.integer_types):
if not isinstance(build, (int, long)):
#lookup build id
build = self.findBuildID(build, strict=True)
return self.listRPMs(buildID=build)
@ -9953,9 +9949,9 @@ class RootExports(object):
userID = get_user(userID, strict=True)['id']
if pkgID is not None:
pkgID = get_package_id(pkgID, strict=True)
result_list = list(readPackageList(tagID=tagID, userID=userID, pkgID=pkgID,
result_list = readPackageList(tagID=tagID, userID=userID, pkgID=pkgID,
inherit=inherited, with_dups=with_dups,
event=event).values())
event=event).values()
if with_dups:
# when with_dups=True, readPackageList returns a list of list of dicts
# convert it to a list of dicts for consistency
@ -10120,7 +10116,7 @@ class RootExports(object):
return taginfo
def getRepo(self, tag, state=None, event=None, dist=False):
if isinstance(tag, six.integer_types):
if isinstance(tag, (int, long)):
id = tag
else:
id = get_tag_id(tag, strict=True)
@ -10386,8 +10382,8 @@ class RootExports(object):
if val.find('<?xml', 0, 10) == -1:
#handle older base64 encoded data
val = base64.decodestring(val)
data, method = six.moves.xmlrpc_client.loads(val)
except six.moves.xmlrpc_client.Fault as fault:
data, method = xmlrpclib.loads(val)
except xmlrpclib.Fault, fault:
data = fault
task[f] = data
yield task
@ -10434,7 +10430,7 @@ class RootExports(object):
#XXX hard-coded interval
c = context.cnx.cursor()
c.execute(q, koji.TASK_STATES)
return [dict(list(zip([f[1] for f in fields], row))) for row in c.fetchall()]
return [dict(zip([f[1] for f in fields], row)) for row in c.fetchall()]
def resubmitTask(self, taskID):
"""Retry a canceled or failed task, using the same parameter as the original task.
@ -10622,7 +10618,7 @@ class RootExports(object):
buildinfo = get_build(build)
if not buildinfo:
raise koji.GenericError('build does not exist: %s' % build)
elif isinstance(ts, six.moves.xmlrpc_client.DateTime):
elif isinstance(ts, xmlrpclib.DateTime):
#not recommended
#the xmlrpclib.DateTime class is almost useless
try:
@ -11140,7 +11136,7 @@ class BuildRoot(object):
('checksum_type', 'checksum_type'),
('project_dep', 'project_dep'),
]
columns, aliases = list(zip(*fields))
columns, aliases = zip(*fields)
query = QueryProcessor(tables=tables, columns=columns,
joins=joins, clauses=clauses,
values=self.data,
@ -11318,7 +11314,7 @@ class Host(object):
WHERE host_id = %%(host_id)s AND state = %%(st_open)s
""" % (",".join(fields))
c.execute(q, locals())
tasks = [dict(list(zip(fields, x))) for x in c.fetchall()]
tasks = [dict(zip(fields, x)) for x in c.fetchall()]
for task in tasks:
id = task['id']
if task['waiting']:
@ -11380,7 +11376,7 @@ class Host(object):
""" % (",".join(fields))
c.execute(q, locals())
for data in c.fetchall():
data = dict(list(zip(fields, data)))
data = dict(zip(fields, data))
# XXX - we should do some pruning here, but for now...
# check arch
if data['arch'] not in arches:
@ -11552,7 +11548,7 @@ class HostExports(object):
safer_move(fn, dest)
os.symlink(dest, fn)
if logs:
for key, files in six.iteritems(logs):
for key, files in logs.iteritems():
if key:
logdir = "%s/logs/%s" % (dir, key)
else:
@ -11575,7 +11571,7 @@ class HostExports(object):
scratchdir = koji.pathinfo.scratch()
username = get_user(task.getOwner())['name']
destdir = os.path.join(scratchdir, username, 'task_%s' % task_id)
for reldir, files in list(results['files'].items()) + [('', results['logs'])]:
for reldir, files in results['files'].items() + [('', results['logs'])]:
for filename in files:
if reldir:
relpath = os.path.join(reldir, filename)
@ -11607,7 +11603,7 @@ class HostExports(object):
scratchdir = koji.pathinfo.scratch()
username = get_user(task.getOwner())['name']
destdir = os.path.join(scratchdir, username, 'task_%s' % task_id)
for relpath in list(results['output'].keys()) + results['logs']:
for relpath in results['output'].keys() + results['logs']:
filename = os.path.join(koji.pathinfo.task(results['task_id']), relpath)
dest = os.path.join(destdir, relpath)
koji.ensuredir(os.path.dirname(dest))
@ -11763,7 +11759,7 @@ class HostExports(object):
maven_buildroot_id = maven_results['buildroot_id']
maven_task_dir = koji.pathinfo.task(maven_task_id)
# import the build output
for relpath, files in six.iteritems(maven_results['files']):
for relpath, files in maven_results['files'].iteritems():
dir_maven_info = maven_info
poms = [f for f in files if f.endswith('.pom')]
if len(poms) == 0:
@ -11911,7 +11907,7 @@ class HostExports(object):
task_dir = koji.pathinfo.task(results['task_id'])
# import the build output
for relpath, metadata in six.iteritems(results['output']):
for relpath, metadata in results['output'].iteritems():
archivetype = get_archive_type(relpath)
if not archivetype:
# Unknown archive type, fail the build
@ -12135,9 +12131,9 @@ class HostExports(object):
extra_deps = []
task_deps = {}
for dep in extra_deps:
if isinstance(dep, six.integer_types):
if isinstance(dep, (int, long)):
task_output = list_task_output(dep, stat=True)
for filepath, filestats in six.iteritems(task_output):
for filepath, filestats in task_output.iteritems():
if os.path.splitext(filepath)[1] in ['.log', '.md5', '.sha1']:
continue
tokens = filepath.split('/')
@ -12170,7 +12166,7 @@ class HostExports(object):
logger.error("Current build is %s, new build is %s.", idx_build, archive['build_id'])
maven_build_index[archive['group_id']][archive['artifact_id']][archive['version']] = archive['build_id']
ignore.extend(list(task_deps.values()))
ignore.extend(task_deps.values())
SNAPSHOT_RE = re.compile(r'-\d{8}\.\d{6}-\d+')
ignore_by_label = {}
@ -12223,7 +12219,7 @@ class HostExports(object):
if build_id:
build = get_build(build_id)
logger.error("g:a:v supplied by build %(nvr)s", build)
logger.error("Build supplies %i archives: %r", len(build_archives), list(build_archives.keys()))
logger.error("Build supplies %i archives: %r", len(build_archives), build_archives.keys())
if tag_archive:
logger.error("Size mismatch, br: %i, db: %i", fileinfo['size'], tag_archive['size'])
raise koji.BuildrootError('Unknown file in build environment: %s, size: %s' % \
@ -12301,7 +12297,7 @@ class HostExports(object):
repodir = koji.pathinfo.repo(repo_id, rinfo['tag_name'])
workdir = koji.pathinfo.work()
if not rinfo['dist']:
for arch, (uploadpath, files) in six.iteritems(data):
for arch, (uploadpath, files) in data.iteritems():
archdir = "%s/%s" % (repodir, koji.canonArch(arch))
if not os.path.isdir(archdir):
raise koji.GenericError("Repo arch directory missing: %s" % archdir)
@ -12430,7 +12426,7 @@ class HostExports(object):
logger.debug("os.link(%r, %r)", rpmpath, l_dst)
try:
os.link(rpmpath, l_dst)
except OSError as ose:
except OSError, ose:
if ose.errno == 18:
shutil.copy2(
rpmpath, os.path.join(archdir, bnplet, bnp))
@ -12478,11 +12474,11 @@ def get_upload_path(reldir, name, create=False, volume=None):
# assuming login was asserted earlier
u_fn = os.path.join(udir, '.user')
if os.path.exists(u_fn):
user_id = int(open(u_fn, 'r').read())
user_id = int(file(u_fn, 'r').read())
if context.session.user_id != user_id:
raise koji.GenericError("Invalid upload directory, not owner: %s" % orig_reldir)
else:
fo = open(u_fn, 'w')
fo = file(u_fn, 'w')
fo.write(str(context.session.user_id))
fo.close()
return os.path.join(udir, name)
@ -12523,11 +12519,11 @@ def handle_upload(environ):
size = 0
chksum = sum_cls()
inf = environ['wsgi.input']
fd = os.open(fn, os.O_RDWR | os.O_CREAT, 0o666)
fd = os.open(fn, os.O_RDWR | os.O_CREAT, 0666)
try:
try:
fcntl.lockf(fd, fcntl.LOCK_EX|fcntl.LOCK_NB)
except IOError as e:
except IOError, e:
raise koji.LockError(e)
if offset == -1:
offset = os.lseek(fd, 0, 2)

View file

@ -18,9 +18,7 @@
# Authors:
# Mike McLean <mikem@redhat.com>
from __future__ import absolute_import
from __future__ import division
from six.moves.configparser import RawConfigParser
from ConfigParser import RawConfigParser
import datetime
import inspect
import logging
@ -31,8 +29,8 @@ import traceback
import types
import pprint
import resource
import six.moves.xmlrpc_client
from six.moves.xmlrpc_client import getparser, dumps, Fault
import xmlrpclib
from xmlrpclib import getparser, dumps, Fault
from koji.server import WSGIWrapper
import koji
@ -42,13 +40,12 @@ import koji.plugin
import koji.policy
import koji.util
from koji.context import context
from six.moves import range
# Workaround to allow xmlrpclib deal with iterators
class Marshaller(six.moves.xmlrpc_client.Marshaller):
class Marshaller(xmlrpclib.Marshaller):
dispatch = six.moves.xmlrpc_client.Marshaller.dispatch.copy()
dispatch = xmlrpclib.Marshaller.dispatch.copy()
def dump_generator(self, value, write):
dump = self.__dump
@ -64,7 +61,7 @@ class Marshaller(six.moves.xmlrpc_client.Marshaller):
self.dump_string(value, write)
dispatch[datetime.datetime] = dump_datetime
six.moves.xmlrpc_client.Marshaller = Marshaller
xmlrpclib.Marshaller = Marshaller
class HandlerRegistry(object):
@ -112,7 +109,7 @@ class HandlerRegistry(object):
Handlers are functions marked with one of the decorators defined in koji.plugin
"""
for v in six.itervalues(vars(plugin)):
for v in vars(plugin).itervalues():
if isinstance(v, type):
#skip classes
continue
@ -132,7 +129,7 @@ class HandlerRegistry(object):
if ret:
return ret
ret = tuple(inspect.getargspec(func))
if inspect.ismethod(func) and func.__self__:
if inspect.ismethod(func) and func.im_self:
# bound method, remove first arg
args, varargs, varkw, defaults = ret
if args:
@ -159,17 +156,17 @@ class HandlerRegistry(object):
def _getFuncArgs(self, func):
args = []
for x in range(0, func.__code__.co_argcount):
if x == 0 and func.__code__.co_varnames[x] == "self":
for x in range(0, func.func_code.co_argcount):
if x == 0 and func.func_code.co_varnames[x] == "self":
continue
if func.__defaults__ and func.__code__.co_argcount - x <= len(func.__defaults__):
args.append((func.__code__.co_varnames[x], func.__defaults__[x - func.__code__.co_argcount + len(func.__defaults__)]))
if func.func_defaults and func.func_code.co_argcount - x <= len(func.func_defaults):
args.append((func.func_code.co_varnames[x], func.func_defaults[x - func.func_code.co_argcount + len(func.func_defaults)]))
else:
args.append(func.__code__.co_varnames[x])
args.append(func.func_code.co_varnames[x])
return args
def system_listMethods(self):
return list(self.funcs.keys())
return self.funcs.keys()
def system_methodSignature(self, method):
#it is not possible to autogenerate this data
@ -245,7 +242,7 @@ class ModXMLRPCRequestHandler(object):
# wrap response in a singleton tuple
response = (response,)
response = dumps(response, methodresponse=1, allow_none=1)
except Fault as fault:
except Fault, fault:
self.traceback = True
response = dumps(fault)
except:
@ -339,7 +336,7 @@ class ModXMLRPCRequestHandler(object):
for call in calls:
try:
result = self._dispatch(call['methodName'], call['params'])
except Fault as fault:
except Fault, fault:
results.append({'faultCode': fault.faultCode, 'faultString': fault.faultString})
except:
# transform unknown exceptions into XML-RPC Faults
@ -515,7 +512,7 @@ def load_config(environ):
opts['policy'] = dict(config.items('policy'))
else:
opts['policy'] = {}
for pname, text in six.iteritems(_default_policies):
for pname, text in _default_policies.iteritems():
opts['policy'].setdefault(pname, text)
# use configured KojiDir
if opts.get('KojiDir') is not None:
@ -577,14 +574,14 @@ def get_policy(opts, plugins):
for plugin_name in opts.get('Plugins', '').split():
alltests.append(koji.policy.findSimpleTests(vars(plugins.get(plugin_name))))
policy = {}
for pname, text in six.iteritems(opts['policy']):
for pname, text in opts['policy'].iteritems():
#filter/merge tests
merged = {}
for tests in alltests:
# tests can be limited to certain policies by setting a class variable
for name, test in six.iteritems(tests):
for name, test in tests.iteritems():
if hasattr(test, 'policy'):
if isinstance(test.policy, six.string_types):
if isinstance(test.policy, basestring):
if pname != test.policy:
continue
elif pname not in test.policy:
@ -690,7 +687,7 @@ def handler(req):
def get_memory_usage():
pagesize = resource.getpagesize()
statm = [pagesize*int(y)//1024 for y in "".join(open("/proc/self/statm").readlines()).strip().split()]
statm = [pagesize*int(y)/1024 for y in "".join(open("/proc/self/statm").readlines()).strip().split()]
size, res, shr, text, lib, data, dirty = statm
return res - shr

View file

@ -20,14 +20,11 @@
# This library and program is heavily based on rpmdiff from the rpmlint package
# It was modified to be used as standalone library for the Koji project.
from __future__ import absolute_import
import rpm
import os
import itertools
import sys, getopt
from six.moves import zip
import six
class Rpmdiff:
@ -114,8 +111,8 @@ class Rpmdiff:
old_files_dict = self.__fileIteratorToDict(old.fiFromHeader())
new_files_dict = self.__fileIteratorToDict(new.fiFromHeader())
files = list(set(itertools.chain(six.iterkeys(old_files_dict),
six.iterkeys(new_files_dict))))
files = list(set(itertools.chain(old_files_dict.iterkeys(),
new_files_dict.iterkeys())))
files.sort()
for f in files:
@ -178,8 +175,8 @@ class Rpmdiff:
if not isinstance(oldflags, list): oldflags = [ oldflags ]
if not isinstance(newflags, list): newflags = [ newflags ]
o = list(zip(old[name], oldflags, old[name[:-1]+'VERSION']))
n = list(zip(new[name], newflags, new[name[:-1]+'VERSION']))
o = zip(old[name], oldflags, old[name[:-1]+'VERSION'])
n = zip(new[name], newflags, new[name[:-1]+'VERSION'])
if name == 'PROVIDES': # filter our self provide
oldNV = (old['name'], rpm.RPMSENSE_EQUAL,
@ -228,7 +225,7 @@ def main():
ignore_tags = []
try:
opts, args = getopt.getopt(sys.argv[1:], "hi:", ["help", "ignore="])
except getopt.GetoptError as e:
except getopt.GetoptError, e:
print("Error: %s" % e)
_usage()

View file

@ -24,10 +24,7 @@
# - request data
# - auth data
from __future__ import absolute_import
import six.moves._thread
from six.moves import range
import six
import thread
class _data(object):
pass
@ -38,7 +35,7 @@ class ThreadLocal(object):
# should probably be getattribute, but easier to debug this way
def __getattr__(self, key):
id = six.moves._thread.get_ident()
id = thread.get_ident()
tdict = object.__getattribute__(self, '_tdict')
if id not in tdict:
raise AttributeError(key)
@ -46,7 +43,7 @@ class ThreadLocal(object):
return object.__getattribute__(data, key)
def __setattr__(self, key, value):
id = six.moves._thread.get_ident()
id = thread.get_ident()
tdict = object.__getattribute__(self, '_tdict')
if id not in tdict:
tdict[id] = _data()
@ -54,7 +51,7 @@ class ThreadLocal(object):
return object.__setattr__(data, key, value)
def __delattr__(self, key):
id = six.moves._thread.get_ident()
id = thread.get_ident()
tdict = object.__getattribute__(self, '_tdict')
if id not in tdict:
raise AttributeError(key)
@ -65,14 +62,14 @@ class ThreadLocal(object):
return ret
def __str__(self):
id = six.moves._thread.get_ident()
id = thread.get_ident()
tdict = object.__getattribute__(self, '_tdict')
return "(current thread: %s) {" % id + \
", ".join(["%s : %s" %(k, v.__dict__) for (k, v) in six.iteritems(tdict)]) + \
", ".join(["%s : %s" %(k, v.__dict__) for (k, v) in tdict.iteritems()]) + \
"}"
def _threadclear(self):
id = six.moves._thread.get_ident()
id = thread.get_ident()
tdict = object.__getattribute__(self, '_tdict')
if id not in tdict:
return
@ -100,8 +97,8 @@ if __name__ == '__main__':
context._threadclear()
print(context)
for x in range(1, 10):
six.moves._thread.start_new_thread(test, ())
for x in xrange(1, 10):
thread.start_new_thread(test, ())
time.sleep(4)
print('')

View file

@ -20,8 +20,6 @@
# Mike McLean <mikem@redhat.com>
# Mike Bonnet <mikeb@redhat.com>
from __future__ import absolute_import
from __future__ import division
import koji
import koji.tasks
from koji.tasks import safe_rmtree
@ -29,16 +27,14 @@ from koji.util import md5_constructor, adler32_constructor, parseStatus
import os
import signal
import logging
import urlparse
from fnmatch import fnmatch
import base64
import time
import sys
import traceback
import errno
import six.moves.xmlrpc_client
from six.moves import range
import six.moves.urllib
import six
import xmlrpclib
def incremental_upload(session, fname, fd, path, retries=5, logger=None):
@ -115,7 +111,7 @@ def log_output(session, path, args, outfile, uploadpath, cwd=None, logerror=0, a
flags = os.O_CREAT | os.O_WRONLY
if append:
flags |= os.O_APPEND
fd = os.open(outfile, flags, 0o666)
fd = os.open(outfile, flags, 0666)
os.dup2(fd, 1)
if logerror:
os.dup2(fd, 2)
@ -146,7 +142,7 @@ def log_output(session, path, args, outfile, uploadpath, cwd=None, logerror=0, a
if not outfd:
try:
outfd = open(outfile, 'r')
outfd = file(outfile, 'r')
except IOError:
# will happen if the forked process has not created the logfile yet
continue
@ -247,7 +243,7 @@ class SCM(object):
# replace the scheme with http:// so that the urlparse works in all cases
dummyurl = self.url.replace(scheme, 'http://', 1)
dummyscheme, netloc, path, params, query, fragment = six.moves.urllib.parse.urlparse(dummyurl)
dummyscheme, netloc, path, params, query, fragment = urlparse.urlparse(dummyurl)
user = None
userhost = netloc.split('@')
@ -529,7 +525,7 @@ class TaskManager(object):
"""Attempt to shut down cleanly"""
for task_id in self.pids.keys():
self.cleanupTask(task_id)
self.session.host.freeTasks(list(self.tasks.keys()))
self.session.host.freeTasks(self.tasks.keys())
self.session.host.updateHost(task_load=0.0, ready=False)
def updateBuildroots(self, nolocal=False):
@ -560,14 +556,14 @@ class TaskManager(object):
#task not running - expire the buildroot
#TODO - consider recycling hooks here (with strong sanity checks)
self.logger.info("Expiring buildroot: %(id)i/%(tag_name)s/%(arch)s" % br)
self.logger.debug("Buildroot task: %r, Current tasks: %r" % (task_id, list(self.tasks.keys())))
self.logger.debug("Buildroot task: %r, Current tasks: %r" % (task_id, self.tasks.keys()))
self.session.host.setBuildRootState(id, st_expired)
continue
if nolocal:
return
local_br = self._scanLocalBuildroots()
# get info on local_only buildroots (most likely expired)
local_only = [id for id in six.iterkeys(local_br) if id not in db_br]
local_only = [id for id in local_br.iterkeys() if id not in db_br]
if local_only:
missed_br = self.session.listBuildroots(buildrootID=tuple(local_only))
#get all the task info in one call
@ -611,7 +607,7 @@ class TaskManager(object):
rootdir = "%s/root" % topdir
try:
st = os.lstat(rootdir)
except OSError as e:
except OSError, e:
if e.errno == errno.ENOENT:
rootdir = None
else:
@ -632,13 +628,13 @@ class TaskManager(object):
#also remove the config
try:
os.unlink(data['cfg'])
except OSError as e:
except OSError, e:
self.logger.warn("%s: can't remove config: %s" % (desc, e))
elif age > 120:
if rootdir:
try:
flist = os.listdir(rootdir)
except OSError as e:
except OSError, e:
self.logger.warn("%s: can't list rootdir: %s" % (desc, e))
continue
if flist:
@ -665,10 +661,10 @@ class TaskManager(object):
fn = "%s/%s" % (configdir, f)
if not os.path.isfile(fn):
continue
fo = open(fn, 'r')
fo = file(fn, 'r')
id = None
name = None
for n in range(10):
for n in xrange(10):
# data should be in first few lines
line = fo.readline()
if line.startswith('# Koji buildroot id:'):
@ -799,7 +795,7 @@ class TaskManager(object):
# Note: we may still take an assigned task below
#sort available capacities for each of our bins
avail = {}
for bin in six.iterkeys(bins):
for bin in bins.iterkeys():
avail[bin] = [host['capacity'] - host['task_load'] for host in bin_hosts[bin]]
avail[bin].sort()
avail[bin].reverse()
@ -831,7 +827,7 @@ class TaskManager(object):
#accept this task)
bin_avail = avail.get(bin, [0])
self.logger.debug("available capacities for bin: %r" % bin_avail)
median = bin_avail[(len(bin_avail)-1)//2]
median = bin_avail[(len(bin_avail)-1)/2]
self.logger.debug("ours: %.2f, median: %.2f" % (our_avail, median))
if not self.checkRelAvail(bin_avail, our_avail):
#decline for now and give the upper half a chance
@ -849,7 +845,7 @@ class TaskManager(object):
Check our available capacity against the capacity of other hosts in this bin.
Return True if we should take a task, False otherwise.
"""
median = bin_avail[(len(bin_avail)-1)//2]
median = bin_avail[(len(bin_avail)-1)/2]
self.logger.debug("ours: %.2f, median: %.2f" % (avail, median))
if avail >= median:
return True
@ -866,7 +862,7 @@ class TaskManager(object):
prefix = "Task %i (pid %i)" % (task_id, pid)
try:
(childpid, status) = os.waitpid(pid, os.WNOHANG)
except OSError as e:
except OSError, e:
#check errno
if e.errno != errno.ECHILD:
#should not happen
@ -907,7 +903,7 @@ class TaskManager(object):
try:
os.kill(pid, sig)
except OSError as e:
except OSError, e:
# process probably went away, we'll find out on the next iteration
self.logger.info('Error sending signal %i to %s (pid %i, taskID %i): %s' %
(sig, execname, pid, task_id, e))
@ -931,14 +927,14 @@ class TaskManager(object):
proc_path = '/proc/%i/stat' % pid
if not os.path.isfile(proc_path):
return None
proc_file = open(proc_path)
proc_file = file(proc_path)
procstats = [not field.isdigit() and field or int(field) for field in proc_file.read().split()]
proc_file.close()
cmd_path = '/proc/%i/cmdline' % pid
if not os.path.isfile(cmd_path):
return None
cmd_file = open(cmd_path)
cmd_file = file(cmd_path)
procstats[1] = cmd_file.read().replace('\0', ' ').strip()
cmd_file.close()
if not procstats[1]:
@ -1042,7 +1038,7 @@ class TaskManager(object):
raise IOError("No such directory: %s" % br_path)
fs_stat = os.statvfs(br_path)
available = fs_stat.f_bavail * fs_stat.f_bsize
availableMB = available // 1024**2
availableMB = available / 1024 / 1024
self.logger.debug("disk space available in '%s': %i MB", br_path, availableMB)
if availableMB < self.options.minspace:
self.status = "Insufficient disk space: %i MB, %i MB required" % (availableMB, self.options.minspace)
@ -1192,12 +1188,12 @@ class TaskManager(object):
try:
response = (handler.run(),)
# note that we wrap response in a singleton tuple
response = six.moves.xmlrpc_client.dumps(response, methodresponse=1, allow_none=1)
response = xmlrpclib.dumps(response, methodresponse=1, allow_none=1)
self.logger.info("RESPONSE: %r" % response)
self.session.host.closeTask(handler.id, response)
return
except six.moves.xmlrpc_client.Fault as fault:
response = six.moves.xmlrpc_client.dumps(fault)
except xmlrpclib.Fault, fault:
response = xmlrpclib.dumps(fault)
tb = ''.join(traceback.format_exception(*sys.exc_info())).replace(r"\n", "\n")
self.logger.warn("FAULT:\n%s" % tb)
except (SystemExit, koji.tasks.ServerExit, KeyboardInterrupt):
@ -1216,7 +1212,7 @@ class TaskManager(object):
if issubclass(e_class, koji.GenericError):
#just pass it through
tb = str(e)
response = six.moves.xmlrpc_client.dumps(six.moves.xmlrpc_client.Fault(faultCode, tb))
response = xmlrpclib.dumps(xmlrpclib.Fault(faultCode, tb))
# if we get here, then we're handling an exception, so fail the task
self.session.host.failTask(handler.id, response)

View file

@ -21,7 +21,6 @@
# Mike McLean <mikem@redhat.com>
from __future__ import absolute_import
import logging
import sys
import psycopg2
@ -34,7 +33,7 @@ import psycopg2
# del psycopg2.extensions.string_types[1266]
import time
import traceback
from . import context
import context
import re
POSITIONAL_RE = re.compile(r'%[a-z]')

View file

@ -19,13 +19,11 @@
# Mike McLean <mikem@redhat.com>
# Mike Bonnet <mikeb@redhat.com>
from __future__ import absolute_import
import imp
import koji
import logging
import sys
import traceback
import six
# the available callback hooks and a list
# of functions to be called for each event
@ -87,7 +85,7 @@ class PluginTracker(object):
return self.plugins.get(name)
def pathlist(self, path):
if isinstance(path, six.string_types):
if isinstance(path, basestring):
return [path]
else:
return path

View file

@ -17,10 +17,8 @@
# Authors:
# Mike McLean <mikem@redhat.com>
from __future__ import absolute_import
import fnmatch
import koji
import six
class BaseSimpleTest(object):
@ -291,7 +289,7 @@ class SimpleRuleSet(object):
index[name] = 1
index = {}
_recurse(self.ruleset, index)
return list(index.keys())
return index.keys()
def _apply(self, rules, data, top=False):
for tests, negate, action in rules:
@ -354,7 +352,7 @@ def findSimpleTests(namespace):
namespace = (namespace,)
ret = {}
for ns in namespace:
for key, value in six.iteritems(ns):
for key, value in ns.iteritems():
if value is BaseSimpleTest:
# skip this abstract base class if we encounter it
# this module contains generic tests, so it is valid to include it

View file

@ -19,7 +19,6 @@
# Authors:
# Mike McLean <mikem@redhat.com>
from __future__ import absolute_import
import sys
import traceback
from koji.util import LazyDict

View file

@ -20,19 +20,17 @@
# Mike McLean <mikem@redhat.com>
# Mike Bonnet <mikeb@redhat.com>
from __future__ import absolute_import
import koji
import koji.util
import os
import logging
import six.moves.xmlrpc_client
import xmlrpclib
import signal
import urllib2
import shutil
import random
import time
import pprint
from six.moves import range
import six.moves.urllib
def scan_mounts(topdir):
"""Search path for mountpoints"""
@ -234,15 +232,13 @@ class BaseTaskHandler(object):
if all:
if failany:
failed = False
task_error = None
for task in finished:
if task in canfail:
# no point in checking
continue
try:
self.session.getTaskResult(task)
except (koji.GenericError, six.moves.xmlrpc_client.Fault) as te:
task_error = te
except (koji.GenericError, xmlrpclib.Fault), task_error:
self.logger.info("task %s failed or was canceled" % task)
failed = True
break
@ -313,10 +309,10 @@ class BaseTaskHandler(object):
return fn
self.logger.debug("Downloading %s", relpath)
url = "%s/%s" % (self.options.topurl, relpath)
fsrc = six.moves.urllib.request.urlopen(url)
fsrc = urllib2.urlopen(url)
if not os.path.exists(os.path.dirname(fn)):
os.makedirs(os.path.dirname(fn))
fdst = open(fn, 'w')
fdst = file(fn, 'w')
shutil.copyfileobj(fsrc, fdst)
fsrc.close()
fdst.close()
@ -365,7 +361,7 @@ class BaseTaskHandler(object):
else:
# no overlap
raise koji.BuildError("host %s (%s) does not support any arches of tag %s (%s)" % \
(host['name'], ', '.join(sorted(host_arches)), tag['name'], ', '.join(sorted(tag_arches))))
(host['name'], ', '.join(host_arches), tag['name'], ', '.join(tag_arches)))
def getRepo(self, tag):
"""
@ -406,7 +402,7 @@ class SleepTask(BaseTaskHandler):
class ForkTask(BaseTaskHandler):
Methods = ['fork']
def handler(self, n=5, m=37):
for i in range(n):
for i in xrange(n):
os.spawnvp(os.P_NOWAIT, 'sleep', ['sleep', str(m)])
class WaitTestTask(BaseTaskHandler):
@ -421,7 +417,7 @@ class WaitTestTask(BaseTaskHandler):
_taskWeight = 0.1
def handler(self, count, seconds=10):
tasks = []
for i in range(count):
for i in xrange(count):
task_id = self.subtask(method='sleep', arglist=[seconds], label=str(i), parent=self.id)
tasks.append(task_id)
bad_task = self.subtask('sleep', ['BAD_ARG'], label='bad')

View file

@ -1,9 +1,8 @@
# kojid plugin
from __future__ import absolute_import
import commands
import koji
import six.moves.configparser
import ConfigParser
import os
import platform
compat_mode = False
@ -57,7 +56,7 @@ class RunRootTask(tasks.BaseTaskHandler):
return res
def _read_config(self):
cp = six.moves.configparser.SafeConfigParser()
cp = ConfigParser.SafeConfigParser()
cp.read(CONFIG_FILE)
self.config = {
'default_mounts': [],
@ -85,7 +84,7 @@ class RunRootTask(tasks.BaseTaskHandler):
'fstype': cp.get(section_name, 'fstype'),
'options': cp.get(section_name, 'options'),
})
except six.moves.configparser.NoOptionError:
except ConfigParser.NoOptionError:
raise koji.GenericError("bad config: missing options in %s section" % section_name)
count += 1

View file

@ -1,9 +1,8 @@
from __future__ import absolute_import
import fnmatch
import os
import sys
import tarfile
import six.moves.configparser
import ConfigParser
import koji
import koji.tasks as tasks
@ -28,7 +27,7 @@ def omit_paths3(tarinfo):
def read_config():
global config
cp = six.moves.configparser.SafeConfigParser()
cp = ConfigParser.SafeConfigParser()
cp.read(CONFIG_FILE)
config = {
'path_filters': [],

View file

@ -5,11 +5,10 @@
# Authors:
# Mike Bonnet <mikeb@redhat.com>
from __future__ import absolute_import
from koji.plugin import callbacks, callback, ignore_error
import logging
@callback(*list(callbacks.keys()))
@callback(*callbacks.keys())
@ignore_error
def echo(cbtype, *args, **kws):
logging.getLogger('koji.plugin.echo').info('Called the %s callback, args: %s; kws: %s',

View file

@ -4,10 +4,9 @@
# Authors:
# Mike Bonnet <mikeb@redhat.com>
from __future__ import absolute_import
from koji import PluginError
from koji.plugin import callbacks, callback, ignore_error
import six.moves.configparser
import ConfigParser
import logging
import qpid.messaging
import qpid.messaging.transports
@ -79,7 +78,7 @@ def get_sender():
session = None
target = None
config = six.moves.configparser.SafeConfigParser()
config = ConfigParser.SafeConfigParser()
config.read(CONFIG_FILE)
if not config.has_option('broker', 'timeout'):
config.set('broker', 'timeout', '60')

View file

@ -5,11 +5,10 @@
# Authors:
# Mike Bonnet <mikeb@redhat.com>
from __future__ import absolute_import
import koji
from koji.plugin import callback, ignore_error
from koji.context import context
import six.moves.configparser
import ConfigParser
import logging
import json
import random
@ -247,7 +246,7 @@ def send_queued_msgs(cbtype, *args, **kws):
log = logging.getLogger('koji.plugin.protonmsg')
global CONFIG
if not CONFIG:
conf = six.moves.configparser.SafeConfigParser()
conf = ConfigParser.SafeConfigParser()
with open(CONFIG_FILE) as conffile:
conf.readfp(conffile)
CONFIG = conf

View file

@ -5,11 +5,10 @@
# Authors:
# Mike Bonnet <mikeb@redhat.com>
from __future__ import absolute_import
import koji
from koji.context import context
from koji.plugin import callback
import six.moves.configparser
import ConfigParser
import fnmatch
import os
import shutil
@ -31,7 +30,7 @@ def maven_import(cbtype, *args, **kws):
filepath = kws['filepath']
if not config:
config = six.moves.configparser.SafeConfigParser()
config = ConfigParser.SafeConfigParser()
config.read(CONFIG_FILE)
name_patterns = config.get('patterns', 'rpm_names').split()
for pattern in name_patterns:
@ -52,7 +51,7 @@ def maven_import(cbtype, *args, **kws):
shutil.rmtree(tmpdir)
def expand_rpm(filepath, tmpdir):
devnull = open('/dev/null', 'r+')
devnull = file('/dev/null', 'r+')
rpm2cpio = subprocess.Popen(['/usr/bin/rpm2cpio', filepath],
stdout=subprocess.PIPE,
stdin=devnull, stderr=devnull,

View file

@ -3,7 +3,6 @@
# plugin has a config file. This hub plugin has no config file.
from __future__ import absolute_import
from koji.context import context
from koji.plugin import export
import koji

View file

@ -1,6 +1,5 @@
from __future__ import absolute_import
import sys
import six.moves.configparser
import ConfigParser
import koji
from koji.context import context
from koji.plugin import export
@ -29,7 +28,7 @@ def saveFailedTree(buildrootID, full=False, **opts):
# read configuration only once
if config is None:
config = six.moves.configparser.SafeConfigParser()
config = ConfigParser.SafeConfigParser()
config.read(CONFIG_FILE)
allowed_methods = config.get('permissions', 'allowed_methods').split()
if len(allowed_methods) == 1 and allowed_methods[0] == '*':

View file

@ -6,9 +6,6 @@
# Authors:
# Mike McLean <mikem@redhat.com>
from __future__ import absolute_import
from six.moves import zip
import six
try:
import krbV
except ImportError: # pragma: no cover
@ -16,7 +13,7 @@ except ImportError: # pragma: no cover
import koji
from koji.util import LazyDict, LazyValue
import koji.policy
import six.moves.configparser
import ConfigParser
from email.MIMEText import MIMEText
import fnmatch
import optparse
@ -26,7 +23,7 @@ import smtplib
import socket # for socket.error
import sys
import time
import six.moves.xmlrpc_client # for ProtocolError and Fault
import xmlrpclib # for ProtocolError and Fault
OptionParser = optparse.OptionParser
@ -115,7 +112,7 @@ def get_options():
defaults = parser.get_default_values()
config = six.moves.configparser.ConfigParser()
config = ConfigParser.ConfigParser()
cf = getattr(options, 'config_file', None)
if cf:
if not os.access(cf, os.F_OK):
@ -199,7 +196,7 @@ def get_options():
if len(parts) < 2:
continue
options.key_aliases[parts[0].upper()] = parts[1]
except ValueError as e:
except ValueError, e:
print(e)
parser.error(_("Invalid key alias data in config: %s") % config.get('main','key_aliases'))
@ -340,7 +337,7 @@ def warn(msg):
def ensure_connection(session):
try:
ret = session.getAPIVersion()
except six.moves.xmlrpc_client.ProtocolError:
except xmlrpclib.ProtocolError:
error(_("Error: Unable to connect to server"))
if ret != koji.API_VERSION:
warn(_("WARNING: The server is at API version %d and the client is at %d" % (ret, koji.API_VERSION)))
@ -374,9 +371,9 @@ def activate_session(session):
session.krb_login(principal=options.principal, keytab=options.keytab, proxyuser=options.runas)
else:
session.krb_login(proxyuser=options.runas)
except krbV.Krb5Error as e:
except krbV.Krb5Error, e:
error(_("Kerberos authentication failed: %s (%s)") % (e.args[1], e.args[0]))
except socket.error as e:
except socket.error, e:
warn(_("Could not connect to Kerberos authentication service: '%s'") % e.args[1])
if not options.noauth and not session.logged_in:
error(_("Error: unable to log in, no authentication methods available"))
@ -455,7 +452,7 @@ def handle_trash():
continue
try:
refs = session.buildReferences(binfo['id'], limit=10)
except six.moves.xmlrpc_client.Fault:
except xmlrpclib.Fault:
print("[%i/%i] Error checking references for %s. Skipping" % (i, N, nvr))
continue
#XXX - this is more data than we need
@ -536,7 +533,7 @@ def handle_trash():
by_owner = {}
for binfo in to_trash:
by_owner.setdefault(binfo['owner_name'], []).append(binfo)
owners = list(by_owner.keys())
owners = by_owner.keys()
owners.sort()
for owner_name in owners:
builds = [(b['nvr'], b) for b in by_owner[owner_name]]
@ -558,7 +555,7 @@ def handle_trash():
#best we can do currently
owner = binfo['owner_id']
else:
owner = max([(n, k) for k, n in six.iteritems(count)])[1]
owner = max([(n, k) for k, n in count.iteritems()])[1]
session.packageListAdd(trashcan_tag, binfo['name'], owner)
session.tagBuildBypass(trashcan_tag, binfo['id'], force=True)
@ -657,7 +654,7 @@ def handle_delete(just_salvage=False):
session.untagBuildBypass(trashcan_tag, binfo['id'])
try:
session.deleteBuild(binfo['id'])
except (six.moves.xmlrpc_client.Fault, koji.GenericError) as e:
except (xmlrpclib.Fault, koji.GenericError), e:
print("Warning: deletion failed: %s" % e)
#server issue
pass
@ -736,7 +733,7 @@ def read_policies(fn=None):
The expected format as follows
test [params] [&& test [params] ...] :: (keep|untag|skip)
"""
fo = open(fn, 'r')
fo = file(fn, 'r')
tests = koji.policy.findSimpleTests(globals())
ret = koji.policy.SimpleRuleSet(fo, tests)
fo.close()
@ -771,7 +768,7 @@ def get_build_sigs(build, cache=False):
for sig in sigs:
if sig['sigkey']:
keys.setdefault(sig['sigkey'], 1)
ret = build_sig_cache[build] = list(keys.keys())
ret = build_sig_cache[build] = keys.keys()
return ret
def handle_prune():
@ -831,7 +828,7 @@ def handle_prune():
pkghist.setdefault(h['name'] + '-' + h['version'], []).append(h)
else:
pkghist.setdefault(h['name'], []).append(h)
pkgs = list(pkghist.keys())
pkgs = pkghist.keys()
pkgs.sort()
for pkg in pkgs:
if not check_package(pkg):
@ -876,7 +873,7 @@ def handle_prune():
try:
session.untagBuildBypass(taginfo['id'], entry['build_id'], force=bypass)
untagged.setdefault(nvr, {})[tagname] = 1
except (six.moves.xmlrpc_client.Fault, koji.GenericError) as e:
except (xmlrpclib.Fault, koji.GenericError), e:
print("Warning: untag operation failed: %s" % e)
pass
# if action == 'keep' do nothing
@ -910,7 +907,7 @@ def handle_prune():
print("Deleting untagged build: %s" % nvr)
try:
session.deleteBuild(build_id, strict=False)
except (six.moves.xmlrpc_client.Fault, koji.GenericError) as e:
except (xmlrpclib.Fault, koji.GenericError), e:
print("Warning: deletion failed: %s" % e)
#server issue
pass

View file

@ -22,16 +22,12 @@
# Dennis Gilmore <dennis@ausil.us>
# Karsten Hopp <karsten@redhat.com>
from __future__ import absolute_import
from six.moves import range
from six.moves import zip
import six
try:
import krbV
except ImportError: # pragma: no cover
pass
import koji
import six.moves.configparser
import ConfigParser
import fnmatch
import optparse
import os
@ -42,8 +38,9 @@ import socket # for socket.error and socket.setdefaulttimeout
import string
import sys
import time
import six.moves.xmlrpc_client # for ProtocolError and Fault
import six.moves.urllib
import urllib2
import urlgrabber.grabber as grabber
import xmlrpclib # for ProtocolError and Fault
import rpm
# koji.fp.o keeps stalling, probably network errors...
@ -163,7 +160,7 @@ def get_options():
(options, args) = parser.parse_args()
defaults = parser.get_default_values()
config = six.moves.configparser.ConfigParser()
config = ConfigParser.ConfigParser()
cf = getattr(options, 'config_file', None)
if cf:
if not os.access(cf, os.F_OK):
@ -299,7 +296,7 @@ def warn(msg):
def ensure_connection(session):
try:
ret = session.getAPIVersion()
except six.moves.xmlrpc_client.ProtocolError:
except xmlrpclib.ProtocolError:
error(_("Error: Unable to connect to server"))
if ret != koji.API_VERSION:
warn(_("WARNING: The server is at API version %d and the client is at "
@ -329,9 +326,9 @@ def activate_session(session):
session.krb_login(principal=options.principal, keytab=options.keytab, proxyuser=options.runas)
else:
session.krb_login(proxyuser=options.runas)
except krbV.Krb5Error as e:
except krbV.Krb5Error, e:
error(_("Kerberos authentication failed: '%s' (%s)") % (e.args[1], e.args[0]))
except socket.error as e:
except socket.error, e:
warn(_("Could not connect to Kerberos authentication service: '%s'") % e.args[1])
if not options.noauth and not session.logged_in:
error(_("Error: unable to log in"))
@ -448,10 +445,10 @@ class TrackedBuild(object):
url = "%s/%s" % (pathinfo.build(self.info), pathinfo.rpm(self.srpm))
log("Downloading %s" % url)
#XXX - this is not really the right place for this
fsrc = six.moves.urllib.request.urlopen(url)
fsrc = urllib2.urlopen(url)
fn = "%s/%s.src.rpm" % (options.workpath, self.nvr)
koji.ensuredir(os.path.dirname(fn))
fdst = open(fn, 'w')
fdst = file(fn, 'w')
shutil.copyfileobj(fsrc, fdst)
fsrc.close()
fdst.close()
@ -498,7 +495,7 @@ class TrackedBuild(object):
log("Warning: some rpms for %s lacked buildroots:" % self.nvr)
for rinfo in bad:
log(" %(name)s-%(version)s-%(release)s.%(arch)s" % rinfo)
return list(brs.keys())
return brs.keys()
def getDeps(self):
buildroots = self.getBuildroots()
@ -547,7 +544,7 @@ class TrackedBuild(object):
# changes happened during the build startup and some subtasks got the old
# repo and others the new one.
base = []
for name, brlist in six.iteritems(bases):
for name, brlist in bases.iteritems():
#We want to determine for each name if that package was present
#in /all/ the buildroots or just some.
#Because brlist is constructed only from elements of buildroots, we
@ -557,12 +554,12 @@ class TrackedBuild(object):
#each buildroot had this as a base package
base.append(name)
if len(tags) > 1:
log("Warning: found multiple buildroot tags for %s: %s" % (self.nvr, list(tags.keys())))
counts = [(n, tag) for tag, n in six.iteritems(tags)]
log("Warning: found multiple buildroot tags for %s: %s" % (self.nvr, tags.keys()))
counts = [(n, tag) for tag, n in tags.iteritems()]
sort(counts)
tag = counts[-1][1]
else:
tag = list(tags.keys())[0]
tag = tags.keys()[0]
# due bugs in used tools mainline koji instance could store empty buildroot infos for builds
if len(builds) == 0:
self.setState("noroot")
@ -662,10 +659,8 @@ class BuildTracker(object):
return grey
return default
def rpmvercmp(self, evr1, evr2):
def rpmvercmp(self, (e1, v1, r1), (e2, v2, r2)):
"""find out which build is newer"""
(e1, v1, r1) = evr1
(e2, v2, r2) = evr2
rc = rpm.labelCompare((e1, v1, r1), (e2, v2, r2))
if rc == 1:
#first evr wins
@ -866,7 +861,7 @@ class BuildTracker(object):
taginfo = remote.getTag(tag)
builds = remote.listTagged(taginfo['id'], latest=True)
for build in builds:
for retry in range(10):
for retry in xrange(10):
try:
self.scanBuild(build['id'], tag=tag)
if options.first_one:
@ -885,13 +880,13 @@ class BuildTracker(object):
if options.link_imports:
#bit of a hack, but faster than uploading
dst = "%s/%s/%s" % (koji.pathinfo.work(), serverdir, fn)
old_umask = os.umask(0o02)
old_umask = os.umask(002)
try:
koji.ensuredir(os.path.dirname(dst))
os.chown(os.path.dirname(dst), 48, 48) #XXX - hack
log ("Downloading %s to %s" % (url, dst))
fsrc = six.moves.urllib.request.urlopen(url)
fdst = open(fn, 'w')
fsrc = urllib2.urlopen(url)
fdst = file(fn, 'w')
shutil.copyfileobj(fsrc, fdst)
fsrc.close()
fdst.close()
@ -903,8 +898,8 @@ class BuildTracker(object):
koji.ensuredir(options.workpath)
dst = "%s/%s" % (options.workpath, fn)
log ("Downloading %s to %s..." % (url, dst))
fsrc = six.moves.urllib.request.urlopen(url)
fdst = open(dst, 'w')
fsrc = urllib2.urlopen(url)
fdst = file(dst, 'w')
shutil.copyfileobj(fsrc, fdst)
fsrc.close()
fdst.close()
@ -1008,7 +1003,7 @@ class BuildTracker(object):
for pkg in session.listPackages(pkgID=name):
owners.setdefault(pkg['owner_id'], []).append(pkg)
if owners:
order = [(len(v), k) for k, v in six.iteritems(owners)]
order = [(len(v), k) for k, v in owners.iteritems()]
order.sort()
owner = order[-1][1]
else:
@ -1124,7 +1119,7 @@ class BuildTracker(object):
log("-- %s --" % time.asctime())
self.report_brief()
for state in ('broken', 'noroot', 'blocked'):
builds = list(self.state_idx[state].values())
builds = self.state_idx[state].values()
not_replaced = [b for b in builds if not b.substitute]
n_replaced = len(builds) - len(not_replaced)
log("%s: %i (+%i replaced)" % (state, len(not_replaced), n_replaced))
@ -1154,7 +1149,7 @@ class BuildTracker(object):
nvr = dep.substitute
problem_counts.setdefault(nvr, 0)
problem_counts[nvr] += 1
order = [(c, nvr) for (nvr, c) in six.iteritems(problem_counts)]
order = [(c, nvr) for (nvr, c) in problem_counts.iteritems()]
if order:
order.sort()
order.reverse()
@ -1165,7 +1160,7 @@ class BuildTracker(object):
def report_brief(self):
N = len(self.builds)
states = list(self.state_idx.keys())
states = self.state_idx.keys()
states.sort()
parts = ["%s: %i" % (s, len(self.state_idx[s])) for s in states]
parts.append("total: %i" % N)
@ -1237,7 +1232,7 @@ class BuildTracker(object):
ret = False
if options.max_jobs and len(self.state_idx['pending']) >= options.max_jobs:
return ret
missing = [(b.order, b.id, b) for b in six.itervalues(self.state_idx['missing'])]
missing = [(b.order, b.id, b) for b in self.state_idx['missing'].itervalues()]
missing.sort()
for order, build_id, build in missing:
if not self.checkBuildDeps(build):
@ -1305,7 +1300,7 @@ def main(args):
if options.logfile:
filename = options.logfile
try:
logfile = os.open(filename,os.O_CREAT|os.O_RDWR|os.O_APPEND, 0o777)
logfile = os.open(filename,os.O_CREAT|os.O_RDWR|os.O_APPEND, 0777)
except:
logfile = None
if logfile is not None:

View file

@ -20,8 +20,6 @@
# Authors:
# Mike McLean <mikem@redhat.com>
from __future__ import absolute_import
import six
try:
import krbV
except ImportError: # pragma: no cover
@ -31,7 +29,7 @@ import os
import koji
from koji.util import rmtree, parseStatus
from optparse import OptionParser
from six.moves.configparser import ConfigParser
from ConfigParser import ConfigParser
import errno
import fnmatch
import logging
@ -87,7 +85,7 @@ class ManagedRepo(object):
tags = {self.tag_id : 1}
for x in order:
tags[x['parent_id']] = 1
self.taglist = list(tags.keys())
self.taglist = tags.keys()
def expire(self):
"""Mark the repo expired"""
@ -148,7 +146,7 @@ class ManagedRepo(object):
#also check dir age. We do this because a repo can be created from an older event
#and should not be removed based solely on that event's timestamp.
mtime = os.stat(path).st_mtime
except OSError as e:
except OSError, e:
if e.errno == 2:
# No such file or directory, so the repo either never existed,
# or has already been deleted, so allow it to be marked deleted.
@ -200,9 +198,9 @@ class RepoManager(object):
def printState(self):
self.logger.debug('Tracking %i repos, %i child processes', len(self.repos), len(self.delete_pids))
for tag_id, task_id in six.iteritems(self.tasks):
for tag_id, task_id in self.tasks.iteritems():
self.logger.debug("Tracking task %s for tag %s", task_id, tag_id)
for pid, desc in six.iteritems(self.delete_pids):
for pid, desc in self.delete_pids.iteritems():
self.logger.debug("Delete job %s: %r", pid, desc)
def rmtree(self, path):
@ -228,7 +226,7 @@ class RepoManager(object):
prefix = "pid %i (%s)" % (pid, self.delete_pids.get(pid))
try:
(childpid, status) = os.waitpid(pid, os.WNOHANG)
except OSError as e:
except OSError, e:
if e.errno != errno.ECHILD:
#should not happen
raise
@ -263,7 +261,7 @@ class RepoManager(object):
for pid in self.delete_pids:
try:
os.kill(pid, sig)
except OSError as e:
except OSError, e:
if e.errno != errno.ESRCH:
logger.error("Unable to kill process %s", pid)
@ -298,7 +296,7 @@ class RepoManager(object):
if session is None:
session = self.session
to_check = []
repo_ids = list(self.repos.keys())
repo_ids = self.repos.keys()
for repo_id in repo_ids:
repo = self.repos.get(repo_id)
if repo is None:
@ -533,8 +531,8 @@ class RepoManager(object):
tag_repos = {}
for repo in self.repos.values():
tag_repos.setdefault(repo.tag_id, []).append(repo)
self.logger.debug("Needed tags: %r" % list(tags.keys()))
self.logger.debug("Current tags: %r" % list(tag_repos.keys()))
self.logger.debug("Needed tags: %r" % tags.keys())
self.logger.debug("Current tags: %r" % tag_repos.keys())
#we need to determine:
# - which tags need a new repo
@ -542,7 +540,7 @@ class RepoManager(object):
#self.checkCurrentRepos now runs continually in a separate thread
regen = []
expire_times = {}
for tag_id in six.iterkeys(tags):
for tag_id in tags.iterkeys():
covered = False
for repo in tag_repos.get(tag_id,[]):
if repo.current:

View file

@ -26,15 +26,14 @@
# kojiwind --install
# in a cygwin shell.
from __future__ import absolute_import
from optparse import OptionParser
from six.moves.configparser import ConfigParser
from ConfigParser import ConfigParser
import os
import subprocess
import sys
import tempfile
import time
import six.moves.xmlrpc_client
import xmlrpclib
import base64
import hashlib
import logging
@ -43,7 +42,6 @@ import threading
import re
import glob
import zipfile
import six
MANAGER_PORT = 7000
@ -303,7 +301,7 @@ class WindowsBuild(object):
"""Download the file from buildreq, at filepath, into the basedir"""
destpath = os.path.join(basedir, fileinfo['localpath'])
ensuredir(os.path.dirname(destpath))
destfile = open(destpath, 'w')
destfile = file(destpath, 'w')
offset = 0
checksum = hashlib.md5()
while True:
@ -562,7 +560,7 @@ def upload_file(server, prefix, path):
"""upload a single file to the vmd"""
logger = logging.getLogger('koji.vm')
destpath = os.path.join(prefix, path)
fobj = open(destpath, 'r')
fobj = file(destpath, 'r')
offset = 0
sum = hashlib.md5()
while True:
@ -588,13 +586,13 @@ def get_mgmt_server():
macaddr, gateway = find_net_info()
logger.debug('found MAC address %s, connecting to %s:%s',
macaddr, gateway, MANAGER_PORT)
server = six.moves.xmlrpc_client.ServerProxy('http://%s:%s/' %
server = xmlrpclib.ServerProxy('http://%s:%s/' %
(gateway, MANAGER_PORT), allow_none=True)
# we would set a timeout on the socket here, but that is apparently not
# supported by python/cygwin/Windows
task_port = server.getPort(macaddr)
logger.debug('found task-specific port %s', task_port)
return six.moves.xmlrpc_client.ServerProxy('http://%s:%s/' % (gateway, task_port), allow_none=True)
return xmlrpclib.ServerProxy('http://%s:%s/' % (gateway, task_port), allow_none=True)
def get_options():
"""handle usage and parse options"""
@ -616,7 +614,7 @@ def setup_logging(opts):
if opts.debug:
level = logging.DEBUG
logger.setLevel(level)
logfd = open(logfile, 'w')
logfd = file(logfile, 'w')
handler = logging.StreamHandler(logfd)
handler.setLevel(level)
handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(name)s: %(message)s'))
@ -641,11 +639,11 @@ def stream_logs(server, handler, builds):
logpath = os.path.join(build.source_dir, relpath)
if logpath not in logs:
logs[logpath] = (relpath, None)
for log, (relpath, fd) in six.iteritems(logs):
for log, (relpath, fd) in logs.iteritems():
if not fd:
if os.path.isfile(log):
try:
fd = open(log, 'r')
fd = file(log, 'r')
logs[log] = (relpath, fd)
except:
log_local('Error opening %s' % log)

View file

@ -1,7 +1,7 @@
#import koji
#from kojiweb import util
#from pprint import pformat
#import six.moves.urllib
#import urllib
#attr _PASSTHROUGH = ['archiveID', 'fileOrder', 'fileStart', 'buildrootOrder', 'buildrootStart']
@ -97,7 +97,7 @@
</tr>
#for $file in $files
<tr class="$util.rowToggle($self)">
<td><a href="fileinfo?archiveID=$archive.id&filename=$six.moves.urllib.parse.quote($file.name)">$file.name</a></td><td>$file.size</td>
<td><a href="fileinfo?archiveID=$archive.id&filename=$urllib.quote($file.name)">$file.name</a></td><td>$file.size</td>
</tr>
#end for
</table>

View file

@ -1,12 +1,12 @@
#from kojiweb import util
#import six.moves.urllib
#import urllib
#import datetime
#include "includes/header.chtml"
#if $rpm
<h4>Information for file <a href="fileinfo?rpmID=$rpm.id&amp;filename=$six.moves.urllib.parse.quote($file.name)">$file.name</a></h4>
<h4>Information for file <a href="fileinfo?rpmID=$rpm.id&amp;filename=$urllib.quote($file.name)">$file.name</a></h4>
#elif $archive
<h4>Information for file <a href="fileinfo?archiveID=$archive.id&amp;filename=$six.moves.urllib.parse.quote($file.name)">$file.name</a></h4>
<h4>Information for file <a href="fileinfo?archiveID=$archive.id&amp;filename=$urllib.quote($file.name)">$file.name</a></h4>
#end if
<table>

View file

@ -20,13 +20,12 @@
# Mike Bonnet <mikeb@redhat.com>
# Mike McLean <mikem@redhat.com>
from __future__ import absolute_import
import os
import os.path
import re
import sys
import mimetypes
import six.moves.http_cookies
import Cookie
import datetime
import logging
import time
@ -37,9 +36,6 @@ from kojiweb.util import _initValues
from kojiweb.util import _genHTML
from kojiweb.util import _getValidTokens
from koji.util import sha1_constructor
from six.moves import range
from six.moves import zip
import six
# Convenience definition of a commonly-used sort function
_sortbyname = kojiweb.util.sortByKeyFunc('name')
@ -57,7 +53,7 @@ def _setUserCookie(environ, user):
shasum = sha1_constructor(value)
shasum.update(options['Secret'].value)
value = "%s:%s" % (shasum.hexdigest(), value)
cookies = six.moves.http_cookies.SimpleCookie()
cookies = Cookie.SimpleCookie()
cookies['user'] = value
c = cookies['user'] #morsel instance
c['secure'] = True
@ -70,7 +66,7 @@ def _setUserCookie(environ, user):
environ['koji.headers'].append(['Cache-Control', 'no-cache="set-cookie"'])
def _clearUserCookie(environ):
cookies = six.moves.http_cookies.SimpleCookie()
cookies = Cookie.SimpleCookie()
cookies['user'] = ''
c = cookies['user'] #morsel instance
c['path'] = os.path.dirname(environ['SCRIPT_NAME'])
@ -80,7 +76,7 @@ def _clearUserCookie(environ):
def _getUserCookie(environ):
options = environ['koji.options']
cookies = six.moves.http_cookies.SimpleCookie(environ.get('HTTP_COOKIE', ''))
cookies = Cookie.SimpleCookie(environ.get('HTTP_COOKIE', ''))
if 'user' not in cookies:
return None
value = cookies['user'].value
@ -683,7 +679,7 @@ def taskinfo(environ, taskID):
values['pathinfo'] = pathinfo
paths = [] # (volume, relpath) tuples
for relname, volumes in six.iteritems(server.listTaskOutput(task['id'], all_volumes=True)):
for relname, volumes in server.listTaskOutput(task['id'], all_volumes=True).iteritems():
paths += [(volume, relname) for volume in volumes]
values['output'] = sorted(paths, cmp = _sortByExtAndName)
if environ['koji.currentUser']:
@ -702,8 +698,8 @@ def taskstatus(environ, taskID):
return ''
files = server.listTaskOutput(taskID, stat=True, all_volumes=True)
output = '%i:%s\n' % (task['id'], koji.TASK_STATES[task['state']])
for filename, volumes_data in six.iteritems(files):
for volume, file_stats in six.iteritems(volumes_data):
for filename, volumes_data in files.iteritems():
for volume, file_stats in volumes_data.iteritems():
output += '%s:%s:%s\n' % (volume, filename, file_stats['st_size'])
return output
@ -813,7 +809,7 @@ def tags(environ, start=None, order=None, childID=None):
return _genHTML(environ, 'tags.chtml')
_PREFIX_CHARS = [chr(char) for char in list(range(48, 58)) + list(range(97, 123))]
_PREFIX_CHARS = [chr(char) for char in range(48, 58) + range(97, 123)]
def packages(environ, tagID=None, userID=None, order='package_name', start=None, prefix=None, inherited='1'):
values = _initValues(environ, 'Packages', 'packages')
@ -2116,7 +2112,7 @@ def buildsbytarget(environ, days='7', start=None, order='-builds'):
if builds > maxBuilds:
maxBuilds = builds
kojiweb.util.paginateList(values, list(targets.values()), start, 'targets', 'target', order)
kojiweb.util.paginateList(values, targets.values(), start, 'targets', 'target', order)
values['order'] = order

View file

@ -2,7 +2,7 @@
#from kojiweb import util
#from pprint import pformat
#import time
#import six.moves.urllib
#import urllib
#attr _PASSTHROUGH = ['rpmID', 'fileOrder', 'fileStart', 'buildrootOrder', 'buildrootStart']
@ -237,7 +237,7 @@
</tr>
#for $file in $files
<tr class="$util.rowToggle($self)">
<td><a href="fileinfo?rpmID=$rpm.id&amp;filename=$six.moves.urllib.parse.quote($file.name)">$util.escapeHTML($file.name)</a></td><td>$file.size</td>
<td><a href="fileinfo?rpmID=$rpm.id&amp;filename=$urllib.quote($file.name)">$util.escapeHTML($file.name)</a></td><td>$file.size</td>
</tr>
#end for
</table>

View file

@ -1,5 +1,5 @@
#from kojiweb import util
#import six.moves.urllib
#import urllib
#include "includes/header.chtml"
@ -38,7 +38,7 @@
<tr class="$util.rowToggle($self)">
<td>$result.id</td>
#set $quoted = $result.copy()
#silent $quoted['name'] = $six.moves.urllib.parse.quote($quoted['name'])
#silent $quoted['name'] = $urllib.quote($quoted['name'])
<td><a href="${infoURL % $quoted}">$result.name</a></td>
</tr>
#end for

View file

@ -1,6 +1,6 @@
#import koji
#from kojiweb import util
#import six.moves.urllib
#import urllib
#import cgi
#def printValue($key, $value, $sep=', ')
@ -424,9 +424,9 @@ $value
<th>Output</th>
<td>
#for $volume, $filename in $output
<a href="$pathinfo.task($task.id, volume=$volume)/$six.moves.urllib.parse.quote($filename)">$filename</a>
<a href="$pathinfo.task($task.id, volume=$volume)/$urllib.quote($filename)">$filename</a>
#if $filename.endswith('.log')
(<a href="getfile?taskID=$task.id&volume=$volume&name=$six.moves.urllib.parse.quote($filename)&offset=-4000">tail</a>)
(<a href="getfile?taskID=$task.id&volume=$volume&name=$urllib.quote($filename)&offset=-4000">tail</a>)
#end if
<br/>
#end for

View file

@ -19,7 +19,6 @@
# Authors:
# Mike McLean <mikem@redhat.com>
from __future__ import absolute_import
import cgi
import inspect
import koji
@ -30,10 +29,9 @@ import pprint
import sys
import traceback
from six.moves.configparser import RawConfigParser
from ConfigParser import RawConfigParser
from koji.server import WSGIWrapper, ServerError, ServerRedirect
from koji.util import dslice
import six
class URLNotFound(ServerError):
@ -423,14 +421,14 @@ class Dispatcher(object):
else:
# last one wins
headers[key] = (name, value)
if isinstance(result, six.string_types):
if isinstance(result, basestring):
headers.setdefault('content-length', ('Content-Length', str(len(result))))
headers.setdefault('content-type', ('Content-Type', 'text/html'))
headers = list(headers.values()) + extra
headers = headers.values() + extra
self.logger.debug("Headers:")
self.logger.debug(koji.util.LazyString(pprint.pformat, [headers]))
start_response(status, headers)
if isinstance(result, six.string_types):
if isinstance(result, basestring):
result = [result]
return result

View file

@ -20,8 +20,6 @@
# Mike Bonnet <mikeb@redhat.com>
# Mike McLean <mikem@redhat.com>
from __future__ import absolute_import
from __future__ import division
import Cheetah.Template
import datetime
import koji
@ -31,11 +29,9 @@ import stat
#a bunch of exception classes that explainError needs
from socket import error as socket_error
from socket import sslerror as socket_sslerror
from six.moves.xmlrpc_client import ProtocolError
from xmlrpclib import ProtocolError
from xml.parsers.expat import ExpatError
import cgi
from six.moves import range
import six
class NoSuchException(Exception):
pass
@ -98,7 +94,7 @@ class DecodeUTF8(Cheetah.Filters.Filter):
def filter(self, *args, **kw):
"""Convert all strs to unicode objects"""
result = super(DecodeUTF8, self).filter(*args, **kw)
if isinstance(result, six.text_type):
if isinstance(result, unicode):
pass
else:
result = result.decode('utf-8', 'replace')
@ -153,7 +149,7 @@ def _genHTML(environ, fileName):
def _truncTime():
now = datetime.datetime.now()
# truncate to the nearest 15 minutes
return now.replace(minute=(now.minute // 15 * 15), second=0, microsecond=0)
return now.replace(minute=(now.minute / 15 * 15), second=0, microsecond=0)
def _genToken(environ, tstamp=None):
if 'koji.currentLogin' in environ and environ['koji.currentLogin']:
@ -358,9 +354,9 @@ def _populateValues(values, dataName, prefix, data, totalRows, start, count, pag
values[(prefix and prefix + 'Count' or 'count')] = count
values[(prefix and prefix + 'Range' or 'range')] = pageSize
values[(prefix and prefix + 'Order' or 'order')] = order
currentPage = start // pageSize
currentPage = start / pageSize
values[(prefix and prefix + 'CurrentPage' or 'currentPage')] = currentPage
totalPages = totalRows // pageSize
totalPages = totalRows / pageSize
if totalRows % pageSize > 0:
totalPages += 1
pages = [page for page in range(0, totalPages) if (abs(page - currentPage) < 100 or ((page + 1) % 100 == 0))]