python-modernize -f libmodernize.fixes.fix_dict_six
This commit is contained in:
parent
b96092334d
commit
bcee24e50c
20 changed files with 130 additions and 112 deletions
|
|
@ -21,6 +21,8 @@
|
|||
# Mike McLean <mikem@redhat.com>
|
||||
# Mike Bonnet <mikeb@redhat.com>
|
||||
|
||||
from __future__ import absolute_import
|
||||
import six
|
||||
try:
|
||||
import krbV
|
||||
except ImportError: # pragma: no cover
|
||||
|
|
@ -1054,7 +1056,7 @@ class BuildTask(BaseTaskHandler):
|
|||
archdict[a] = 1
|
||||
if not archdict:
|
||||
raise koji.BuildError("No matching arches were found")
|
||||
return archdict.keys()
|
||||
return list(archdict.keys())
|
||||
|
||||
|
||||
def choose_taskarch(self, arch, srpm, build_tag):
|
||||
|
|
@ -1115,7 +1117,7 @@ class BuildTask(BaseTaskHandler):
|
|||
self.logger.debug("Waiting on subtasks...")
|
||||
|
||||
# wait for subtasks to finish
|
||||
results = self.wait(subtasks.values(), all=True, failany=failany)
|
||||
results = self.wait(list(subtasks.values()), all=True, failany=failany)
|
||||
|
||||
# finalize import
|
||||
# merge data into needed args for completeBuild call
|
||||
|
|
@ -1123,7 +1125,7 @@ class BuildTask(BaseTaskHandler):
|
|||
brmap = {}
|
||||
logs = {}
|
||||
built_srpm = None
|
||||
for (arch, task_id) in subtasks.iteritems():
|
||||
for (arch, task_id) in six.iteritems(subtasks):
|
||||
result = results[task_id]
|
||||
self.logger.debug("DEBUG: %r : %r " % (arch,result,))
|
||||
brootid = result['brootid']
|
||||
|
|
@ -1631,7 +1633,7 @@ class BuildMavenTask(BaseBuildTask):
|
|||
for filepath in logs:
|
||||
self.uploadFile(os.path.join(outputdir, filepath),
|
||||
relPath=os.path.dirname(filepath))
|
||||
for relpath, files in output_files.iteritems():
|
||||
for relpath, files in six.iteritems(output_files):
|
||||
for filename in files:
|
||||
self.uploadFile(os.path.join(outputdir, relpath, filename),
|
||||
relPath=relpath)
|
||||
|
|
@ -2097,7 +2099,7 @@ class ChainMavenTask(MultiPlatformTask):
|
|||
running[task_id] = package
|
||||
del todo[package]
|
||||
try:
|
||||
results = self.wait(running.keys())
|
||||
results = self.wait(list(running.keys()))
|
||||
except (xmlrpclib.Fault, koji.GenericError) as e:
|
||||
# One task has failed, wait for the rest to complete before the
|
||||
# chainmaven task fails. self.wait(all=True) should thrown an exception.
|
||||
|
|
@ -2150,8 +2152,8 @@ class ChainMavenTask(MultiPlatformTask):
|
|||
have the same keys and those keys have the same values. If a value is
|
||||
list, it will be considered equal to a list with the same values in
|
||||
a different order."""
|
||||
akeys = a.keys()
|
||||
bkeys = b.keys()
|
||||
akeys = list(a.keys())
|
||||
bkeys = list(b.keys())
|
||||
if sorted(akeys) != sorted(bkeys):
|
||||
return False
|
||||
for key in akeys:
|
||||
|
|
@ -2312,7 +2314,7 @@ class BuildBaseImageTask(BuildImageTask):
|
|||
canfail.append(subtasks[arch])
|
||||
self.logger.debug("Got image subtasks: %r" % (subtasks))
|
||||
self.logger.debug("Waiting on image subtasks (%s can fail)..." % canfail)
|
||||
results = self.wait(subtasks.values(), all=True, failany=True, canfail=canfail)
|
||||
results = self.wait(list(subtasks.values()), all=True, failany=True, canfail=canfail)
|
||||
|
||||
# if everything failed, fail even if all subtasks are in canfail
|
||||
self.logger.debug('subtask results: %r', results)
|
||||
|
|
@ -2621,7 +2623,7 @@ class BuildLiveMediaTask(BuildImageTask):
|
|||
|
||||
self.logger.debug("Got image subtasks: %r", subtasks)
|
||||
self.logger.debug("Waiting on livemedia subtasks...")
|
||||
results = self.wait(subtasks.values(), all=True, failany=True, canfail=canfail)
|
||||
results = self.wait(list(subtasks.values()), all=True, failany=True, canfail=canfail)
|
||||
|
||||
# if everything failed, fail even if all subtasks are in canfail
|
||||
self.logger.debug('subtask results: %r', results)
|
||||
|
|
@ -2657,7 +2659,7 @@ class BuildLiveMediaTask(BuildImageTask):
|
|||
wrapper_tasks[arch] = self.subtask('wrapperRPM', arglist,
|
||||
label='wrapper %s' % arch, arch='noarch')
|
||||
|
||||
results2 = self.wait(wrapper_tasks.values(), all=True, failany=True)
|
||||
results2 = self.wait(list(wrapper_tasks.values()), all=True, failany=True)
|
||||
self.logger.debug('wrapper results: %r', results2)
|
||||
|
||||
# add wrapper rpm results into main results
|
||||
|
|
@ -2919,7 +2921,7 @@ class ImageTask(BaseTaskHandler):
|
|||
}
|
||||
|
||||
# Duplicated with pungi/util.py _apply_substitutions
|
||||
for k, v in sorted(substitutions.items(), key=lambda x: len(x[0]), reverse=True):
|
||||
for k, v in sorted(list(substitutions.items()), key=lambda x: len(x[0]), reverse=True):
|
||||
if k in name:
|
||||
name = name.replace(k, v)
|
||||
if k in version:
|
||||
|
|
@ -3716,7 +3718,7 @@ class BaseImageTask(OzImageTask):
|
|||
if len(formats) == 0:
|
||||
# we only want a raw disk image (no format option given)
|
||||
f_dict['raw'] = True
|
||||
elif 'raw' not in f_dict.keys():
|
||||
elif 'raw' not in list(f_dict.keys()):
|
||||
f_dict['raw'] = False
|
||||
self.logger.debug('Image delivery plan: %s' % f_dict)
|
||||
return f_dict
|
||||
|
|
@ -4955,7 +4957,7 @@ Build Info: %(weburl)s/buildinfo?buildID=%(build_id)i\r
|
|||
def uniq(self, items):
|
||||
"""Remove duplicates from the list of items, and sort the list."""
|
||||
m = dict(zip(items, [1] * len(items)))
|
||||
l = m.keys()
|
||||
l = list(m.keys())
|
||||
l.sort()
|
||||
return l
|
||||
|
||||
|
|
@ -5000,8 +5002,8 @@ class NewRepoTask(BaseTaskHandler):
|
|||
# gather subtask results
|
||||
data = {}
|
||||
if subtasks:
|
||||
results = self.wait(subtasks.values(), all=True, failany=True)
|
||||
for (arch, task_id) in subtasks.iteritems():
|
||||
results = self.wait(list(subtasks.values()), all=True, failany=True)
|
||||
for (arch, task_id) in six.iteritems(subtasks):
|
||||
data[arch] = results[task_id]
|
||||
self.logger.debug("DEBUG: %r : %r " % (arch,data[arch],))
|
||||
|
||||
|
|
@ -5154,7 +5156,7 @@ class NewDistRepoTask(BaseTaskHandler):
|
|||
method='createdistrepo', arglist=arglist, label=arch,
|
||||
parent=self.id, arch='noarch')
|
||||
if len(subtasks) > 0 and task_opts['multilib']:
|
||||
self.wait(subtasks.values(), all=True, failany=True)
|
||||
self.wait(list(subtasks.values()), all=True, failany=True)
|
||||
for arch in arch32s:
|
||||
# move the 32-bit task output to the final resting place
|
||||
# so the 64-bit arches can use it for multilib
|
||||
|
|
@ -5168,8 +5170,8 @@ class NewDistRepoTask(BaseTaskHandler):
|
|||
method='createdistrepo', arglist=arglist, label=arch,
|
||||
parent=self.id, arch='noarch')
|
||||
# wait for 64-bit subtasks to finish
|
||||
self.wait(subtasks.values(), all=True, failany=True)
|
||||
for (arch, task_id) in subtasks.iteritems():
|
||||
self.wait(list(subtasks.values()), all=True, failany=True)
|
||||
for (arch, task_id) in six.iteritems(subtasks):
|
||||
if task_opts['multilib'] and arch in arch32s:
|
||||
# already moved above
|
||||
continue
|
||||
|
|
@ -5505,7 +5507,7 @@ enabled=1
|
|||
# select our rpms
|
||||
selected = {}
|
||||
for rpm_id in rpm_idx:
|
||||
avail_keys = rpm_idx[rpm_id].keys()
|
||||
avail_keys = list(rpm_idx[rpm_id].keys())
|
||||
best_key = self.pick_key(keys, avail_keys)
|
||||
if best_key is None:
|
||||
# we lack a matching key for this rpm
|
||||
|
|
@ -5569,7 +5571,7 @@ enabled=1
|
|||
fmt = '%(name)s-%(version)s-%(release)s.%(arch)s'
|
||||
filenames = [[fmt % selected[r], r] for r in sig_missing]
|
||||
for fname, rpm_id in sorted(filenames):
|
||||
avail = rpm_idx.get(rpm_id, {}).keys()
|
||||
avail = list(rpm_idx.get(rpm_id, {}).keys())
|
||||
outfile.write('%s: %r\n' % (fname, avail))
|
||||
outfile.close()
|
||||
self.session.uploadWrapper(missing_log, self.uploadpath)
|
||||
|
|
@ -5823,7 +5825,7 @@ def get_options():
|
|||
defaults[name] = config.getboolean('kojid', name)
|
||||
elif name in ['plugin', 'plugins']:
|
||||
defaults['plugin'] = value.split()
|
||||
elif name in defaults.keys():
|
||||
elif name in list(defaults.keys()):
|
||||
defaults[name] = value
|
||||
elif name.upper().startswith('RLIMIT_'):
|
||||
defaults[name.upper()] = value
|
||||
|
|
|
|||
|
|
@ -695,12 +695,12 @@ def _writeInheritanceData(tag_id, changes, clear=False):
|
|||
data[parent_id] = link
|
||||
break
|
||||
if clear:
|
||||
for link in data.itervalues():
|
||||
for link in six.itervalues(data):
|
||||
if not link.get('is_update'):
|
||||
link['delete link'] = True
|
||||
link['is_update'] = True
|
||||
changed = False
|
||||
for link in data.itervalues():
|
||||
for link in six.itervalues(data):
|
||||
if link.get('is_update'):
|
||||
changed = True
|
||||
break
|
||||
|
|
@ -710,17 +710,17 @@ def _writeInheritanceData(tag_id, changes, clear=False):
|
|||
return
|
||||
#check for duplicate priorities
|
||||
pri_index = {}
|
||||
for link in data.itervalues():
|
||||
for link in six.itervalues(data):
|
||||
if link.get('delete link'):
|
||||
continue
|
||||
pri_index.setdefault(link['priority'], []).append(link)
|
||||
for pri, dups in pri_index.iteritems():
|
||||
for pri, dups in six.iteritems(pri_index):
|
||||
if len(dups) <= 1:
|
||||
continue
|
||||
#oops, duplicate entries for a single priority
|
||||
dup_ids = [link['parent_id'] for link in dups]
|
||||
raise koji.GenericError("Inheritance priorities must be unique (pri %s: %r )" % (pri, dup_ids))
|
||||
for parent_id, link in data.iteritems():
|
||||
for parent_id, link in six.iteritems(data):
|
||||
if not link.get('is_update'):
|
||||
continue
|
||||
# revoke old values
|
||||
|
|
@ -728,7 +728,7 @@ def _writeInheritanceData(tag_id, changes, clear=False):
|
|||
clauses=['tag_id=%(tag_id)s', 'parent_id = %(parent_id)s'])
|
||||
update.make_revoke()
|
||||
update.execute()
|
||||
for parent_id, link in data.iteritems():
|
||||
for parent_id, link in six.iteritems(data):
|
||||
if not link.get('is_update'):
|
||||
continue
|
||||
# skip rest if we are just deleting
|
||||
|
|
@ -1993,7 +1993,7 @@ def get_tag_groups(tag, event=None, inherit=True, incl_pkgs=True, incl_reqs=True
|
|||
groups.setdefault(grp_id, group)
|
||||
|
||||
if incl_pkgs:
|
||||
for group in groups.itervalues():
|
||||
for group in six.itervalues(groups):
|
||||
group['packagelist'] = {}
|
||||
fields = ('group_id', 'tag_id', 'package', 'blocked', 'type', 'basearchonly', 'requires')
|
||||
q = """
|
||||
|
|
@ -2015,7 +2015,7 @@ def get_tag_groups(tag, event=None, inherit=True, incl_pkgs=True, incl_reqs=True
|
|||
|
||||
if incl_reqs:
|
||||
# and now the group reqs
|
||||
for group in groups.itervalues():
|
||||
for group in six.itervalues(groups):
|
||||
group['grouplist'] = {}
|
||||
fields = ('group_id', 'tag_id', 'req_id', 'blocked', 'type', 'is_metapkg', 'name')
|
||||
q = """SELECT %s FROM group_req_listing JOIN groups on req_id = id
|
||||
|
|
@ -2212,7 +2212,7 @@ def get_all_arches():
|
|||
#in a perfect world, this list would only include canonical
|
||||
#arches, but not all admins will undertand that.
|
||||
ret[koji.canonArch(arch)] = 1
|
||||
return ret.keys()
|
||||
return list(ret.keys())
|
||||
|
||||
def get_active_tasks(host=None):
|
||||
"""Return data on tasks that are yet to be run"""
|
||||
|
|
@ -2465,7 +2465,7 @@ def repo_init(tag, with_src=False, with_debuginfo=False, event=None):
|
|||
os.symlink(relpath, destlink)
|
||||
except:
|
||||
log_error('Error linking %s to %s' % (destlink, relpath))
|
||||
for artifact_dir, artifacts in artifact_dirs.iteritems():
|
||||
for artifact_dir, artifacts in six.iteritems(artifact_dirs):
|
||||
_write_maven_repo_metadata(artifact_dir, artifacts)
|
||||
|
||||
koji.plugin.run_callbacks('postRepoInit', tag=tinfo, with_src=with_src, with_debuginfo=with_debuginfo,
|
||||
|
|
@ -2618,7 +2618,7 @@ def repo_references(repo_id):
|
|||
'host_id': 'host_id',
|
||||
'create_event': 'create_event',
|
||||
'state': 'state'}
|
||||
fields, aliases = zip(*fields.items())
|
||||
fields, aliases = zip(*list(fields.items()))
|
||||
values = {'repo_id': repo_id}
|
||||
clauses = ['repo_id=%(repo_id)s', 'retire_event IS NULL']
|
||||
query = QueryProcessor(columns=fields, aliases=aliases, tables=['standard_buildroot'],
|
||||
|
|
@ -2996,7 +2996,7 @@ def _create_tag(name, parent=None, arches=None, perm=None, locked=False, maven_s
|
|||
|
||||
# add extra data
|
||||
if extra is not None:
|
||||
for key, value in extra.iteritems():
|
||||
for key, value in six.iteritems(extra):
|
||||
data = {
|
||||
'tag_id': tag_id,
|
||||
'key': key,
|
||||
|
|
@ -3061,7 +3061,7 @@ def get_tag(tagInfo, strict=False, event=None):
|
|||
raise koji.GenericError('invalid type for tagInfo: %s' % type(tagInfo))
|
||||
|
||||
data = {'tagInfo': tagInfo}
|
||||
fields, aliases = zip(*fields.items())
|
||||
fields, aliases = zip(*list(fields.items()))
|
||||
query = QueryProcessor(columns=fields, aliases=aliases, tables=tables,
|
||||
joins=joins, clauses=clauses, values=data)
|
||||
result = query.executeOne()
|
||||
|
|
@ -4623,7 +4623,7 @@ def get_host(hostInfo, strict=False, event=None):
|
|||
raise koji.GenericError('invalid type for hostInfo: %s' % type(hostInfo))
|
||||
|
||||
data = {'hostInfo': hostInfo}
|
||||
fields, aliases = zip(*fields.items())
|
||||
fields, aliases = zip(*list(fields.items()))
|
||||
query = QueryProcessor(columns=fields, aliases=aliases, tables=tables,
|
||||
joins=joins, clauses=clauses, values=data)
|
||||
result = query.executeOne()
|
||||
|
|
@ -4780,7 +4780,7 @@ def list_channels(hostID=None, event=None):
|
|||
"""List channels. If hostID is specified, only list
|
||||
channels associated with the host with that ID."""
|
||||
fields = {'channels.id': 'id', 'channels.name': 'name'}
|
||||
columns, aliases = zip(*fields.items())
|
||||
columns, aliases = zip(*list(fields.items()))
|
||||
if hostID:
|
||||
tables = ['host_channels']
|
||||
joins = ['channels ON channels.id = host_channels.channel_id']
|
||||
|
|
@ -5198,7 +5198,7 @@ def import_build(srpm, rpms, brmap=None, task_id=None, build_id=None, logs=None)
|
|||
|
||||
policy_data = {
|
||||
'package': build['name'],
|
||||
'buildroots': brmap.values(),
|
||||
'buildroots': list(brmap.values()),
|
||||
'import': True,
|
||||
'import_type': 'rpm',
|
||||
}
|
||||
|
|
@ -5238,7 +5238,7 @@ def import_build(srpm, rpms, brmap=None, task_id=None, build_id=None, logs=None)
|
|||
import_rpm_file(fn, binfo, rpminfo)
|
||||
add_rpm_sig(rpminfo['id'], koji.rip_rpm_sighdr(fn))
|
||||
if logs:
|
||||
for key, files in logs.iteritems():
|
||||
for key, files in six.iteritems(logs):
|
||||
if not key:
|
||||
key = None
|
||||
for relpath in files:
|
||||
|
|
@ -6882,7 +6882,7 @@ def query_history(tables=None, **kwargs):
|
|||
fields[r_test] = '_revoked_before_event'
|
||||
if skip:
|
||||
continue
|
||||
fields, aliases = zip(*fields.items())
|
||||
fields, aliases = zip(*list(fields.items()))
|
||||
query = QueryProcessor(columns=fields, aliases=aliases, tables=[table],
|
||||
joins=joins, clauses=clauses, values=data)
|
||||
ret[table] = query.iterate()
|
||||
|
|
@ -7021,7 +7021,7 @@ def build_references(build_id, limit=None):
|
|||
idx.setdefault(row['id'], row)
|
||||
if limit is not None and len(idx) > limit:
|
||||
break
|
||||
ret['rpms'] = idx.values()
|
||||
ret['rpms'] = list(idx.values())
|
||||
|
||||
ret['component_of'] = []
|
||||
# find images/archives that contain the build rpms
|
||||
|
|
@ -7052,7 +7052,7 @@ def build_references(build_id, limit=None):
|
|||
idx.setdefault(row['id'], row)
|
||||
if limit is not None and len(idx) > limit:
|
||||
break
|
||||
ret['archives'] = idx.values()
|
||||
ret['archives'] = list(idx.values())
|
||||
|
||||
# find images/archives that contain the build archives
|
||||
fields = ['archive_id']
|
||||
|
|
@ -7394,7 +7394,7 @@ def tag_notification(is_successful, tag_id, from_id, build_id, user_id, ignore_s
|
|||
from_tag = get_tag(from_id)
|
||||
for email in get_notification_recipients(build, from_tag['id'], state):
|
||||
recipients[email] = 1
|
||||
recipients_uniq = recipients.keys()
|
||||
recipients_uniq = list(recipients.keys())
|
||||
if len(recipients_uniq) > 0 and not (is_successful and ignore_success):
|
||||
task_id = make_task('tagNotification', [recipients_uniq, is_successful, tag_id, from_id, build_id, user_id, ignore_success, failure_msg])
|
||||
return task_id
|
||||
|
|
@ -7622,8 +7622,8 @@ class InsertProcessor(object):
|
|||
if not self.data and not self.rawdata:
|
||||
return "-- incomplete update: no assigns"
|
||||
parts = ['INSERT INTO %s ' % self.table]
|
||||
columns = self.data.keys()
|
||||
columns.extend(self.rawdata.keys())
|
||||
columns = list(self.data.keys())
|
||||
columns.extend(list(self.rawdata.keys()))
|
||||
parts.append("(%s) " % ', '.join(columns))
|
||||
values = []
|
||||
for key in columns:
|
||||
|
|
@ -7666,7 +7666,7 @@ class InsertProcessor(object):
|
|||
del data['create_event']
|
||||
del data['creator_id']
|
||||
clauses = ["%s = %%(%s)s" % (k, k) for k in data]
|
||||
query = QueryProcessor(columns=data.keys(), tables=[self.table],
|
||||
query = QueryProcessor(columns=list(data.keys()), tables=[self.table],
|
||||
clauses=clauses, values=data)
|
||||
if query.execute():
|
||||
return True
|
||||
|
|
@ -8333,7 +8333,7 @@ class UserInGroupTest(koji.policy.BaseSimpleTest):
|
|||
return False
|
||||
groups = koji.auth.get_user_groups(user['id'])
|
||||
args = self.str.split()[1:]
|
||||
for group_id, group in groups.iteritems():
|
||||
for group_id, group in six.iteritems(groups):
|
||||
for pattern in args:
|
||||
if fnmatch.fnmatch(group, pattern):
|
||||
return True
|
||||
|
|
@ -10189,9 +10189,9 @@ class RootExports(object):
|
|||
userID = get_user(userID, strict=True)['id']
|
||||
if pkgID is not None:
|
||||
pkgID = get_package_id(pkgID, strict=True)
|
||||
result_list = readPackageList(tagID=tagID, userID=userID, pkgID=pkgID,
|
||||
result_list = list(readPackageList(tagID=tagID, userID=userID, pkgID=pkgID,
|
||||
inherit=inherited, with_dups=with_dups,
|
||||
event=event).values()
|
||||
event=event).values())
|
||||
if with_dups:
|
||||
# when with_dups=True, readPackageList returns a list of list of dicts
|
||||
# convert it to a list of dicts for consistency
|
||||
|
|
@ -10791,7 +10791,7 @@ class RootExports(object):
|
|||
'host_config.enabled': 'enabled',
|
||||
}
|
||||
tables = ['host_config']
|
||||
fields, aliases = zip(*fields.items())
|
||||
fields, aliases = zip(*list(fields.items()))
|
||||
query = QueryProcessor(columns=fields, aliases=aliases,
|
||||
tables=tables, joins=joins, clauses=clauses, values=locals())
|
||||
return query.execute()
|
||||
|
|
@ -11844,7 +11844,7 @@ class HostExports(object):
|
|||
safer_move(fn, dest)
|
||||
os.symlink(dest, fn)
|
||||
if logs:
|
||||
for key, files in logs.iteritems():
|
||||
for key, files in six.iteritems(logs):
|
||||
if key:
|
||||
logdir = "%s/logs/%s" % (dir, key)
|
||||
else:
|
||||
|
|
@ -11867,7 +11867,7 @@ class HostExports(object):
|
|||
scratchdir = koji.pathinfo.scratch()
|
||||
username = get_user(task.getOwner())['name']
|
||||
destdir = os.path.join(scratchdir, username, 'task_%s' % task_id)
|
||||
for reldir, files in results['files'].items() + [('', results['logs'])]:
|
||||
for reldir, files in list(results['files'].items()) + [('', results['logs'])]:
|
||||
for filename in files:
|
||||
if reldir:
|
||||
relpath = os.path.join(reldir, filename)
|
||||
|
|
@ -11899,7 +11899,7 @@ class HostExports(object):
|
|||
scratchdir = koji.pathinfo.scratch()
|
||||
username = get_user(task.getOwner())['name']
|
||||
destdir = os.path.join(scratchdir, username, 'task_%s' % task_id)
|
||||
for relpath in results['output'].keys() + results['logs']:
|
||||
for relpath in list(results['output'].keys()) + results['logs']:
|
||||
filename = os.path.join(koji.pathinfo.task(results['task_id']), relpath)
|
||||
dest = os.path.join(destdir, relpath)
|
||||
koji.ensuredir(os.path.dirname(dest))
|
||||
|
|
@ -12088,7 +12088,7 @@ class HostExports(object):
|
|||
maven_task_id = maven_results['task_id']
|
||||
maven_buildroot_id = maven_results['buildroot_id']
|
||||
maven_task_dir = koji.pathinfo.task(maven_task_id)
|
||||
for relpath, files in maven_results['files'].iteritems():
|
||||
for relpath, files in six.iteritems(maven_results['files']):
|
||||
dir_maven_info = maven_info
|
||||
poms = [f for f in files if f.endswith('.pom')]
|
||||
if len(poms) == 0:
|
||||
|
|
@ -12252,7 +12252,7 @@ class HostExports(object):
|
|||
|
||||
task_dir = koji.pathinfo.task(results['task_id'])
|
||||
# import the build output
|
||||
for relpath, metadata in results['output'].iteritems():
|
||||
for relpath, metadata in six.iteritems(results['output']):
|
||||
archivetype = get_archive_type(relpath)
|
||||
if not archivetype:
|
||||
# Unknown archive type, fail the build
|
||||
|
|
@ -12480,7 +12480,7 @@ class HostExports(object):
|
|||
for dep in extra_deps:
|
||||
if isinstance(dep, (int, long)):
|
||||
task_output = list_task_output(dep, stat=True)
|
||||
for filepath, filestats in task_output.iteritems():
|
||||
for filepath, filestats in six.iteritems(task_output):
|
||||
if os.path.splitext(filepath)[1] in ['.log', '.md5', '.sha1']:
|
||||
continue
|
||||
tokens = filepath.split('/')
|
||||
|
|
@ -12513,7 +12513,7 @@ class HostExports(object):
|
|||
logger.error("Current build is %s, new build is %s.", idx_build, archive['build_id'])
|
||||
maven_build_index[archive['group_id']][archive['artifact_id']][archive['version']] = archive['build_id']
|
||||
|
||||
ignore.extend(task_deps.values())
|
||||
ignore.extend(list(task_deps.values()))
|
||||
|
||||
SNAPSHOT_RE = re.compile(r'-\d{8}\.\d{6}-\d+')
|
||||
ignore_by_label = {}
|
||||
|
|
@ -12566,7 +12566,7 @@ class HostExports(object):
|
|||
if build_id:
|
||||
build = get_build(build_id)
|
||||
logger.error("g:a:v supplied by build %(nvr)s", build)
|
||||
logger.error("Build supplies %i archives: %r", len(build_archives), build_archives.keys())
|
||||
logger.error("Build supplies %i archives: %r", len(build_archives), list(build_archives.keys()))
|
||||
if tag_archive:
|
||||
logger.error("Size mismatch, br: %i, db: %i", fileinfo['size'], tag_archive['size'])
|
||||
raise koji.BuildrootError('Unknown file in build environment: %s, size: %s' % \
|
||||
|
|
@ -12651,7 +12651,7 @@ class HostExports(object):
|
|||
repodir = koji.pathinfo.repo(repo_id, rinfo['tag_name'])
|
||||
workdir = koji.pathinfo.work()
|
||||
if not rinfo['dist']:
|
||||
for arch, (uploadpath, files) in data.iteritems():
|
||||
for arch, (uploadpath, files) in six.iteritems(data):
|
||||
archdir = "%s/%s" % (repodir, koji.canonArch(arch))
|
||||
if not os.path.isdir(archdir):
|
||||
raise koji.GenericError("Repo arch directory missing: %s" % archdir)
|
||||
|
|
|
|||
|
|
@ -40,6 +40,7 @@ import koji.util
|
|||
from koji.xmlrpcplus import getparser, dumps, Fault, ExtendedMarshaller
|
||||
from koji.context import context
|
||||
from six.moves import range
|
||||
import six
|
||||
|
||||
|
||||
class Marshaller(ExtendedMarshaller):
|
||||
|
|
@ -98,7 +99,7 @@ class HandlerRegistry(object):
|
|||
|
||||
Handlers are functions marked with one of the decorators defined in koji.plugin
|
||||
"""
|
||||
for v in vars(plugin).itervalues():
|
||||
for v in six.itervalues(vars(plugin)):
|
||||
if isinstance(v, type):
|
||||
#skip classes
|
||||
continue
|
||||
|
|
@ -155,7 +156,7 @@ class HandlerRegistry(object):
|
|||
return args
|
||||
|
||||
def system_listMethods(self):
|
||||
return self.funcs.keys()
|
||||
return list(self.funcs.keys())
|
||||
|
||||
def system_methodSignature(self, method):
|
||||
#it is not possible to autogenerate this data
|
||||
|
|
@ -477,7 +478,7 @@ def load_config(environ):
|
|||
opts['policy'] = dict(config.items('policy'))
|
||||
else:
|
||||
opts['policy'] = {}
|
||||
for pname, text in _default_policies.iteritems():
|
||||
for pname, text in six.iteritems(_default_policies):
|
||||
opts['policy'].setdefault(pname, text)
|
||||
# use configured KojiDir
|
||||
if opts.get('KojiDir') is not None:
|
||||
|
|
@ -545,12 +546,12 @@ def get_policy(opts, plugins):
|
|||
continue
|
||||
alltests.append(koji.policy.findSimpleTests(vars(plugin)))
|
||||
policy = {}
|
||||
for pname, text in opts['policy'].iteritems():
|
||||
for pname, text in six.iteritems(opts['policy']):
|
||||
#filter/merge tests
|
||||
merged = {}
|
||||
for tests in alltests:
|
||||
# tests can be limited to certain policies by setting a class variable
|
||||
for name, test in tests.iteritems():
|
||||
for name, test in six.iteritems(tests):
|
||||
if hasattr(test, 'policy'):
|
||||
if isinstance(test.policy, basestring):
|
||||
if pname != test.policy:
|
||||
|
|
|
|||
|
|
@ -108,7 +108,7 @@ class Session(object):
|
|||
'user_id': 'user_id',
|
||||
}
|
||||
# sort for stability (unittests)
|
||||
fields, aliases = list(zip(*list(sorted(fields.items(), key=lambda x: x[1]))))
|
||||
fields, aliases = list(zip(*list(sorted(list(fields.items()), key=lambda x: x[1]))))
|
||||
q = """
|
||||
SELECT %s FROM sessions
|
||||
WHERE id = %%(id)i
|
||||
|
|
|
|||
|
|
@ -27,6 +27,7 @@
|
|||
from __future__ import absolute_import
|
||||
import thread
|
||||
from six.moves import range
|
||||
import six
|
||||
|
||||
class _data(object):
|
||||
pass
|
||||
|
|
@ -67,7 +68,7 @@ class ThreadLocal(object):
|
|||
id = thread.get_ident()
|
||||
tdict = object.__getattribute__(self, '_tdict')
|
||||
return "(current thread: %s) {" % id + \
|
||||
", ".join(["%s : %s" %(k, v.__dict__) for (k, v) in tdict.iteritems()]) + \
|
||||
", ".join(["%s : %s" %(k, v.__dict__) for (k, v) in six.iteritems(tdict)]) + \
|
||||
"}"
|
||||
|
||||
def _threadclear(self):
|
||||
|
|
|
|||
|
|
@ -38,6 +38,7 @@ import sys
|
|||
import traceback
|
||||
import errno
|
||||
from six.moves import range
|
||||
import six
|
||||
|
||||
|
||||
def incremental_upload(session, fname, fd, path, retries=5, logger=None):
|
||||
|
|
@ -582,7 +583,7 @@ class TaskManager(object):
|
|||
"""Attempt to shut down cleanly"""
|
||||
for task_id in self.pids.keys():
|
||||
self.cleanupTask(task_id)
|
||||
self.session.host.freeTasks(self.tasks.keys())
|
||||
self.session.host.freeTasks(list(self.tasks.keys()))
|
||||
self.session.host.updateHost(task_load=0.0, ready=False)
|
||||
|
||||
def updateBuildroots(self, nolocal=False):
|
||||
|
|
@ -613,14 +614,14 @@ class TaskManager(object):
|
|||
#task not running - expire the buildroot
|
||||
#TODO - consider recycling hooks here (with strong sanity checks)
|
||||
self.logger.info("Expiring buildroot: %(id)i/%(tag_name)s/%(arch)s" % br)
|
||||
self.logger.debug("Buildroot task: %r, Current tasks: %r" % (task_id, self.tasks.keys()))
|
||||
self.logger.debug("Buildroot task: %r, Current tasks: %r" % (task_id, list(self.tasks.keys())))
|
||||
self.session.host.setBuildRootState(id, st_expired)
|
||||
continue
|
||||
if nolocal:
|
||||
return
|
||||
local_br = self._scanLocalBuildroots()
|
||||
# get info on local_only buildroots (most likely expired)
|
||||
local_only = [id for id in local_br.iterkeys() if id not in db_br]
|
||||
local_only = [id for id in six.iterkeys(local_br) if id not in db_br]
|
||||
if local_only:
|
||||
missed_br = self.session.listBuildroots(buildrootID=tuple(local_only))
|
||||
#get all the task info in one call
|
||||
|
|
@ -852,7 +853,7 @@ class TaskManager(object):
|
|||
# Note: we may still take an assigned task below
|
||||
#sort available capacities for each of our bins
|
||||
avail = {}
|
||||
for bin in bins.iterkeys():
|
||||
for bin in six.iterkeys(bins):
|
||||
avail[bin] = [host['capacity'] - host['task_load'] for host in bin_hosts[bin]]
|
||||
avail[bin].sort()
|
||||
avail[bin].reverse()
|
||||
|
|
|
|||
|
|
@ -17,10 +17,12 @@
|
|||
# Authors:
|
||||
# Mike McLean <mikem@redhat.com>
|
||||
|
||||
from __future__ import absolute_import
|
||||
import fnmatch
|
||||
import logging
|
||||
|
||||
import koji
|
||||
import six
|
||||
|
||||
|
||||
class BaseSimpleTest(object):
|
||||
|
|
@ -292,7 +294,7 @@ class SimpleRuleSet(object):
|
|||
index[name] = 1
|
||||
index = {}
|
||||
_recurse(self.ruleset, index)
|
||||
return index.keys()
|
||||
return list(index.keys())
|
||||
|
||||
def _apply(self, rules, data, top=False):
|
||||
for tests, negate, action in rules:
|
||||
|
|
@ -361,7 +363,7 @@ def findSimpleTests(namespace):
|
|||
namespace = (namespace,)
|
||||
ret = {}
|
||||
for ns in namespace:
|
||||
for key, value in ns.iteritems():
|
||||
for key, value in six.iteritems(ns):
|
||||
if value is BaseSimpleTest:
|
||||
# skip this abstract base class if we encounter it
|
||||
# this module contains generic tests, so it is valid to include it
|
||||
|
|
|
|||
|
|
@ -18,11 +18,13 @@
|
|||
# This library and program is heavily based on rpmdiff from the rpmlint package
|
||||
# It was modified to be used as standalone library for the Koji project.
|
||||
|
||||
from __future__ import absolute_import
|
||||
import hashlib
|
||||
import json
|
||||
import rpm
|
||||
import os
|
||||
import itertools
|
||||
import six
|
||||
|
||||
class Rpmdiff:
|
||||
|
||||
|
|
@ -113,8 +115,8 @@ class Rpmdiff:
|
|||
|
||||
old_files_dict = self.__fileIteratorToDict(old.fiFromHeader())
|
||||
new_files_dict = self.__fileIteratorToDict(new.fiFromHeader())
|
||||
files = list(set(itertools.chain(old_files_dict.iterkeys(),
|
||||
new_files_dict.iterkeys())))
|
||||
files = list(set(itertools.chain(six.iterkeys(old_files_dict),
|
||||
six.iterkeys(new_files_dict))))
|
||||
files.sort()
|
||||
self.old_data['files'] = old_files_dict
|
||||
self.new_data['files'] = new_files_dict
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@
|
|||
from koji.plugin import callbacks, callback, ignore_error
|
||||
import logging
|
||||
|
||||
@callback(*callbacks.keys())
|
||||
@callback(*list(callbacks.keys()))
|
||||
@ignore_error
|
||||
def echo(cbtype, *args, **kws):
|
||||
logging.getLogger('koji.plugin.echo').info('Called the %s callback, args: %s; kws: %s',
|
||||
|
|
|
|||
|
|
@ -487,7 +487,7 @@ Finished: Thu Jan 1 00:50:00 1970
|
|||
}
|
||||
|
||||
# need ordered dict to get same results
|
||||
files = collections.OrderedDict(sorted(files.items(),
|
||||
files = collections.OrderedDict(sorted(list(files.items()),
|
||||
key=lambda t: t[0]))
|
||||
|
||||
list_task_output_mock.side_effect = [[], files, {}]
|
||||
|
|
|
|||
|
|
@ -86,7 +86,7 @@ class RepoManagerTest(unittest.TestCase):
|
|||
|
||||
self.mgr.checkTasks()
|
||||
# should have removed the close tasks
|
||||
self.assertEqual(self.mgr.tasks.keys(), [101, 102])
|
||||
self.assertEqual(list(self.mgr.tasks.keys()), [101, 102])
|
||||
|
||||
@mock.patch('time.sleep')
|
||||
def test_regen_loop(self, sleep):
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import os
|
|||
import resource
|
||||
import six.moves.configparser
|
||||
import time
|
||||
import six
|
||||
try:
|
||||
import unittest2 as unittest
|
||||
except ImportError:
|
||||
|
|
@ -795,24 +796,24 @@ class MavenUtilTestCase(unittest.TestCase):
|
|||
|
||||
name, release, date = 'fedora', 26, datetime.now().strftime('%Y%m%d')
|
||||
data = {'name': name, 'release': release, 'date': date}
|
||||
six.assertCountEqual(self, data.items(), ldict.items())
|
||||
six.assertCountEqual(self, data.items(), [v for v in ldict.iteritems()])
|
||||
six.assertCountEqual(self, list(data.items()), list(ldict.items()))
|
||||
six.assertCountEqual(self, list(data.items()), [v for v in six.iteritems(ldict)])
|
||||
|
||||
name, release, date = 'rhel', 7, '20171012'
|
||||
six.assertCountEqual(self, [name, release, date], ldict.values())
|
||||
six.assertCountEqual(self, [name, release, date], [v for v in ldict.itervalues()])
|
||||
six.assertCountEqual(self, [name, release, date], list(ldict.values()))
|
||||
six.assertCountEqual(self, [name, release, date], [v for v in six.itervalues(ldict)])
|
||||
|
||||
data = {'name': name, 'release': release, 'date': date}
|
||||
self.assertEqual(name, ldict.pop('name'))
|
||||
data.pop('name')
|
||||
six.assertCountEqual(self, data.items(), ldict.items())
|
||||
six.assertCountEqual(self, list(data.items()), list(ldict.items()))
|
||||
|
||||
(key, value) = ldict.popitem()
|
||||
data.pop(key)
|
||||
six.assertCountEqual(self, data.items(), ldict.items())
|
||||
six.assertCountEqual(self, list(data.items()), list(ldict.items()))
|
||||
|
||||
ldict_copy = ldict.copy()
|
||||
six.assertCountEqual(self, data.items(), ldict_copy.items())
|
||||
six.assertCountEqual(self, list(data.items()), list(ldict_copy.items()))
|
||||
|
||||
def test_LazyRecord(self):
|
||||
"""Test LazyRecord object"""
|
||||
|
|
@ -878,7 +879,7 @@ class MavenUtilTestCase(unittest.TestCase):
|
|||
|
||||
actual = koji.util.eventFromOpts(session, opts)
|
||||
self.assertNotEqual(None, actual)
|
||||
six.assertCountEqual(self, expect.items(), actual.items())
|
||||
six.assertCountEqual(self, list(expect.items()), list(actual.items()))
|
||||
|
||||
# no event is matched case
|
||||
opts = mock.MagicMock(event=0, ts=0, repo=0)
|
||||
|
|
|
|||
|
|
@ -80,7 +80,7 @@ class FakeConfigParser(object):
|
|||
return
|
||||
|
||||
def sections(self):
|
||||
return self.CONFIG.keys()
|
||||
return list(self.CONFIG.keys())
|
||||
|
||||
def has_option(self, section, key):
|
||||
return section in self.CONFIG and key in self.CONFIG[section]
|
||||
|
|
|
|||
10
util/koji-gc
10
util/koji-gc
|
|
@ -6,6 +6,8 @@
|
|||
# Authors:
|
||||
# Mike McLean <mikem@redhat.com>
|
||||
|
||||
from __future__ import absolute_import
|
||||
import six
|
||||
try:
|
||||
import krbV
|
||||
except ImportError: # pragma: no cover
|
||||
|
|
@ -535,7 +537,7 @@ def handle_trash():
|
|||
by_owner = {}
|
||||
for binfo in to_trash:
|
||||
by_owner.setdefault(binfo['owner_name'], []).append(binfo)
|
||||
owners = by_owner.keys()
|
||||
owners = list(by_owner.keys())
|
||||
owners.sort()
|
||||
for owner_name in owners:
|
||||
builds = [(b['nvr'], b) for b in by_owner[owner_name]]
|
||||
|
|
@ -557,7 +559,7 @@ def handle_trash():
|
|||
#best we can do currently
|
||||
owner = binfo['owner_id']
|
||||
else:
|
||||
owner = max([(n, k) for k, n in count.iteritems()])[1]
|
||||
owner = max([(n, k) for k, n in six.iteritems(count)])[1]
|
||||
session.packageListAdd(trashcan_tag, binfo['name'], owner)
|
||||
session.tagBuildBypass(trashcan_tag, binfo['id'], force=True)
|
||||
|
||||
|
|
@ -770,7 +772,7 @@ def get_build_sigs(build, cache=False):
|
|||
for sig in sigs:
|
||||
if sig['sigkey']:
|
||||
keys.setdefault(sig['sigkey'], 1)
|
||||
ret = build_sig_cache[build] = keys.keys()
|
||||
ret = build_sig_cache[build] = list(keys.keys())
|
||||
return ret
|
||||
|
||||
def handle_prune():
|
||||
|
|
@ -830,7 +832,7 @@ def handle_prune():
|
|||
pkghist.setdefault(h['name'] + '-' + h['version'], []).append(h)
|
||||
else:
|
||||
pkghist.setdefault(h['name'], []).append(h)
|
||||
pkgs = pkghist.keys()
|
||||
pkgs = list(pkghist.keys())
|
||||
pkgs.sort()
|
||||
for pkg in pkgs:
|
||||
if not check_package(pkg):
|
||||
|
|
|
|||
|
|
@ -24,6 +24,7 @@
|
|||
|
||||
from __future__ import absolute_import
|
||||
from six.moves import range
|
||||
import six
|
||||
try:
|
||||
import krbV
|
||||
except ImportError: # pragma: no cover
|
||||
|
|
@ -497,7 +498,7 @@ class TrackedBuild(object):
|
|||
log("Warning: some rpms for %s lacked buildroots:" % self.nvr)
|
||||
for rinfo in bad:
|
||||
log(" %(name)s-%(version)s-%(release)s.%(arch)s" % rinfo)
|
||||
return brs.keys()
|
||||
return list(brs.keys())
|
||||
|
||||
def getDeps(self):
|
||||
buildroots = self.getBuildroots()
|
||||
|
|
@ -546,7 +547,7 @@ class TrackedBuild(object):
|
|||
# changes happened during the build startup and some subtasks got the old
|
||||
# repo and others the new one.
|
||||
base = []
|
||||
for name, brlist in bases.iteritems():
|
||||
for name, brlist in six.iteritems(bases):
|
||||
#We want to determine for each name if that package was present
|
||||
#in /all/ the buildroots or just some.
|
||||
#Because brlist is constructed only from elements of buildroots, we
|
||||
|
|
@ -556,12 +557,12 @@ class TrackedBuild(object):
|
|||
#each buildroot had this as a base package
|
||||
base.append(name)
|
||||
if len(tags) > 1:
|
||||
log("Warning: found multiple buildroot tags for %s: %s" % (self.nvr, tags.keys()))
|
||||
counts = [(n, tag) for tag, n in tags.iteritems()]
|
||||
log("Warning: found multiple buildroot tags for %s: %s" % (self.nvr, list(tags.keys())))
|
||||
counts = [(n, tag) for tag, n in six.iteritems(tags)]
|
||||
sort(counts)
|
||||
tag = counts[-1][1]
|
||||
else:
|
||||
tag = tags.keys()[0]
|
||||
tag = list(tags.keys())[0]
|
||||
# due bugs in used tools mainline koji instance could store empty buildroot infos for builds
|
||||
if len(builds) == 0:
|
||||
self.setState("noroot")
|
||||
|
|
@ -1005,7 +1006,7 @@ class BuildTracker(object):
|
|||
for pkg in session.listPackages(pkgID=name):
|
||||
owners.setdefault(pkg['owner_id'], []).append(pkg)
|
||||
if owners:
|
||||
order = [(len(v), k) for k, v in owners.iteritems()]
|
||||
order = [(len(v), k) for k, v in six.iteritems(owners)]
|
||||
order.sort()
|
||||
owner = order[-1][1]
|
||||
else:
|
||||
|
|
@ -1121,7 +1122,7 @@ class BuildTracker(object):
|
|||
log("-- %s --" % time.asctime())
|
||||
self.report_brief()
|
||||
for state in ('broken', 'noroot', 'blocked'):
|
||||
builds = self.state_idx[state].values()
|
||||
builds = list(self.state_idx[state].values())
|
||||
not_replaced = [b for b in builds if not b.substitute]
|
||||
n_replaced = len(builds) - len(not_replaced)
|
||||
log("%s: %i (+%i replaced)" % (state, len(not_replaced), n_replaced))
|
||||
|
|
@ -1151,7 +1152,7 @@ class BuildTracker(object):
|
|||
nvr = dep.substitute
|
||||
problem_counts.setdefault(nvr, 0)
|
||||
problem_counts[nvr] += 1
|
||||
order = [(c, nvr) for (nvr, c) in problem_counts.iteritems()]
|
||||
order = [(c, nvr) for (nvr, c) in six.iteritems(problem_counts)]
|
||||
if order:
|
||||
order.sort()
|
||||
order.reverse()
|
||||
|
|
@ -1162,7 +1163,7 @@ class BuildTracker(object):
|
|||
|
||||
def report_brief(self):
|
||||
N = len(self.builds)
|
||||
states = self.state_idx.keys()
|
||||
states = list(self.state_idx.keys())
|
||||
states.sort()
|
||||
parts = ["%s: %i" % (s, len(self.state_idx[s])) for s in states]
|
||||
parts.append("total: %i" % N)
|
||||
|
|
@ -1234,7 +1235,7 @@ class BuildTracker(object):
|
|||
ret = False
|
||||
if options.max_jobs and len(self.state_idx['pending']) >= options.max_jobs:
|
||||
return ret
|
||||
missing = [(b.order, b.id, b) for b in self.state_idx['missing'].itervalues()]
|
||||
missing = [(b.order, b.id, b) for b in six.itervalues(self.state_idx['missing'])]
|
||||
missing.sort()
|
||||
for order, build_id, build in missing:
|
||||
if not self.checkBuildDeps(build):
|
||||
|
|
|
|||
12
util/kojira
12
util/kojira
|
|
@ -20,6 +20,7 @@
|
|||
# Authors:
|
||||
# Mike McLean <mikem@redhat.com>
|
||||
|
||||
from __future__ import absolute_import
|
||||
import sys
|
||||
import os
|
||||
import koji
|
||||
|
|
@ -34,6 +35,7 @@ import signal
|
|||
import time
|
||||
import threading
|
||||
import traceback
|
||||
import six
|
||||
|
||||
|
||||
|
||||
|
|
@ -83,7 +85,7 @@ class ManagedRepo(object):
|
|||
tags = {self.tag_id : 1}
|
||||
for x in order:
|
||||
tags[x['parent_id']] = 1
|
||||
self.taglist = tags.keys()
|
||||
self.taglist = list(tags.keys())
|
||||
|
||||
@property
|
||||
def dist(self):
|
||||
|
|
@ -216,9 +218,9 @@ class RepoManager(object):
|
|||
|
||||
def printState(self):
|
||||
self.logger.debug('Tracking %i repos, %i child processes', len(self.repos), len(self.delete_pids))
|
||||
for tag_id, task_id in self.tasks.iteritems():
|
||||
for tag_id, task_id in six.iteritems(self.tasks):
|
||||
self.logger.debug("Tracking task %s for tag %s", task_id, tag_id)
|
||||
for pid, desc in self.delete_pids.iteritems():
|
||||
for pid, desc in six.iteritems(self.delete_pids):
|
||||
self.logger.debug("Delete job %s: %r", pid, desc)
|
||||
|
||||
def rmtree(self, path):
|
||||
|
|
@ -312,7 +314,7 @@ class RepoManager(object):
|
|||
def checkCurrentRepos(self):
|
||||
"""Determine which repos are current"""
|
||||
to_check = []
|
||||
repo_ids = self.repos.keys()
|
||||
repo_ids = list(self.repos.keys())
|
||||
for repo_id in repo_ids:
|
||||
repo = self.repos.get(repo_id)
|
||||
if repo is None:
|
||||
|
|
@ -658,7 +660,7 @@ class RepoManager(object):
|
|||
|
||||
debuginfo_pat = self.options.debuginfo_tags.split()
|
||||
src_pat = self.options.source_tags.split()
|
||||
order = self.needed_tags.values()
|
||||
order = list(self.needed_tags.values())
|
||||
order.sort(key=lambda t:t['score'])
|
||||
for tag in order:
|
||||
if running_tasks >= self.options.max_repo_tasks:
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@
|
|||
# kojiwind --install
|
||||
# in a cygwin shell.
|
||||
|
||||
from __future__ import absolute_import
|
||||
from optparse import OptionParser
|
||||
from ConfigParser import ConfigParser
|
||||
import os
|
||||
|
|
@ -42,6 +43,7 @@ import threading
|
|||
import re
|
||||
import glob
|
||||
import zipfile
|
||||
import six
|
||||
|
||||
MANAGER_PORT = 7000
|
||||
|
||||
|
|
@ -639,7 +641,7 @@ def stream_logs(server, handler, builds):
|
|||
logpath = os.path.join(build.source_dir, relpath)
|
||||
if logpath not in logs:
|
||||
logs[logpath] = (relpath, None)
|
||||
for log, (relpath, fd) in logs.iteritems():
|
||||
for log, (relpath, fd) in six.iteritems(logs):
|
||||
if not fd:
|
||||
if os.path.isfile(log):
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -150,7 +150,7 @@ def get_options():
|
|||
defaults[name] = config.getboolean('kojivmd', name)
|
||||
elif name in ['plugin', 'plugins']:
|
||||
defaults['plugin'] = value.split()
|
||||
elif name in defaults.keys():
|
||||
elif name in list(defaults.keys()):
|
||||
defaults[name] = value
|
||||
else:
|
||||
quit("unknown config option: %s" % name)
|
||||
|
|
@ -314,7 +314,7 @@ class WinBuildTask(MultiPlatformTask):
|
|||
strict=False)
|
||||
# winspec and patches options are urls
|
||||
# verify the urls before passing them to the VM
|
||||
for url in [source_url] + subopts.values():
|
||||
for url in [source_url] + list(subopts.values()):
|
||||
scm = SCM(url)
|
||||
scm.assert_allowed(self.options.allowed_scms)
|
||||
|
||||
|
|
|
|||
|
|
@ -38,6 +38,7 @@ from kojiweb.util import _genHTML
|
|||
from kojiweb.util import _getValidTokens
|
||||
from koji.util import sha1_constructor
|
||||
from six.moves import range
|
||||
import six
|
||||
|
||||
# Convenience definition of a commonly-used sort function
|
||||
_sortbyname = kojiweb.util.sortByKeyFunc('name')
|
||||
|
|
@ -683,7 +684,7 @@ def taskinfo(environ, taskID):
|
|||
values['pathinfo'] = pathinfo
|
||||
|
||||
paths = [] # (volume, relpath) tuples
|
||||
for relname, volumes in server.listTaskOutput(task['id'], all_volumes=True).iteritems():
|
||||
for relname, volumes in six.iteritems(server.listTaskOutput(task['id'], all_volumes=True)):
|
||||
paths += [(volume, relname) for volume in volumes]
|
||||
values['output'] = sorted(paths, key = _sortByExtAndName)
|
||||
if environ['koji.currentUser']:
|
||||
|
|
@ -702,8 +703,8 @@ def taskstatus(environ, taskID):
|
|||
return ''
|
||||
files = server.listTaskOutput(taskID, stat=True, all_volumes=True)
|
||||
output = '%i:%s\n' % (task['id'], koji.TASK_STATES[task['state']])
|
||||
for filename, volumes_data in files.iteritems():
|
||||
for volume, file_stats in volumes_data.iteritems():
|
||||
for filename, volumes_data in six.iteritems(files):
|
||||
for volume, file_stats in six.iteritems(volumes_data):
|
||||
output += '%s:%s:%s\n' % (volume, filename, file_stats['st_size'])
|
||||
return output
|
||||
|
||||
|
|
@ -2122,7 +2123,7 @@ def buildsbytarget(environ, days='7', start=None, order='-builds'):
|
|||
if builds > maxBuilds:
|
||||
maxBuilds = builds
|
||||
|
||||
kojiweb.util.paginateList(values, targets.values(), start, 'targets', 'target', order)
|
||||
kojiweb.util.paginateList(values, list(targets.values()), start, 'targets', 'target', order)
|
||||
|
||||
values['order'] = order
|
||||
|
||||
|
|
|
|||
|
|
@ -399,7 +399,7 @@ class Dispatcher(object):
|
|||
if isinstance(result, basestring):
|
||||
headers.setdefault('content-length', ('Content-Length', str(len(result))))
|
||||
headers.setdefault('content-type', ('Content-Type', 'text/html'))
|
||||
headers = headers.values() + extra
|
||||
headers = list(headers.values()) + extra
|
||||
self.logger.debug("Headers:")
|
||||
self.logger.debug(koji.util.LazyString(pprint.pformat, [headers]))
|
||||
start_response(status, headers)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue