workaround unnecessary list conversions
This commit is contained in:
parent
56095e8018
commit
caae1a0a8b
18 changed files with 126 additions and 95 deletions
|
|
@ -39,7 +39,7 @@ import logging
|
|||
import logging.handlers
|
||||
from koji.daemon import incremental_upload, log_output, TaskManager, SCM
|
||||
from koji.tasks import ServerExit, ServerRestart, BaseTaskHandler, MultiPlatformTask
|
||||
from koji.util import parseStatus, isSuccess, dslice, dslice_ex
|
||||
from koji.util import parseStatus, isSuccess, dslice, dslice_ex, to_list
|
||||
import multilib.multilib as multilib
|
||||
import os
|
||||
import pwd
|
||||
|
|
@ -1057,7 +1057,7 @@ class BuildTask(BaseTaskHandler):
|
|||
archdict[a] = 1
|
||||
if not archdict:
|
||||
raise koji.BuildError("No matching arches were found")
|
||||
return list(archdict.keys())
|
||||
return to_list(archdict.keys())
|
||||
|
||||
|
||||
def choose_taskarch(self, arch, srpm, build_tag):
|
||||
|
|
@ -1118,7 +1118,7 @@ class BuildTask(BaseTaskHandler):
|
|||
self.logger.debug("Waiting on subtasks...")
|
||||
|
||||
# wait for subtasks to finish
|
||||
results = self.wait(list(subtasks.values()), all=True, failany=failany)
|
||||
results = self.wait(to_list(subtasks.values()), all=True, failany=failany)
|
||||
|
||||
# finalize import
|
||||
# merge data into needed args for completeBuild call
|
||||
|
|
@ -2100,7 +2100,7 @@ class ChainMavenTask(MultiPlatformTask):
|
|||
running[task_id] = package
|
||||
del todo[package]
|
||||
try:
|
||||
results = self.wait(list(running.keys()))
|
||||
results = self.wait(to_list(running.keys()))
|
||||
except (six.moves.xmlrpc_client.Fault, koji.GenericError) as e:
|
||||
# One task has failed, wait for the rest to complete before the
|
||||
# chainmaven task fails. self.wait(all=True) should thrown an exception.
|
||||
|
|
@ -2153,8 +2153,8 @@ class ChainMavenTask(MultiPlatformTask):
|
|||
have the same keys and those keys have the same values. If a value is
|
||||
list, it will be considered equal to a list with the same values in
|
||||
a different order."""
|
||||
akeys = list(a.keys())
|
||||
bkeys = list(b.keys())
|
||||
akeys = to_list(a.keys())
|
||||
bkeys = to_list(b.keys())
|
||||
if sorted(akeys) != sorted(bkeys):
|
||||
return False
|
||||
for key in akeys:
|
||||
|
|
@ -2315,7 +2315,7 @@ class BuildBaseImageTask(BuildImageTask):
|
|||
canfail.append(subtasks[arch])
|
||||
self.logger.debug("Got image subtasks: %r" % (subtasks))
|
||||
self.logger.debug("Waiting on image subtasks (%s can fail)..." % canfail)
|
||||
results = self.wait(list(subtasks.values()), all=True, failany=True, canfail=canfail)
|
||||
results = self.wait(to_list(subtasks.values()), all=True, failany=True, canfail=canfail)
|
||||
|
||||
# if everything failed, fail even if all subtasks are in canfail
|
||||
self.logger.debug('subtask results: %r', results)
|
||||
|
|
@ -2624,7 +2624,7 @@ class BuildLiveMediaTask(BuildImageTask):
|
|||
|
||||
self.logger.debug("Got image subtasks: %r", subtasks)
|
||||
self.logger.debug("Waiting on livemedia subtasks...")
|
||||
results = self.wait(list(subtasks.values()), all=True, failany=True, canfail=canfail)
|
||||
results = self.wait(to_list(subtasks.values()), all=True, failany=True, canfail=canfail)
|
||||
|
||||
# if everything failed, fail even if all subtasks are in canfail
|
||||
self.logger.debug('subtask results: %r', results)
|
||||
|
|
@ -2660,7 +2660,7 @@ class BuildLiveMediaTask(BuildImageTask):
|
|||
wrapper_tasks[arch] = self.subtask('wrapperRPM', arglist,
|
||||
label='wrapper %s' % arch, arch='noarch')
|
||||
|
||||
results2 = self.wait(list(wrapper_tasks.values()), all=True, failany=True)
|
||||
results2 = self.wait(to_list(wrapper_tasks.values()), all=True, failany=True)
|
||||
self.logger.debug('wrapper results: %r', results2)
|
||||
|
||||
# add wrapper rpm results into main results
|
||||
|
|
@ -2922,7 +2922,7 @@ class ImageTask(BaseTaskHandler):
|
|||
}
|
||||
|
||||
# Duplicated with pungi/util.py _apply_substitutions
|
||||
for k, v in sorted(list(substitutions.items()), key=lambda x: len(x[0]), reverse=True):
|
||||
for k, v in sorted(to_list(substitutions.items()), key=lambda x: len(x[0]), reverse=True):
|
||||
if k in name:
|
||||
name = name.replace(k, v)
|
||||
if k in version:
|
||||
|
|
@ -3719,7 +3719,7 @@ class BaseImageTask(OzImageTask):
|
|||
if len(formats) == 0:
|
||||
# we only want a raw disk image (no format option given)
|
||||
f_dict['raw'] = True
|
||||
elif 'raw' not in list(f_dict.keys()):
|
||||
elif 'raw' not in f_dict.keys():
|
||||
f_dict['raw'] = False
|
||||
self.logger.debug('Image delivery plan: %s' % f_dict)
|
||||
return f_dict
|
||||
|
|
@ -4958,7 +4958,7 @@ Build Info: %(weburl)s/buildinfo?buildID=%(build_id)i\r
|
|||
def uniq(self, items):
|
||||
"""Remove duplicates from the list of items, and sort the list."""
|
||||
m = dict(zip(items, [1] * len(items)))
|
||||
l = list(m.keys())
|
||||
l = to_list(m.keys())
|
||||
l.sort()
|
||||
return l
|
||||
|
||||
|
|
@ -5003,7 +5003,7 @@ class NewRepoTask(BaseTaskHandler):
|
|||
# gather subtask results
|
||||
data = {}
|
||||
if subtasks:
|
||||
results = self.wait(list(subtasks.values()), all=True, failany=True)
|
||||
results = self.wait(to_list(subtasks.values()), all=True, failany=True)
|
||||
for (arch, task_id) in six.iteritems(subtasks):
|
||||
data[arch] = results[task_id]
|
||||
self.logger.debug("DEBUG: %r : %r " % (arch,data[arch],))
|
||||
|
|
@ -5157,7 +5157,7 @@ class NewDistRepoTask(BaseTaskHandler):
|
|||
method='createdistrepo', arglist=arglist, label=arch,
|
||||
parent=self.id, arch='noarch')
|
||||
if len(subtasks) > 0 and task_opts['multilib']:
|
||||
self.wait(list(subtasks.values()), all=True, failany=True)
|
||||
self.wait(to_list(subtasks.values()), all=True, failany=True)
|
||||
for arch in arch32s:
|
||||
# move the 32-bit task output to the final resting place
|
||||
# so the 64-bit arches can use it for multilib
|
||||
|
|
@ -5171,7 +5171,7 @@ class NewDistRepoTask(BaseTaskHandler):
|
|||
method='createdistrepo', arglist=arglist, label=arch,
|
||||
parent=self.id, arch='noarch')
|
||||
# wait for 64-bit subtasks to finish
|
||||
self.wait(list(subtasks.values()), all=True, failany=True)
|
||||
self.wait(to_list(subtasks.values()), all=True, failany=True)
|
||||
for (arch, task_id) in six.iteritems(subtasks):
|
||||
if task_opts['multilib'] and arch in arch32s:
|
||||
# already moved above
|
||||
|
|
@ -5508,7 +5508,7 @@ enabled=1
|
|||
# select our rpms
|
||||
selected = {}
|
||||
for rpm_id in rpm_idx:
|
||||
avail_keys = list(rpm_idx[rpm_id].keys())
|
||||
avail_keys = to_list(rpm_idx[rpm_id].keys())
|
||||
best_key = self.pick_key(keys, avail_keys)
|
||||
if best_key is None:
|
||||
# we lack a matching key for this rpm
|
||||
|
|
@ -5572,7 +5572,7 @@ enabled=1
|
|||
fmt = '%(name)s-%(version)s-%(release)s.%(arch)s'
|
||||
filenames = [[fmt % selected[r], r] for r in sig_missing]
|
||||
for fname, rpm_id in sorted(filenames):
|
||||
avail = list(rpm_idx.get(rpm_id, {}).keys())
|
||||
avail = to_list(rpm_idx.get(rpm_id, {}).keys())
|
||||
outfile.write('%s: %r\n' % (fname, avail))
|
||||
outfile.close()
|
||||
self.session.uploadWrapper(missing_log, self.uploadpath)
|
||||
|
|
@ -5661,7 +5661,7 @@ class WaitrepoTask(BaseTaskHandler):
|
|||
|
||||
if isinstance(newer_than, six.string_types) and newer_than.lower() == "now":
|
||||
newer_than = start
|
||||
if not isinstance(newer_than, list(six.integer_types) + [type(None), float]):
|
||||
if not isinstance(newer_than, to_list(six.integer_types) + [type(None), float]):
|
||||
raise koji.GenericError("Invalid value for newer_than: %s" % newer_than)
|
||||
|
||||
if newer_than and nvrs:
|
||||
|
|
@ -5826,7 +5826,7 @@ def get_options():
|
|||
defaults[name] = config.getboolean('kojid', name)
|
||||
elif name in ['plugin', 'plugins']:
|
||||
defaults['plugin'] = value.split()
|
||||
elif name in list(defaults.keys()):
|
||||
elif name in to_list(defaults.keys()):
|
||||
defaults[name] = value
|
||||
elif name.upper().startswith('RLIMIT_'):
|
||||
defaults[name.upper()] = value
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ except ImportError: # pragma: no cover
|
|||
yumcomps = None
|
||||
|
||||
import koji
|
||||
from koji.util import md5_constructor
|
||||
from koji.util import md5_constructor, to_list
|
||||
from koji_cli.lib import _, OptionParser, activate_session, parse_arches, \
|
||||
_unique_path, _running_in_bg, _progress_callback, watch_tasks, \
|
||||
arg_filter, linked_upload, list_task_output_all_volumes, \
|
||||
|
|
@ -654,9 +654,9 @@ def handle_maven_build(options, session, args):
|
|||
section=build_opts.section)
|
||||
except ValueError as e:
|
||||
parser.error(e.args[0])
|
||||
opts = list(params.values())[0]
|
||||
opts = to_list(params.values())[0]
|
||||
if opts.pop('type', 'maven') != 'maven':
|
||||
parser.error(_("Section %s does not contain a maven-build config") % list(params.keys())[0])
|
||||
parser.error(_("Section %s does not contain a maven-build config") % to_list(params.keys())[0])
|
||||
source = opts.pop('scmurl')
|
||||
else:
|
||||
source = args[1]
|
||||
|
|
@ -715,9 +715,9 @@ def handle_wrapper_rpm(options, session, args):
|
|||
section=build_opts.section)
|
||||
except ValueError as e:
|
||||
parser.error(e.args[0])
|
||||
opts = list(params.values())[0]
|
||||
opts = to_list(params.values())[0]
|
||||
if opts.get('type') != 'wrapper':
|
||||
parser.error(_("Section %s does not contain a wrapper-rpm config") % list(params.keys())[0])
|
||||
parser.error(_("Section %s does not contain a wrapper-rpm config") % to_list(params.keys())[0])
|
||||
url = opts['scmurl']
|
||||
package = opts['buildrequires'][0]
|
||||
target_info = session.getBuildTarget(target, strict=True)
|
||||
|
|
@ -1146,7 +1146,7 @@ def handle_import(goptions, session, args):
|
|||
nvr = "%(name)s-%(version)s-%(release)s" % koji.parse_NVRA(data['sourcerpm'])
|
||||
to_import.setdefault(nvr,[]).append((path,data))
|
||||
builds_missing = False
|
||||
nvrs = list(to_import.keys())
|
||||
nvrs = to_list(to_import.keys())
|
||||
nvrs.sort()
|
||||
for nvr in nvrs:
|
||||
to_import[nvr].sort()
|
||||
|
|
@ -1374,7 +1374,7 @@ def _import_comps(session, filename, tag, options):
|
|||
for k in pkgopts.keys():
|
||||
if six.PY2 and isinstance(pkgopts[k], unicode):
|
||||
pkgopts[k] = str(pkgopts[k])
|
||||
s_opts = ', '.join(["'%s': %r" % (k, pkgopts[k]) for k in sorted(list(pkgopts.keys()))])
|
||||
s_opts = ', '.join(["'%s': %r" % (k, pkgopts[k]) for k in sorted(pkgopts.keys())])
|
||||
print(" Package: %s: {%s}" % (pkg.name, s_opts))
|
||||
session.groupPackageListAdd(tag, group.id, pkg.name, force=force, **pkgopts)
|
||||
# libcomps does not support group dependencies
|
||||
|
|
@ -1407,7 +1407,7 @@ def _import_comps_alt(session, filename, tag, options): # no cover 3.x
|
|||
for k in pkgopts.keys():
|
||||
if six.PY2 and isinstance(pkgopts[k], unicode):
|
||||
pkgopts[k] = str(pkgopts[k])
|
||||
s_opts = ', '.join(["'%s': %r" % (k, pkgopts[k]) for k in sorted(list(pkgopts.keys()))])
|
||||
s_opts = ', '.join(["'%s': %r" % (k, pkgopts[k]) for k in sorted(pkgopts.keys())])
|
||||
print(" Package: %s: {%s}" % (pkg, s_opts))
|
||||
session.groupPackageListAdd(tag, group.groupid, pkg, force=force, **pkgopts)
|
||||
#yum.comps does not support group dependencies
|
||||
|
|
@ -1617,7 +1617,7 @@ def handle_prune_signed_copies(options, session, args):
|
|||
#that the build was recently untagged from
|
||||
tags.setdefault(entry['tag_name'], 1)
|
||||
if options.debug:
|
||||
print("Tags: %s" % list(tags.keys()))
|
||||
print("Tags: %s" % to_list(tags.keys()))
|
||||
for tag_name in tags:
|
||||
if tag_name == options.trashcan_tag:
|
||||
if options.debug:
|
||||
|
|
@ -1834,7 +1834,7 @@ def handle_prune_signed_copies(options, session, args):
|
|||
except OSError as e:
|
||||
print("Error removing %s: %s" % (signedpath, e))
|
||||
if len(sigdirs) == 1:
|
||||
dir = list(sigdirs.keys())[0]
|
||||
dir = to_list(sigdirs.keys())[0]
|
||||
if options.test:
|
||||
print("Would have removed dir: %s" % dir)
|
||||
else:
|
||||
|
|
@ -4134,7 +4134,7 @@ def _print_histline(entry, **kwargs):
|
|||
else:
|
||||
return '%s.name' % key
|
||||
if edit:
|
||||
keys = list(x.keys())
|
||||
keys = to_list(x.keys())
|
||||
keys.sort()
|
||||
y = other[-1]
|
||||
for key in keys:
|
||||
|
|
@ -4149,7 +4149,7 @@ def _print_histline(entry, **kwargs):
|
|||
continue
|
||||
print(" %s: %s -> %s" % (key, x[key], y[key]))
|
||||
elif create and options.verbose and table != 'tag_listing':
|
||||
keys = list(x.keys())
|
||||
keys = to_list(x.keys())
|
||||
keys.sort()
|
||||
# the table keys have already been represented in the base format string
|
||||
also_hidden = list(_table_keys[table])
|
||||
|
|
@ -4636,9 +4636,7 @@ def anon_handle_taginfo(goptions, session, args):
|
|||
print("Include all Maven archives?: %s" % (info['maven_include_all'] and 'yes' or 'no'))
|
||||
if 'extra' in info:
|
||||
print("Tag options:")
|
||||
keys = list(info['extra'].keys())
|
||||
keys.sort()
|
||||
for key in keys:
|
||||
for key in sorted(info['extra'].keys()):
|
||||
print(" %s : %s" % (key, pprint.pformat(info['extra'][key])))
|
||||
dest_targets = session.getBuildTargets(destTagID=info['id'], **event_opts)
|
||||
build_targets = session.getBuildTargets(buildTagID=info['id'], **event_opts)
|
||||
|
|
@ -6830,8 +6828,7 @@ def anon_handle_wait_repo(options, session, args):
|
|||
targets = session.getBuildTargets(destTagID=tag_info['id'])
|
||||
if targets:
|
||||
maybe = {}.fromkeys([t['build_tag_name'] for t in targets])
|
||||
maybe = list(maybe.keys())
|
||||
maybe.sort()
|
||||
maybe = sorted(maybe.keys())
|
||||
print("Suggested tags: %s" % ', '.join(maybe))
|
||||
return 1
|
||||
tag_id = tag_info['id']
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ except ImportError: # pragma: no cover
|
|||
krbV = None
|
||||
|
||||
import koji
|
||||
from koji.util import to_list
|
||||
|
||||
# fix OptionParser for python 2.3 (optparse verion 1.4.1+)
|
||||
# code taken from optparse version 1.5a2
|
||||
|
|
@ -94,7 +95,7 @@ categories = {
|
|||
def get_epilog_str(progname=None):
|
||||
if progname is None:
|
||||
progname = os.path.basename(sys.argv[0]) or 'koji'
|
||||
categories_ordered=', '.join(sorted(['all'] + list(categories.keys())))
|
||||
categories_ordered=', '.join(sorted(['all'] + to_list(categories.keys())))
|
||||
epilog_str = '''
|
||||
Try "%(progname)s --help" for help about global options
|
||||
Try "%(progname)s help" to get all available commands
|
||||
|
|
@ -301,7 +302,7 @@ def watch_tasks(session, tasklist, quiet=False, poll_interval=60):
|
|||
rv = 1
|
||||
for child in session.getTaskChildren(task_id):
|
||||
child_id = child['id']
|
||||
if not child_id in list(tasks.keys()):
|
||||
if not child_id in tasks.keys():
|
||||
tasks[child_id] = TaskWatcher(child_id, session, task.level + 1, quiet=quiet)
|
||||
tasks[child_id].update()
|
||||
# If we found new children, go through the list again,
|
||||
|
|
|
|||
|
|
@ -62,6 +62,7 @@ from koji.util import md5_constructor
|
|||
from koji.util import multi_fnmatch
|
||||
from koji.util import safer_move
|
||||
from koji.util import sha1_constructor
|
||||
from koji.util import to_list
|
||||
from six.moves import range
|
||||
logger = logging.getLogger('koji.hub')
|
||||
|
||||
|
|
@ -2050,17 +2051,17 @@ def readTagGroups(tag, event=None, inherit=True, incl_pkgs=True, incl_reqs=True,
|
|||
Blocked packages/groups can alternatively also be listed if incl_blocked is set to True
|
||||
"""
|
||||
groups = get_tag_groups(tag, event, inherit, incl_pkgs, incl_reqs)
|
||||
groups = list(groups.values())
|
||||
groups = to_list(groups.values())
|
||||
for group in groups:
|
||||
#filter blocked entries and collapse to a list
|
||||
if 'packagelist' in group:
|
||||
if incl_blocked:
|
||||
group['packagelist'] = list(group['packagelist'].values())
|
||||
group['packagelist'] = to_list(group['packagelist'].values())
|
||||
else:
|
||||
group['packagelist'] = [x for x in group['packagelist'].values() if not x['blocked']]
|
||||
if 'grouplist' in group:
|
||||
if incl_blocked:
|
||||
group['grouplist'] = list(group['grouplist'].values())
|
||||
group['grouplist'] = to_list(group['grouplist'].values())
|
||||
else:
|
||||
group['grouplist'] = [x for x in group['grouplist'].values() if not x['blocked']]
|
||||
#filter blocked entries and collapse to a list
|
||||
|
|
@ -2212,7 +2213,7 @@ def get_all_arches():
|
|||
#in a perfect world, this list would only include canonical
|
||||
#arches, but not all admins will undertand that.
|
||||
ret[koji.canonArch(arch)] = 1
|
||||
return list(ret.keys())
|
||||
return to_list(ret.keys())
|
||||
|
||||
def get_active_tasks(host=None):
|
||||
"""Return data on tasks that are yet to be run"""
|
||||
|
|
@ -2618,7 +2619,7 @@ def repo_references(repo_id):
|
|||
'host_id': 'host_id',
|
||||
'create_event': 'create_event',
|
||||
'state': 'state'}
|
||||
fields, aliases = zip(*list(fields.items()))
|
||||
fields, aliases = zip(*fields.items())
|
||||
values = {'repo_id': repo_id}
|
||||
clauses = ['repo_id=%(repo_id)s', 'retire_event IS NULL']
|
||||
query = QueryProcessor(columns=fields, aliases=aliases, tables=['standard_buildroot'],
|
||||
|
|
@ -3061,7 +3062,7 @@ def get_tag(tagInfo, strict=False, event=None):
|
|||
raise koji.GenericError('invalid type for tagInfo: %s' % type(tagInfo))
|
||||
|
||||
data = {'tagInfo': tagInfo}
|
||||
fields, aliases = zip(*list(fields.items()))
|
||||
fields, aliases = zip(*fields.items())
|
||||
query = QueryProcessor(columns=fields, aliases=aliases, tables=tables,
|
||||
joins=joins, clauses=clauses, values=data)
|
||||
result = query.executeOne()
|
||||
|
|
@ -4623,7 +4624,7 @@ def get_host(hostInfo, strict=False, event=None):
|
|||
raise koji.GenericError('invalid type for hostInfo: %s' % type(hostInfo))
|
||||
|
||||
data = {'hostInfo': hostInfo}
|
||||
fields, aliases = zip(*list(fields.items()))
|
||||
fields, aliases = zip(*fields.items())
|
||||
query = QueryProcessor(columns=fields, aliases=aliases, tables=tables,
|
||||
joins=joins, clauses=clauses, values=data)
|
||||
result = query.executeOne()
|
||||
|
|
@ -4780,7 +4781,7 @@ def list_channels(hostID=None, event=None):
|
|||
"""List channels. If hostID is specified, only list
|
||||
channels associated with the host with that ID."""
|
||||
fields = {'channels.id': 'id', 'channels.name': 'name'}
|
||||
columns, aliases = zip(*list(fields.items()))
|
||||
columns, aliases = zip(*fields.items())
|
||||
if hostID:
|
||||
tables = ['host_channels']
|
||||
joins = ['channels ON channels.id = host_channels.channel_id']
|
||||
|
|
@ -5198,7 +5199,7 @@ def import_build(srpm, rpms, brmap=None, task_id=None, build_id=None, logs=None)
|
|||
|
||||
policy_data = {
|
||||
'package': build['name'],
|
||||
'buildroots': list(brmap.values()),
|
||||
'buildroots': to_list(brmap.values()),
|
||||
'import': True,
|
||||
'import_type': 'rpm',
|
||||
}
|
||||
|
|
@ -6882,7 +6883,7 @@ def query_history(tables=None, **kwargs):
|
|||
fields[r_test] = '_revoked_before_event'
|
||||
if skip:
|
||||
continue
|
||||
fields, aliases = zip(*list(fields.items()))
|
||||
fields, aliases = zip(*fields.items())
|
||||
query = QueryProcessor(columns=fields, aliases=aliases, tables=[table],
|
||||
joins=joins, clauses=clauses, values=data)
|
||||
ret[table] = query.iterate()
|
||||
|
|
@ -7021,7 +7022,7 @@ def build_references(build_id, limit=None):
|
|||
idx.setdefault(row['id'], row)
|
||||
if limit is not None and len(idx) > limit:
|
||||
break
|
||||
ret['rpms'] = list(idx.values())
|
||||
ret['rpms'] = to_list(idx.values())
|
||||
|
||||
ret['component_of'] = []
|
||||
# find images/archives that contain the build rpms
|
||||
|
|
@ -7052,7 +7053,7 @@ def build_references(build_id, limit=None):
|
|||
idx.setdefault(row['id'], row)
|
||||
if limit is not None and len(idx) > limit:
|
||||
break
|
||||
ret['archives'] = list(idx.values())
|
||||
ret['archives'] = to_list(idx.values())
|
||||
|
||||
# find images/archives that contain the build archives
|
||||
fields = ['archive_id']
|
||||
|
|
@ -7394,7 +7395,7 @@ def tag_notification(is_successful, tag_id, from_id, build_id, user_id, ignore_s
|
|||
from_tag = get_tag(from_id)
|
||||
for email in get_notification_recipients(build, from_tag['id'], state):
|
||||
recipients[email] = 1
|
||||
recipients_uniq = list(recipients.keys())
|
||||
recipients_uniq = to_list(recipients.keys())
|
||||
if len(recipients_uniq) > 0 and not (is_successful and ignore_success):
|
||||
task_id = make_task('tagNotification', [recipients_uniq, is_successful, tag_id, from_id, build_id, user_id, ignore_success, failure_msg])
|
||||
return task_id
|
||||
|
|
@ -7622,8 +7623,8 @@ class InsertProcessor(object):
|
|||
if not self.data and not self.rawdata:
|
||||
return "-- incomplete update: no assigns"
|
||||
parts = ['INSERT INTO %s ' % self.table]
|
||||
columns = list(self.data.keys())
|
||||
columns.extend(list(self.rawdata.keys()))
|
||||
columns = to_list(self.data.keys())
|
||||
columns.extend(to_list(self.rawdata.keys()))
|
||||
parts.append("(%s) " % ', '.join(columns))
|
||||
values = []
|
||||
for key in columns:
|
||||
|
|
@ -7666,7 +7667,7 @@ class InsertProcessor(object):
|
|||
del data['create_event']
|
||||
del data['creator_id']
|
||||
clauses = ["%s = %%(%s)s" % (k, k) for k in data]
|
||||
query = QueryProcessor(columns=list(data.keys()), tables=[self.table],
|
||||
query = QueryProcessor(columns=to_list(data.keys()), tables=[self.table],
|
||||
clauses=clauses, values=data)
|
||||
if query.execute():
|
||||
return True
|
||||
|
|
@ -10791,7 +10792,7 @@ class RootExports(object):
|
|||
'host_config.enabled': 'enabled',
|
||||
}
|
||||
tables = ['host_config']
|
||||
fields, aliases = zip(*list(fields.items()))
|
||||
fields, aliases = zip(*fields.items())
|
||||
query = QueryProcessor(columns=fields, aliases=aliases,
|
||||
tables=tables, joins=joins, clauses=clauses, values=locals())
|
||||
return query.execute()
|
||||
|
|
@ -11867,7 +11868,7 @@ class HostExports(object):
|
|||
scratchdir = koji.pathinfo.scratch()
|
||||
username = get_user(task.getOwner())['name']
|
||||
destdir = os.path.join(scratchdir, username, 'task_%s' % task_id)
|
||||
for reldir, files in list(results['files'].items()) + [('', results['logs'])]:
|
||||
for reldir, files in to_list(results['files'].items()) + [('', results['logs'])]:
|
||||
for filename in files:
|
||||
if reldir:
|
||||
relpath = os.path.join(reldir, filename)
|
||||
|
|
@ -11899,7 +11900,7 @@ class HostExports(object):
|
|||
scratchdir = koji.pathinfo.scratch()
|
||||
username = get_user(task.getOwner())['name']
|
||||
destdir = os.path.join(scratchdir, username, 'task_%s' % task_id)
|
||||
for relpath in list(results['output'].keys()) + results['logs']:
|
||||
for relpath in to_list(results['output'].keys()) + results['logs']:
|
||||
filename = os.path.join(koji.pathinfo.task(results['task_id']), relpath)
|
||||
dest = os.path.join(destdir, relpath)
|
||||
koji.ensuredir(os.path.dirname(dest))
|
||||
|
|
@ -12513,7 +12514,7 @@ class HostExports(object):
|
|||
logger.error("Current build is %s, new build is %s.", idx_build, archive['build_id'])
|
||||
maven_build_index[archive['group_id']][archive['artifact_id']][archive['version']] = archive['build_id']
|
||||
|
||||
ignore.extend(list(task_deps.values()))
|
||||
ignore.extend(task_deps.values())
|
||||
|
||||
SNAPSHOT_RE = re.compile(r'-\d{8}\.\d{6}-\d+')
|
||||
ignore_by_label = {}
|
||||
|
|
@ -12566,7 +12567,7 @@ class HostExports(object):
|
|||
if build_id:
|
||||
build = get_build(build_id)
|
||||
logger.error("g:a:v supplied by build %(nvr)s", build)
|
||||
logger.error("Build supplies %i archives: %r", len(build_archives), list(build_archives.keys()))
|
||||
logger.error("Build supplies %i archives: %r", len(build_archives), to_list(build_archives.keys()))
|
||||
if tag_archive:
|
||||
logger.error("Size mismatch, br: %i, db: %i", fileinfo['size'], tag_archive['size'])
|
||||
raise koji.BuildrootError('Unknown file in build environment: %s, size: %s' % \
|
||||
|
|
|
|||
|
|
@ -157,7 +157,7 @@ class HandlerRegistry(object):
|
|||
return args
|
||||
|
||||
def system_listMethods(self):
|
||||
return list(self.funcs.keys())
|
||||
return koji.util.to_list(self.funcs.keys())
|
||||
|
||||
def system_methodSignature(self, method):
|
||||
#it is not possible to autogenerate this data
|
||||
|
|
|
|||
|
|
@ -450,7 +450,7 @@ def decode_args2(args, names, strict=True):
|
|||
args, opts = decode_args(*args)
|
||||
if strict and len(names) < len(args):
|
||||
raise TypeError("Expecting at most %i arguments" % len(names))
|
||||
ret = dict(list(zip(names, args)))
|
||||
ret = dict(zip(names, args))
|
||||
ret.update(opts)
|
||||
return ret
|
||||
|
||||
|
|
@ -1166,7 +1166,7 @@ def parse_pom(path=None, contents=None):
|
|||
xml.sax.parseString(contents, handler)
|
||||
|
||||
for field in fields:
|
||||
if field not in list(values.keys()):
|
||||
if field not in util.to_list(values.keys()):
|
||||
raise GenericError('could not extract %s from POM: %s' % (field, (path or '<contents>')))
|
||||
return values
|
||||
|
||||
|
|
@ -2179,7 +2179,7 @@ class ClientSession(object):
|
|||
# decode and decrypt the login info
|
||||
sinfo_priv = base64.decodestring(sinfo_enc)
|
||||
sinfo_str = ac.rd_priv(sinfo_priv)
|
||||
sinfo = dict(list(zip(['session-id', 'session-key'], sinfo_str.split())))
|
||||
sinfo = dict(zip(['session-id', 'session-key'], sinfo_str.split()))
|
||||
|
||||
if not sinfo:
|
||||
self.logger.warn('No session info received')
|
||||
|
|
@ -3061,7 +3061,7 @@ def fixEncodingRecurse(value, fallback='iso8859-15', remove_nonprintable=False):
|
|||
if isinstance(value, tuple):
|
||||
return tuple([fixEncodingRecurse(x, fallback=fallback, remove_nonprintable=remove_nonprintable) for x in value])
|
||||
elif isinstance(value, list):
|
||||
return list([fixEncodingRecurse(x, fallback=fallback, remove_nonprintable=remove_nonprintable) for x in value])
|
||||
return [fixEncodingRecurse(x, fallback=fallback, remove_nonprintable=remove_nonprintable) for x in value]
|
||||
elif isinstance(value, dict):
|
||||
ret = {}
|
||||
for k in value:
|
||||
|
|
|
|||
11
koji/auth.py
11
koji/auth.py
|
|
@ -34,6 +34,7 @@ from .context import context
|
|||
from six.moves import range
|
||||
from six.moves import zip
|
||||
import six
|
||||
from .util import to_list
|
||||
|
||||
# 1 - load session if provided
|
||||
# - check uri for session id
|
||||
|
|
@ -108,7 +109,7 @@ class Session(object):
|
|||
'user_id': 'user_id',
|
||||
}
|
||||
# sort for stability (unittests)
|
||||
fields, aliases = list(zip(*list(sorted(list(fields.items()), key=lambda x: x[1]))))
|
||||
fields, aliases = zip(*sorted(fields.items(), key=lambda x: x[1]))
|
||||
q = """
|
||||
SELECT %s FROM sessions
|
||||
WHERE id = %%(id)i
|
||||
|
|
@ -120,7 +121,7 @@ class Session(object):
|
|||
row = c.fetchone()
|
||||
if not row:
|
||||
raise koji.AuthError('Invalid session or bad credentials')
|
||||
session_data = dict(list(zip(aliases, row)))
|
||||
session_data = dict(zip(aliases, row))
|
||||
#check for expiration
|
||||
if session_data['expired']:
|
||||
raise koji.AuthExpired('session "%i" has expired' % id)
|
||||
|
|
@ -158,7 +159,7 @@ class Session(object):
|
|||
fields = ('name', 'status', 'usertype')
|
||||
q = """SELECT %s FROM users WHERE id=%%(user_id)s""" % ','.join(fields)
|
||||
c.execute(q, session_data)
|
||||
user_data = dict(list(zip(fields, c.fetchone())))
|
||||
user_data = dict(zip(fields, c.fetchone()))
|
||||
|
||||
if user_data['status'] != koji.USER_STATUS['NORMAL']:
|
||||
raise koji.AuthError('logins by %s are not allowed' % user_data['name'])
|
||||
|
|
@ -537,7 +538,7 @@ class Session(object):
|
|||
def getPerms(self):
|
||||
if not self.logged_in:
|
||||
return []
|
||||
return list(self.perms.keys())
|
||||
return to_list(self.perms.keys())
|
||||
|
||||
def hasPerm(self, name):
|
||||
if not self.logged_in:
|
||||
|
|
@ -709,7 +710,7 @@ def get_user_data(user_id):
|
|||
row = c.fetchone()
|
||||
if not row:
|
||||
return None
|
||||
return dict(list(zip(fields, row)))
|
||||
return dict(zip(fields, row))
|
||||
|
||||
def login(*args, **opts):
|
||||
return context.session.login(*args, **opts)
|
||||
|
|
|
|||
|
|
@ -26,7 +26,8 @@ import koji
|
|||
import koji.tasks
|
||||
import koji.xmlrpcplus
|
||||
from koji.tasks import safe_rmtree
|
||||
from koji.util import md5_constructor, adler32_constructor, parseStatus, dslice
|
||||
from koji.util import md5_constructor, adler32_constructor, parseStatus, \
|
||||
dslice, to_list
|
||||
import os
|
||||
import signal
|
||||
import logging
|
||||
|
|
@ -584,7 +585,7 @@ class TaskManager(object):
|
|||
"""Attempt to shut down cleanly"""
|
||||
for task_id in self.pids.keys():
|
||||
self.cleanupTask(task_id)
|
||||
self.session.host.freeTasks(list(self.tasks.keys()))
|
||||
self.session.host.freeTasks(to_list(self.tasks.keys()))
|
||||
self.session.host.updateHost(task_load=0.0, ready=False)
|
||||
|
||||
def updateBuildroots(self, nolocal=False):
|
||||
|
|
@ -615,7 +616,7 @@ class TaskManager(object):
|
|||
#task not running - expire the buildroot
|
||||
#TODO - consider recycling hooks here (with strong sanity checks)
|
||||
self.logger.info("Expiring buildroot: %(id)i/%(tag_name)s/%(arch)s" % br)
|
||||
self.logger.debug("Buildroot task: %r, Current tasks: %r" % (task_id, list(self.tasks.keys())))
|
||||
self.logger.debug("Buildroot task: %r, Current tasks: %r" % (task_id, to_list(self.tasks.keys())))
|
||||
self.session.host.setBuildRootState(id, st_expired)
|
||||
continue
|
||||
if nolocal:
|
||||
|
|
|
|||
|
|
@ -24,6 +24,8 @@ import logging
|
|||
import koji
|
||||
import six
|
||||
|
||||
from koji.util import to_list
|
||||
|
||||
|
||||
class BaseSimpleTest(object):
|
||||
"""Abstract base class for simple tests"""
|
||||
|
|
@ -294,7 +296,7 @@ class SimpleRuleSet(object):
|
|||
index[name] = 1
|
||||
index = {}
|
||||
_recurse(self.ruleset, index)
|
||||
return list(index.keys())
|
||||
return to_list(index.keys())
|
||||
|
||||
def _apply(self, rules, data, top=False):
|
||||
for tests, negate, action in rules:
|
||||
|
|
|
|||
16
koji/util.py
16
koji/util.py
|
|
@ -182,7 +182,7 @@ class DataWalker(object):
|
|||
if isinstance(value, tuple):
|
||||
value = tuple([self._walk(x) for x in value])
|
||||
elif isinstance(value, list):
|
||||
value = list([self._walk(x) for x in value])
|
||||
value = [self._walk(x) for x in value]
|
||||
elif isinstance(value, dict):
|
||||
ret = {}
|
||||
for k in value:
|
||||
|
|
@ -730,3 +730,17 @@ def parse_maven_chain(confs, scratch=False):
|
|||
except ValueError:
|
||||
raise ValueError('No possible build order, missing/circular dependencies')
|
||||
return builds
|
||||
|
||||
def to_list(l):
|
||||
"""
|
||||
Helper function for py2/py3 compatibility used e.g. in
|
||||
list(dict.keys())
|
||||
|
||||
Don't use it for structures like list(zip(x, y)), where six.moves.zip is
|
||||
used, so it is always an iterator.
|
||||
"""
|
||||
|
||||
if isinstance(l, list):
|
||||
return l
|
||||
else:
|
||||
return list(l)
|
||||
|
|
|
|||
|
|
@ -7,9 +7,10 @@
|
|||
|
||||
from __future__ import absolute_import
|
||||
from koji.plugin import callbacks, callback, ignore_error
|
||||
from koji.util import to_list
|
||||
import logging
|
||||
|
||||
@callback(*list(callbacks.keys()))
|
||||
@callback(*to_list(callbacks.keys()))
|
||||
@ignore_error
|
||||
def echo(cbtype, *args, **kws):
|
||||
logging.getLogger('koji.plugin.echo').info('Called the %s callback, args: %s; kws: %s',
|
||||
|
|
|
|||
|
|
@ -968,6 +968,19 @@ class MavenUtilTestCase(unittest.TestCase):
|
|||
self.assertNotEqual(copy.digest(), chksum.digest())
|
||||
self.assertEqual(614401368, chksum.digest())
|
||||
|
||||
def test_to_list(self):
|
||||
l = [1, 2, 3]
|
||||
|
||||
r = koji.util.to_list(l)
|
||||
self.assertEqual(l, r)
|
||||
|
||||
it = iter(l)
|
||||
r = koji.util.to_list(it)
|
||||
self.assertEqual(l, r)
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
koji.util.to_list(1)
|
||||
|
||||
|
||||
class TestRmtree(unittest.TestCase):
|
||||
@patch('koji.util._rmtree')
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ try:
|
|||
except ImportError: # pragma: no cover
|
||||
krbV = None
|
||||
import koji
|
||||
from koji.util import LazyDict, LazyValue
|
||||
from koji.util import LazyDict, LazyValue, to_list
|
||||
import koji.policy
|
||||
import six.moves.configparser
|
||||
from email.MIMEText import MIMEText
|
||||
|
|
@ -537,7 +537,7 @@ def handle_trash():
|
|||
by_owner = {}
|
||||
for binfo in to_trash:
|
||||
by_owner.setdefault(binfo['owner_name'], []).append(binfo)
|
||||
owners = list(by_owner.keys())
|
||||
owners = to_list(by_owner.keys())
|
||||
owners.sort()
|
||||
for owner_name in owners:
|
||||
builds = [(b['nvr'], b) for b in by_owner[owner_name]]
|
||||
|
|
@ -772,7 +772,7 @@ def get_build_sigs(build, cache=False):
|
|||
for sig in sigs:
|
||||
if sig['sigkey']:
|
||||
keys.setdefault(sig['sigkey'], 1)
|
||||
ret = build_sig_cache[build] = list(keys.keys())
|
||||
ret = build_sig_cache[build] = to_list(keys.keys())
|
||||
return ret
|
||||
|
||||
def handle_prune():
|
||||
|
|
@ -832,7 +832,7 @@ def handle_prune():
|
|||
pkghist.setdefault(h['name'] + '-' + h['version'], []).append(h)
|
||||
else:
|
||||
pkghist.setdefault(h['name'], []).append(h)
|
||||
pkgs = list(pkghist.keys())
|
||||
pkgs = to_list(pkghist.keys())
|
||||
pkgs.sort()
|
||||
for pkg in pkgs:
|
||||
if not check_package(pkg):
|
||||
|
|
|
|||
|
|
@ -30,6 +30,7 @@ try:
|
|||
except ImportError: # pragma: no cover
|
||||
krbV = None
|
||||
import koji
|
||||
from koji.util import import to_list
|
||||
import six.moves.configparser
|
||||
import fnmatch
|
||||
import optparse
|
||||
|
|
@ -498,7 +499,7 @@ class TrackedBuild(object):
|
|||
log("Warning: some rpms for %s lacked buildroots:" % self.nvr)
|
||||
for rinfo in bad:
|
||||
log(" %(name)s-%(version)s-%(release)s.%(arch)s" % rinfo)
|
||||
return list(brs.keys())
|
||||
return to_list(brs.keys())
|
||||
|
||||
def getDeps(self):
|
||||
buildroots = self.getBuildroots()
|
||||
|
|
@ -557,12 +558,12 @@ class TrackedBuild(object):
|
|||
#each buildroot had this as a base package
|
||||
base.append(name)
|
||||
if len(tags) > 1:
|
||||
log("Warning: found multiple buildroot tags for %s: %s" % (self.nvr, list(tags.keys())))
|
||||
log("Warning: found multiple buildroot tags for %s: %s" % (self.nvr, to_list(tags.keys())))
|
||||
counts = [(n, tag) for tag, n in six.iteritems(tags)]
|
||||
sort(counts)
|
||||
tag = counts[-1][1]
|
||||
else:
|
||||
tag = list(tags.keys())[0]
|
||||
tag = to_list(tags.keys())[0]
|
||||
# due bugs in used tools mainline koji instance could store empty buildroot infos for builds
|
||||
if len(builds) == 0:
|
||||
self.setState("noroot")
|
||||
|
|
@ -1122,7 +1123,7 @@ class BuildTracker(object):
|
|||
log("-- %s --" % time.asctime())
|
||||
self.report_brief()
|
||||
for state in ('broken', 'noroot', 'blocked'):
|
||||
builds = list(self.state_idx[state].values())
|
||||
builds = to_list(self.state_idx[state].values())
|
||||
not_replaced = [b for b in builds if not b.substitute]
|
||||
n_replaced = len(builds) - len(not_replaced)
|
||||
log("%s: %i (+%i replaced)" % (state, len(not_replaced), n_replaced))
|
||||
|
|
@ -1163,8 +1164,7 @@ class BuildTracker(object):
|
|||
|
||||
def report_brief(self):
|
||||
N = len(self.builds)
|
||||
states = list(self.state_idx.keys())
|
||||
states.sort()
|
||||
states = sorted(self.state_idx.keys())
|
||||
parts = ["%s: %i" % (s, len(self.state_idx[s])) for s in states]
|
||||
parts.append("total: %i" % N)
|
||||
log (' '.join(parts))
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ from __future__ import division
|
|||
import sys
|
||||
import os
|
||||
import koji
|
||||
from koji.util import rmtree, parseStatus
|
||||
from koji.util import rmtree, parseStatus, to_list
|
||||
from optparse import OptionParser
|
||||
from six.moves.configparser import ConfigParser
|
||||
import errno
|
||||
|
|
@ -86,7 +86,7 @@ class ManagedRepo(object):
|
|||
tags = {self.tag_id : 1}
|
||||
for x in order:
|
||||
tags[x['parent_id']] = 1
|
||||
self.taglist = list(tags.keys())
|
||||
self.taglist = to_list(tags.keys())
|
||||
|
||||
@property
|
||||
def dist(self):
|
||||
|
|
@ -315,7 +315,7 @@ class RepoManager(object):
|
|||
def checkCurrentRepos(self):
|
||||
"""Determine which repos are current"""
|
||||
to_check = []
|
||||
repo_ids = list(self.repos.keys())
|
||||
repo_ids = to_list(self.repos.keys())
|
||||
for repo_id in repo_ids:
|
||||
repo = self.repos.get(repo_id)
|
||||
if repo is None:
|
||||
|
|
@ -661,8 +661,7 @@ class RepoManager(object):
|
|||
|
||||
debuginfo_pat = self.options.debuginfo_tags.split()
|
||||
src_pat = self.options.source_tags.split()
|
||||
order = list(self.needed_tags.values())
|
||||
order.sort(key=lambda t:t['score'])
|
||||
order = sorted(self.needed_tags.values(), key=lambda t: t['score'])
|
||||
for tag in order:
|
||||
if running_tasks >= self.options.max_repo_tasks:
|
||||
self.logger.info("Maximum number of repo tasks reached")
|
||||
|
|
|
|||
|
|
@ -152,7 +152,7 @@ def get_options():
|
|||
defaults[name] = config.getboolean('kojivmd', name)
|
||||
elif name in ['plugin', 'plugins']:
|
||||
defaults['plugin'] = value.split()
|
||||
elif name in list(defaults.keys()):
|
||||
elif name in defaults.keys():
|
||||
defaults[name] = value
|
||||
else:
|
||||
quit("unknown config option: %s" % name)
|
||||
|
|
@ -316,7 +316,7 @@ class WinBuildTask(MultiPlatformTask):
|
|||
strict=False)
|
||||
# winspec and patches options are urls
|
||||
# verify the urls before passing them to the VM
|
||||
for url in [source_url] + list(subopts.values()):
|
||||
for url in [source_url] + koji.util.to_list(subopts.values()):
|
||||
scm = SCM(url)
|
||||
scm.assert_allowed(self.options.allowed_scms)
|
||||
|
||||
|
|
|
|||
|
|
@ -33,6 +33,7 @@ import logging
|
|||
import time
|
||||
import koji
|
||||
import kojiweb.util
|
||||
from koji.util import to_list
|
||||
from koji.server import ServerRedirect
|
||||
from kojiweb.util import _initValues
|
||||
from kojiweb.util import _genHTML
|
||||
|
|
@ -2124,7 +2125,7 @@ def buildsbytarget(environ, days='7', start=None, order='-builds'):
|
|||
if builds > maxBuilds:
|
||||
maxBuilds = builds
|
||||
|
||||
kojiweb.util.paginateList(values, list(targets.values()), start, 'targets', 'target', order)
|
||||
kojiweb.util.paginateList(values, to_list(targets.values()), start, 'targets', 'target', order)
|
||||
|
||||
values['order'] = order
|
||||
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ import traceback
|
|||
|
||||
from six.moves.configparser import RawConfigParser
|
||||
from koji.server import ServerError, ServerRedirect
|
||||
from koji.util import dslice
|
||||
from koji.util import dslice, to_list
|
||||
import six
|
||||
|
||||
|
||||
|
|
@ -401,7 +401,7 @@ class Dispatcher(object):
|
|||
if isinstance(result, six.string_types):
|
||||
headers.setdefault('content-length', ('Content-Length', str(len(result))))
|
||||
headers.setdefault('content-type', ('Content-Type', 'text/html'))
|
||||
headers = list(headers.values()) + extra
|
||||
headers = to_list(headers.values()) + extra
|
||||
self.logger.debug("Headers:")
|
||||
self.logger.debug(koji.util.LazyString(pprint.pformat, [headers]))
|
||||
start_response(status, headers)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue