flake8: apply all rest E7 rules

This commit is contained in:
Yuming Zhu 2020-02-26 04:49:48 +08:00
parent 6ed30954b2
commit f1ba6cefd7
24 changed files with 112 additions and 127 deletions

View file

@ -1,6 +1,6 @@
[flake8]
select = I,C,F,E1,E2,E3,E4,E502,E70,E71
ignore = E266
select = I,C,F,E1,E2,E3,E4,E502,E7
ignore = E266,E731
exclude =
.git,
__pycache__,

View file

@ -150,7 +150,7 @@ def main(options, session):
tm.restart_pending = True
signal.signal(signal.SIGTERM, shutdown)
signal.signal(signal.SIGUSR1, restart)
while 1:
while True:
try:
taken = False
tm.updateBuildroots()
@ -167,7 +167,7 @@ def main(options, session):
break
except koji.RetryError:
raise
except:
except BaseException:
# XXX - this is a little extreme
# log the exception and continue
logger.error(''.join(traceback.format_exception(*sys.exc_info())))
@ -485,7 +485,7 @@ class BuildRoot(object):
fd.close()
fd = open(fpath, 'rb')
logs[fname] = (fd, stat_info.st_ino, stat_info.st_size or size, fpath)
except:
except BaseException:
self.logger.error("Error reading mock log: %s", fpath)
self.logger.error(''.join(traceback.format_exception(*sys.exc_info())))
continue
@ -530,7 +530,7 @@ class BuildRoot(object):
os.setregid(gid, gid)
os.setreuid(uid, uid)
os.execvp(cmd[0], cmd)
except:
except BaseException:
# diediedie
print("Failed to exec mock")
print(''.join(traceback.format_exception(*sys.exc_info())))
@ -792,7 +792,7 @@ class BuildRoot(object):
with koji.openRemoteFile(repomdpath, **opts) as fo:
try:
repodata = repoMDObject.RepoMD('ourrepo', fo)
except:
except BaseException:
raise koji.BuildError("Unable to parse repomd.xml file for %s" % os.path.join(repodir, self.br_arch))
data = repodata.getData('origin')
pkgorigins = data.location[1]
@ -1050,7 +1050,7 @@ class BuildTask(BaseTaskHandler):
except (SystemExit, ServerExit, KeyboardInterrupt):
# we do not trap these
raise
except:
except BaseException:
if not self.opts.get('scratch'):
# scratch builds do not get imported
self.session.host.failBuild(self.id, build_id)
@ -1528,7 +1528,7 @@ class MavenTask(MultiPlatformTask):
except (SystemExit, ServerExit, KeyboardInterrupt):
# we do not trap these
raise
except:
except BaseException:
if not self.opts.get('scratch'):
# scratch builds do not get imported
self.session.host.failBuild(self.id, self.build_id)
@ -2030,7 +2030,7 @@ class WrapperRPMTask(BaseBuildTask):
buildroot.build(srpm)
except (SystemExit, ServerExit, KeyboardInterrupt):
raise
except:
except BaseException:
if self.new_build_id:
self.session.host.failBuild(self.id, self.new_build_id)
raise
@ -2074,7 +2074,7 @@ class WrapperRPMTask(BaseBuildTask):
self.uploadFile(os.path.join(resultdir, rpm_fn))
except (SystemExit, ServerExit, KeyboardInterrupt):
raise
except:
except BaseException:
if self.new_build_id:
self.session.host.failBuild(self.id, self.new_build_id)
raise
@ -2101,7 +2101,7 @@ class WrapperRPMTask(BaseBuildTask):
relsrpm, relrpms, brmap, {'noarch': rellogs})
except (SystemExit, ServerExit, KeyboardInterrupt):
raise
except:
except BaseException:
self.session.host.failBuild(self.id, self.new_build_id)
raise
if not opts.get('skip_tag'):
@ -2272,7 +2272,7 @@ class ChainMavenTask(MultiPlatformTask):
for key in akeys:
aval = a.get(key)
bval = b.get(key)
if type(aval) != type(bval):
if not isinstance(aval, type(bval)):
return False
if isinstance(aval, dict):
if not self.dicts_equal(aval, bval):
@ -2487,7 +2487,7 @@ class BuildBaseImageTask(BuildImageTask):
except (SystemExit, ServerExit, KeyboardInterrupt):
# we do not trap these
raise
except:
except BaseException:
if not opts.get('scratch'):
# scratch builds do not get imported
if bld_info:
@ -2572,7 +2572,7 @@ class BuildApplianceTask(BuildImageTask):
except (SystemExit, ServerExit, KeyboardInterrupt):
# we do not trap these
raise
except:
except BaseException:
if not opts.get('scratch'):
# scratch builds do not get imported
if bld_info:
@ -2657,7 +2657,7 @@ class BuildLiveCDTask(BuildImageTask):
except (SystemExit, ServerExit, KeyboardInterrupt):
# we do not trap these
raise
except:
except BaseException:
if not opts.get('scratch'):
# scratch builds do not get imported
if bld_info:
@ -2794,7 +2794,7 @@ class BuildLiveMediaTask(BuildImageTask):
except (SystemExit, ServerExit, KeyboardInterrupt):
# we do not trap these
raise
except:
except BaseException:
if not opts.get('scratch'):
# scratch builds do not get imported
if bld_info:
@ -4555,7 +4555,7 @@ class BuildIndirectionImageTask(OzImageTask):
return self._do_indirection(opts, base_factory_image, utility_factory_image,
indirection_template, tlog, ozlog, fhandler,
bld_info, target_info, bd)
except:
except BaseException:
if not opts.get('scratch'):
# scratch builds do not get imported
if bld_info:
@ -5013,7 +5013,7 @@ Build Info: %(weburl)s/buildinfo?buildID=%(build_id)i\r
result = None
try:
result = self.session.getTaskResult(task_id)
except:
except BaseException:
excClass, result = sys.exc_info()[:2]
if hasattr(result, 'faultString'):
result = result.faultString
@ -5114,8 +5114,7 @@ Build Info: %(weburl)s/buildinfo?buildID=%(build_id)i\r
for taskinfo in task_data.values():
for srpmfile in taskinfo['srpms']:
srpms.append(srpmfile)
srpms = self.uniq(srpms)
srpms.sort()
srpms = sorted(self.uniq(srpms))
if srpms:
output = "SRPMS:\r\n"
@ -5182,9 +5181,8 @@ Build Info: %(weburl)s/buildinfo?buildID=%(build_id)i\r
def uniq(self, items):
"""Remove duplicates from the list of items, and sort the list."""
m = dict(zip(items, [1] * len(items)))
l = to_list(m.keys())
l.sort()
return l
s = sorted(to_list(m.keys()))
return s
class NewRepoTask(BaseTaskHandler):

View file

@ -335,7 +335,7 @@ if __name__ == "__main__":
rv = 0
except (KeyboardInterrupt, SystemExit):
rv = 1
except:
except BaseException:
if options.debug:
raise
else:
@ -344,6 +344,6 @@ if __name__ == "__main__":
logger.error("%s: %s" % (exctype.__name__, value))
try:
session.logout()
except:
except BaseException:
pass
sys.exit(rv)

View file

@ -1162,8 +1162,7 @@ def handle_import(goptions, session, args):
nvr = "%(name)s-%(version)s-%(release)s" % koji.parse_NVRA(data['sourcerpm'])
to_import.setdefault(nvr, []).append((path, data))
builds_missing = False
nvrs = to_list(to_import.keys())
nvrs.sort()
nvrs = sorted(to_list(to_import.keys()))
for nvr in nvrs:
to_import[nvr].sort()
for path, data in to_import[nvr]:
@ -2358,8 +2357,7 @@ def anon_handle_latest_build(goptions, session, args):
print("%s %s %s" % ("-" * 40, "-" * 20, "-" * 16))
options.quiet = True
output = [fmt % x for x in data]
output.sort()
output = sorted([fmt % x for x in data])
for line in output:
print(line)
@ -2379,7 +2377,7 @@ def anon_handle_list_api(goptions, session, args):
# older servers may not provide argdesc
expanded = []
for arg in x['args']:
if type(arg) is str:
if isinstance(arg, str):
expanded.append(arg)
else:
expanded.append('%s=%s' % (arg[0], arg[1]))
@ -2493,8 +2491,7 @@ def anon_handle_list_tagged(goptions, session, args):
print("%-40s %-20s %s" % ("Build", "Tag", "Built by"))
print("%s %s %s" % ("-" * 40, "-" * 20, "-" * 16))
output = [fmt % x for x in data]
output.sort()
output = sorted([fmt % x for x in data])
for line in output:
print(line)
@ -2519,8 +2516,7 @@ def anon_handle_list_buildroot(goptions, session, args):
data = session.listRPMs(**opts)
fmt = "%(nvr)s.%(arch)s"
order = [(fmt % x, x) for x in data]
order.sort()
order = sorted([(fmt % x, x) for x in data])
for nvra, rinfo in order:
if options.verbose and rinfo.get('is_update'):
print(nvra, "[update]")
@ -2573,8 +2569,7 @@ def anon_handle_list_untagged(goptions, session, args):
if options.show_references:
fmt = fmt + " %(refs)s"
output = [fmt % x for x in data]
output.sort()
output = sorted([fmt % x for x in data])
for line in output:
print(line)
@ -2612,8 +2607,7 @@ def anon_handle_list_groups(goptions, session, args):
opts['event'] = event['id']
event['timestr'] = time.asctime(time.localtime(event['ts']))
print("Querying at event %(id)i (%(timestr)s)" % event)
tmp_list = [(x['name'], x) for x in session.getTagGroups(args[0], **opts)]
tmp_list.sort()
tmp_list = sorted([(x['name'], x) for x in session.getTagGroups(args[0], **opts)])
groups = [x[1] for x in tmp_list]
tags_cache = {}
@ -2631,8 +2625,7 @@ def anon_handle_list_groups(goptions, session, args):
if len(args) > 1 and group['name'] != args[1]:
continue
print("%s [%s]" % (group['name'], get_cached_tag(group['tag_id'])))
groups = [(x['name'], x) for x in group['grouplist']]
groups.sort()
groups = sorted([(x['name'], x) for x in group['grouplist']])
for x in [x[1] for x in groups]:
x['tag_name'] = get_cached_tag(x['tag_id'])
print_group_list_req_group(x)
@ -2794,8 +2787,7 @@ def anon_handle_list_hosts(goptions, session, args):
opts['ready'] = options.ready
if options.enabled is not None:
opts['enabled'] = options.enabled
tmp_list = [(x['name'], x) for x in session.listHosts(**opts)]
tmp_list.sort()
tmp_list = sorted([(x['name'], x) for x in session.listHosts(**opts)])
hosts = [x[1] for x in tmp_list]
def yesno(x):
@ -3011,7 +3003,7 @@ def anon_handle_list_builds(goptions, session, args):
dt = dateutil.parser.parse(val)
ts = time.mktime(dt.timetuple())
setattr(options, opt, ts)
except:
except BaseException:
parser.error(_("Invalid time specification: %s") % val)
if options.before:
opts['completeBefore'] = getattr(options, 'before')
@ -3879,8 +3871,7 @@ def anon_handle_list_targets(goptions, session, args):
if not options.quiet:
print("%-30s %-30s %-30s" % ('Name', 'Buildroot', 'Destination'))
print("-" * 93)
tmp_list = [(x['name'], x) for x in session.getBuildTargets(options.name)]
tmp_list.sort()
tmp_list = sorted([(x['name'], x) for x in session.getBuildTargets(options.name)])
targets = [x[1] for x in tmp_list]
for target in targets:
print(fmt % target)
@ -4278,8 +4269,7 @@ def _print_histline(entry, **kwargs):
else:
return '%s.name' % key
if edit:
keys = to_list(x.keys())
keys.sort()
keys = sorted(to_list(x.keys()))
y = other[-1]
for key in keys:
if key in hidden_fields:
@ -4293,8 +4283,7 @@ def _print_histline(entry, **kwargs):
continue
print(" %s: %s -> %s" % (key, x[key], y[key]))
elif create and options.verbose and table != 'tag_listing':
keys = to_list(x.keys())
keys.sort()
keys = sorted(to_list(x.keys()))
# the table keys have already been represented in the base format string
also_hidden = list(_table_keys[table])
also_hidden.extend([get_nkey(k) for k in also_hidden])
@ -4385,7 +4374,7 @@ def anon_handle_list_history(goptions, session, args):
dt = dateutil.parser.parse(val)
ts = time.mktime(dt.timetuple())
setattr(options, opt, ts)
except:
except BaseException:
parser.error(_("Invalid time specification: %s") % val)
for opt in ('package', 'tag', 'build', 'editor', 'user', 'permission',
'cg', 'external_repo', 'build_target', 'group', 'before',
@ -4768,8 +4757,7 @@ def anon_handle_taginfo(goptions, session, args):
print('')
print("Tag: %(name)s [%(id)d]" % info)
print("Arches: %(arches)s" % info)
group_list = [x['name'] for x in session.getTagGroups(info['id'], **event_opts)]
group_list.sort()
group_list = sorted([x['name'] for x in session.getTagGroups(info['id'], **event_opts)])
print("Groups: " + ', '.join(group_list))
if info.get('locked'):
print('LOCKED')
@ -6811,8 +6799,10 @@ def anon_handle_download_task(options, session, args):
base_task = session.getTaskInfo(base_task_id)
if not base_task:
error(_('No such task: #%i') % base_task_id)
def check_downloadable(task):
return task["method"] == "buildArch"
check_downloadable = lambda task: task["method"] == "buildArch"
downloadable_tasks = []
if check_downloadable(base_task):

View file

@ -498,14 +498,14 @@ def download_file(url, relpath, quiet=False, noprogress=False, size=None, num=No
response.raise_for_status()
length = int(response.headers.get('content-length') or 0)
with open(relpath, 'wb') as f:
l = 0
pos = 0
for chunk in response.iter_content(chunk_size=65536):
l += len(chunk)
pos += len(chunk)
f.write(chunk)
if not (quiet or noprogress):
_download_progress(length, l)
_download_progress(length, pos)
if not length and not (quiet or noprogress):
_download_progress(l, l)
_download_progress(pos, pos)
if not (quiet or noprogress):
print('')

View file

@ -2634,7 +2634,7 @@ def repo_init(tag, with_src=False, with_debuginfo=False, event=None, with_separa
relpath = os.path.relpath(srcdir, dest_parent)
try:
os.symlink(relpath, destlink)
except:
except BaseException:
log_error('Error linking %s to %s' % (destlink, relpath))
for artifact_dir, artifacts in six.iteritems(artifact_dirs):
_write_maven_repo_metadata(artifact_dir, artifacts)
@ -4934,7 +4934,7 @@ def list_task_output(taskID, stat=False, all_volumes=False, strict=False):
# raise error if task doesn't exist
try:
Task(taskID).getInfo(strict=True)
except:
except BaseException:
raise koji.GenericError("Task doesn't exist")
if stat or all_volumes:
@ -7288,10 +7288,10 @@ def check_rpm_sig(an_rpm, sigkey, sighdr):
ts.setVSFlags(0) # full verify
with open(temp, 'rb') as fo:
hdr = ts.hdrFromFdno(fo.fileno())
except:
except BaseException:
try:
os.unlink(temp)
except:
except BaseException:
pass
raise
raw_key = koji.get_header_field(hdr, 'siggpg')

View file

@ -240,7 +240,7 @@ class ModXMLRPCRequestHandler(object):
except Fault as fault:
self.traceback = True
response = dumps(fault, marshaller=Marshaller)
except:
except BaseException:
self.traceback = True
# report exception back to server
e_class, e = sys.exc_info()[:2]

View file

@ -303,10 +303,10 @@ class GenericError(Exception):
def __str__(self):
try:
return str(self.args[0]['args'][0])
except:
except BaseException:
try:
return str(self.args[0])
except:
except BaseException:
return str(self.__dict__)
# END kojikamid dup #
@ -1727,7 +1727,7 @@ def format_exc_plus():
# COULD cause any exception, so we MUST catch any...:
try:
rv += "%s\n" % value
except:
except BaseException:
rv += "<ERROR WHILE PRINTING VALUE>\n"
return rv
@ -2265,7 +2265,7 @@ class VirtualMethod(object):
self.__session._apidoc = dict(
[(f["name"], f) for f in self.__func("_listapi", [], {})]
)
except:
except BaseException:
self.__session._apidoc = {}
funcdoc = self.__session._apidoc.get(self.__name)
@ -2651,7 +2651,7 @@ class ClientSession(object):
if self.__dict__:
try:
self.logout()
except:
except BaseException:
pass
def callMethod(self, name, *args, **opts):
@ -3283,7 +3283,7 @@ class DBHandler(logging.Handler):
# self.cnx.commit()
# XXX - committing here is most likely wrong, but we need to set commit_pending or something
# ...and this is really the wrong place for that
except:
except BaseException:
self.handleError(record)

View file

@ -238,7 +238,7 @@ def _try_read_cpuinfo():
mounted). """
try:
return open("/proc/cpuinfo", "r")
except:
except BaseException:
return []
@ -248,7 +248,7 @@ def _parse_auxv():
# In case we can't open and read /proc/self/auxv, just return
try:
data = open("/proc/self/auxv", "rb").read()
except:
except BaseException:
return
# Define values from /usr/include/elf.h
@ -323,7 +323,7 @@ def getCanonPPCArch(arch):
try:
if platform.startswith("power") and int(platform[5:].rstrip('+')) >= 7:
return "ppc64p7"
except:
except BaseException:
pass
if machine is None:
@ -388,7 +388,7 @@ def getCanonArch(skipRpmPlatform=0):
f.close()
(arch, vendor, opersys) = line.split("-", 2)
return arch
except:
except BaseException:
pass
arch = os.uname()[4]

View file

@ -95,7 +95,7 @@ class Session(object):
raise koji.AuthError('%s not specified in session args' % field)
try:
callnum = args['callnum'][0]
except:
except BaseException:
callnum = None
# lookup the session
c = context.cnx.cursor()

View file

@ -138,7 +138,7 @@ def log_output(session, path, args, outfile, uploadpath, cwd=None, logerror=0, a
if env:
environ.update(env)
os.execvpe(path, args, environ)
except:
except BaseException:
msg = ''.join(traceback.format_exception(*sys.exc_info()))
if fd:
try:
@ -147,7 +147,7 @@ def log_output(session, path, args, outfile, uploadpath, cwd=None, logerror=0, a
else:
os.write(fd, msg)
os.close(fd)
except:
except BaseException:
pass
print(msg)
os._exit(1)
@ -166,7 +166,7 @@ def log_output(session, path, args, outfile, uploadpath, cwd=None, logerror=0, a
except IOError:
# will happen if the forked process has not created the logfile yet
continue
except:
except BaseException:
print('Error reading log file: %s' % outfile)
print(''.join(traceback.format_exception(*sys.exc_info())))
@ -1133,7 +1133,7 @@ class TaskManager(object):
try:
self.session.logoutChild(session_id)
del self.subsessions[task_id]
except:
except BaseException:
# not much we can do about it
pass
if wait:
@ -1233,7 +1233,7 @@ class TaskManager(object):
valid_host = handler.checkHost(self.hostdata)
except (SystemExit, KeyboardInterrupt):
raise
except:
except BaseException:
valid_host = False
self.logger.warn('Error during host check')
self.logger.warn(''.join(traceback.format_exception(*sys.exc_info())))
@ -1316,7 +1316,7 @@ class TaskManager(object):
# freeing this task will allow the pending restart to take effect
self.session.host.freeTasks([handler.id])
return
except:
except BaseException:
tb = ''.join(traceback.format_exception(*sys.exc_info()))
self.logger.warn("TRACEBACK: %s" % tb)
# report exception back to server

View file

@ -105,7 +105,8 @@ class CursorWrapper:
if hasattr(self.cursor, "mogrify"):
quote = self.cursor.mogrify
else:
quote = lambda a, b: a % b
def quote(a, b):
return a % b
try:
return quote(operation, parameters)
except Exception:

View file

@ -199,7 +199,7 @@ def run_callbacks(cbtype, *args, **kws):
cb_args, cb_kwargs = _fix_cb_args(func, args, kws, cache)
try:
func(cbtype, *cb_args, **cb_kwargs)
except:
except BaseException:
msg = 'Error running %s callback from %s' % (cbtype, func.__module__)
if getattr(func, 'failure_is_an_option', False):
logging.getLogger('koji.plugin').warn(msg, exc_info=True)

View file

@ -71,7 +71,7 @@ class Rpmdiff:
except AttributeError:
try:
PREREQ_FLAG = rpm.RPMSENSE_PREREQ
except:
except BaseException:
# (proyvind): This seems ugly, but then again so does
# this whole check as well.
PREREQ_FLAG = False
@ -119,9 +119,8 @@ class Rpmdiff:
old_files_dict = self.__fileIteratorToDict(old.fiFromHeader())
new_files_dict = self.__fileIteratorToDict(new.fiFromHeader())
files = list(set(itertools.chain(six.iterkeys(old_files_dict),
six.iterkeys(new_files_dict))))
files.sort()
files = sorted(set(itertools.chain(six.iterkeys(old_files_dict),
six.iterkeys(new_files_dict))))
self.old_data['files'] = old_files_dict
self.new_data['files'] = new_files_dict

View file

@ -79,7 +79,7 @@ def safe_rmtree(path, unmount=False, strict=True):
logger.debug("Removing: %s" % path)
try:
os.remove(path)
except:
except BaseException:
if strict:
raise
else:

View file

@ -449,7 +449,7 @@ Build: %%(name)s-%%(version)s-%%(release)s
s.login(options.smtp_user, options.smtp_pass)
s.sendmail(msg['From'], msg['To'], msg.as_string())
s.quit()
except:
except BaseException:
print("FAILED: Sending warning notice to %s" % msg['To'])
@ -571,12 +571,10 @@ def handle_trash():
by_owner = {}
for binfo in to_trash:
by_owner.setdefault(binfo['owner_name'], []).append(binfo)
owners = to_list(by_owner.keys())
owners.sort()
owners = sorted(to_list(by_owner.keys()))
mcall = koji.MultiCallSession(session, batch=1000)
for owner_name in owners:
builds = [(b['nvr'], b) for b in by_owner[owner_name]]
builds.sort()
builds = sorted([(b['nvr'], b) for b in by_owner[owner_name]])
send_warning_notice(owner_name, [x[1] for x in builds])
for nvr, binfo in builds:
if options.test:
@ -646,8 +644,7 @@ def handle_delete(just_salvage=False):
"""
print("Getting list of builds in trash...")
trashcan_tag = options.trashcan_tag
trash = [(b['nvr'], b) for b in session.listTagged(trashcan_tag)]
trash.sort()
trash = sorted([(b['nvr'], b) for b in session.listTagged(trashcan_tag)])
print("...got %i builds" % len(trash))
# XXX - it would be better if there were more appropriate server calls for this
grace_period = options.grace_period
@ -902,8 +899,7 @@ def handle_prune():
pkghist.setdefault(h['name'] + '-' + h['version'], []).append(h)
else:
pkghist.setdefault(h['name'], []).append(h)
pkgs = to_list(pkghist.keys())
pkgs.sort()
pkgs = sorted(to_list(pkghist.keys()))
for pkg in pkgs:
if not check_package(pkg):
# if options.debug:
@ -1010,7 +1006,7 @@ if __name__ == "__main__":
if options.exit_on_lock:
try:
session.logout()
except:
except BaseException:
pass
sys.exit(1)
os.close(lock_fd)
@ -1041,7 +1037,7 @@ if __name__ == "__main__":
# print("%s: %s" % (exctype, value))
try:
session.logout()
except:
except BaseException:
pass
if not options.skip_main:
sys.exit(rv)

View file

@ -450,7 +450,7 @@ class TrackedBuild(object):
# XXX - Move SCM class out of kojid and use it to check for scm url
if src.startswith('cvs:'):
return src
except:
except BaseException:
pass
# otherwise fail
return None
@ -980,8 +980,7 @@ class BuildTracker(object):
for pkg in session.listPackages(pkgID=name):
owners.setdefault(pkg['owner_id'], []).append(pkg)
if owners:
order = [(len(v), k) for k, v in six.iteritems(owners)]
order.sort()
order = sorted([(len(v), k) for k, v in six.iteritems(owners)])
owner = order[-1][1]
else:
# just use ourselves
@ -1207,8 +1206,7 @@ class BuildTracker(object):
ret = False
if options.max_jobs and len(self.state_idx['pending']) >= options.max_jobs:
return ret
missing = [(b.order, b.id, b) for b in six.itervalues(self.state_idx['missing'])]
missing.sort()
missing = sorted([(b.order, b.id, b) for b in six.itervalues(self.state_idx['missing'])])
for order, build_id, build in missing:
if not self.checkBuildDeps(build):
continue
@ -1276,7 +1274,7 @@ def main(args):
filename = options.logfile
try:
logfile = os.open(filename, os.O_CREAT | os.O_RDWR | os.O_APPEND, 0o777)
except:
except BaseException:
logfile = None
if logfile is not None:
log("logging to %s" % filename)
@ -1330,6 +1328,6 @@ if __name__ == "__main__":
# log ("%s: %s" % (exctype, value))
try:
session.logout()
except:
except BaseException:
pass
sys.exit(rv)

View file

@ -75,7 +75,7 @@ def clean_scratch_tasks(cursor, vacuum, test, age):
if opts['scratch']:
cursor.execute("INSERT INTO temp_scratch_tasks VALUES (%s)", (task_id,))
ids.append(task_id)
except:
except BaseException:
continue
parents = ids

View file

@ -408,7 +408,7 @@ class RepoManager(object):
while True:
self.checkCurrentRepos()
time.sleep(self.options.sleeptime)
except:
except BaseException:
self.logger.exception('Error in currency checker thread')
raise
finally:
@ -423,7 +423,7 @@ class RepoManager(object):
while True:
self.regenRepos()
time.sleep(self.options.sleeptime)
except:
except BaseException:
self.logger.exception('Error in regen thread')
raise
finally:
@ -851,7 +851,7 @@ def main(options, session):
except SystemExit:
logger.warn("Shutting down")
break
except:
except BaseException:
# log the exception and continue
logger.error(''.join(traceback.format_exception(*sys.exc_info())))
try:
@ -1000,7 +1000,7 @@ if __name__ == "__main__":
try:
logfile = open(options.logfile, "w")
logfile.close()
except:
except BaseException:
sys.stderr.write("Cannot create logfile: %s\n" % options.logfile)
sys.exit(1)
if not os.access(options.logfile, os.W_OK):

View file

@ -687,7 +687,7 @@ def stream_logs(server, handler, builds):
try:
fd = open(log, 'r')
logs[log] = (relpath, fd)
except:
except BaseException:
log_local('Error opening %s' % log)
continue
else:
@ -701,7 +701,7 @@ def stream_logs(server, handler, builds):
del contents
try:
server.uploadDirect(relpath, offset, size, digest, data)
except:
except BaseException:
log_local('error uploading %s' % relpath)
time.sleep(1)
@ -717,14 +717,14 @@ def fail(server, handler):
logfd.flush()
upload_file(server, os.path.dirname(logfile),
os.path.basename(logfile))
except:
except BaseException:
log_local('error calling upload_file()')
while True:
try:
# this is the very last thing we do, keep trying as long as we can
server.failTask(tb)
break
except:
except BaseException:
log_local('error calling server.failTask()')
sys.exit(1)
@ -794,7 +794,7 @@ def main():
results['logs'].append(os.path.basename(logfile))
server.closeTask(results)
except:
except BaseException:
fail(server, handler)
sys.exit(0)

View file

@ -241,7 +241,7 @@ def main(options, session):
break
except koji.RetryError:
raise
except:
except BaseException:
# XXX - this is a little extreme
# log the exception and continue
logger.error('Error in main loop', exc_info=True)
@ -293,7 +293,7 @@ class DaemonXMLRPCServer(six.moves.xmlrpc_server.SimpleXMLRPCServer):
self.close_request(conn)
except socket.timeout:
pass
except:
except BaseException:
self.logger.error('Error handling requests', exc_info=True)
if sys.version_info[:2] <= (2, 4):
@ -310,7 +310,7 @@ class DaemonXMLRPCServer(six.moves.xmlrpc_server.SimpleXMLRPCServer):
response = six.moves.xmlrpc_client.dumps(response, methodresponse=1, allow_none=True)
except six.moves.xmlrpc_client.Fault as fault:
response = six.moves.xmlrpc_client.dumps(fault)
except:
except BaseException:
# report exception back to server
response = six.moves.xmlrpc_client.dumps(
six.moves.xmlrpc_client.Fault(1, "%s:%s" % (sys.exc_type, sys.exc_value))
@ -407,7 +407,7 @@ class WinBuildTask(MultiPlatformTask):
except (SystemExit, ServerExit, KeyboardInterrupt):
# we do not trap these
raise
except:
except BaseException:
if not opts.get('scratch'):
# scratch builds do not get imported
self.session.host.failBuild(self.id, build_id)
@ -1037,7 +1037,7 @@ class VMTaskManager(TaskManager):
if os.path.isfile(disk):
os.unlink(disk)
self.logger.debug('Removed disk file %s for VM %s', disk, vm_name)
except:
except BaseException:
self.logger.error('Error removing disk file %s for VM %s', disk, vm_name,
exc_info=True)
return False
@ -1054,7 +1054,7 @@ class VMTaskManager(TaskManager):
"""
vms = self.libvirt_conn.listDefinedDomains() + self.libvirt_conn.listDomainsID()
for vm_name in vms:
if type(vm_name) == int:
if isinstance(vm_name, int):
vm_name = self.libvirt_conn.lookupByID(vm_name).name()
if vm_name.startswith(VMExecTask.CLONE_PREFIX):
self.cleanupVM(vm_name)

View file

@ -42,8 +42,11 @@ from koji.server import ServerRedirect
from koji.util import to_list
from kojiweb.util import _genHTML, _getValidTokens, _initValues
# Convenience definition of a commonly-used sort function
_sortbyname = lambda x: x['name']
def _sortbyname(x):
return x['name']
# loggers
authlogger = logging.getLogger('koji.auth')
@ -694,7 +697,7 @@ def taskinfo(environ, taskID):
if task['state'] in (koji.TASK_STATES['CLOSED'], koji.TASK_STATES['FAILED']):
try:
result = server.getTaskResult(task['id'])
except:
except BaseException:
excClass, exc = sys.exc_info()[:2]
values['result'] = exc
values['excClass'] = excClass
@ -728,7 +731,7 @@ def taskinfo(environ, taskID):
try:
values['params_parsed'] = _genHTML(environ, 'taskinfo_params.chtml')
except:
except BaseException:
values['params_parsed'] = None
return _genHTML(environ, 'taskinfo.chtml')
@ -2393,7 +2396,7 @@ def search(environ, start=None, order=None):
if match == 'regexp':
try:
re.compile(terms)
except:
except BaseException:
values['error'] = 'Invalid regular expression'
return _genHTML(environ, 'search.chtml')

View file

@ -205,7 +205,7 @@ class Dispatcher(object):
args = inspect.getargspec(val)
if not args[0] or args[0][0] != 'environ':
continue
except:
except BaseException:
tb_str = ''.join(traceback.format_exception(*sys.exc_info()))
self.logger.error(tb_str)
self.handler_index[name] = val

View file

@ -45,7 +45,7 @@ class NoSuchException(Exception):
try:
# pyOpenSSL might not be around
from OpenSSL.SSL import Error as SSL_Error
except:
except BaseException:
SSL_Error = NoSuchException