flake8: apply E2 rules except E266
This commit is contained in:
parent
ce1f9928af
commit
05340b146b
28 changed files with 707 additions and 706 deletions
100
koji/__init__.py
100
koji/__init__.py
|
|
@ -108,13 +108,13 @@ RPM_FILEDIGESTALGO_IDS = {
|
|||
# Taken from RFC 4880
|
||||
# A missing algo ID means md5
|
||||
None: 'MD5',
|
||||
1: 'MD5',
|
||||
2: 'SHA1',
|
||||
3: 'RIPEMD160',
|
||||
8: 'SHA256',
|
||||
9: 'SHA384',
|
||||
10: 'SHA512',
|
||||
11: 'SHA224'
|
||||
1: 'MD5',
|
||||
2: 'SHA1',
|
||||
3: 'RIPEMD160',
|
||||
8: 'SHA256',
|
||||
9: 'SHA384',
|
||||
10: 'SHA512',
|
||||
11: 'SHA224'
|
||||
}
|
||||
|
||||
# rpm 4.12 introduces optional deps, but they can also be backported in some
|
||||
|
|
@ -288,7 +288,7 @@ DEFAULT_AUTH_TIMEOUT = 60
|
|||
# BEGIN kojikamid dup #
|
||||
|
||||
# Exceptions
|
||||
PythonImportError = ImportError # will be masked by koji's one
|
||||
PythonImportError = ImportError # will be masked by koji's one
|
||||
|
||||
class GenericError(Exception):
|
||||
"""Base class for our custom exceptions"""
|
||||
|
|
@ -640,8 +640,8 @@ class RawHeader(object):
|
|||
for i in range(il):
|
||||
entry = []
|
||||
for j in range(4):
|
||||
ofs = 16 + i*16 + j*4
|
||||
data = [_ord(x) for x in self.header[ofs:ofs+4]]
|
||||
ofs = 16 + i * 16 + j * 4
|
||||
data = [_ord(x) for x in self.header[ofs:ofs + 4]]
|
||||
entry.append(multibyte(data))
|
||||
|
||||
# print("Tag: %d, Type: %d, Offset: %x, Count: %d" % tuple(entry))
|
||||
|
|
@ -693,7 +693,7 @@ class RawHeader(object):
|
|||
# integer
|
||||
n = 1 << (dtype - 2)
|
||||
for i in range(count):
|
||||
data = [_ord(x) for x in self.header[pos:pos+n]]
|
||||
data = [_ord(x) for x in self.header[pos:pos + n]]
|
||||
print("%r" % data)
|
||||
num = multibyte(data)
|
||||
print("Int(%d): %d" % (n, num))
|
||||
|
|
@ -702,23 +702,23 @@ class RawHeader(object):
|
|||
elif dtype == 6:
|
||||
# string (null terminated)
|
||||
end = self.header.find(six.b('\0'), pos)
|
||||
print("String(%d): %r" % (end-pos, self.header[pos:end]))
|
||||
print("String(%d): %r" % (end - pos, self.header[pos:end]))
|
||||
next = end + 1
|
||||
elif dtype == 7:
|
||||
print("Data: %s" % hex_string(self.header[pos:pos+count]))
|
||||
next = pos+count
|
||||
print("Data: %s" % hex_string(self.header[pos:pos + count]))
|
||||
next = pos + count
|
||||
elif dtype == 8:
|
||||
# string array
|
||||
for i in range(count):
|
||||
end = self.header.find(six.b('\0'), pos)
|
||||
print("String(%d): %r" % (end-pos, self.header[pos:end]))
|
||||
print("String(%d): %r" % (end - pos, self.header[pos:end]))
|
||||
pos = end + 1
|
||||
next = pos
|
||||
elif dtype == 9:
|
||||
# unicode string array
|
||||
for i in range(count):
|
||||
end = self.header.find(six.b('\0'), pos)
|
||||
print("i18n(%d): %r" % (end-pos, self.header[pos:end]))
|
||||
print("i18n(%d): %r" % (end - pos, self.header[pos:end]))
|
||||
pos = end + 1
|
||||
next = pos
|
||||
else:
|
||||
|
|
@ -746,7 +746,7 @@ class RawHeader(object):
|
|||
if dtype >= 2 and dtype <= 5:
|
||||
n = 1 << (dtype - 2)
|
||||
# n-byte integer
|
||||
data = [_ord(x) for x in self.header[pos:pos+n]]
|
||||
data = [_ord(x) for x in self.header[pos:pos + n]]
|
||||
return multibyte(data)
|
||||
elif dtype == 6:
|
||||
# string (null terminated)
|
||||
|
|
@ -754,7 +754,7 @@ class RawHeader(object):
|
|||
return self.header[pos:end]
|
||||
elif dtype == 7:
|
||||
# raw data
|
||||
return self.header[pos:pos+count]
|
||||
return self.header[pos:pos + count]
|
||||
else:
|
||||
# XXX - not all valid data types are handled
|
||||
raise GenericError("Unable to read header data type: %x" % dtype)
|
||||
|
|
@ -806,7 +806,7 @@ def __parse_packet_header(pgp_packet):
|
|||
offset = 1
|
||||
length = len(pgp_packet) - offset
|
||||
else:
|
||||
(fmt, offset) = {0:('>B', 2), 1:('>H', 3), 2:('>I', 5)}[len_type]
|
||||
(fmt, offset) = {0: ('>B', 2), 1: ('>H', 3), 2: ('>I', 5)}[len_type]
|
||||
length = struct.unpack(fmt, pgp_packet[1:offset])[0]
|
||||
else:
|
||||
tag = byte0 & 0x3F
|
||||
|
|
@ -843,8 +843,8 @@ def __subpacket_key_ids(subs):
|
|||
length = struct.unpack('>I', subs[1:5])[0]
|
||||
off = 5
|
||||
if _ord(subs[off]) == 16:
|
||||
res.append(subs[off+1 : off+length])
|
||||
subs = subs[off+length:]
|
||||
res.append(subs[off + 1: off + length])
|
||||
subs = subs[off + length:]
|
||||
return res
|
||||
|
||||
def get_sigpacket_key_id(sigpacket):
|
||||
|
|
@ -858,9 +858,9 @@ def get_sigpacket_key_id(sigpacket):
|
|||
sub_len = struct.unpack('>H', sigpacket[4:6])[0]
|
||||
off = 6 + sub_len
|
||||
key_ids = __subpacket_key_ids(sigpacket[6:off])
|
||||
sub_len = struct.unpack('>H', sigpacket[off : off+2])[0]
|
||||
sub_len = struct.unpack('>H', sigpacket[off: off + 2])[0]
|
||||
off += 2
|
||||
key_ids += __subpacket_key_ids(sigpacket[off : off+sub_len])
|
||||
key_ids += __subpacket_key_ids(sigpacket[off: off + sub_len])
|
||||
if len(key_ids) != 1:
|
||||
raise NotImplementedError(
|
||||
'Unexpected number of key IDs: %s' % len(key_ids))
|
||||
|
|
@ -907,7 +907,7 @@ def get_rpm_header(f, ts=None):
|
|||
raise GenericError("rpm's python bindings are not installed")
|
||||
if ts is None:
|
||||
ts = rpm.TransactionSet()
|
||||
ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES|rpm._RPMVSF_NODIGESTS)
|
||||
ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES | rpm._RPMVSF_NODIGESTS)
|
||||
if isinstance(f, six.string_types):
|
||||
fo = open(f, "rb")
|
||||
else:
|
||||
|
|
@ -1007,8 +1007,8 @@ def parse_NVR(nvr):
|
|||
p1 = nvr.rfind("-", 0, p2)
|
||||
if p1 == -1 or p1 == p2 - 1:
|
||||
raise GenericError("invalid format: %s" % nvr)
|
||||
ret['release'] = nvr[p2+1:]
|
||||
ret['version'] = nvr[p1+1:p2]
|
||||
ret['release'] = nvr[p2 + 1:]
|
||||
ret['version'] = nvr[p1 + 1:p2]
|
||||
ret['name'] = nvr[:p1]
|
||||
epochIndex = ret['name'].find(':')
|
||||
if epochIndex == -1:
|
||||
|
|
@ -1031,7 +1031,7 @@ def parse_NVRA(nvra):
|
|||
p3 = nvra.rfind(".")
|
||||
if p3 == -1 or p3 == len(nvra) - 1:
|
||||
raise GenericError("invalid format: %s" % nvra)
|
||||
arch = nvra[p3+1:]
|
||||
arch = nvra[p3 + 1:]
|
||||
ret = parse_NVR(nvra[:p3])
|
||||
ret['arch'] = arch
|
||||
if arch == 'src':
|
||||
|
|
@ -1427,7 +1427,7 @@ def generate_comps(groups, expand_groups=False):
|
|||
if expand_groups and g['grouplist']:
|
||||
# add a requires entry for all packages in groups required by buildgroup
|
||||
need = [req['name'] for req in g['grouplist']]
|
||||
seen_grp = {g['name'] : 1}
|
||||
seen_grp = {g['name']: 1}
|
||||
seen_pkg = {}
|
||||
for p in g['packagelist']:
|
||||
seen_pkg[p['package']] = 1
|
||||
|
|
@ -1503,9 +1503,9 @@ def genMockConfig(name, arch, managed=False, repoid=None, tag_name=None, **opts)
|
|||
# rely on the mock defaults being correct
|
||||
# and only includes changes from the defaults here
|
||||
config_opts = {
|
||||
'root' : name,
|
||||
'basedir' : mockdir,
|
||||
'target_arch' : opts.get('target_arch', arch),
|
||||
'root': name,
|
||||
'basedir': mockdir,
|
||||
'target_arch': opts.get('target_arch', arch),
|
||||
'chroothome': '/builddir',
|
||||
# Use the group data rather than a generated rpm
|
||||
'chroot_setup_cmd': 'groupinstall %s' % opts.get('install_group', 'build'),
|
||||
|
|
@ -1578,9 +1578,9 @@ name=build
|
|||
}
|
||||
|
||||
macros = {
|
||||
'%_rpmfilename' : '%%{NAME}-%%{VERSION}-%%{RELEASE}.%%{ARCH}.rpm',
|
||||
'%vendor' : opts.get('vendor', 'Koji'),
|
||||
'%packager' : opts.get('packager', 'Koji'),
|
||||
'%_rpmfilename': '%%{NAME}-%%{VERSION}-%%{RELEASE}.%%{ARCH}.rpm',
|
||||
'%vendor': opts.get('vendor', 'Koji'),
|
||||
'%packager': opts.get('packager', 'Koji'),
|
||||
'%distribution': opts.get('distribution', 'Unknown')
|
||||
}
|
||||
|
||||
|
|
@ -1747,18 +1747,18 @@ def config_directory_contents(dir_name, strict=False):
|
|||
|
||||
def read_config(profile_name, user_config=None):
|
||||
config_defaults = {
|
||||
'server' : 'http://localhost/kojihub',
|
||||
'weburl' : 'http://localhost/koji',
|
||||
'topurl' : None,
|
||||
'pkgurl' : None,
|
||||
'topdir' : '/mnt/koji',
|
||||
'max_retries' : None,
|
||||
'server': 'http://localhost/kojihub',
|
||||
'weburl': 'http://localhost/koji',
|
||||
'topurl': None,
|
||||
'pkgurl': None,
|
||||
'topdir': '/mnt/koji',
|
||||
'max_retries': None,
|
||||
'retry_interval': None,
|
||||
'anon_retry' : None,
|
||||
'offline_retry' : None,
|
||||
'offline_retry_interval' : None,
|
||||
'timeout' : DEFAULT_REQUEST_TIMEOUT,
|
||||
'auth_timeout' : DEFAULT_AUTH_TIMEOUT,
|
||||
'anon_retry': None,
|
||||
'offline_retry': None,
|
||||
'offline_retry_interval': None,
|
||||
'timeout': DEFAULT_REQUEST_TIMEOUT,
|
||||
'auth_timeout': DEFAULT_AUTH_TIMEOUT,
|
||||
'use_fast_upload': False,
|
||||
'upload_blocksize': 1048576,
|
||||
'poll_interval': 6,
|
||||
|
|
@ -2109,7 +2109,7 @@ def is_requests_cert_error(e):
|
|||
def is_cert_error(e):
|
||||
"""Determine if an OpenSSL error is due to a bad cert"""
|
||||
|
||||
if SSL_Error is None: #pragma: no cover
|
||||
if SSL_Error is None: # pragma: no cover
|
||||
# import failed, so we can't determine
|
||||
raise Exception("OpenSSL library did not load")
|
||||
if not isinstance(e, SSL_Error):
|
||||
|
|
@ -2980,14 +2980,14 @@ class ClientSession(object):
|
|||
tries = 0
|
||||
while True:
|
||||
if debug:
|
||||
self.logger.debug("uploadFile(%r,%r,%r,%r,%r,...)" %(path, name, sz, digest, offset))
|
||||
self.logger.debug("uploadFile(%r,%r,%r,%r,%r,...)" % (path, name, sz, digest, offset))
|
||||
if self.callMethod('uploadFile', path, name, sz, digest, offset, data, **volopts):
|
||||
break
|
||||
if tries <= retries:
|
||||
tries += 1
|
||||
continue
|
||||
else:
|
||||
raise GenericError("Error uploading file %s, offset %d" %(path, offset))
|
||||
raise GenericError("Error uploading file %s, offset %d" % (path, offset))
|
||||
if size == 0:
|
||||
break
|
||||
ofs += size
|
||||
|
|
@ -3127,7 +3127,7 @@ class MultiCallSession(object):
|
|||
self._session.logger.debug(
|
||||
"MultiCall with batch size %i, calls/groups(%i/%i)",
|
||||
batch, len(calls), round(len(calls) // batch))
|
||||
batches = [calls[i:i+batch] for i in range(0, len(calls), batch)]
|
||||
batches = [calls[i:i + batch] for i in range(0, len(calls), batch)]
|
||||
else:
|
||||
batches = [calls]
|
||||
results = []
|
||||
|
|
@ -3502,7 +3502,7 @@ def add_file_logger(logger, fn):
|
|||
return
|
||||
if not os.access(fn, os.W_OK):
|
||||
return
|
||||
handler = logging.handlers.RotatingFileHandler(fn, maxBytes=1024*1024*10, backupCount=5)
|
||||
handler = logging.handlers.RotatingFileHandler(fn, maxBytes=1024 * 1024 * 10, backupCount=5)
|
||||
handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(name)s: %(message)s'))
|
||||
logging.getLogger(logger).addHandler(handler)
|
||||
|
||||
|
|
|
|||
52
koji/arch.py
52
koji/arch.py
|
|
@ -10,14 +10,14 @@ import rpm
|
|||
_ppc64_native_is_best = True
|
||||
|
||||
# dict mapping arch -> ( multicompat, best personality, biarch personality )
|
||||
multilibArches = { "x86_64": ( "athlon", "x86_64", "athlon" ),
|
||||
"sparc64v": ( "sparcv9v", "sparcv9v", "sparc64v" ),
|
||||
"sparc64": ( "sparcv9", "sparcv9", "sparc64" ),
|
||||
"ppc64": ( "ppc", "ppc", "ppc64" ),
|
||||
"s390x": ( "s390", "s390x", "s390" ),
|
||||
}
|
||||
multilibArches = {"x86_64": ("athlon", "x86_64", "athlon"),
|
||||
"sparc64v": ("sparcv9v", "sparcv9v", "sparc64v"),
|
||||
"sparc64": ("sparcv9", "sparcv9", "sparc64"),
|
||||
"ppc64": ("ppc", "ppc", "ppc64"),
|
||||
"s390x": ("s390", "s390x", "s390"),
|
||||
}
|
||||
if _ppc64_native_is_best:
|
||||
multilibArches["ppc64"] = ( "ppc", "ppc64", "ppc64" )
|
||||
multilibArches["ppc64"] = ("ppc", "ppc64", "ppc64")
|
||||
|
||||
arches = {
|
||||
# ia32
|
||||
|
|
@ -34,7 +34,7 @@ arches = {
|
|||
"ia32e": "x86_64",
|
||||
|
||||
# ppc64le
|
||||
"ppc64le": "noarch",
|
||||
"ppc64le": "noarch",
|
||||
|
||||
# ppc
|
||||
"ppc64p7": "ppc64",
|
||||
|
|
@ -56,16 +56,16 @@ arches = {
|
|||
"sparc": "noarch",
|
||||
|
||||
# alpha
|
||||
"alphaev7": "alphaev68",
|
||||
"alphaev68": "alphaev67",
|
||||
"alphaev67": "alphaev6",
|
||||
"alphaev6": "alphapca56",
|
||||
"alphaev7": "alphaev68",
|
||||
"alphaev68": "alphaev67",
|
||||
"alphaev67": "alphaev6",
|
||||
"alphaev6": "alphapca56",
|
||||
"alphapca56": "alphaev56",
|
||||
"alphaev56": "alphaev5",
|
||||
"alphaev5": "alphaev45",
|
||||
"alphaev45": "alphaev4",
|
||||
"alphaev4": "alpha",
|
||||
"alpha": "noarch",
|
||||
"alphaev56": "alphaev5",
|
||||
"alphaev5": "alphaev45",
|
||||
"alphaev45": "alphaev4",
|
||||
"alphaev4": "alpha",
|
||||
"alpha": "noarch",
|
||||
|
||||
# arm
|
||||
"armv7l": "armv6l",
|
||||
|
|
@ -124,7 +124,7 @@ def canCoinstall(arch1, arch2):
|
|||
# if both are a multlibarch then we can't coinstall (x86_64, ia32e)
|
||||
# if both are not multilibarches then we can't coinstall (i386, i686)
|
||||
|
||||
if 'noarch' in [arch1, arch2]: # noarch can never coinstall
|
||||
if 'noarch' in [arch1, arch2]: # noarch can never coinstall
|
||||
return False
|
||||
|
||||
if isMultiLibArch(arch=arch1) == isMultiLibArch(arch=arch2):
|
||||
|
|
@ -153,7 +153,7 @@ def isMultiLibArch(arch=None):
|
|||
if arch is None:
|
||||
arch = canonArch
|
||||
|
||||
if arch not in arches: # or we could check if it is noarch
|
||||
if arch not in arches: # or we could check if it is noarch
|
||||
return 0
|
||||
|
||||
if arch in multilibArches:
|
||||
|
|
@ -219,7 +219,7 @@ def getArchList(thisarch=None):
|
|||
# hack hack hack
|
||||
# sparc64v is also sparc64 compat
|
||||
if archlist[0] == "sparc64v":
|
||||
archlist.insert(1,"sparc64")
|
||||
archlist.insert(1, "sparc64")
|
||||
|
||||
# if we're a weirdo arch - add noarch on there.
|
||||
if len(archlist) == 1 and archlist[0] == thisarch:
|
||||
|
|
@ -366,7 +366,7 @@ def getCanonX86_64Arch(arch):
|
|||
return "ia32e"
|
||||
return arch
|
||||
|
||||
def getCanonArch(skipRpmPlatform = 0):
|
||||
def getCanonArch(skipRpmPlatform=0):
|
||||
if not skipRpmPlatform and os.access("/etc/rpm/platform", os.R_OK):
|
||||
try:
|
||||
f = open("/etc/rpm/platform", "r")
|
||||
|
|
@ -398,11 +398,11 @@ def getCanonArch(skipRpmPlatform = 0):
|
|||
canonArch = getCanonArch()
|
||||
|
||||
# this gets you the "compat" arch of a biarch pair
|
||||
def getMultiArchInfo(arch = canonArch):
|
||||
def getMultiArchInfo(arch=canonArch):
|
||||
if arch in multilibArches:
|
||||
return multilibArches[arch]
|
||||
if arch in arches and arches[arch] != "noarch":
|
||||
return getMultiArchInfo(arch = arches[arch])
|
||||
return getMultiArchInfo(arch=arches[arch])
|
||||
return None
|
||||
|
||||
# get the best usual userspace arch for the arch we're on. this is
|
||||
|
|
@ -430,7 +430,7 @@ def getBaseArch(myarch=None):
|
|||
if not myarch:
|
||||
myarch = canonArch
|
||||
|
||||
if myarch not in arches: # this is dumb, but <shrug>
|
||||
if myarch not in arches: # this is dumb, but <shrug>
|
||||
return myarch
|
||||
|
||||
if myarch.startswith("sparc64"):
|
||||
|
|
@ -485,7 +485,7 @@ class ArchStorage(object):
|
|||
self.basearch = getBaseArch(myarch=self.canonarch)
|
||||
self.archlist = getArchList(thisarch=self.canonarch)
|
||||
|
||||
if not archlist_includes_compat_arch: # - do we bother including i686 and below on x86_64
|
||||
if not archlist_includes_compat_arch: # - do we bother including i686 and below on x86_64
|
||||
limit_archlist = []
|
||||
for a in self.archlist:
|
||||
if isMultiLibArch(a) or a == 'noarch':
|
||||
|
|
@ -495,7 +495,7 @@ class ArchStorage(object):
|
|||
self.bestarch = getBestArch(myarch=self.canonarch)
|
||||
self.compatarches = getMultiArchInfo(arch=self.canonarch)
|
||||
self.multilib = isMultiLibArch(arch=self.canonarch)
|
||||
self.legit_multi_arches = legitMultiArchesInSameLib(arch = self.canonarch)
|
||||
self.legit_multi_arches = legitMultiArchesInSameLib(arch=self.canonarch)
|
||||
|
||||
def get_best_arch_from_list(self, archlist, fromarch=None):
|
||||
if not fromarch:
|
||||
|
|
|
|||
10
koji/auth.py
10
koji/auth.py
|
|
@ -321,7 +321,7 @@ class Session(object):
|
|||
srvkt = krbV.Keytab(name=context.opts.get('AuthKeytab'), context=ctx)
|
||||
|
||||
ac = krbV.AuthContext(context=ctx)
|
||||
ac.flags = krbV.KRB5_AUTH_CONTEXT_DO_SEQUENCE|krbV.KRB5_AUTH_CONTEXT_DO_TIME
|
||||
ac.flags = krbV.KRB5_AUTH_CONTEXT_DO_SEQUENCE | krbV.KRB5_AUTH_CONTEXT_DO_TIME
|
||||
conninfo = self.getConnInfo()
|
||||
ac.addrs = conninfo
|
||||
|
||||
|
|
@ -537,8 +537,8 @@ class Session(object):
|
|||
|
||||
# generate a random key
|
||||
alnum = string.ascii_letters + string.digits
|
||||
key = "%s-%s" %(user_id,
|
||||
''.join([random.choice(alnum) for x in range(1, 20)]))
|
||||
key = "%s-%s" % (user_id,
|
||||
''.join([random.choice(alnum) for x in range(1, 20)]))
|
||||
# use sha? sha.new(phrase).hexdigest()
|
||||
|
||||
# get a session id
|
||||
|
|
@ -556,7 +556,7 @@ class Session(object):
|
|||
context.cnx.commit()
|
||||
|
||||
# return session info
|
||||
return {'session-id' : session_id, 'session-key' : key}
|
||||
return {'session-id': session_id, 'session-key': key}
|
||||
|
||||
def subsession(self):
|
||||
"Create a subsession"
|
||||
|
|
@ -607,7 +607,7 @@ class Session(object):
|
|||
return None
|
||||
c = context.cnx.cursor()
|
||||
q = """SELECT id FROM host WHERE user_id = %(uid)d"""
|
||||
c.execute(q, {'uid' : self.user_id})
|
||||
c.execute(q, {'uid': self.user_id})
|
||||
r = c.fetchone()
|
||||
c.close()
|
||||
if r:
|
||||
|
|
|
|||
|
|
@ -67,8 +67,8 @@ class ThreadLocal(object):
|
|||
def __str__(self):
|
||||
id = six.moves._thread.get_ident()
|
||||
tdict = object.__getattribute__(self, '_tdict')
|
||||
return "(current thread: %s) {" % id + \
|
||||
", ".join(["%s : %s" %(k, v.__dict__) for (k, v) in six.iteritems(tdict)]) + \
|
||||
return "(current thread: %s) {" % id + \
|
||||
", ".join(["%s : %s" % (k, v.__dict__) for (k, v) in six.iteritems(tdict)]) + \
|
||||
"}"
|
||||
|
||||
def _threadclear(self):
|
||||
|
|
|
|||
|
|
@ -307,7 +307,7 @@ class SCM(object):
|
|||
# check for validity: params should be empty, query may be empty, everything else should be populated
|
||||
if params:
|
||||
raise koji.GenericError('Unable to parse SCM URL: %s . Params element %s should be empty.' % (self.url, params))
|
||||
if not scheme: #pragma: no cover
|
||||
if not scheme: # pragma: no cover
|
||||
# should not happen because of is_scm_url check earlier
|
||||
raise koji.GenericError('Unable to parse SCM URL: %s . Could not find the scheme element.' % self.url)
|
||||
if not fragment:
|
||||
|
|
@ -699,7 +699,7 @@ class TaskManager(object):
|
|||
# can lead to a world of hurt.
|
||||
# We remove the rootdir contents but leave the rootdir unless it
|
||||
# is really old
|
||||
if age > 3600*24:
|
||||
if age > 3600 * 24:
|
||||
# dir untouched for a day
|
||||
self.logger.info("Removing buildroot: %s" % desc)
|
||||
if topdir and safe_rmtree(topdir, unmount=True, strict=False) != 0:
|
||||
|
|
@ -850,8 +850,8 @@ class TaskManager(object):
|
|||
self.logger.debug(" hosts: %r" % hosts)
|
||||
self.logger.debug(" tasks: %r" % tasks)
|
||||
# now we organize this data into channel-arch bins
|
||||
bin_hosts = {} #hosts indexed by bin
|
||||
bins = {} #bins for this host
|
||||
bin_hosts = {} # hosts indexed by bin
|
||||
bins = {} # bins for this host
|
||||
our_avail = None
|
||||
for host in hosts:
|
||||
host['bins'] = []
|
||||
|
|
|
|||
|
|
@ -34,28 +34,28 @@ from koji.util import encode_datetime_recurse
|
|||
# the available callback hooks and a list
|
||||
# of functions to be called for each event
|
||||
callbacks = {
|
||||
'prePackageListChange': [],
|
||||
'postPackageListChange': [],
|
||||
'preTaskStateChange': [],
|
||||
'postTaskStateChange': [],
|
||||
'preBuildStateChange': [],
|
||||
'postBuildStateChange': [],
|
||||
'preImport': [],
|
||||
'postImport': [],
|
||||
'preRPMSign': [],
|
||||
'postRPMSign': [],
|
||||
'preTag': [],
|
||||
'postTag': [],
|
||||
'preUntag': [],
|
||||
'postUntag': [],
|
||||
'preRepoInit': [],
|
||||
'postRepoInit': [],
|
||||
'preRepoDone': [],
|
||||
'postRepoDone': [],
|
||||
'preCommit': [],
|
||||
'postCommit': [],
|
||||
'preSCMCheckout': [],
|
||||
'postSCMCheckout': [],
|
||||
'prePackageListChange': [],
|
||||
'postPackageListChange': [],
|
||||
'preTaskStateChange': [],
|
||||
'postTaskStateChange': [],
|
||||
'preBuildStateChange': [],
|
||||
'postBuildStateChange': [],
|
||||
'preImport': [],
|
||||
'postImport': [],
|
||||
'preRPMSign': [],
|
||||
'postRPMSign': [],
|
||||
'preTag': [],
|
||||
'postTag': [],
|
||||
'preUntag': [],
|
||||
'postUntag': [],
|
||||
'preRepoInit': [],
|
||||
'postRepoInit': [],
|
||||
'preRepoDone': [],
|
||||
'postRepoDone': [],
|
||||
'preCommit': [],
|
||||
'postCommit': [],
|
||||
'preSCMCheckout': [],
|
||||
'postSCMCheckout': [],
|
||||
}
|
||||
|
||||
class PluginTracker(object):
|
||||
|
|
|
|||
|
|
@ -288,7 +288,7 @@ class SimpleRuleSet(object):
|
|||
raise Exception("bad policy line: %s" % line)
|
||||
negate = True
|
||||
tests = line[:pos]
|
||||
action = line[pos+2:]
|
||||
action = line[pos + 2:]
|
||||
tests = [self.get_test_handler(x) for x in tests.split('&&')]
|
||||
action = action.strip()
|
||||
# just return action = { for nested rules
|
||||
|
|
|
|||
|
|
@ -40,53 +40,53 @@ class Rpmdiff:
|
|||
|
||||
# constants
|
||||
|
||||
TAGS = ( rpm.RPMTAG_NAME, rpm.RPMTAG_SUMMARY,
|
||||
rpm.RPMTAG_DESCRIPTION, rpm.RPMTAG_GROUP,
|
||||
rpm.RPMTAG_LICENSE, rpm.RPMTAG_URL,
|
||||
rpm.RPMTAG_PREIN, rpm.RPMTAG_POSTIN,
|
||||
rpm.RPMTAG_PREUN, rpm.RPMTAG_POSTUN)
|
||||
TAGS = (rpm.RPMTAG_NAME, rpm.RPMTAG_SUMMARY,
|
||||
rpm.RPMTAG_DESCRIPTION, rpm.RPMTAG_GROUP,
|
||||
rpm.RPMTAG_LICENSE, rpm.RPMTAG_URL,
|
||||
rpm.RPMTAG_PREIN, rpm.RPMTAG_POSTIN,
|
||||
rpm.RPMTAG_PREUN, rpm.RPMTAG_POSTUN)
|
||||
|
||||
PRCO = ( 'REQUIRES', 'PROVIDES', 'CONFLICTS', 'OBSOLETES')
|
||||
PRCO = ('REQUIRES', 'PROVIDES', 'CONFLICTS', 'OBSOLETES')
|
||||
|
||||
# {fname : (size, mode, mtime, flags, dev, inode,
|
||||
# nlink, state, vflags, user, group, digest)}
|
||||
__FILEIDX = [ ['S', 0],
|
||||
['M', 1],
|
||||
['5', 11],
|
||||
['D', 4],
|
||||
['N', 6],
|
||||
['L', 7],
|
||||
['V', 8],
|
||||
['U', 9],
|
||||
['G', 10],
|
||||
['F', 3],
|
||||
['T', 2] ]
|
||||
__FILEIDX = [['S', 0],
|
||||
['M', 1],
|
||||
['5', 11],
|
||||
['D', 4],
|
||||
['N', 6],
|
||||
['L', 7],
|
||||
['V', 8],
|
||||
['U', 9],
|
||||
['G', 10],
|
||||
['F', 3],
|
||||
['T', 2]]
|
||||
|
||||
try:
|
||||
if rpm.RPMSENSE_SCRIPT_PRE:
|
||||
PREREQ_FLAG=rpm.RPMSENSE_PREREQ|rpm.RPMSENSE_SCRIPT_PRE|\
|
||||
rpm.RPMSENSE_SCRIPT_POST|rpm.RPMSENSE_SCRIPT_PREUN|\
|
||||
PREREQ_FLAG = rpm.RPMSENSE_PREREQ | rpm.RPMSENSE_SCRIPT_PRE |\
|
||||
rpm.RPMSENSE_SCRIPT_POST | rpm.RPMSENSE_SCRIPT_PREUN |\
|
||||
rpm.RPMSENSE_SCRIPT_POSTUN
|
||||
except AttributeError:
|
||||
try:
|
||||
PREREQ_FLAG=rpm.RPMSENSE_PREREQ
|
||||
PREREQ_FLAG = rpm.RPMSENSE_PREREQ
|
||||
except:
|
||||
# (proyvind): This seems ugly, but then again so does
|
||||
# this whole check as well.
|
||||
PREREQ_FLAG=False
|
||||
PREREQ_FLAG = False
|
||||
|
||||
DEPFORMAT = '%-12s%s %s %s %s'
|
||||
FORMAT = '%-12s%s'
|
||||
|
||||
ADDED = 'added'
|
||||
ADDED = 'added'
|
||||
REMOVED = 'removed'
|
||||
|
||||
# code starts here
|
||||
|
||||
def __init__(self, old, new, ignore=None):
|
||||
self.result = []
|
||||
self.old_data = { 'tags': {}, 'ignore': ignore }
|
||||
self.new_data = { 'tags': {}, 'ignore': ignore }
|
||||
self.old_data = {'tags': {}, 'ignore': ignore}
|
||||
self.new_data = {'tags': {}, 'ignore': ignore}
|
||||
if ignore is None:
|
||||
ignore = set()
|
||||
else:
|
||||
|
|
@ -111,7 +111,7 @@ class Rpmdiff:
|
|||
self.__add(self.FORMAT, ('S.5........', tagname))
|
||||
|
||||
# compare Provides, Requires, ...
|
||||
for tag in self.PRCO:
|
||||
for tag in self.PRCO:
|
||||
self.__comparePRCOs(old, new, tag)
|
||||
|
||||
# compare the files
|
||||
|
|
@ -183,16 +183,16 @@ class Rpmdiff:
|
|||
|
||||
# compare Provides, Requires, Conflicts, Obsoletes
|
||||
def __comparePRCOs(self, old, new, name):
|
||||
oldflags = old[name[:-1]+'FLAGS']
|
||||
newflags = new[name[:-1]+'FLAGS']
|
||||
oldflags = old[name[:-1] + 'FLAGS']
|
||||
newflags = new[name[:-1] + 'FLAGS']
|
||||
# fix buggy rpm binding not returning list for single entries
|
||||
if not isinstance(oldflags, list): oldflags = [ oldflags ]
|
||||
if not isinstance(newflags, list): newflags = [ newflags ]
|
||||
if not isinstance(oldflags, list): oldflags = [oldflags]
|
||||
if not isinstance(newflags, list): newflags = [newflags]
|
||||
|
||||
o = list(zip(old[name], oldflags, old[name[:-1]+'VERSION']))
|
||||
n = list(zip(new[name], newflags, new[name[:-1]+'VERSION']))
|
||||
o = list(zip(old[name], oldflags, old[name[:-1] + 'VERSION']))
|
||||
n = list(zip(new[name], newflags, new[name[:-1] + 'VERSION']))
|
||||
|
||||
if name == 'PROVIDES': # filter our self provide
|
||||
if name == 'PROVIDES': # filter our self provide
|
||||
oldNV = (old['name'], rpm.RPMSENSE_EQUAL,
|
||||
"%s-%s" % (old['version'], old['release']))
|
||||
newNV = (new['name'], rpm.RPMSENSE_EQUAL,
|
||||
|
|
|
|||
|
|
@ -144,121 +144,121 @@ def parse_task_params(method, params):
|
|||
LEGACY_SIGNATURES = {
|
||||
# key is method name, value is list of possible signatures
|
||||
# signatures are like getargspec -- args, varargs, keywords, defaults
|
||||
'chainbuild' : [
|
||||
'chainbuild': [
|
||||
[['srcs', 'target', 'opts'], None, None, (None,)],
|
||||
],
|
||||
'waitrepo' : [
|
||||
'waitrepo': [
|
||||
[['tag', 'newer_than', 'nvrs'], None, None, (None, None)],
|
||||
],
|
||||
'createLiveMedia' : [
|
||||
'createLiveMedia': [
|
||||
[['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info', 'ksfile', 'opts'], None, None, (None,)],
|
||||
],
|
||||
'createAppliance' : [
|
||||
'createAppliance': [
|
||||
[['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info', 'ksfile', 'opts'], None, None, (None,)],
|
||||
],
|
||||
'livecd' : [
|
||||
'livecd': [
|
||||
[['name', 'version', 'arch', 'target', 'ksfile', 'opts'], None, None, (None,)],
|
||||
],
|
||||
'buildNotification' : [
|
||||
'buildNotification': [
|
||||
[['recipients', 'build', 'target', 'weburl'], None, None, None],
|
||||
],
|
||||
'buildMaven' : [
|
||||
'buildMaven': [
|
||||
[['url', 'build_tag', 'opts'], None, None, (None,)],
|
||||
],
|
||||
'build' : [
|
||||
'build': [
|
||||
[['src', 'target', 'opts'], None, None, (None,)],
|
||||
],
|
||||
'buildSRPMFromSCM' : [
|
||||
'buildSRPMFromSCM': [
|
||||
[['url', 'build_tag', 'opts'], None, None, (None,)],
|
||||
],
|
||||
'rebuildSRPM' : [
|
||||
'rebuildSRPM': [
|
||||
[['srpm', 'build_tag', 'opts'], None, None, (None,)],
|
||||
],
|
||||
'createrepo' : [
|
||||
'createrepo': [
|
||||
[['repo_id', 'arch', 'oldrepo'], None, None, None],
|
||||
],
|
||||
'livemedia' : [
|
||||
'livemedia': [
|
||||
[['name', 'version', 'arches', 'target', 'ksfile', 'opts'], None, None, (None,)],
|
||||
],
|
||||
'indirectionimage' : [
|
||||
'indirectionimage': [
|
||||
[['opts'], None, None, None],
|
||||
],
|
||||
'wrapperRPM' : [
|
||||
'wrapperRPM': [
|
||||
[['spec_url', 'build_target', 'build', 'task', 'opts'], None, None, (None,)],
|
||||
],
|
||||
'createLiveCD' : [
|
||||
'createLiveCD': [
|
||||
[['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info', 'ksfile', 'opts'], None, None, (None,)],
|
||||
],
|
||||
'appliance' : [
|
||||
'appliance': [
|
||||
[['name', 'version', 'arch', 'target', 'ksfile', 'opts'], None, None, (None,)],
|
||||
],
|
||||
'image' : [
|
||||
'image': [
|
||||
[['name', 'version', 'arches', 'target', 'inst_tree', 'opts'], None, None, (None,)],
|
||||
],
|
||||
'tagBuild' : [
|
||||
'tagBuild': [
|
||||
[['tag_id', 'build_id', 'force', 'fromtag', 'ignore_success'], None, None, (False, None, False)],
|
||||
],
|
||||
'chainmaven' : [
|
||||
'chainmaven': [
|
||||
[['builds', 'target', 'opts'], None, None, (None,)],
|
||||
],
|
||||
'newRepo' : [
|
||||
'newRepo': [
|
||||
[['tag', 'event', 'src', 'debuginfo', 'separate_src'], None, None, (None, False, False, False)],
|
||||
],
|
||||
'createImage' : [
|
||||
'createImage': [
|
||||
[['name', 'version', 'release', 'arch', 'target_info', 'build_tag', 'repo_info', 'inst_tree', 'opts'], None, None, (None,)],
|
||||
],
|
||||
'tagNotification' : [
|
||||
'tagNotification': [
|
||||
[['recipients', 'is_successful', 'tag_info', 'from_info', 'build_info', 'user_info', 'ignore_success', 'failure_msg'], None, None, (None, '')],
|
||||
],
|
||||
'buildArch' : [
|
||||
'buildArch': [
|
||||
[['pkg', 'root', 'arch', 'keep_srpm', 'opts'], None, None, (None,)],
|
||||
],
|
||||
'maven' : [
|
||||
'maven': [
|
||||
[['url', 'target', 'opts'], None, None, (None,)],
|
||||
],
|
||||
'waittest' : [
|
||||
'waittest': [
|
||||
[['count', 'seconds'], None, None, (10,)],
|
||||
],
|
||||
'default' : [
|
||||
'default': [
|
||||
[[], 'args', 'opts', None],
|
||||
],
|
||||
'shutdown' : [
|
||||
'shutdown': [
|
||||
[[], None, None, None],
|
||||
],
|
||||
'restartVerify' : [
|
||||
'restartVerify': [
|
||||
[['task_id', 'host'], None, None, None],
|
||||
],
|
||||
'someMethod' : [
|
||||
'someMethod': [
|
||||
[[], 'args', None, None],
|
||||
],
|
||||
'restart' : [
|
||||
'restart': [
|
||||
[['host'], None, None, None],
|
||||
],
|
||||
'fork' : [
|
||||
'fork': [
|
||||
[['n', 'm'], None, None, (5, 37)],
|
||||
],
|
||||
'sleep' : [
|
||||
'sleep': [
|
||||
[['n'], None, None, None],
|
||||
],
|
||||
'dependantTask' : [
|
||||
'dependantTask': [
|
||||
[['wait_list', 'task_list'], None, None, None],
|
||||
],
|
||||
'subtask' : [
|
||||
'subtask': [
|
||||
[['n'], None, None, (4,)],
|
||||
],
|
||||
'restartHosts' : [
|
||||
'restartHosts': [
|
||||
[['options'], None, None, (None,)],
|
||||
],
|
||||
'runroot' : [
|
||||
'runroot': [
|
||||
[['root', 'arch', 'command', 'keep', 'packages', 'mounts', 'repo_id', 'skip_setarch', 'weight', 'upload_logs', 'new_chroot'], None, None, (False, [], [], None, False, None, None, False)],
|
||||
],
|
||||
'distRepo' : [
|
||||
'distRepo': [
|
||||
[['tag', 'repo_id', 'keys', 'task_opts'], None, None, None],
|
||||
],
|
||||
'createdistrepo' : [
|
||||
'createdistrepo': [
|
||||
[['tag', 'repo_id', 'arch', 'keys', 'opts'], None, None, None],
|
||||
],
|
||||
'saveFailedTree' : [
|
||||
'saveFailedTree': [
|
||||
[['buildrootID', 'full'], None, None, (False,)],
|
||||
],
|
||||
}
|
||||
|
|
@ -278,7 +278,7 @@ class BaseTaskHandler(object):
|
|||
Foreground = False
|
||||
|
||||
def __init__(self, id, method, params, session, options, workdir=None):
|
||||
self.id = id #task id
|
||||
self.id = id # task id
|
||||
if method not in self.Methods:
|
||||
raise koji.GenericError('method "%s" is not supported' % method)
|
||||
self.method = method
|
||||
|
|
@ -641,7 +641,7 @@ class SubtaskTask(BaseTaskHandler):
|
|||
def handler(self, n=4):
|
||||
if n > 0:
|
||||
task_id = self.session.host.subtask(method='subtask',
|
||||
arglist=[n-1],
|
||||
arglist=[n - 1],
|
||||
label='foo',
|
||||
parent=self.id)
|
||||
self.wait(task_id)
|
||||
|
|
@ -715,14 +715,14 @@ class RestartHostsTask(BaseTaskHandler):
|
|||
hostquery = {'enabled': True}
|
||||
if 'channel' in options:
|
||||
chan = self.session.getChannel(options['channel'], strict=True)
|
||||
hostquery['channelID']= chan['id']
|
||||
hostquery['channelID'] = chan['id']
|
||||
if 'arches' in options:
|
||||
hostquery['arches'] = options['arches']
|
||||
hosts = self.session.listHosts(**hostquery)
|
||||
if not hosts:
|
||||
raise koji.GenericError("No matching hosts")
|
||||
|
||||
timeout = options.get('timeout', 3600*24)
|
||||
timeout = options.get('timeout', 3600 * 24)
|
||||
|
||||
# fire off the subtasks
|
||||
this_host = self.session.host.getID()
|
||||
|
|
|
|||
|
|
@ -532,8 +532,8 @@ def eventFromOpts(session, opts):
|
|||
if repo:
|
||||
rinfo = session.repoInfo(repo)
|
||||
if rinfo:
|
||||
return {'id' : rinfo['create_event'],
|
||||
'ts' : rinfo['create_ts']}
|
||||
return {'id': rinfo['create_event'],
|
||||
'ts': rinfo['create_ts']}
|
||||
return None
|
||||
|
||||
|
||||
|
|
@ -664,7 +664,7 @@ class adler32_constructor(object):
|
|||
return dup
|
||||
|
||||
digest_size = 4
|
||||
block_size = 1 #I think
|
||||
block_size = 1 # I think
|
||||
|
||||
|
||||
def tsort(parts):
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue