PR#3589: Replace _multiRow, _singleRow, _singleValue with QP
Merges #3589 https://pagure.io/koji/pull-request/3589 Fixes: #3581 https://pagure.io/koji/issue/3581 Replace _mutliRow and _singleRow with QP
This commit is contained in:
commit
4ffcbc65f5
8 changed files with 272 additions and 203 deletions
358
hub/kojihub.py
358
hub/kojihub.py
|
|
@ -75,7 +75,7 @@ from koji.util import (
|
|||
multi_fnmatch,
|
||||
safer_move,
|
||||
)
|
||||
from koji.db import (
|
||||
from koji.db import ( # noqa: F401
|
||||
BulkInsertProcessor,
|
||||
DeleteProcessor,
|
||||
InsertProcessor,
|
||||
|
|
@ -85,11 +85,12 @@ from koji.db import (
|
|||
_applyQueryOpts,
|
||||
_dml,
|
||||
_fetchSingle,
|
||||
_multiRow,
|
||||
_singleRow,
|
||||
_multiRow, # needed for backward compatibility, will removed in Koji 1.36
|
||||
_singleRow, # needed for backward compatibility, will removed in Koji 1.36
|
||||
_singleValue,
|
||||
get_event,
|
||||
nextval,
|
||||
currval,
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -736,7 +737,7 @@ def make_task(method, arglist, **opts):
|
|||
idata['host_id'] = opts['assign']
|
||||
insert = InsertProcessor('task', data=idata)
|
||||
insert.execute()
|
||||
task_id = _singleValue("SELECT currval('task_id_seq')", strict=True)
|
||||
task_id = currval('task_id_seq')
|
||||
opts['id'] = task_id
|
||||
koji.plugin.run_callbacks(
|
||||
'postTaskStateChange', attribute='state', old=None, new='FREE', info=opts)
|
||||
|
|
@ -2285,15 +2286,15 @@ def get_tag_groups(tag, event=None, inherit=True, incl_pkgs=True, incl_reqs=True
|
|||
evcondition = eventCondition(event)
|
||||
|
||||
# First get the list of groups
|
||||
fields = ('name', 'group_id', 'tag_id', 'blocked', 'exported', 'display_name',
|
||||
'is_default', 'uservisible', 'description', 'langonly', 'biarchonly',)
|
||||
q = """
|
||||
SELECT %s FROM group_config JOIN groups ON group_id = id
|
||||
WHERE %s AND tag_id = %%(tagid)s
|
||||
""" % (",".join(fields), evcondition)
|
||||
columns = ['name', 'group_id', 'tag_id', 'blocked', 'exported', 'display_name',
|
||||
'is_default', 'uservisible', 'description', 'langonly', 'biarchonly']
|
||||
groups = {}
|
||||
for tagid in taglist:
|
||||
for group in _multiRow(q, locals(), fields):
|
||||
query = QueryProcessor(tables=['group_config'], columns=columns,
|
||||
joins=['groups ON group_id = id'],
|
||||
clauses=[evcondition, 'tag_id = %(tagid)s'],
|
||||
values={'tagid': tagid})
|
||||
for group in query.execute():
|
||||
grp_id = group['group_id']
|
||||
# we only take the first entry for group as we go through inheritance
|
||||
groups.setdefault(grp_id, group)
|
||||
|
|
@ -2301,13 +2302,12 @@ def get_tag_groups(tag, event=None, inherit=True, incl_pkgs=True, incl_reqs=True
|
|||
if incl_pkgs:
|
||||
for group in groups.values():
|
||||
group['packagelist'] = {}
|
||||
fields = ('group_id', 'tag_id', 'package', 'blocked', 'type', 'basearchonly', 'requires')
|
||||
q = """
|
||||
SELECT %s FROM group_package_listing
|
||||
WHERE %s AND tag_id = %%(tagid)s
|
||||
""" % (",".join(fields), evcondition)
|
||||
columns = ['group_id', 'tag_id', 'package', 'blocked', 'type', 'basearchonly', 'requires']
|
||||
for tagid in taglist:
|
||||
for grp_pkg in _multiRow(q, locals(), fields):
|
||||
query = QueryProcessor(tables=['group_package_listing'], columns=columns,
|
||||
clauses=[evcondition, 'tag_id = %(tagid)s'],
|
||||
values={'tagid': tagid})
|
||||
for grp_pkg in query.execute():
|
||||
grp_id = grp_pkg['group_id']
|
||||
if grp_id not in groups:
|
||||
# tag does not have this group
|
||||
|
|
@ -2323,12 +2323,13 @@ def get_tag_groups(tag, event=None, inherit=True, incl_pkgs=True, incl_reqs=True
|
|||
# and now the group reqs
|
||||
for group in groups.values():
|
||||
group['grouplist'] = {}
|
||||
fields = ('group_id', 'tag_id', 'req_id', 'blocked', 'type', 'is_metapkg', 'name')
|
||||
q = """SELECT %s FROM group_req_listing JOIN groups on req_id = id
|
||||
WHERE %s AND tag_id = %%(tagid)s
|
||||
""" % (",".join(fields), evcondition)
|
||||
columns = ['group_id', 'tag_id', 'req_id', 'blocked', 'type', 'is_metapkg', 'name']
|
||||
for tagid in taglist:
|
||||
for grp_req in _multiRow(q, locals(), fields):
|
||||
query = QueryProcessor(tables=['group_req_listing'], columns=columns,
|
||||
joins=['groups on req_id = id'],
|
||||
clauses=[evcondition, 'tag_id = %(tagid)s'],
|
||||
values={'tagid': tagid})
|
||||
for grp_req in query.execute():
|
||||
grp_id = grp_req['group_id']
|
||||
if grp_id not in groups:
|
||||
# tag does not have this group
|
||||
|
|
@ -2992,7 +2993,7 @@ def repo_info(repo_id, strict=False):
|
|||
:returns: dict (id, state, create_event, creation_time, tag_id, tag_name,
|
||||
dist)
|
||||
"""
|
||||
fields = (
|
||||
fields = [
|
||||
('repo.id', 'id'),
|
||||
('repo.state', 'state'),
|
||||
('repo.task_id', 'task_id'),
|
||||
|
|
@ -3002,12 +3003,12 @@ def repo_info(repo_id, strict=False):
|
|||
('repo.tag_id', 'tag_id'),
|
||||
('tag.name', 'tag_name'),
|
||||
('repo.dist', 'dist'),
|
||||
)
|
||||
q = """SELECT %s FROM repo
|
||||
JOIN tag ON tag_id=tag.id
|
||||
JOIN events ON repo.create_event = events.id
|
||||
WHERE repo.id = %%(repo_id)s""" % ','.join([f[0] for f in fields])
|
||||
return _singleRow(q, locals(), [f[1] for f in fields], strict=strict)
|
||||
]
|
||||
columns, aliases = zip(*fields)
|
||||
joins = ['tag ON tag_id=tag.id', 'events ON repo.create_event = events.id']
|
||||
query = QueryProcessor(tables=['repo'], columns=columns, aliases=aliases, joins=joins,
|
||||
clauses=['repo.id = %(repo_id)s'], values={'repo_id': repo_id})
|
||||
return query.executeOne(strict=strict)
|
||||
|
||||
|
||||
def repo_ready(repo_id):
|
||||
|
|
@ -3059,12 +3060,13 @@ def repo_expire_older(tag_id, event_id, dist=None):
|
|||
|
||||
def repo_references(repo_id):
|
||||
"""Return a list of buildroots that reference the repo"""
|
||||
fields = {
|
||||
'buildroot_id': 'id',
|
||||
'host_id': 'host_id',
|
||||
'create_event': 'create_event',
|
||||
'state': 'state'}
|
||||
fields, aliases = zip(*fields.items())
|
||||
fields = [
|
||||
('buildroot_id', 'id'),
|
||||
('host_id', 'host_id'),
|
||||
('create_event', 'create_event'),
|
||||
('state', 'state'),
|
||||
]
|
||||
fields, aliases = zip(*fields)
|
||||
query = QueryProcessor(
|
||||
tables=['standard_buildroot'],
|
||||
columns=fields, aliases=aliases,
|
||||
|
|
@ -3559,15 +3561,16 @@ def get_tag(tagInfo, strict=False, event=None, blocked=False):
|
|||
tables = ['tag_config']
|
||||
joins = ['tag ON tag.id = tag_config.tag_id',
|
||||
'LEFT OUTER JOIN permissions ON tag_config.perm_id = permissions.id']
|
||||
fields = {'tag.id': 'id',
|
||||
'tag.name': 'name',
|
||||
'tag_config.perm_id': 'perm_id',
|
||||
'permissions.name': 'perm',
|
||||
'tag_config.arches': 'arches',
|
||||
'tag_config.locked': 'locked',
|
||||
'tag_config.maven_support': 'maven_support',
|
||||
'tag_config.maven_include_all': 'maven_include_all',
|
||||
}
|
||||
fields = [
|
||||
('tag.id', 'id'),
|
||||
('tag.name', 'name'),
|
||||
('tag_config.perm_id', 'perm_id'),
|
||||
('permissions.name', 'perm'),
|
||||
('tag_config.arches', 'arches'),
|
||||
('tag_config.locked', 'locked'),
|
||||
('tag_config.maven_support', 'maven_support'),
|
||||
('tag_config.maven_include_all', 'maven_include_all')
|
||||
]
|
||||
|
||||
clause, values = name_or_id_clause('tag', tagInfo)
|
||||
clauses = [clause]
|
||||
|
|
@ -3585,13 +3588,13 @@ def get_tag(tagInfo, strict=False, event=None, blocked=False):
|
|||
# query point instantly before the revoke_event
|
||||
# (to get latest tag_config before deletion)
|
||||
event -= 1
|
||||
fields['tag_config.revoke_event'] = 'revoke_event'
|
||||
fields.append(('tag_config.revoke_event', 'revoke_event'))
|
||||
else:
|
||||
# if tag is not deleted, query event=None
|
||||
pass
|
||||
clauses.append(eventCondition(event, table='tag_config'))
|
||||
|
||||
fields, aliases = zip(*fields.items())
|
||||
fields, aliases = zip(*fields)
|
||||
query = QueryProcessor(columns=fields, aliases=aliases, tables=tables,
|
||||
joins=joins, clauses=clauses, values=values)
|
||||
result = query.executeOne()
|
||||
|
|
@ -4091,17 +4094,17 @@ def get_tag_external_repos(tag_info=None, repo_info=None, event=None):
|
|||
joins = ['tag ON tag_external_repos.tag_id = tag.id',
|
||||
'external_repo ON tag_external_repos.external_repo_id = external_repo.id',
|
||||
'external_repo_config ON external_repo.id = external_repo_config.external_repo_id']
|
||||
fields = {
|
||||
'external_repo.id': 'external_repo_id',
|
||||
'external_repo.name': 'external_repo_name',
|
||||
'priority': 'priority',
|
||||
'tag.id': 'tag_id',
|
||||
'tag.name': 'tag_name',
|
||||
'url': 'url',
|
||||
'merge_mode': 'merge_mode',
|
||||
'arches': 'arches',
|
||||
}
|
||||
columns, aliases = zip(*fields.items())
|
||||
fields = [
|
||||
('external_repo.id', 'external_repo_id'),
|
||||
('external_repo.name', 'external_repo_name'),
|
||||
('priority', 'priority'),
|
||||
('tag.id', 'tag_id'),
|
||||
('tag.name', 'tag_name'),
|
||||
('url', 'url'),
|
||||
('merge_mode', 'merge_mode'),
|
||||
('arches', 'arches'),
|
||||
]
|
||||
columns, aliases = zip(*fields)
|
||||
|
||||
clauses = [eventCondition(event, table='tag_external_repos'),
|
||||
eventCondition(event, table='external_repo_config')]
|
||||
|
|
@ -4861,8 +4864,8 @@ def get_image_build(buildInfo, strict=False):
|
|||
build_id = find_build_id(buildInfo, strict=strict)
|
||||
if not build_id:
|
||||
return None
|
||||
query = QueryProcessor(tables=('image_builds',), columns=('build_id',),
|
||||
clauses=('build_id = %(build_id)i',),
|
||||
query = QueryProcessor(tables=['image_builds'], columns=['build_id'],
|
||||
clauses=['build_id = %(build_id)i'],
|
||||
values={'build_id': build_id})
|
||||
result = query.executeOne()
|
||||
if strict and not result:
|
||||
|
|
@ -5491,23 +5494,24 @@ def get_host(hostInfo, strict=False, event=None):
|
|||
tables = ['host_config']
|
||||
joins = ['host ON host.id = host_config.host_id']
|
||||
|
||||
fields = {'host.id': 'id',
|
||||
'host.user_id': 'user_id',
|
||||
'host.name': 'name',
|
||||
'host.ready': 'ready',
|
||||
'host.task_load': 'task_load',
|
||||
'host_config.arches': 'arches',
|
||||
'host_config.capacity': 'capacity',
|
||||
'host_config.description': 'description',
|
||||
'host_config.comment': 'comment',
|
||||
'host_config.enabled': 'enabled',
|
||||
}
|
||||
fields = [
|
||||
('host.id', 'id'),
|
||||
('host.user_id', 'user_id'),
|
||||
('host.name', 'name'),
|
||||
('host.ready', 'ready'),
|
||||
('host.task_load', 'task_load'),
|
||||
('host_config.arches', 'arches'),
|
||||
('host_config.capacity', 'capacity'),
|
||||
('host_config.description', 'description'),
|
||||
('host_config.comment', 'comment'),
|
||||
('host_config.enabled', 'enabled'),
|
||||
]
|
||||
clauses = [eventCondition(event, table='host_config')]
|
||||
|
||||
clause, values = name_or_id_clause('host', hostInfo)
|
||||
clauses.append(clause)
|
||||
|
||||
fields, aliases = zip(*fields.items())
|
||||
fields, aliases = zip(*fields)
|
||||
query = QueryProcessor(columns=fields, aliases=aliases, tables=tables,
|
||||
joins=joins, clauses=clauses, values=values)
|
||||
result = query.executeOne()
|
||||
|
|
@ -5730,9 +5734,14 @@ def list_channels(hostID=None, event=None, enabled=None):
|
|||
[{'comment': 'test channel', 'description': 'container channel',
|
||||
'enabled': True, 'id': 20, 'name': 'container', 'container channel' }]
|
||||
"""
|
||||
fields = {'channels.id': 'id', 'channels.name': 'name', 'channels.description': 'description',
|
||||
'channels.enabled': 'enabled', 'channels.comment': 'comment'}
|
||||
columns, aliases = zip(*fields.items())
|
||||
fields = [
|
||||
('channels.id', 'id'),
|
||||
('channels.name', 'name'),
|
||||
('channels.description', 'description'),
|
||||
('channels.enabled', 'enabled'),
|
||||
('channels.comment', 'comment'),
|
||||
]
|
||||
columns, aliases = zip(*fields)
|
||||
if enabled is not None:
|
||||
if enabled:
|
||||
enable_clause = 'enabled IS TRUE'
|
||||
|
|
@ -7414,9 +7423,9 @@ def add_archive_type(name, description, extensions, compression_type=None):
|
|||
for ext in extensions.split(' '):
|
||||
if not ext.replace('.', '').isalnum():
|
||||
raise koji.GenericError(f'No such {ext} file extension')
|
||||
select = r"""SELECT id FROM archivetypes
|
||||
WHERE extensions ~* E'(\\s|^)%s(\\s|$)'""" % ext
|
||||
results = _multiRow(select, {}, ('id',))
|
||||
query = QueryProcessor(tables=['archivetypes'], columns=['id'],
|
||||
clauses=[f"extensions ~* E'(\\s|^){ext}(\\s|$)'"], values={})
|
||||
results = query.execute()
|
||||
if len(results) > 0:
|
||||
raise koji.GenericError(f'file extension {ext} already exists')
|
||||
insert = InsertProcessor('archivetypes', data=data)
|
||||
|
|
@ -7490,8 +7499,8 @@ def new_image_build(build_info):
|
|||
# We don't have to worry about updating an image build because the id is
|
||||
# the only thing we care about, and that should never change if a build
|
||||
# fails first and succeeds later on a resubmission.
|
||||
query = QueryProcessor(tables=('image_builds',), columns=('build_id',),
|
||||
clauses=('build_id = %(build_id)i',),
|
||||
query = QueryProcessor(tables=['image_builds'], columns=['build_id'],
|
||||
clauses=['build_id = %(build_id)i'],
|
||||
values={'build_id': build_info['id']})
|
||||
result = query.executeOne()
|
||||
if not result:
|
||||
|
|
@ -7506,11 +7515,9 @@ def new_typed_build(build_info, btype):
|
|||
"""Mark build as a given btype"""
|
||||
|
||||
btype_id = lookup_name('btype', btype, strict=True)['id']
|
||||
query = QueryProcessor(tables=('build_types',), columns=('build_id',),
|
||||
clauses=('build_id = %(build_id)i',
|
||||
'btype_id = %(btype_id)i'),
|
||||
values={'build_id': build_info['id'],
|
||||
'btype_id': btype_id})
|
||||
query = QueryProcessor(tables=['build_types'], columns=['build_id'],
|
||||
clauses=['build_id = %(build_id)i', 'btype_id = %(btype_id)i'],
|
||||
values={'build_id': build_info['id'], 'btype_id': btype_id})
|
||||
result = query.executeOne()
|
||||
if not result:
|
||||
insert = InsertProcessor('build_types')
|
||||
|
|
@ -8337,10 +8344,16 @@ def build_references(build_id, limit=None, lazy=False):
|
|||
ret = {}
|
||||
|
||||
# find tags
|
||||
q = """SELECT tag_id, tag.name FROM tag_listing JOIN tag on tag_id = tag.id
|
||||
WHERE build_id = %(build_id)i AND active = TRUE"""
|
||||
ret['tags'] = _multiRow(q, locals(), ('id', 'name'))
|
||||
|
||||
fields = [
|
||||
('tag_id', 'tag_id'),
|
||||
('tag.name', 'name')
|
||||
]
|
||||
columns, aliases = zip(*fields)
|
||||
query = QueryProcessor(tables=['tag_listing'], columns=columns, aliases=aliases,
|
||||
joins=['tag on tag_id = tag.id'],
|
||||
clauses=['build_id = %(build_id)i', 'active = TRUE'],
|
||||
values={'build_id': build_id})
|
||||
ret['tags'] = query.execute()
|
||||
if lazy and ret['tags']:
|
||||
return ret
|
||||
|
||||
|
|
@ -8348,11 +8361,11 @@ def build_references(build_id, limit=None, lazy=False):
|
|||
query = QueryProcessor(tables=['rpminfo'], columns=['id'],
|
||||
clauses=['build_id=%(build_id)i'],
|
||||
values={'build_id': build_id}, opts={'asList': True})
|
||||
build_rpm_ids = query.execute()
|
||||
build_rpm_ids = [(i,) for i in query.execute()]
|
||||
query = QueryProcessor(tables=['archiveinfo'], columns=['id'],
|
||||
clauses=['build_id=%(build_id)i'],
|
||||
values={'build_id': build_id}, opts={'asList': True})
|
||||
build_archive_ids = query.execute()
|
||||
build_archive_ids = [(i,) for i in query.execute()]
|
||||
if not build_archive_ids:
|
||||
build_archive_ids = []
|
||||
|
||||
|
|
@ -8372,7 +8385,7 @@ def build_references(build_id, limit=None, lazy=False):
|
|||
AND build.state = %(st_complete)i"""
|
||||
if limit is not None:
|
||||
q += "\nLIMIT %(limit)i"
|
||||
for (rpm_id,) in build_rpm_ids:
|
||||
for rpm_id in build_rpm_ids:
|
||||
for row in _multiRow(q, locals(), fields):
|
||||
idx.setdefault(row['id'], row)
|
||||
if limit is not None and len(idx) > limit:
|
||||
|
|
@ -8395,7 +8408,7 @@ def build_references(build_id, limit=None, lazy=False):
|
|||
qopts['limit'] = limit
|
||||
query = QueryProcessor(columns=fields, tables=['archive_rpm_components'],
|
||||
clauses=clauses, joins=joins, values=values, opts=qopts)
|
||||
for (rpm_id,) in build_rpm_ids:
|
||||
for rpm_id in build_rpm_ids:
|
||||
query.values['rpm_id'] = rpm_id
|
||||
archive_ids = [i[0] for i in query.execute()]
|
||||
ret['component_of'].extend(archive_ids)
|
||||
|
|
@ -8404,20 +8417,29 @@ def build_references(build_id, limit=None, lazy=False):
|
|||
return ret
|
||||
|
||||
# find archives whose buildroots we were in
|
||||
fields = ('id', 'type_id', 'type_name', 'build_id', 'filename')
|
||||
fields = [
|
||||
('archiveinfo.id', 'id'),
|
||||
('archiveinfo.type_id', 'type_id'),
|
||||
('archivetypes.name', 'type_name'),
|
||||
('archiveinfo.build_id', 'build_id'),
|
||||
('archiveinfo.filename', 'filename')
|
||||
]
|
||||
columns, aliases = zip(*fields)
|
||||
idx = {}
|
||||
q = """SELECT archiveinfo.id, archiveinfo.type_id, archivetypes.name, archiveinfo.build_id,
|
||||
archiveinfo.filename
|
||||
FROM buildroot_archives
|
||||
JOIN archiveinfo ON archiveinfo.buildroot_id = buildroot_archives.buildroot_id
|
||||
JOIN build ON archiveinfo.build_id = build.id
|
||||
JOIN archivetypes ON archivetypes.id = archiveinfo.type_id
|
||||
WHERE buildroot_archives.archive_id = %(archive_id)i
|
||||
AND build.state = %(st_complete)i"""
|
||||
opts = {}
|
||||
if limit is not None:
|
||||
q += "\nLIMIT %(limit)i"
|
||||
for (archive_id,) in build_archive_ids:
|
||||
for row in _multiRow(q, locals(), fields):
|
||||
opts = {'limit': limit}
|
||||
for archive_id in build_archive_ids:
|
||||
query = QueryProcessor(tables=['buildroot_archives'], columns=columns, aliases=aliases,
|
||||
joins=['archiveinfo ON archiveinfo.buildroot_id = '
|
||||
'buildroot_archives.buildroot_id',
|
||||
'build ON archiveinfo.build_id = build.id',
|
||||
'archivetypes ON archivetypes.id = archiveinfo.type_id'],
|
||||
clauses=['buildroot_archives.archive_id = %(archive_id)i',
|
||||
'build.state = %(st_complete)i'],
|
||||
values={'archive_id': archive_id, 'st_complete': st_complete},
|
||||
opts=opts)
|
||||
for row in query.execute():
|
||||
idx.setdefault(row['id'], row)
|
||||
if limit is not None and len(idx) > limit:
|
||||
break
|
||||
|
|
@ -8438,7 +8460,7 @@ def build_references(build_id, limit=None, lazy=False):
|
|||
qopts['limit'] = limit
|
||||
query = QueryProcessor(columns=fields, tables=['archive_components'],
|
||||
clauses=clauses, joins=joins, values=values, opts=qopts)
|
||||
for (archive_id,) in build_archive_ids:
|
||||
for archive_id in build_archive_ids:
|
||||
query.values['archive_id'] = archive_id
|
||||
archive_ids = [i[0] for i in query.execute()]
|
||||
ret['component_of'].extend(archive_ids)
|
||||
|
|
@ -8936,9 +8958,9 @@ def add_group_member(group, user, strict=True):
|
|||
# check to see if user is already a member
|
||||
data = {'user_id': uinfo['id'], 'group_id': ginfo['id']}
|
||||
table = 'user_groups'
|
||||
clauses = ('user_id = %(user_id)i', 'group_id = %(group_id)s')
|
||||
query = QueryProcessor(columns=['user_id'], tables=[table],
|
||||
clauses=('active = TRUE',) + clauses,
|
||||
clauses=['active = TRUE', 'user_id = %(user_id)i',
|
||||
'group_id = %(group_id)s'],
|
||||
values=data, opts={'rowlock': True})
|
||||
row = query.executeOne()
|
||||
if row:
|
||||
|
|
@ -9011,9 +9033,12 @@ def list_cgs():
|
|||
are permitted to import for this content generator.
|
||||
"""
|
||||
|
||||
fields = {'content_generator.id': 'id', 'content_generator.name': 'name',
|
||||
'users.name': 'user_name'}
|
||||
columns, aliases = zip(*fields.items())
|
||||
fields = [
|
||||
('content_generator.id', 'id'),
|
||||
('content_generator.name', 'name'),
|
||||
('users.name', 'user_name'),
|
||||
]
|
||||
columns, aliases = zip(*fields)
|
||||
tables = ['cg_users']
|
||||
joins = ['content_generator ON content_generator.id = cg_users.cg_id',
|
||||
'users ON users.id = cg_users.user_id']
|
||||
|
|
@ -10290,16 +10315,17 @@ class RootExports(object):
|
|||
if not context.session.logged_in:
|
||||
return None
|
||||
clauses = ['expired is FALSE']
|
||||
fields = {'user_id': 'user_id',
|
||||
'expired': 'expired',
|
||||
'master': 'master',
|
||||
'authtype': 'authtype',
|
||||
'callnum': 'callnum',
|
||||
"date_part('epoch', start_time)": 'start_time',
|
||||
'update_time': 'update_time',
|
||||
'exclusive': 'exclusive',
|
||||
}
|
||||
columns, aliases = zip(*fields.items())
|
||||
fields = [
|
||||
('user_id', 'user_id'),
|
||||
('expired', 'expired'),
|
||||
('master', 'master'),
|
||||
('authtype', 'authtype'),
|
||||
('callnum', 'callnum'),
|
||||
("date_part('epoch', start_time)", 'start_time'),
|
||||
('update_time', 'update_time'),
|
||||
('exclusive', 'exclusive'),
|
||||
]
|
||||
columns, aliases = zip(*fields)
|
||||
if details:
|
||||
columns += ('hostip', 'id')
|
||||
aliases += ('hostip', 'id')
|
||||
|
|
@ -10351,10 +10377,14 @@ class RootExports(object):
|
|||
|
||||
If no event with the given id exists, an error will be raised.
|
||||
"""
|
||||
fields = ('id', 'ts')
|
||||
values = {'id': id}
|
||||
q = """SELECT id, date_part('epoch', time) FROM events WHERE id = %(id)i"""
|
||||
return _singleRow(q, values, fields, strict=True)
|
||||
fields = [
|
||||
('id', 'id'),
|
||||
("date_part('epoch', time)", 'ts')
|
||||
]
|
||||
columns, aliases = zip(*fields)
|
||||
query = QueryProcessor(tables=['events'], columns=columns, aliases=aliases,
|
||||
clauses=['id = %(id)i'], values={'id': id})
|
||||
return query.executeOne(strict=True)
|
||||
|
||||
def getLastEvent(self, before=None):
|
||||
"""
|
||||
|
|
@ -10373,18 +10403,24 @@ class RootExports(object):
|
|||
When trying to find information about a specific event, the getEvent() method
|
||||
should be used.
|
||||
"""
|
||||
fields = ('id', 'ts')
|
||||
fields = [
|
||||
('id', 'id'),
|
||||
("date_part('epoch', time)", 'ts')
|
||||
]
|
||||
columns, aliases = zip(*fields)
|
||||
values = {}
|
||||
q = """SELECT id, date_part('epoch', time) FROM events"""
|
||||
clauses = []
|
||||
if before is not None:
|
||||
if not isinstance(before, NUMERIC_TYPES):
|
||||
raise koji.GenericError('Invalid type for before: %s' % type(before))
|
||||
# use the repr() conversion because it retains more precision than the
|
||||
# string conversion
|
||||
q += """ WHERE date_part('epoch', time) < %(before)r"""
|
||||
clauses = ["date_part('epoch', time) < %(before)r"]
|
||||
values['before'] = before
|
||||
q += """ ORDER BY id DESC LIMIT 1"""
|
||||
return _singleRow(q, values, fields, strict=True)
|
||||
opts = {'order': '-id', 'limit': 1}
|
||||
query = QueryProcessor(tables=['events'], columns=columns, aliases=aliases,
|
||||
clauses=clauses, values=values, opts=opts)
|
||||
return query.executeOne(strict=True)
|
||||
|
||||
evalPolicy = staticmethod(eval_policy)
|
||||
|
||||
|
|
@ -11904,19 +11940,17 @@ class RootExports(object):
|
|||
st_complete = koji.BUILD_STATES['COMPLETE']
|
||||
# we need to filter out builds without tasks (imports) as they'll reduce
|
||||
# time average. CG imported builds often contain *_koji_task_id instead.
|
||||
query = """SELECT date_part('epoch', avg(build.completion_time - events.time))
|
||||
FROM build
|
||||
JOIN events ON build.create_event = events.id
|
||||
WHERE build.pkg_id = %(packageID)i
|
||||
AND build.state = %(st_complete)i
|
||||
AND (
|
||||
build.task_id IS NOT NULL OR
|
||||
build.extra LIKE '%%' || 'koji_task_id' || '%%'
|
||||
)"""
|
||||
clauses = ['build.pkg_id = %(packageID)i', 'build.state = %(st_complete)i',
|
||||
"build.task_id IS NOT NULL OR build.extra LIKE '%%' || 'koji_task_id' || '%%'"]
|
||||
if age is not None:
|
||||
query += " AND build.completion_time > NOW() - '%s months'::interval" % int(age)
|
||||
|
||||
return _singleValue(query, locals())
|
||||
clauses.append(f"build.completion_time > NOW() - '{int(age)} months'::interval")
|
||||
query = QueryProcessor(tables=['build'],
|
||||
columns=["date_part('epoch', "
|
||||
"avg(build.completion_time - events.time))"],
|
||||
joins=['events ON build.create_event = events.id'],
|
||||
clauses=clauses,
|
||||
values={'packageID': packageID, 'st_complete': st_complete})
|
||||
return query.singleValue()
|
||||
|
||||
packageListAdd = staticmethod(pkglist_add)
|
||||
packageListRemove = staticmethod(pkglist_remove)
|
||||
|
|
@ -12547,7 +12581,7 @@ class RootExports(object):
|
|||
else:
|
||||
raise koji.GenericError('queryOpts.group is not available for this API')
|
||||
query = QueryProcessor(columns=fields, aliases=aliases,
|
||||
tables=('users',), joins=joins, clauses=clauses,
|
||||
tables=['users'], joins=joins, clauses=clauses,
|
||||
values=locals(), opts=queryOpts,
|
||||
enable_group=True, transform=xform_user_krb)
|
||||
return query.execute()
|
||||
|
|
@ -13073,19 +13107,20 @@ class RootExports(object):
|
|||
userID = get_user(userID, strict=True)['id']
|
||||
clauses.append('user_id = %(userID)i')
|
||||
|
||||
fields = {'host.id': 'id',
|
||||
'host.user_id': 'user_id',
|
||||
'host.name': 'name',
|
||||
'host.ready': 'ready',
|
||||
'host.task_load': 'task_load',
|
||||
'host_config.arches': 'arches',
|
||||
'host_config.capacity': 'capacity',
|
||||
'host_config.description': 'description',
|
||||
'host_config.comment': 'comment',
|
||||
'host_config.enabled': 'enabled',
|
||||
}
|
||||
fields = [
|
||||
('host.id', 'id'),
|
||||
('host.user_id', 'user_id'),
|
||||
('host.name', 'name'),
|
||||
('host.ready', 'ready'),
|
||||
('host.task_load', 'task_load'),
|
||||
('host_config.arches', 'arches'),
|
||||
('host_config.capacity', 'capacity'),
|
||||
('host_config.description', 'description'),
|
||||
('host_config.comment', 'comment'),
|
||||
('host_config.enabled', 'enabled'),
|
||||
]
|
||||
tables = ['host_config']
|
||||
fields, aliases = zip(*fields.items())
|
||||
fields, aliases = zip(*fields)
|
||||
query = QueryProcessor(columns=fields, aliases=aliases,
|
||||
tables=tables, joins=joins, clauses=clauses, values=locals())
|
||||
return query.execute()
|
||||
|
|
@ -13095,14 +13130,13 @@ class RootExports(object):
|
|||
|
||||
The timestamp represents the last time the host with the given
|
||||
ID contacted the hub. Returns None if the host has never contacted
|
||||
the hub."""
|
||||
query = """SELECT update_time FROM sessions
|
||||
JOIN host ON sessions.user_id = host.user_id
|
||||
WHERE host.id = %(hostID)i
|
||||
ORDER BY update_time DESC
|
||||
LIMIT 1
|
||||
"""
|
||||
date = _singleValue(query, locals(), strict=False)
|
||||
the hub."""
|
||||
opts = {'order': '-update_time', 'limit': 1}
|
||||
query = QueryProcessor(tables=['sessions'], columns=['update_time'],
|
||||
joins=['host ON sessions.user_id = host.user_id'],
|
||||
clauses=['host.id = %(hostID)i'], values={'hostID': hostID},
|
||||
opts=opts)
|
||||
date = query.singleValue(strict=False)
|
||||
if ts and date is not None:
|
||||
return date.timestamp()
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -315,6 +315,12 @@ def nextval(sequence):
|
|||
return _singleValue("SELECT nextval(%(sequence)s)", data, strict=True)
|
||||
|
||||
|
||||
def currval(sequence):
|
||||
"""Get the current value for the given sequence"""
|
||||
data = {'sequence': sequence}
|
||||
return _singleValue("SELECT currval(%(sequence)s)", data, strict=True)
|
||||
|
||||
|
||||
class Savepoint(object):
|
||||
|
||||
def __init__(self, name):
|
||||
|
|
|
|||
|
|
@ -6,10 +6,10 @@ import koji
|
|||
import kojihub
|
||||
|
||||
IP = kojihub.InsertProcessor
|
||||
QP = kojihub.QueryProcessor
|
||||
|
||||
|
||||
class TestAddArchiveType(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
|
||||
self.context = mock.patch('kojihub.context').start()
|
||||
|
|
@ -23,7 +23,10 @@ class TestAddArchiveType(unittest.TestCase):
|
|||
self.insert_execute = mock.MagicMock()
|
||||
self.verify_name_internal = mock.patch('kojihub.verify_name_internal').start()
|
||||
self.get_archive_type = mock.patch('kojihub.get_archive_type').start()
|
||||
self._multiRow = mock.patch('kojihub._multiRow').start()
|
||||
self.QueryProcessor = mock.patch('kojihub.QueryProcessor',
|
||||
side_effect=self.getQuery).start()
|
||||
self.queries = []
|
||||
self.query_execute = mock.MagicMock()
|
||||
|
||||
def tearDown(self):
|
||||
mock.patch.stopall()
|
||||
|
|
@ -34,7 +37,14 @@ class TestAddArchiveType(unittest.TestCase):
|
|||
self.inserts.append(insert)
|
||||
return insert
|
||||
|
||||
def getQuery(self, *args, **kwargs):
|
||||
query = QP(*args, **kwargs)
|
||||
query.execute = self.query_execute
|
||||
self.queries.append(query)
|
||||
return query
|
||||
|
||||
def test_add_archive_type_valid_empty_compression_type(self):
|
||||
self.query_execute.side_effect = [[]]
|
||||
self.verify_name_internal.return_value = None
|
||||
self.get_archive_type.return_value = None
|
||||
kojihub.add_archive_type('deb', 'Debian package', 'deb')
|
||||
|
|
@ -50,6 +60,7 @@ class TestAddArchiveType(unittest.TestCase):
|
|||
self.context.session.assertPerm.assert_called_with('admin')
|
||||
|
||||
def test_add_archive_type_valid_with_compression_type(self):
|
||||
self.query_execute.side_effect = [[]]
|
||||
self.verify_name_internal.return_value = None
|
||||
self.get_archive_type.return_value = None
|
||||
kojihub.add_archive_type('jar', 'Jar package', 'jar', 'zip')
|
||||
|
|
|
|||
|
|
@ -3,43 +3,55 @@ import unittest
|
|||
|
||||
import kojihub
|
||||
|
||||
QP = kojihub.QueryProcessor
|
||||
|
||||
|
||||
class TestGetBuildType(unittest.TestCase):
|
||||
|
||||
@mock.patch('kojihub.get_build')
|
||||
@mock.patch('kojihub.QueryProcessor')
|
||||
def test_no_build(self, QueryProcessor, get_build):
|
||||
get_build.return_value = None
|
||||
def setUp(self):
|
||||
self.maxDiff = None
|
||||
self.QueryProcessor = mock.patch('kojihub.QueryProcessor',
|
||||
side_effect=self.getQuery).start()
|
||||
self.queries = []
|
||||
self.query_execute = mock.MagicMock()
|
||||
self.get_build = mock.patch('kojihub.get_build').start()
|
||||
self.get_maven_build = mock.patch('kojihub.get_maven_build').start()
|
||||
self.get_win_build = mock.patch('kojihub.get_win_build').start()
|
||||
self.get_image_build = mock.patch('kojihub.get_image_build').start()
|
||||
|
||||
def tearDown(self):
|
||||
mock.patch.stopall()
|
||||
|
||||
def getQuery(self, *args, **kwargs):
|
||||
query = QP(*args, **kwargs)
|
||||
query.execute = self.query_execute
|
||||
self.queries.append(query)
|
||||
return query
|
||||
|
||||
def test_no_build(self):
|
||||
self.get_build.return_value = None
|
||||
|
||||
# strict on
|
||||
kojihub.get_build_type('mytestbuild-1-1', strict=True)
|
||||
QueryProcessor.assert_not_called()
|
||||
get_build.assert_called_with('mytestbuild-1-1', strict=True)
|
||||
self.assertEqual(len(self.queries), 0)
|
||||
self.get_build.assert_called_with('mytestbuild-1-1', strict=True)
|
||||
|
||||
|
||||
@mock.patch('kojihub.get_maven_build')
|
||||
@mock.patch('kojihub.get_win_build')
|
||||
@mock.patch('kojihub.get_image_build')
|
||||
@mock.patch('kojihub.get_build')
|
||||
@mock.patch('kojihub.QueryProcessor')
|
||||
def test_has_build(self, QueryProcessor, get_build, get_image_build,
|
||||
get_win_build, get_maven_build):
|
||||
def test_has_build(self):
|
||||
typeinfo = {'maven': {'maven': 'foo'},
|
||||
'win': {'win': 'foo'},
|
||||
'image': {'image': 'foo'},
|
||||
'new_type': {'bar': 42}}
|
||||
binfo = {'id' : 1, 'extra' : {'typeinfo': {'new_type': typeinfo['new_type']}}}
|
||||
get_build.return_value = binfo
|
||||
get_maven_build.return_value = typeinfo['maven']
|
||||
get_win_build.return_value = typeinfo['win']
|
||||
get_image_build.return_value = typeinfo['image']
|
||||
binfo = {'id': 1, 'extra': {'typeinfo': {'new_type': typeinfo['new_type']}}}
|
||||
self.get_build.return_value = binfo
|
||||
self.get_maven_build.return_value = typeinfo['maven']
|
||||
self.get_win_build.return_value = typeinfo['win']
|
||||
self.get_image_build.return_value = typeinfo['image']
|
||||
|
||||
query = QueryProcessor.return_value
|
||||
query.execute.return_value = [['new_type']]
|
||||
self.query_execute.return_value = [['new_type']]
|
||||
|
||||
ret = kojihub.get_build_type('mytestbuild-1-1', strict=True)
|
||||
assert ret == typeinfo
|
||||
get_build.assert_called_with('mytestbuild-1-1', strict=True)
|
||||
get_maven_build.assert_called_with(binfo['id'], strict=False)
|
||||
get_win_build.assert_called_with(binfo['id'], strict=False)
|
||||
get_image_build.assert_called_with(binfo['id'], strict=False)
|
||||
self.get_build.assert_called_with('mytestbuild-1-1', strict=True)
|
||||
self.get_maven_build.assert_called_with(binfo['id'], strict=False)
|
||||
self.get_win_build.assert_called_with(binfo['id'], strict=False)
|
||||
self.get_image_build.assert_called_with(binfo['id'], strict=False)
|
||||
|
|
|
|||
|
|
@ -5,14 +5,24 @@ import sys
|
|||
|
||||
import kojihub
|
||||
|
||||
QP = kojihub.QueryProcessor
|
||||
|
||||
|
||||
class TestGetLastHostUpdate(unittest.TestCase):
|
||||
def getQuery(self, *args, **kwargs):
|
||||
query = QP(*args, **kwargs)
|
||||
query.singleValue = self.query_singleValue
|
||||
self.queries.append(query)
|
||||
return query
|
||||
|
||||
def setUp(self):
|
||||
self.exports = kojihub.RootExports()
|
||||
self.QueryProcessor = mock.patch('kojihub.QueryProcessor',
|
||||
side_effect=self.getQuery).start()
|
||||
self.queries = []
|
||||
self.query_singleValue = mock.MagicMock()
|
||||
|
||||
@mock.patch('kojihub._singleValue')
|
||||
def test_valid_ts(self, _singleValue):
|
||||
def test_valid_ts(self):
|
||||
expected = 1615875554.862938
|
||||
if sys.version_info[1] <= 6:
|
||||
dt = datetime.datetime.strptime(
|
||||
|
|
@ -20,19 +30,17 @@ class TestGetLastHostUpdate(unittest.TestCase):
|
|||
else:
|
||||
dt = datetime.datetime.strptime(
|
||||
"2021-03-16T06:19:14.862938+00:00", "%Y-%m-%dT%H:%M:%S.%f%z")
|
||||
_singleValue.return_value = dt
|
||||
self.query_singleValue.return_value = dt
|
||||
rv = self.exports.getLastHostUpdate(1, ts=True)
|
||||
self.assertEqual(rv, expected)
|
||||
|
||||
@mock.patch('kojihub._singleValue')
|
||||
def test_valid_datetime(self, _singleValue):
|
||||
def test_valid_datetime(self):
|
||||
if sys.version_info[1] <= 6:
|
||||
dt = datetime.datetime.strptime(
|
||||
"2021-03-16T06:19:14.862938+0000", "%Y-%m-%dT%H:%M:%S.%f%z")
|
||||
else:
|
||||
dt = datetime.datetime.strptime(
|
||||
"2021-03-16T06:19:14.862938+00:00", "%Y-%m-%dT%H:%M:%S.%f%z")
|
||||
expected = "2021-03-16T06:19:14.862938+00:00"
|
||||
_singleValue.return_value = dt
|
||||
self.query_singleValue.return_value = dt
|
||||
rv = self.exports.getLastHostUpdate(1)
|
||||
self.assertEqual(rv, dt)
|
||||
|
|
|
|||
|
|
@ -23,7 +23,6 @@ class TestNewBuild(unittest.TestCase):
|
|||
self.get_build = mock.patch('kojihub.get_build').start()
|
||||
self.recycle_build = mock.patch('kojihub.recycle_build').start()
|
||||
self.context = mock.patch('kojihub.context').start()
|
||||
self._singleValue = mock.patch('kojihub._singleValue').start()
|
||||
|
||||
def tearDown(self):
|
||||
mock.patch.stopall()
|
||||
|
|
|
|||
|
|
@ -27,6 +27,7 @@ class TestRepoFunctions(unittest.TestCase):
|
|||
self._dml = mock.patch('kojihub._dml').start()
|
||||
self.exports = kojihub.RootExports()
|
||||
self.get_tag = mock.patch('kojihub.get_tag').start()
|
||||
self.query_executeOne = mock.MagicMock()
|
||||
|
||||
def tearDown(self):
|
||||
mock.patch.stopall()
|
||||
|
|
@ -34,6 +35,7 @@ class TestRepoFunctions(unittest.TestCase):
|
|||
def getQuery(self, *args, **kwargs):
|
||||
query = QP(*args, **kwargs)
|
||||
query.execute = mock.MagicMock()
|
||||
query.executeOne = self.query_executeOne
|
||||
self.queries.append(query)
|
||||
return query
|
||||
|
||||
|
|
@ -78,8 +80,7 @@ class TestRepoFunctions(unittest.TestCase):
|
|||
if 'dist = %(dist)s' not in update.clauses:
|
||||
raise Exception('Missing dist condition')
|
||||
|
||||
@mock.patch('kojihub._singleRow')
|
||||
def test_repo_info(self, _singleRow):
|
||||
def test_repo_info(self):
|
||||
repo_row = {'id': 10,
|
||||
'state': 0,
|
||||
'task_id': 15,
|
||||
|
|
@ -90,7 +91,7 @@ class TestRepoFunctions(unittest.TestCase):
|
|||
'tag_id': 3,
|
||||
'tag_name': 'test-tag',
|
||||
'dist': False}
|
||||
_singleRow.return_value = repo_row
|
||||
self.query_executeOne.return_value = repo_row
|
||||
rv = kojihub.repo_info(3)
|
||||
self.assertEqual(rv, repo_row)
|
||||
|
||||
|
|
|
|||
|
|
@ -259,8 +259,7 @@ class TestGrouplist(unittest.TestCase):
|
|||
self.assertEqual(u.values['user_id'], uid)
|
||||
self.assertEqual(u.values['group_id'], gid)
|
||||
|
||||
@mock.patch('kojihub._multiRow')
|
||||
def test_get_group_members(self, _multiRow):
|
||||
def test_get_group_members(self):
|
||||
group, gid = 'test_group', 1
|
||||
|
||||
# no permission
|
||||
|
|
@ -300,4 +299,3 @@ class TestGrouplist(unittest.TestCase):
|
|||
self.assertEqual(len(self.queries), 1)
|
||||
self.assertEqual(len(self.inserts), 0)
|
||||
self.assertEqual(len(self.updates), 0)
|
||||
_multiRow.assert_not_called()
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue