Move database classes and functions from kojihub.py to koji/db.py

Move QueryProcessor, InsertProcessor, UpdateProcessor,
BulkInsertProcessor, _applyQueryOpts, get_event, _dml,
_fetchMulti, _fetchSingle, _singleValue, _multiRow, _singleRow

Update koji-sweep-db script to DB Processors

Fixes: https://pagure.io/koji/issue/3466
This commit is contained in:
Jana Cupova 2022-08-18 07:06:08 +02:00 committed by Tomas Kopecek
parent fadda5b755
commit 1cfe6538db
30 changed files with 1170 additions and 1141 deletions

View file

@ -75,6 +75,22 @@ from koji.util import (
multi_fnmatch,
safer_move,
)
from koji.db import (
BulkInsertProcessor,
InsertProcessor,
QueryProcessor,
Savepoint,
UpdateProcessor,
_applyQueryOpts,
_dml,
_fetchMulti,
_fetchSingle,
_multiRow,
_singleRow,
_singleValue,
get_event,
nextval,
)
logger = logging.getLogger('koji.hub')
@ -5421,89 +5437,6 @@ def list_task_output(taskID, stat=False, all_volumes=False, strict=False):
return result
def _fetchMulti(query, values):
"""Run the query and return all rows"""
c = context.cnx.cursor()
c.execute(query, values)
results = c.fetchall()
c.close()
return results
def _fetchSingle(query, values, strict=False):
"""Run the query and return a single row
If strict is true, raise an error if the query returns more or less than
one row."""
results = _fetchMulti(query, values)
numRows = len(results)
if numRows == 0:
if strict:
raise koji.GenericError('query returned no rows')
else:
return None
elif strict and numRows > 1:
raise koji.GenericError('multiple rows returned for a single row query')
else:
return results[0]
def _multiRow(query, values, fields):
"""Return all rows from "query". Named query parameters
can be specified using the "values" map. Results will be returned
as a list of maps. Each map in the list will have a key for each
element in the "fields" list. If there are no results, an empty
list will be returned."""
return [dict(zip(fields, row)) for row in _fetchMulti(query, values)]
def _singleRow(query, values, fields, strict=False):
"""Return a single row from "query". Named parameters can be
specified using the "values" map. The result will be returned as
as map. The map will have a key for each element in the "fields"
list. If more than one row is returned and "strict" is true, a
GenericError will be raised. If no rows are returned, and "strict"
is True, a GenericError will be raised. Otherwise None will be
returned."""
row = _fetchSingle(query, values, strict)
if row:
return dict(zip(fields, row))
else:
# strict enforced by _fetchSingle
return None
def _singleValue(query, values=None, strict=True):
"""Perform a query that returns a single value.
Note that unless strict is True a return value of None could mean either
a single NULL value or zero rows returned."""
if values is None:
values = {}
row = _fetchSingle(query, values, strict)
if row:
if strict and len(row) > 1:
raise koji.GenericError('multiple fields returned for a single value query')
return row[0]
else:
# don't need to check strict here, since that was already handled by _singleRow()
return None
def _dml(operation, values, log_errors=True):
"""Run an insert, update, or delete. Return number of rows affected
If log is False, errors will not be logged. It makes sense only for
queries which are expected to fail (LOCK NOWAIT)
"""
c = context.cnx.cursor()
c.execute(operation, values, log_errors=log_errors)
ret = c.rowcount
logger.debug("Operation affected %s row(s)", ret)
c.close()
context.commit_pending = True
return ret
def get_host(hostInfo, strict=False, event=None):
"""Get information about the given host. hostInfo may be
either a string (hostname) or int (host id). A map will be returned
@ -9096,38 +9029,6 @@ def assert_cg(cg, user=None):
raise koji.AuthError("Content generator access required (%s)" % cg['name'])
def get_event():
"""Get an event id for this transaction
We cache the result in context, so subsequent calls in the same transaction will
get the same event.
This cache is cleared between the individual calls in a multicall.
See: https://pagure.io/koji/pull-request/74
"""
if hasattr(context, 'event_id'):
return context.event_id
event_id = _singleValue("SELECT get_event()")
context.event_id = event_id
return event_id
def nextval(sequence):
"""Get the next value for the given sequence"""
data = {'sequence': sequence}
return _singleValue("SELECT nextval(%(sequence)s)", data, strict=True)
class Savepoint(object):
def __init__(self, name):
self.name = name
_dml("SAVEPOINT %s" % name, {})
def rollback(self):
_dml("ROLLBACK TO SAVEPOINT %s" % self.name, {})
def parse_json(value, desc=None, errstr=None):
if value is None:
return value
@ -9147,560 +9048,6 @@ def _fix_extra_field(row):
return row
class BulkInsertProcessor(object):
def __init__(self, table, data=None, columns=None, strict=True, batch=1000):
"""Do bulk inserts - it has some limitations compared to
InsertProcessor (no rawset, dup_check).
set() is replaced with add_record() to avoid confusion
table - name of the table
data - list of dict per record
columns - list/set of names of used columns - makes sense
mainly with strict=True
strict - if True, all records must contain values for all columns.
if False, missing values will be inserted as NULLs
batch - batch size for inserts (one statement per batch)
"""
self.table = table
self.data = []
if columns is None:
self.columns = set()
else:
self.columns = set(columns)
if data is not None:
self.data = data
for row in data:
self.columns |= set(row.keys())
self.strict = strict
self.batch = batch
def __str__(self):
if not self.data:
return "-- incomplete insert: no data"
query, params = self._get_insert(self.data)
return query
def _get_insert(self, data):
"""
Generate one insert statement for the given data
:param list data: list of rows (dict format) to insert
:returns: (query, params)
"""
if not data:
# should not happen
raise ValueError('no data for insert')
parts = ['INSERT INTO %s ' % self.table]
columns = sorted(self.columns)
parts.append("(%s) " % ', '.join(columns))
prepared_data = {}
values = []
i = 0
for row in data:
row_values = []
for key in columns:
if key in row:
row_key = '%s%d' % (key, i)
row_values.append("%%(%s)s" % row_key)
prepared_data[row_key] = row[key]
elif self.strict:
raise koji.GenericError("Missing value %s in BulkInsert" % key)
else:
row_values.append("NULL")
values.append("(%s)" % ', '.join(row_values))
i += 1
parts.append("VALUES %s" % ', '.join(values))
return ''.join(parts), prepared_data
def __repr__(self):
return "<BulkInsertProcessor: %r>" % vars(self)
def add_record(self, **kwargs):
"""Set whole record via keyword args"""
if not kwargs:
raise koji.GenericError("Missing values in BulkInsert.add_record")
self.data.append(kwargs)
self.columns |= set(kwargs.keys())
def execute(self):
if not self.batch:
self._one_insert(self.data)
else:
for i in range(0, len(self.data), self.batch):
data = self.data[i:i + self.batch]
self._one_insert(data)
def _one_insert(self, data):
query, params = self._get_insert(data)
_dml(query, params)
class InsertProcessor(object):
"""Build an insert statement
table - the table to insert into
data - a dictionary of data to insert (keys = row names)
rawdata - data to insert specified as sql expressions rather than python values
does not support query inserts of "DEFAULT VALUES"
"""
def __init__(self, table, data=None, rawdata=None):
self.table = table
self.data = {}
if data:
self.data.update(data)
self.rawdata = {}
if rawdata:
self.rawdata.update(rawdata)
def __str__(self):
if not self.data and not self.rawdata:
return "-- incomplete update: no assigns"
parts = ['INSERT INTO %s ' % self.table]
columns = sorted(list(self.data.keys()) + list(self.rawdata.keys()))
parts.append("(%s) " % ', '.join(columns))
values = []
for key in columns:
if key in self.data:
values.append("%%(%s)s" % key)
else:
values.append("(%s)" % self.rawdata[key])
parts.append("VALUES (%s)" % ', '.join(values))
return ''.join(parts)
def __repr__(self):
return "<InsertProcessor: %r>" % vars(self)
def set(self, **kwargs):
"""Set data via keyword args"""
self.data.update(kwargs)
def rawset(self, **kwargs):
"""Set rawdata via keyword args"""
self.rawdata.update(kwargs)
def make_create(self, event_id=None, user_id=None):
if event_id is None:
event_id = get_event()
if user_id is None:
context.session.assertLogin()
user_id = context.session.user_id
self.data['create_event'] = event_id
self.data['creator_id'] = user_id
def dup_check(self):
"""Check to see if the insert duplicates an existing row"""
if self.rawdata:
logger.warning("Can't perform duplicate check")
return None
data = self.data.copy()
if 'create_event' in self.data:
# versioned table
data['active'] = True
del data['create_event']
del data['creator_id']
clauses = ["%s = %%(%s)s" % (k, k) for k in data]
query = QueryProcessor(columns=list(data.keys()), tables=[self.table],
clauses=clauses, values=data)
if query.execute():
return True
return False
def execute(self):
return _dml(str(self), self.data)
class UpdateProcessor(object):
"""Build an update statement
table - the table to insert into
data - a dictionary of data to insert (keys = row names)
rawdata - data to insert specified as sql expressions rather than python values
clauses - a list of where clauses which will be ANDed together
values - dict of values used in clauses
does not support the FROM clause
"""
def __init__(self, table, data=None, rawdata=None, clauses=None, values=None):
self.table = table
self.data = {}
if data:
self.data.update(data)
self.rawdata = {}
if rawdata:
self.rawdata.update(rawdata)
self.clauses = []
if clauses:
self.clauses.extend(clauses)
self.values = {}
if values:
self.values.update(values)
def __str__(self):
if not self.data and not self.rawdata:
return "-- incomplete update: no assigns"
parts = ['UPDATE %s SET ' % self.table]
assigns = ["%s = %%(data.%s)s" % (key, key) for key in self.data]
assigns.extend(["%s = (%s)" % (key, self.rawdata[key]) for key in self.rawdata])
parts.append(', '.join(sorted(assigns)))
if self.clauses:
parts.append('\nWHERE ')
parts.append(' AND '.join(["( %s )" % c for c in sorted(self.clauses)]))
return ''.join(parts)
def __repr__(self):
return "<UpdateProcessor: %r>" % vars(self)
def get_values(self):
"""Returns unified values dict, including data"""
ret = {}
ret.update(self.values)
for key in self.data:
ret["data." + key] = self.data[key]
return ret
def set(self, **kwargs):
"""Set data via keyword args"""
self.data.update(kwargs)
def rawset(self, **kwargs):
"""Set rawdata via keyword args"""
self.rawdata.update(kwargs)
def make_revoke(self, event_id=None, user_id=None):
"""Add standard revoke options to the update"""
if event_id is None:
event_id = get_event()
if user_id is None:
context.session.assertLogin()
user_id = context.session.user_id
self.data['revoke_event'] = event_id
self.data['revoker_id'] = user_id
self.rawdata['active'] = 'NULL'
self.clauses.append('active = TRUE')
def execute(self):
return _dml(str(self), self.get_values())
class QueryProcessor(object):
"""
Build a query from its components.
- columns, aliases, tables: lists of the column names to retrieve,
the tables to retrieve them from, and the key names to use when
returning values as a map, respectively
- joins: a list of joins in the form 'table1 ON table1.col1 = table2.col2', 'JOIN' will be
prepended automatically; if extended join syntax (LEFT, OUTER, etc.) is required,
it can be specified, and 'JOIN' will not be prepended
- clauses: a list of where clauses in the form 'table1.col1 OPER table2.col2-or-variable';
each clause will be surrounded by parentheses and all will be AND'ed together
- values: the map that will be used to replace any substitution expressions in the query
- transform: a function that will be called on each row (not compatible with
countOnly or singleValue)
- opts: a map of query options; currently supported options are:
countOnly: if True, return an integer indicating how many results would have been
returned, rather than the actual query results
order: a column or alias name to use in the 'ORDER BY' clause
offset: an integer to use in the 'OFFSET' clause
limit: an integer to use in the 'LIMIT' clause
asList: if True, return results as a list of lists, where each list contains the
column values in query order, rather than the usual list of maps
rowlock: if True, use "FOR UPDATE" to lock the queried rows
group: a column or alias name to use in the 'GROUP BY' clause
(controlled by enable_group)
- enable_group: if True, opts.group will be enabled
"""
iterchunksize = 1000
def __init__(self, columns=None, aliases=None, tables=None,
joins=None, clauses=None, values=None, transform=None,
opts=None, enable_group=False):
self.columns = columns
self.aliases = aliases
if columns and aliases:
if len(columns) != len(aliases):
raise Exception('column and alias lists must be the same length')
# reorder
alias_table = sorted(zip(aliases, columns))
self.aliases = [x[0] for x in alias_table]
self.columns = [x[1] for x in alias_table]
self.colsByAlias = dict(alias_table)
else:
self.colsByAlias = {}
if columns:
self.columns = sorted(columns)
if aliases:
self.aliases = sorted(aliases)
self.tables = tables
self.joins = joins
if clauses:
self.clauses = sorted(clauses)
else:
self.clauses = clauses
self.cursors = 0
if values:
self.values = values
else:
self.values = {}
self.transform = transform
if opts:
self.opts = opts
else:
self.opts = {}
self.enable_group = enable_group
def countOnly(self, count):
self.opts['countOnly'] = count
def __str__(self):
query = \
"""
SELECT %(col_str)s
FROM %(table_str)s
%(join_str)s
%(clause_str)s
%(group_str)s
%(order_str)s
%(offset_str)s
%(limit_str)s
"""
if self.opts.get('countOnly'):
if self.opts.get('offset') \
or self.opts.get('limit') \
or (self.enable_group and self.opts.get('group')):
# If we're counting with an offset and/or limit, we need
# to wrap the offset/limited query and then count the results,
# rather than trying to offset/limit the single row returned
# by count(*). Because we're wrapping the query, we don't care
# about the column values.
col_str = '1'
else:
col_str = 'count(*)'
else:
col_str = self._seqtostr(self.columns)
table_str = self._seqtostr(self.tables, sort=True)
join_str = self._joinstr()
clause_str = self._seqtostr(self.clauses, sep=')\n AND (')
if clause_str:
clause_str = ' WHERE (' + clause_str + ')'
if self.enable_group:
group_str = self._group()
else:
group_str = ''
order_str = self._order()
offset_str = self._optstr('offset')
limit_str = self._optstr('limit')
query = query % locals()
if self.opts.get('countOnly') and \
(self.opts.get('offset') or
self.opts.get('limit') or
(self.enable_group and self.opts.get('group'))):
query = 'SELECT count(*)\nFROM (' + query + ') numrows'
if self.opts.get('rowlock'):
query += '\n FOR UPDATE'
return query
def __repr__(self):
return '<QueryProcessor: ' \
'columns=%r, aliases=%r, tables=%r, joins=%r, clauses=%r, values=%r, opts=%r>' % \
(self.columns, self.aliases, self.tables, self.joins, self.clauses, self.values,
self.opts)
def _seqtostr(self, seq, sep=', ', sort=False):
if seq:
if sort:
seq = sorted(seq)
return sep.join(seq)
else:
return ''
def _joinstr(self):
if not self.joins:
return ''
result = ''
for join in self.joins:
if result:
result += '\n'
if re.search(r'\bjoin\b', join, re.IGNORECASE):
# The join clause already contains the word 'join',
# so don't prepend 'JOIN' to it
result += ' ' + join
else:
result += ' JOIN ' + join
return result
def _order(self):
# Don't bother sorting if we're just counting
if self.opts.get('countOnly'):
return ''
order_opt = self.opts.get('order')
if order_opt:
order_exprs = []
for order in order_opt.split(','):
if order.startswith('-'):
order = order[1:]
direction = ' DESC'
else:
direction = ''
# Check if we're ordering by alias first
orderCol = self.colsByAlias.get(order)
if orderCol:
pass
elif order in self.columns:
orderCol = order
else:
raise Exception('Invalid order: ' + order)
order_exprs.append(orderCol + direction)
return 'ORDER BY ' + ', '.join(order_exprs)
else:
return ''
def _group(self):
group_opt = self.opts.get('group')
if group_opt:
group_exprs = []
for group in group_opt.split(','):
if group:
group_exprs.append(group)
return 'GROUP BY ' + ', '.join(group_exprs)
else:
return ''
def _optstr(self, optname):
optval = self.opts.get(optname)
if optval:
return '%s %i' % (optname.upper(), optval)
else:
return ''
def singleValue(self, strict=True):
# self.transform not applied here
return _singleValue(str(self), self.values, strict=strict)
def execute(self):
query = str(self)
if self.opts.get('countOnly'):
return _singleValue(query, self.values, strict=True)
elif self.opts.get('asList'):
if self.transform is None:
return _fetchMulti(query, self.values)
else:
# if we're transforming, generate the dicts so the transform can modify
fields = self.aliases or self.columns
data = _multiRow(query, self.values, fields)
data = [self.transform(row) for row in data]
# and then convert back to lists
data = [[row[f] for f in fields] for row in data]
return data
else:
data = _multiRow(query, self.values, (self.aliases or self.columns))
if self.transform is not None:
data = [self.transform(row) for row in data]
return data
def iterate(self):
if self.opts.get('countOnly'):
return self.execute()
elif self.opts.get('limit') and self.opts['limit'] < self.iterchunksize:
return self.execute()
else:
fields = self.aliases or self.columns
fields = list(fields)
cname = "qp_cursor_%s_%i_%i" % (id(self), os.getpid(), self.cursors)
self.cursors += 1
logger.debug('Setting up query iterator. cname=%r', cname)
return self._iterate(cname, str(self), self.values.copy(), fields,
self.iterchunksize, self.opts.get('asList'))
def _iterate(self, cname, query, values, fields, chunksize, as_list=False):
# We pass all this data into the generator so that the iterator works
# from the snapshot when it was generated. Otherwise reuse of the processor
# for similar queries could have unpredictable results.
query = "DECLARE %s NO SCROLL CURSOR FOR %s" % (cname, query)
c = context.cnx.cursor()
c.execute(query, values)
c.close()
try:
query = "FETCH %i FROM %s" % (chunksize, cname)
while True:
if as_list:
if self.transform is None:
buf = _fetchMulti(query, {})
else:
# if we're transforming, generate the dicts so the transform can modify
buf = _multiRow(query, self.values, fields)
buf = [self.transform(row) for row in buf]
# and then convert back to lists
buf = [[row[f] for f in fields] for row in buf]
else:
buf = _multiRow(query, {}, fields)
if self.transform is not None:
buf = [self.transform(row) for row in buf]
if not buf:
break
for row in buf:
yield row
finally:
c = context.cnx.cursor()
c.execute("CLOSE %s" % cname)
c.close()
def executeOne(self, strict=False):
results = self.execute()
if isinstance(results, list):
if len(results) > 0:
if strict and len(results) > 1:
raise koji.GenericError('multiple rows returned for a single row query')
return results[0]
elif strict:
raise koji.GenericError('query returned no rows')
else:
return None
return results
def _applyQueryOpts(results, queryOpts):
"""
Apply queryOpts to results in the same way QueryProcessor would.
results is a list of maps.
queryOpts is a map which may contain the following fields:
countOnly
order
offset
limit
Note:
- asList is supported by QueryProcessor but not by this method.
We don't know the original query order, and so don't have a way to
return a useful list. asList should be handled by the caller.
- group is supported by QueryProcessor but not by this method as well.
"""
if queryOpts is None:
queryOpts = {}
if queryOpts.get('order'):
order = queryOpts['order']
reverse = False
if order.startswith('-'):
order = order[1:]
reverse = True
results.sort(key=lambda o: o[order], reverse=reverse)
if queryOpts.get('offset'):
results = results[queryOpts['offset']:]
if queryOpts.get('limit'):
results = results[:queryOpts['limit']]
if queryOpts.get('countOnly'):
return len(results)
else:
return results
#
# Policy Test Handlers

View file

@ -344,7 +344,7 @@ class ModXMLRPCRequestHandler(object):
results and errors, and return those as a list."""
results = []
for call in calls:
savepoint = kojihub.Savepoint('multiCall_loop')
savepoint = koji.db.Savepoint('multiCall_loop')
try:
result = self._dispatch(call['methodName'], call['params'])
except Fault as fault:

View file

@ -1,9 +1,16 @@
PYVER_MAJOR := $(shell $(PYTHON) -c 'import sys; print(".".join(sys.version.split(".")[:1]))')
PACKAGE = $(shell basename `pwd`)
PYFILES = $(wildcard *.py)
PYSCRIPTS =
SUBDIRS =
PKGDIR = $(shell $(PYTHON) -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())")/$(PACKAGE)
ifeq ($(PYVER_MAJOR),2)
PYFILES=$(filter-out db.py,$(PYFILES))
else
PYFILES=$(PYFILES)
endif
_default:
@echo "nothing to make. try make install"

View file

@ -24,6 +24,8 @@
from __future__ import absolute_import
import logging
import koji
import os
# import psycopg2.extensions
# # don't convert timestamp fields to DateTime objects
# del psycopg2.extensions.string_types[1114]
@ -38,7 +40,9 @@ import traceback
import psycopg2
from . import context
import koji.context
context = koji.context.context
POSITIONAL_RE = re.compile(r'%[a-z]')
NAMED_RE = re.compile(r'%\(([^\)]+)\)[a-z]')
@ -53,7 +57,9 @@ _DBopts = None
# This probably doesn't need to be a ThreadLocal
# since Apache is not using threading,
# but play it safe anyway.
_DBconn = context.ThreadLocal()
_DBconn = koji.context.ThreadLocal()
logger = logging.getLogger('koji.db')
class DBWrapper:
@ -202,3 +208,677 @@ def connect():
_DBconn.conn = conn
return DBWrapper(conn)
def _dml(operation, values, log_errors=True):
"""Run an insert, update, or delete. Return number of rows affected
If log is False, errors will not be logged. It makes sense only for
queries which are expected to fail (LOCK NOWAIT)
"""
c = context.cnx.cursor()
c.execute(operation, values, log_errors=log_errors)
ret = c.rowcount
logger.debug("Operation affected %s row(s)", ret)
c.close()
context.commit_pending = True
return ret
def _fetchMulti(query, values):
"""Run the query and return all rows"""
print('===================================')
print(context)
print('===================================')
c = context.cnx.cursor()
c.execute(query, values)
results = c.fetchall()
c.close()
return results
def _fetchSingle(query, values, strict=False):
"""Run the query and return a single row
If strict is true, raise an error if the query returns more or less than
one row."""
results = _fetchMulti(query, values)
numRows = len(results)
if numRows == 0:
if strict:
raise koji.GenericError('query returned no rows')
else:
return None
elif strict and numRows > 1:
raise koji.GenericError('multiple rows returned for a single row query')
else:
return results[0]
def _singleValue(query, values=None, strict=True):
"""Perform a query that returns a single value.
Note that unless strict is True a return value of None could mean either
a single NULL value or zero rows returned."""
if values is None:
values = {}
row = _fetchSingle(query, values, strict)
if row:
if strict and len(row) > 1:
raise koji.GenericError('multiple fields returned for a single value query')
return row[0]
else:
# don't need to check strict here, since that was already handled by _singleRow()
return None
def _multiRow(query, values, fields):
"""Return all rows from "query". Named query parameters
can be specified using the "values" map. Results will be returned
as a list of maps. Each map in the list will have a key for each
element in the "fields" list. If there are no results, an empty
list will be returned."""
return [dict(zip(fields, row)) for row in _fetchMulti(query, values)]
def _singleRow(query, values, fields, strict=False):
"""Return a single row from "query". Named parameters can be
specified using the "values" map. The result will be returned as
as map. The map will have a key for each element in the "fields"
list. If more than one row is returned and "strict" is true, a
GenericError will be raised. If no rows are returned, and "strict"
is True, a GenericError will be raised. Otherwise None will be
returned."""
row = _fetchSingle(query, values, strict)
if row:
return dict(zip(fields, row))
else:
# strict enforced by _fetchSingle
return None
def get_event():
"""Get an event id for this transaction
We cache the result in context, so subsequent calls in the same transaction will
get the same event.
This cache is cleared between the individual calls in a multicall.
See: https://pagure.io/koji/pull-request/74
"""
if hasattr(context, 'event_id'):
return context.event_id
event_id = _singleValue("SELECT get_event()")
context.event_id = event_id
return event_id
def nextval(sequence):
"""Get the next value for the given sequence"""
data = {'sequence': sequence}
return _singleValue("SELECT nextval(%(sequence)s)", data, strict=True)
class Savepoint(object):
def __init__(self, name):
self.name = name
_dml("SAVEPOINT %s" % name, {})
def rollback(self):
_dml("ROLLBACK TO SAVEPOINT %s" % self.name, {})
class InsertProcessor(object):
"""Build an insert statement
table - the table to insert into
data - a dictionary of data to insert (keys = row names)
rawdata - data to insert specified as sql expressions rather than python values
does not support query inserts of "DEFAULT VALUES"
"""
def __init__(self, table, data=None, rawdata=None):
self.table = table
self.data = {}
if data:
self.data.update(data)
self.rawdata = {}
if rawdata:
self.rawdata.update(rawdata)
def __str__(self):
if not self.data and not self.rawdata:
return "-- incomplete update: no assigns"
parts = ['INSERT INTO %s ' % self.table]
columns = sorted(list(self.data.keys()) + list(self.rawdata.keys()))
parts.append("(%s) " % ', '.join(columns))
values = []
for key in columns:
if key in self.data:
values.append("%%(%s)s" % key)
else:
values.append("(%s)" % self.rawdata[key])
parts.append("VALUES (%s)" % ', '.join(values))
return ''.join(parts)
def __repr__(self):
return "<InsertProcessor: %r>" % vars(self)
def set(self, **kwargs):
"""Set data via keyword args"""
self.data.update(kwargs)
def rawset(self, **kwargs):
"""Set rawdata via keyword args"""
self.rawdata.update(kwargs)
def make_create(self, event_id=None, user_id=None):
if event_id is None:
event_id = get_event()
if user_id is None:
context.session.assertLogin()
user_id = context.session.user_id
self.data['create_event'] = event_id
self.data['creator_id'] = user_id
def dup_check(self):
"""Check to see if the insert duplicates an existing row"""
if self.rawdata:
logger.warning("Can't perform duplicate check")
return None
data = self.data.copy()
if 'create_event' in self.data:
# versioned table
data['active'] = True
del data['create_event']
del data['creator_id']
clauses = ["%s = %%(%s)s" % (k, k) for k in data]
query = QueryProcessor(columns=list(data.keys()), tables=[self.table],
clauses=clauses, values=data)
if query.execute():
return True
return False
def execute(self):
return _dml(str(self), self.data)
class UpdateProcessor(object):
"""Build an update statement
table - the table to insert into
data - a dictionary of data to insert (keys = row names)
rawdata - data to insert specified as sql expressions rather than python values
clauses - a list of where clauses which will be ANDed together
values - dict of values used in clauses
does not support the FROM clause
"""
def __init__(self, table, data=None, rawdata=None, clauses=None, values=None):
self.table = table
self.data = {}
if data:
self.data.update(data)
self.rawdata = {}
if rawdata:
self.rawdata.update(rawdata)
self.clauses = []
if clauses:
self.clauses.extend(clauses)
self.values = {}
if values:
self.values.update(values)
def __str__(self):
if not self.data and not self.rawdata:
return "-- incomplete update: no assigns"
parts = ['UPDATE %s SET ' % self.table]
assigns = ["%s = %%(data.%s)s" % (key, key) for key in self.data]
assigns.extend(["%s = (%s)" % (key, self.rawdata[key]) for key in self.rawdata])
parts.append(', '.join(sorted(assigns)))
if self.clauses:
parts.append('\nWHERE ')
parts.append(' AND '.join(["( %s )" % c for c in sorted(self.clauses)]))
return ''.join(parts)
def __repr__(self):
return "<UpdateProcessor: %r>" % vars(self)
def get_values(self):
"""Returns unified values dict, including data"""
ret = {}
ret.update(self.values)
for key in self.data:
ret["data." + key] = self.data[key]
return ret
def set(self, **kwargs):
"""Set data via keyword args"""
self.data.update(kwargs)
def rawset(self, **kwargs):
"""Set rawdata via keyword args"""
self.rawdata.update(kwargs)
def make_revoke(self, event_id=None, user_id=None):
"""Add standard revoke options to the update"""
if event_id is None:
event_id = get_event()
if user_id is None:
context.session.assertLogin()
user_id = context.session.user_id
self.data['revoke_event'] = event_id
self.data['revoker_id'] = user_id
self.rawdata['active'] = 'NULL'
self.clauses.append('active = TRUE')
def execute(self):
return _dml(str(self), self.get_values())
class QueryProcessor(object):
"""
Build a query from its components.
- columns, aliases, tables: lists of the column names to retrieve,
the tables to retrieve them from, and the key names to use when
returning values as a map, respectively
- joins: a list of joins in the form 'table1 ON table1.col1 = table2.col2', 'JOIN' will be
prepended automatically; if extended join syntax (LEFT, OUTER, etc.) is required,
it can be specified, and 'JOIN' will not be prepended
- clauses: a list of where clauses in the form 'table1.col1 OPER table2.col2-or-variable';
each clause will be surrounded by parentheses and all will be AND'ed together
- values: the map that will be used to replace any substitution expressions in the query
- transform: a function that will be called on each row (not compatible with
countOnly or singleValue)
- opts: a map of query options; currently supported options are:
countOnly: if True, return an integer indicating how many results would have been
returned, rather than the actual query results
order: a column or alias name to use in the 'ORDER BY' clause
offset: an integer to use in the 'OFFSET' clause
limit: an integer to use in the 'LIMIT' clause
asList: if True, return results as a list of lists, where each list contains the
column values in query order, rather than the usual list of maps
rowlock: if True, use "FOR UPDATE" to lock the queried rows
group: a column or alias name to use in the 'GROUP BY' clause
(controlled by enable_group)
- enable_group: if True, opts.group will be enabled
"""
iterchunksize = 1000
def __init__(self, columns=None, aliases=None, tables=None,
joins=None, clauses=None, values=None, transform=None,
opts=None, enable_group=False):
self.columns = columns
self.aliases = aliases
if columns and aliases:
if len(columns) != len(aliases):
raise Exception('column and alias lists must be the same length')
# reorder
alias_table = sorted(zip(aliases, columns))
self.aliases = [x[0] for x in alias_table]
self.columns = [x[1] for x in alias_table]
self.colsByAlias = dict(alias_table)
else:
self.colsByAlias = {}
if columns:
self.columns = sorted(columns)
if aliases:
self.aliases = sorted(aliases)
self.tables = tables
self.joins = joins
if clauses:
self.clauses = sorted(clauses)
else:
self.clauses = clauses
self.cursors = 0
if values:
self.values = values
else:
self.values = {}
self.transform = transform
if opts:
self.opts = opts
else:
self.opts = {}
self.enable_group = enable_group
self.logger = logging.getLogger('koji.db')
def countOnly(self, count):
self.opts['countOnly'] = count
def __str__(self):
query = \
"""
SELECT %(col_str)s
FROM %(table_str)s
%(join_str)s
%(clause_str)s
%(group_str)s
%(order_str)s
%(offset_str)s
%(limit_str)s
"""
if self.opts.get('countOnly'):
if self.opts.get('offset') \
or self.opts.get('limit') \
or (self.enable_group and self.opts.get('group')):
# If we're counting with an offset and/or limit, we need
# to wrap the offset/limited query and then count the results,
# rather than trying to offset/limit the single row returned
# by count(*). Because we're wrapping the query, we don't care
# about the column values.
col_str = '1'
else:
col_str = 'count(*)'
else:
col_str = self._seqtostr(self.columns)
table_str = self._seqtostr(self.tables, sort=True)
join_str = self._joinstr()
clause_str = self._seqtostr(self.clauses, sep=')\n AND (')
if clause_str:
clause_str = ' WHERE (' + clause_str + ')'
if self.enable_group:
group_str = self._group()
else:
group_str = ''
order_str = self._order()
offset_str = self._optstr('offset')
limit_str = self._optstr('limit')
query = query % locals()
if self.opts.get('countOnly') and \
(self.opts.get('offset') or
self.opts.get('limit') or
(self.enable_group and self.opts.get('group'))):
query = 'SELECT count(*)\nFROM (' + query + ') numrows'
if self.opts.get('rowlock'):
query += '\n FOR UPDATE'
return query
def __repr__(self):
return '<QueryProcessor: ' \
'columns=%r, aliases=%r, tables=%r, joins=%r, clauses=%r, values=%r, opts=%r>' % \
(self.columns, self.aliases, self.tables, self.joins, self.clauses, self.values,
self.opts)
def _seqtostr(self, seq, sep=', ', sort=False):
if seq:
if sort:
seq = sorted(seq)
return sep.join(seq)
else:
return ''
def _joinstr(self):
if not self.joins:
return ''
result = ''
for join in self.joins:
if result:
result += '\n'
if re.search(r'\bjoin\b', join, re.IGNORECASE):
# The join clause already contains the word 'join',
# so don't prepend 'JOIN' to it
result += ' ' + join
else:
result += ' JOIN ' + join
return result
def _order(self):
# Don't bother sorting if we're just counting
if self.opts.get('countOnly'):
return ''
order_opt = self.opts.get('order')
if order_opt:
order_exprs = []
for order in order_opt.split(','):
if order.startswith('-'):
order = order[1:]
direction = ' DESC'
else:
direction = ''
# Check if we're ordering by alias first
orderCol = self.colsByAlias.get(order)
if orderCol:
pass
elif order in self.columns:
orderCol = order
else:
raise Exception('Invalid order: ' + order)
order_exprs.append(orderCol + direction)
return 'ORDER BY ' + ', '.join(order_exprs)
else:
return ''
def _group(self):
group_opt = self.opts.get('group')
if group_opt:
group_exprs = []
for group in group_opt.split(','):
if group:
group_exprs.append(group)
return 'GROUP BY ' + ', '.join(group_exprs)
else:
return ''
def _optstr(self, optname):
optval = self.opts.get(optname)
if optval:
return '%s %i' % (optname.upper(), optval)
else:
return ''
def singleValue(self, strict=True):
# self.transform not applied here
return _singleValue(str(self), self.values, strict=strict)
def execute(self):
query = str(self)
if self.opts.get('countOnly'):
return _singleValue(query, self.values, strict=True)
elif self.opts.get('asList'):
if self.transform is None:
return _fetchMulti(query, self.values)
else:
# if we're transforming, generate the dicts so the transform can modify
fields = self.aliases or self.columns
data = _multiRow(query, self.values, fields)
data = [self.transform(row) for row in data]
# and then convert back to lists
data = [[row[f] for f in fields] for row in data]
return data
else:
data = _multiRow(query, self.values, (self.aliases or self.columns))
if self.transform is not None:
data = [self.transform(row) for row in data]
return data
def iterate(self):
if self.opts.get('countOnly'):
return self.execute()
elif self.opts.get('limit') and self.opts['limit'] < self.iterchunksize:
return self.execute()
else:
fields = self.aliases or self.columns
fields = list(fields)
cname = "qp_cursor_%s_%i_%i" % (id(self), os.getpid(), self.cursors)
self.cursors += 1
self.logger.debug('Setting up query iterator. cname=%r', cname)
return self._iterate(cname, str(self), self.values.copy(), fields,
self.iterchunksize, self.opts.get('asList'))
def _iterate(self, cname, query, values, fields, chunksize, as_list=False):
# We pass all this data into the generator so that the iterator works
# from the snapshot when it was generated. Otherwise reuse of the processor
# for similar queries could have unpredictable results.
query = "DECLARE %s NO SCROLL CURSOR FOR %s" % (cname, query)
c = context.cnx.cursor()
c.execute(query, values)
c.close()
try:
query = "FETCH %i FROM %s" % (chunksize, cname)
while True:
if as_list:
if self.transform is None:
buf = _fetchMulti(query, {})
else:
# if we're transforming, generate the dicts so the transform can modify
buf = _multiRow(query, self.values, fields)
buf = [self.transform(row) for row in buf]
# and then convert back to lists
buf = [[row[f] for f in fields] for row in buf]
else:
buf = _multiRow(query, {}, fields)
if self.transform is not None:
buf = [self.transform(row) for row in buf]
if not buf:
break
for row in buf:
yield row
finally:
c = context.cnx.cursor()
c.execute("CLOSE %s" % cname)
c.close()
def executeOne(self, strict=False):
results = self.execute()
if isinstance(results, list):
if len(results) > 0:
if strict and len(results) > 1:
raise koji.GenericError('multiple rows returned for a single row query')
return results[0]
elif strict:
raise koji.GenericError('query returned no rows')
else:
return None
return results
class BulkInsertProcessor(object):
def __init__(self, table, data=None, columns=None, strict=True, batch=1000):
"""Do bulk inserts - it has some limitations compared to
InsertProcessor (no rawset, dup_check).
set() is replaced with add_record() to avoid confusion
table - name of the table
data - list of dict per record
columns - list/set of names of used columns - makes sense
mainly with strict=True
strict - if True, all records must contain values for all columns.
if False, missing values will be inserted as NULLs
batch - batch size for inserts (one statement per batch)
"""
self.table = table
self.data = []
if columns is None:
self.columns = set()
else:
self.columns = set(columns)
if data is not None:
self.data = data
for row in data:
self.columns |= set(row.keys())
self.strict = strict
self.batch = batch
def __str__(self):
if not self.data:
return "-- incomplete insert: no data"
query, params = self._get_insert(self.data)
return query
def _get_insert(self, data):
"""
Generate one insert statement for the given data
:param list data: list of rows (dict format) to insert
:returns: (query, params)
"""
if not data:
# should not happen
raise ValueError('no data for insert')
parts = ['INSERT INTO %s ' % self.table]
columns = sorted(self.columns)
parts.append("(%s) " % ', '.join(columns))
prepared_data = {}
values = []
i = 0
for row in data:
row_values = []
for key in columns:
if key in row:
row_key = '%s%d' % (key, i)
row_values.append("%%(%s)s" % row_key)
prepared_data[row_key] = row[key]
elif self.strict:
raise koji.GenericError("Missing value %s in BulkInsert" % key)
else:
row_values.append("NULL")
values.append("(%s)" % ', '.join(row_values))
i += 1
parts.append("VALUES %s" % ', '.join(values))
return ''.join(parts), prepared_data
def __repr__(self):
return "<BulkInsertProcessor: %r>" % vars(self)
def add_record(self, **kwargs):
"""Set whole record via keyword args"""
if not kwargs:
raise koji.GenericError("Missing values in BulkInsert.add_record")
self.data.append(kwargs)
self.columns |= set(kwargs.keys())
def execute(self):
if not self.batch:
self._one_insert(self.data)
else:
for i in range(0, len(self.data), self.batch):
data = self.data[i:i + self.batch]
self._one_insert(data)
def _one_insert(self, data):
query, params = self._get_insert(data)
_dml(query, params)
def _applyQueryOpts(results, queryOpts):
"""
Apply queryOpts to results in the same way QueryProcessor would.
results is a list of maps.
queryOpts is a map which may contain the following fields:
countOnly
order
offset
limit
Note:
- asList is supported by QueryProcessor but not by this method.
We don't know the original query order, and so don't have a way to
return a useful list. asList should be handled by the caller.
- group is supported by QueryProcessor but not by this method as well.
"""
if queryOpts is None:
queryOpts = {}
if queryOpts.get('order'):
order = queryOpts['order']
reverse = False
if order.startswith('-'):
order = order[1:]
reverse = True
results.sort(key=lambda o: o[order], reverse=reverse)
if queryOpts.get('offset'):
results = results[queryOpts['offset']:]
if queryOpts.get('limit'):
results = results[:queryOpts['limit']]
if queryOpts.get('countOnly'):
return len(results)
else:
return results

View file

@ -17,7 +17,8 @@ from proton.reactor import Container
import koji
from koji.context import context
from koji.plugin import callback, convert_datetime, ignore_error
from kojihub import QueryProcessor, InsertProcessor, get_build_type
from kojihub import get_build_type
from koji.db import QueryProcessor, InsertProcessor
CONFIG_FILE = '/etc/koji-hub/plugins/protonmsg.conf'
CONFIG = None

View file

@ -4,12 +4,12 @@
import sys
import koji
import koji.policy
from koji.db import QueryProcessor, nextval
from koji.context import context
from koji.plugin import callback, export
import koji.policy
sys.path.insert(0, "/usr/share/koji-hub/")
from kojihub import ( # noqa: E402
QueryProcessor,
_create_build_target,
_create_tag,
_delete_build_target,
@ -20,11 +20,11 @@ from kojihub import ( # noqa: E402
get_build_target,
get_tag,
get_user,
nextval,
policy_get_user,
readInheritanceData,
)
CONFIG_FILE = "/etc/koji-hub/plugins/sidetag.conf"
CONFIG = None
ALLOWED_SUFFIXES = []

View file

@ -30,9 +30,10 @@ class TestAddHost(unittest.TestCase):
side_effect=self.getUpdate).start()
self.updates = []
self.context = mock.patch('kojihub.context').start()
self.context_db = mock.patch('koji.db.context').start()
# It seems MagicMock will not automatically handle attributes that
# start with "assert"
self.context.session.assertLogin = mock.MagicMock()
self.context_db.session.assertLogin = mock.MagicMock()
self.context.session.assertPerm = mock.MagicMock()
self.context.opts = {'HostPrincipalFormat': '-%s-'}
self.exports = kojihub.RootExports()

View file

@ -20,12 +20,13 @@ class TestAddHostToChannel(unittest.TestCase):
side_effect=self.getInsert).start()
self.inserts = []
self.context = mock.patch('kojihub.context').start()
self.context_db = mock.patch('koji.db.context').start()
# It seems MagicMock will not automatically handle attributes that
# start with "assert"
self.context.session.assertLogin = mock.MagicMock()
self.context_db.session.assertLogin = mock.MagicMock()
self.context.session.assertPerm = mock.MagicMock()
self.context.event_id = 42
self.context.session.user_id = 23
self.context_db.event_id = 42
self.context_db.session.user_id = 23
self.context.opts = {'HostPrincipalFormat': '-%s-'}
self.exports = kojihub.RootExports()
self.get_channel = mock.patch('kojihub.get_channel').start()

View file

@ -17,6 +17,16 @@ class TestCGImporter(unittest.TestCase):
def setUp(self):
if not os.path.exists(self.TMP_PATH):
os.mkdir(self.TMP_PATH)
self.path_work = mock.patch('koji.pathinfo.work').start()
self.context_db = mock.patch('koji.db.context').start()
self.context = mock.patch('kojihub.context').start()
self.get_build = mock.patch('kojihub.get_build').start()
self.get_user = mock.patch('kojihub.get_user').start()
self.userinfo = {'id': 123}
self.rmtree = mock.patch('koji.util.rmtree').start()
self.lexists = mock.patch('os.path.lexists').start()
self.path_build = mock.patch('koji.pathinfo.build').start()
self.new_build = mock.patch('kojihub.new_build').start()
def tearDown(self):
if os.path.exists(self.TMP_PATH):
@ -41,33 +51,30 @@ class TestCGImporter(unittest.TestCase):
f"expected type <class 'str'>", str(ex.exception))
def test_get_metadata_is_none(self):
self.path_work.return_value = os.path.dirname(__file__)
x = kojihub.CG_Importer()
with self.assertRaises(GenericError) as ex:
x.get_metadata(None, '')
self.assertEqual('No such file: metadata.json', str(ex.exception))
@mock.patch("koji.pathinfo.work")
def test_get_metadata_missing_json_file(self, work):
work.return_value = os.path.dirname(__file__)
def test_get_metadata_missing_json_file(self):
self.path_work.return_value = os.path.dirname(__file__)
x = kojihub.CG_Importer()
with self.assertRaises(GenericError):
x.get_metadata('missing.json', 'cg_importer_json')
@mock.patch("koji.pathinfo.work")
def test_get_metadata_is_json_file(self, work):
work.return_value = os.path.dirname(__file__)
def test_get_metadata_is_json_file(self):
self.path_work.return_value = os.path.dirname(__file__)
x = kojihub.CG_Importer()
x.get_metadata('default.json', 'cg_importer_json')
assert x.raw_metadata
assert isinstance(x.raw_metadata, str)
@mock.patch('kojihub.context')
@mock.patch("koji.pathinfo.work")
def test_assert_cg_access(self, work, context):
work.return_value = os.path.dirname(__file__)
def test_assert_cg_access(self):
self.path_work.return_value = os.path.dirname(__file__)
cursor = mock.MagicMock()
context.session.user_id = 42
context.cnx.cursor.return_value = cursor
self.context.session.user_id = 42
self.context_db.cnx.cursor.return_value = cursor
cursor.fetchall.return_value = [(1, 'foo'), (2, 'bar')]
x = kojihub.CG_Importer()
x.get_metadata('default.json', 'cg_importer_json')
@ -75,17 +82,13 @@ class TestCGImporter(unittest.TestCase):
assert x.cg
assert isinstance(x.cg, int)
@mock.patch("kojihub.get_build")
@mock.patch("kojihub.get_user")
@mock.patch('kojihub.context')
@mock.patch("koji.pathinfo.work")
def test_prep_build(self, work, context, get_user, get_build):
work.return_value = os.path.dirname(__file__)
def test_prep_build(self):
self.path_work.return_value = os.path.dirname(__file__)
cursor = mock.MagicMock()
context.cnx.cursor.return_value = cursor
self.context_db.cnx.cursor.return_value = cursor
cursor.fetchall.return_value = [(1, 'foo'), (2, 'bar')]
get_user.return_value = {'id': 123}
get_build.return_value = {}
self.get_user.return_value = self.userinfo
self.get_build.return_value = {}
x = kojihub.CG_Importer()
x.get_metadata('default.json', 'cg_importer_json')
x.assert_cg_access()
@ -93,84 +96,72 @@ class TestCGImporter(unittest.TestCase):
assert x.buildinfo
assert isinstance(x.buildinfo, dict)
@mock.patch('koji.pathinfo.build')
@mock.patch('os.path.lexists')
@mock.patch('koji.util.rmtree')
def test_check_build_dir(self, rmtree, lexists, build):
def test_check_build_dir(self):
path = '/random_path/random_dir'
build.return_value = path
self.path_build.return_value = path
x = kojihub.CG_Importer()
# directory exists
lexists.return_value = True
self.lexists.return_value = True
with self.assertRaises(koji.GenericError):
x.check_build_dir(delete=False)
rmtree.assert_not_called()
self.rmtree.assert_not_called()
# directory exists + delete
lexists.return_value = True
self.lexists.return_value = True
x.check_build_dir(delete=True)
rmtree.assert_called_once_with(path)
self.rmtree.assert_called_once_with(path)
# directory doesn't exist
rmtree.reset_mock()
lexists.return_value = False
self.rmtree.reset_mock()
self.lexists.return_value = False
x.check_build_dir()
rmtree.assert_not_called()
self.rmtree.assert_not_called()
@mock.patch('kojihub.get_build')
@mock.patch("koji.pathinfo.work")
def test_prep_build_exists(self, work, get_build):
work.return_value = os.path.dirname(__file__)
get_build.return_value = True
def test_prep_build_exists(self):
self.path_work.return_value = os.path.dirname(__file__)
self.get_build.return_value = True
x = kojihub.CG_Importer()
x.get_metadata('default.json', 'cg_importer_json')
with self.assertRaises(GenericError):
x.prep_build()
@mock.patch("kojihub.get_user")
@mock.patch('kojihub.get_build')
@mock.patch('kojihub.new_build')
@mock.patch('kojihub.context')
@mock.patch("koji.pathinfo.work")
def test_get_build(self, work, context, new_build_id, get_build, get_user):
work.return_value = os.path.dirname(__file__)
def test_get_build(self):
self.path_work.return_value = os.path.dirname(__file__)
cursor = mock.MagicMock()
cursor.fetchall.return_value = [(1, 'foo'), (2, 'bar')]
context.cnx.cursor.return_value = cursor
new_build_id.return_value = 42
get_build.return_value = False
get_user.return_value = {'id': 123}
self.context_db.cnx.cursor.return_value = cursor
self.new_build.return_value = 42
self.get_build.return_value = False
self.get_user.return_value = self.userinfo
x = kojihub.CG_Importer()
x.get_metadata('default.json', 'cg_importer_json')
x.assert_cg_access()
x.prep_build()
x.prepped_outputs = []
get_build.return_value = {'id': 43, 'package_id': 1,
'package_name': 'testpkg',
'name': 'testpkg', 'version': '1.0.1e',
'release': '42.el7', 'epoch': None,
'nvr': 'testpkg-1.0.1-1.fc24',
'state': 'complete', 'task_id': 1,
'owner_id': 1, 'owner_name': 'jvasallo',
'volume_id': 'id-1212', 'volume_name': 'testvolume',
'creation_event_id': '', 'creation_time': '',
'creation_ts': 424242424242,
'start_time': None, 'start_ts': None,
'completion_time': None, 'completion_ts': None,
'source': 'https://example.com', 'extra': {}
}
new_build_id.return_value = 43
self.get_build.return_value = {'id': 43, 'package_id': 1,
'package_name': 'testpkg',
'name': 'testpkg', 'version': '1.0.1e',
'release': '42.el7', 'epoch': None,
'nvr': 'testpkg-1.0.1-1.fc24',
'state': 'complete', 'task_id': 1,
'owner_id': 1, 'owner_name': 'jvasallo',
'volume_id': 'id-1212', 'volume_name': 'testvolume',
'creation_event_id': '', 'creation_time': '',
'creation_ts': 424242424242,
'start_time': None, 'start_ts': None,
'completion_time': None, 'completion_ts': None,
'source': 'https://example.com', 'extra': {}
}
self.new_build.return_value = 43
x.get_build()
assert x.buildinfo
assert isinstance(x.buildinfo, dict)
@mock.patch("koji.pathinfo.build")
@mock.patch("koji.pathinfo.work")
def test_import_metadata(self, work, build):
work.return_value = os.path.dirname(__file__)
build.return_value = self.TMP_PATH
def test_import_metadata(self):
self.path_work.return_value = os.path.dirname(__file__)
self.path_build.return_value = self.TMP_PATH
x = kojihub.CG_Importer()
x.get_metadata('default.json', 'cg_importer_json')
x.import_metadata()
@ -280,23 +271,28 @@ class TestCGReservation(unittest.TestCase):
self.inserts = []
self.updates = []
self.context = mock.patch('kojihub.context').start()
self.context.session.user_id = 123456
self.context_db = mock.patch('koji.db.context').start()
self.context_db.session.user_id = 123456
self.mock_cursor = mock.MagicMock()
self.context.cnx.cursor.return_value = self.mock_cursor
self.context_db.cnx.cursor.return_value = self.mock_cursor
self.get_build = mock.patch('kojihub.get_build').start()
self.get_user = mock.patch('kojihub.get_user').start()
self.userinfo = {'id': 123456, 'name': 'username'}
self.new_build = mock.patch('kojihub.new_build').start()
self.lookup_name = mock.patch('kojihub.lookup_name').start()
self.assert_cg = mock.patch('kojihub.assert_cg').start()
self.get_reservation_token = mock.patch('kojihub.get_reservation_token').start()
self.run_callbacks = mock.patch('koji.plugin.run_callbacks').start()
def tearDown(self):
mock.patch.stopall()
@mock.patch("kojihub.new_build")
@mock.patch("kojihub.get_user")
@mock.patch("kojihub.lookup_name")
@mock.patch("kojihub.assert_cg")
def test_init_build_ok(self, assert_cg, lookup_name, get_user, new_build):
assert_cg.return_value = True
lookup_name.return_value = {'id': 21, 'name': 'cg_name'}
get_user.return_value = {'id': 123456, 'name': 'username'}
new_build.return_value = 654
def test_init_build_ok(self):
self.assert_cg.return_value = True
self.lookup_name.return_value = {'id': 21, 'name': 'cg_name'}
self.get_reservation_token.return_value = None
self.get_user.return_value = self.userinfo
self.new_build.return_value = 654
cg = 'content_generator_name'
self.mock_cursor.fetchone.side_effect = [
[333], # get pkg_id
@ -315,8 +311,8 @@ class TestCGReservation(unittest.TestCase):
kojihub.cg_init_build(cg, data)
lookup_name.assert_called_once_with('content_generator', cg, strict=True)
assert_cg.assert_called_once_with(cg)
self.lookup_name.assert_called_once_with('content_generator', cg, strict=True)
self.assert_cg.assert_called_once_with(cg)
self.assertEqual(1, len(self.inserts))
insert = self.inserts[0]
self.assertEqual(insert.table, 'build_reservations')
@ -324,18 +320,12 @@ class TestCGReservation(unittest.TestCase):
self.assertTrue('token' in insert.data)
self.assertEqual(insert.rawdata, {'created': 'NOW()'})
@mock.patch("koji.plugin.run_callbacks")
@mock.patch("kojihub.get_reservation_token")
@mock.patch("kojihub.lookup_name")
@mock.patch("kojihub.get_build")
@mock.patch("kojihub.assert_cg")
def test_uninit_build_ok(self, assert_cg, get_build, lookup_name, get_reservation_token,
run_callbacks):
assert_cg.return_value = True
def test_uninit_build_ok(self):
self.assert_cg.return_value = True
build_id = 1122
cg_id = 888
cg = 'content_generator_name'
get_build.side_effect = [
self.get_build.side_effect = [
{
'id': build_id,
'state': koji.BUILD_STATES['BUILDING'],
@ -349,18 +339,18 @@ class TestCGReservation(unittest.TestCase):
]
token = 'random_token'
get_reservation_token.return_value = {'build_id': build_id, 'token': token}
lookup_name.return_value = {'name': cg, 'id': cg_id}
self.get_reservation_token.return_value = {'build_id': build_id, 'token': token}
self.lookup_name.return_value = {'name': cg, 'id': cg_id}
kojihub.cg_refund_build(cg, build_id, token)
assert_cg.assert_called_once_with(cg)
get_build.assert_has_calls([
self.assert_cg.assert_called_once_with(cg)
self.get_build.assert_has_calls([
mock.call(build_id, strict=True),
mock.call(build_id, strict=True),
])
get_reservation_token.assert_called_once_with(build_id)
lookup_name.assert_called_once_with('content_generator', cg, strict=True)
self.get_reservation_token.assert_called_once_with(build_id)
self.lookup_name.assert_called_once_with('content_generator', cg, strict=True)
self.assertEqual(len(self.updates), 1)
update = self.updates[0]
@ -369,7 +359,7 @@ class TestCGReservation(unittest.TestCase):
self.assertEqual(update.data['state'], koji.BUILD_STATES['FAILED'])
self.assertEqual(update.rawdata, {'completion_time': 'NOW()'})
run_callbacks.assert_has_calls([
self.run_callbacks.assert_has_calls([
mock.call('preBuildStateChange', attribute='state',
old=koji.BUILD_STATES['BUILDING'],
new=koji.BUILD_STATES['FAILED'],

View file

@ -1,5 +1,4 @@
import copy
import json
import mock
import os
import os.path
@ -53,7 +52,7 @@ class TestCompleteImageBuild(unittest.TestCase):
self.pathinfo = koji.PathInfo(self.tempdir)
mock.patch('koji.pathinfo', new=self.pathinfo).start()
self.hostcalls = kojihub.HostExports()
self.context = mock.patch('kojihub.context').start()
self.context_db = mock.patch('koji.db.context').start()
mock.patch('kojihub.Host').start()
self.Task = mock.patch('kojihub.Task').start()
self.Task.return_value.assertHost = mock.MagicMock()
@ -64,22 +63,22 @@ class TestCompleteImageBuild(unittest.TestCase):
mock.patch('kojihub.lookup_name', new=self.my_lookup_name).start()
mock.patch.object(kojihub.BuildRoot, 'load', new=self.my_buildroot_load).start()
mock.patch('kojihub.import_archive_internal',
new=self.my_import_archive_internal).start()
new=self.my_import_archive_internal).start()
self._dml = mock.patch('kojihub._dml').start()
mock.patch('kojihub.build_notification').start()
mock.patch('kojihub.assert_policy').start()
mock.patch('kojihub.check_volume_policy',
return_value={'id':0, 'name': 'DEFAULT'}).start()
return_value={'id': 0, 'name': 'DEFAULT'}).start()
self.set_up_callbacks()
self.rpms = {}
self.inserts = []
self.updates = []
mock.patch.object(kojihub.InsertProcessor, 'execute',
new=make_insert_grabber(self)).start()
new=make_insert_grabber(self)).start()
mock.patch.object(kojihub.BulkInsertProcessor, '_one_insert',
new=make_bulk_insert_grabber(self)).start()
new=make_bulk_insert_grabber(self)).start()
mock.patch.object(kojihub.UpdateProcessor, 'execute',
new=make_update_grabber(self)).start()
new=make_update_grabber(self)).start()
mock.patch('kojihub.nextval', new=self.my_nextval).start()
self.sequences = {}
@ -140,15 +139,14 @@ class TestCompleteImageBuild(unittest.TestCase):
def my_lookup_name(self, table, info, **kw):
if table == 'btype':
return {
'id': 'BTYPEID:%s' % info,
'name': info,
}
'id': 'BTYPEID:%s' % info,
'name': info,
}
else:
raise Exception("Cannot fake call")
def my_get_archive_type(self, *a, **kw):
return dict.fromkeys(['id', 'name', 'description', 'extensions'],
'ARCHIVETYPE')
return dict.fromkeys(['id', 'name', 'description', 'extensions'], 'ARCHIVETYPE')
@staticmethod
def my_buildroot_load(br, id):
@ -156,14 +154,15 @@ class TestCompleteImageBuild(unittest.TestCase):
br.id = id
br.is_standard = True
br.data = {
'br_type': koji.BR_TYPES['STANDARD'],
'id': id,
}
'br_type': koji.BR_TYPES['STANDARD'],
'id': id,
}
def my_import_archive_internal(self, *a, **kw):
# this is kind of odd, but we need this to fake the archiveinfo
share = {}
old_ip = kojihub.InsertProcessor
def my_ip(table, *a, **kw):
if table == 'archiveinfo':
data = kw['data']
@ -171,6 +170,7 @@ class TestCompleteImageBuild(unittest.TestCase):
share['archiveinfo'] = data
# TODO: need to add id
return old_ip(table, *a, **kw)
def my_ga(archive_id, **kw):
return share['archiveinfo']
with mock.patch('kojihub.InsertProcessor', new=my_ip):
@ -190,17 +190,17 @@ class TestCompleteImageBuild(unittest.TestCase):
def test_complete_image_build(self):
self.set_up_files('import_1')
buildinfo = {
'id': 137,
'task_id': 'TASK_ID',
'name': 'some-image',
'version': '1.2.3.4',
'release': '3',
'epoch': None,
'source': None,
'state': koji.BUILD_STATES['BUILDING'],
'volume_id': 0,
'extra': {},
}
'id': 137,
'task_id': 'TASK_ID',
'name': 'some-image',
'version': '1.2.3.4',
'release': '3',
'epoch': None,
'source': None,
'state': koji.BUILD_STATES['BUILDING'],
'volume_id': 0,
'extra': {},
}
image_info = {'build_id': buildinfo['id']}
self.get_build.return_value = buildinfo
self.get_image_build.return_value = image_info
@ -232,7 +232,7 @@ class TestCompleteImageBuild(unittest.TestCase):
'postImport', # build
'preBuildStateChange', # building -> completed
'postBuildStateChange',
]
]
self.assertEqual(cbtypes, cb_expect)
cb_idx = {}
for c in self.callbacks:
@ -246,10 +246,10 @@ class TestCompleteImageBuild(unittest.TestCase):
cb_idx.setdefault(key, [])
cb_idx[key].append(c[2])
key_expect = [
'postBuildStateChange', 'preBuildStateChange',
'preImport:archive', 'postImport:archive',
'preImport:image', 'postImport:image',
]
'postBuildStateChange', 'preBuildStateChange',
'preImport:archive', 'postImport:archive',
'preImport:image', 'postImport:image',
]
self.assertEqual(set(cb_idx.keys()), set(key_expect))
for key in ['preImport:image']:
callbacks = cb_idx[key]

View file

@ -1,5 +1,4 @@
import copy
import json
import mock
import os
import os.path
@ -23,6 +22,7 @@ class TestCompleteMavenBuild(unittest.TestCase):
mock.patch('koji.pathinfo', new=self.pathinfo).start()
self.hostcalls = kojihub.HostExports()
self.context = mock.patch('kojihub.context').start()
self.context_db = mock.patch('koji.db.context').start()
self.context.opts = {'EnableMaven': True}
mock.patch('kojihub.Host').start()
self.Task = mock.patch('kojihub.Task').start()
@ -33,13 +33,13 @@ class TestCompleteMavenBuild(unittest.TestCase):
mock.patch('kojihub.lookup_name', new=self.my_lookup_name).start()
mock.patch.object(kojihub.BuildRoot, 'load', new=self.my_buildroot_load).start()
mock.patch('kojihub.import_archive_internal',
new=self.my_import_archive_internal).start()
mock.patch('kojihub._dml').start()
mock.patch('kojihub._fetchSingle').start()
new=self.my_import_archive_internal).start()
mock.patch('koji.db._dml').start()
mock.patch('koji.db._fetchSingle').start()
mock.patch('kojihub.build_notification').start()
mock.patch('kojihub.assert_policy').start()
mock.patch('kojihub.check_volume_policy',
return_value={'id':0, 'name': 'DEFAULT'}).start()
return_value={'id': 0, 'name': 'DEFAULT'}).start()
self.set_up_callbacks()
def tearDown(self):
@ -65,7 +65,7 @@ class TestCompleteMavenBuild(unittest.TestCase):
shutil.copy(src, dst)
self.maven_data = data
files = open(datadir + '/files', 'rt', encoding='utf-8').readlines()
files = [l.strip() for l in files]
files = [file.strip() for file in files]
self.expected_files = files
def my_lookup_name(self, table, info, **kw):
@ -80,18 +80,20 @@ class TestCompleteMavenBuild(unittest.TestCase):
br.id = id
br.is_standard = True
br.data = {
'br_type': koji.BR_TYPES['STANDARD'],
'id': id,
}
'br_type': koji.BR_TYPES['STANDARD'],
'id': id,
}
def my_import_archive_internal(self, *a, **kw):
# this is kind of odd, but we need this to fake the archiveinfo
share = {}
def my_ip(table, *a, **kw):
if table == 'archiveinfo':
share['archiveinfo'] = kw['data']
# TODO: need to add id
return mock.MagicMock()
def my_ga(archive_id, **kw):
return share['archiveinfo']
with mock.patch('kojihub.InsertProcessor', new=my_ip):
@ -141,7 +143,7 @@ class TestCompleteMavenBuild(unittest.TestCase):
'postImport',
'preBuildStateChange', # building -> completed
'postBuildStateChange',
]
]
self.assertEqual(cbtypes, cb_expect)
cb_idx = {}
@ -157,7 +159,12 @@ class TestCompleteMavenBuild(unittest.TestCase):
key = cbtype
cb_idx.setdefault(key, [])
cb_idx[key].append(c[2])
key_expect = ['postBuildStateChange', 'preBuildStateChange', 'preImport:archive', 'postImport:archive']
key_expect = [
'postBuildStateChange',
'preBuildStateChange',
'preImport:archive',
'postImport:archive'
]
self.assertEqual(set(cb_idx.keys()), set(key_expect))
# in this case, pre and post data is similar
for key in ['preImport:archive', 'postImport:archive']:

View file

@ -15,6 +15,7 @@ class TestCreateMavenBuild(unittest.TestCase):
self.exports = kojihub.RootExports()
self.session = mock.MagicMock()
self.context = mock.patch('kojihub.context').start()
self.context_db = mock.patch('koji.db.context').start()
self.context.session.assertPerm = mock.MagicMock()
self.InsertProcessor = mock.patch('kojihub.InsertProcessor',
side_effect=self.getInsert).start()

View file

@ -28,10 +28,11 @@ class TestCreateTag(unittest.TestCase):
self.verify_name_internal = mock.patch('kojihub.verify_name_internal').start()
self.writeInheritanceData = mock.patch('kojihub._writeInheritanceData').start()
self.context = mock.patch('kojihub.context').start()
self.context_db = mock.patch('koji.db.context').start()
# It seems MagicMock will not automatically handle attributes that
# start with "assert"
self.context.session.assertPerm = mock.MagicMock()
self.context.session.assertLogin = mock.MagicMock()
self.context_db.session.assertLogin = mock.MagicMock()
def tearDown(self):
mock.patch.stopall()
@ -46,8 +47,8 @@ class TestCreateTag(unittest.TestCase):
self.get_tag.side_effect = [None, {'id': 1, 'name': 'parent-tag'}]
self.get_tag_id.return_value = 99
self.verify_name_internal.return_value = None
self.context.event_id = 42
self.context.session.user_id = 23
self.context_db.event_id = 42
self.context_db.session.user_id = 23
self.writeInheritanceData.return_value = None
kojihub.create_tag('newtag', parent='parent-tag')
@ -73,8 +74,8 @@ class TestCreateTag(unittest.TestCase):
self.get_tag.return_value = None
self.get_tag_id.return_value = 99
self.verify_name_internal.return_value = None
self.context.event_id = 42
self.context.session.user_id = 23
self.context_db.event_id = 42
self.context_db.session.user_id = 23
with self.assertRaises(koji.GenericError):
kojihub.create_tag('newtag', arches=u'ěšč')

View file

@ -20,10 +20,11 @@ class TestDeleteTag(unittest.TestCase):
self.updates = []
self.get_tag = mock.patch('kojihub.get_tag').start()
self.context = mock.patch('kojihub.context').start()
self.context_db = mock.patch('koji.db.context').start()
# It seems MagicMock will not automatically handle attributes that
# start with "assert"
self.context.session.assertPerm = mock.MagicMock()
self.context.session.assertLogin = mock.MagicMock()
self.context_db.session.assertLogin = mock.MagicMock()
def tearDown(self):
mock.patch.stopall()
@ -37,8 +38,8 @@ class TestDeleteTag(unittest.TestCase):
def test_good_tag(self):
self.get_tag.return_value = {'id': 'TAGID'}
self.context.event_id = "12345"
self.context.session.user_id = "42"
self.context_db.event_id = "12345"
self.context_db.session.user_id = "42"
data = {'revoker_id': '42', 'revoke_event': '12345'}
kojihub.delete_tag('goodtag')
for u in self.updates:

View file

@ -30,9 +30,10 @@ class TestEditHost(unittest.TestCase):
side_effect=self.getUpdate).start()
self.updates = []
self.context = mock.patch('kojihub.context').start()
self.context_db = mock.patch('koji.db.context').start()
# It seems MagicMock will not automatically handle attributes that
# start with "assert"
self.context.session.assertLogin = mock.MagicMock()
self.context_db.session.assertLogin = mock.MagicMock()
self.context.session.assertPerm = mock.MagicMock()
self.exports = kojihub.RootExports()
self.get_host = mock.patch('kojihub.get_host').start()
@ -94,8 +95,8 @@ class TestEditHost(unittest.TestCase):
def test_edit_host_valid(self):
kojihub.get_host = mock.MagicMock()
kojihub.get_host.return_value = self.hostinfo
self.context.event_id = 42
self.context.session.user_id = 23
self.context_db.event_id = 42
self.context_db.session.user_id = 23
r = self.exports.editHost('hostname', arches='x86_64 i386', capacity=12.0,
comment='comment_new', non_existing_kw='bogus')
@ -140,8 +141,8 @@ class TestEditHost(unittest.TestCase):
def test_edit_host_no_change(self):
kojihub.get_host = mock.MagicMock()
kojihub.get_host.return_value = self.hostinfo
self.context.event_id = 42
self.context.session.user_id = 23
self.context_db.event_id = 42
self.context_db.session.user_id = 23
r = self.exports.editHost('hostname')

View file

@ -36,9 +36,10 @@ class TestEditTag(unittest.TestCase):
self.get_perm_id = mock.patch('kojihub.get_perm_id').start()
self.verify_name_internal = mock.patch('kojihub.verify_name_internal').start()
self.context = mock.patch('kojihub.context').start()
self.context_db = mock.patch('koji.db.context').start()
# It seems MagicMock will not automatically handle attributes that
# start with "assert"
self.context.session.assertLogin = mock.MagicMock()
self.context_db.session.assertLogin = mock.MagicMock()
def tearDown(self):
mock.patch.stopall()
@ -66,8 +67,8 @@ class TestEditTag(unittest.TestCase):
'exD': 4}}
self._singleValue.return_value = None
self.verify_name_internal.return_value = None
self.context.event_id = 42
self.context.session.user_id = 23
self.context_db.event_id = 42
self.context_db.session.user_id = 23
# no1 invoke
kwargs = {
'perm': None,

View file

@ -7,6 +7,7 @@ QP = kojihub.QueryProcessor
IP = kojihub.InsertProcessor
UP = kojihub.UpdateProcessor
class TestGrouplist(unittest.TestCase):
def getQuery(self, *args, **kwargs):
query = QP(*args, **kwargs)
@ -40,6 +41,7 @@ class TestGrouplist(unittest.TestCase):
def setUp(self):
self.context = mock.patch('kojihub.context').start()
self.context_db = mock.patch('koji.db.context').start()
self.get_tag = mock.patch('kojihub.get_tag').start()
self.lookup_tag = mock.patch('kojihub.lookup_tag').start()
self.lookup_group = mock.patch('kojihub.lookup_group').start()
@ -47,38 +49,40 @@ class TestGrouplist(unittest.TestCase):
# It seems MagicMock will not automatically handle attributes that
# start with "assert"
self.context.session.assertPerm = mock.MagicMock()
self.context.session.assertLogin = mock.MagicMock()
self.context_db.session.assertLogin = mock.MagicMock()
self.QueryProcessor = mock.patch('kojihub.QueryProcessor',
side_effect=self.getQuery).start()
side_effect=self.getQuery).start()
self.queries = []
self.InsertProcessor = mock.patch('kojihub.InsertProcessor',
side_effect=self.getInsert).start()
side_effect=self.getInsert).start()
self.inserts = []
self.UpdateProcessor = mock.patch('kojihub.UpdateProcessor',
side_effect=self.getUpdate).start()
side_effect=self.getUpdate).start()
self.updates = []
self.tag = 'tag'
self.group = 'group'
self.taginfo = {'name': self.tag, 'id': 1}
self.groupinfo = {'name': self.group, 'id': 2}
def tearDown(self):
mock.patch.stopall()
def test_grplist_add(self):
tag = 'tag'
group = 'group'
self.get_tag.return_value = {'name': 'tag', 'id': 'tag_id'}
self.lookup_group.return_value = {'name': 'group', 'id': 'group_id'}
self.get_tag.return_value = self.taginfo
self.lookup_group.return_value = self.groupinfo
self.get_tag_groups.return_value = {}
self.context.event_id = 42
self.context.session.user_id = 24
self.context_db.event_id = 42
self.context_db.session.user_id = 24
kojihub.grplist_add(tag, group)
kojihub.grplist_add(self.tag, self.group)
# what was called
self.context.session.assertPerm.assert_called_once_with('tag')
self.get_tag.assert_called_once_with(tag, strict=True)
self.lookup_group.assert_called_once_with(group, create=True)
self.get_tag_groups.assert_called_with('tag_id', inherit=True,
incl_pkgs=False, incl_reqs=False)
self.get_tag.assert_called_once_with(self.tag, strict=True)
self.lookup_group.assert_called_once_with(self.group, create=True)
self.get_tag_groups.assert_called_with(self.taginfo['id'], inherit=True, incl_pkgs=False,
incl_reqs=False)
# db
# revoke
self.assertEqual(len(self.updates), 1)
@ -96,8 +100,8 @@ class TestGrouplist(unittest.TestCase):
'uservisible': True,
'create_event': 42,
'creator_id': 24,
'tag_id': 'tag_id',
'group_id': 'group_id',
'tag_id': self.taginfo['id'],
'group_id': self.groupinfo['id'],
'blocked': False,
}
self.assertEqual(insert.table, 'group_config')
@ -107,37 +111,35 @@ class TestGrouplist(unittest.TestCase):
def test_grplist_add_no_admin(self):
self.context.session.assertPerm.side_effect = koji.GenericError
with self.assertRaises(koji.GenericError):
kojihub.grplist_add('tag', 'group')
self.context.session.assertPerm.assert_called_once_with('tag')
kojihub.grplist_add(self.tag, self.group)
self.context.session.assertPerm.assert_called_once_with(self.tag)
self.assertEqual(len(self.inserts), 0)
self.assertEqual(len(self.updates), 0)
def test_grplist_add_no_tag(self):
self.get_tag.side_effect = koji.GenericError
with self.assertRaises(koji.GenericError):
kojihub.grplist_add('tag', 'group')
self.context.session.assertPerm.assert_called_once_with('tag')
kojihub.grplist_add(self.tag, self.group)
self.context.session.assertPerm.assert_called_once_with(self.tag)
self.assertEqual(len(self.inserts), 0)
self.assertEqual(len(self.updates), 0)
def test_grplist_block(self):
# identical with test_grplist_add except blocked=True
tag = 'tag'
group = 'group'
self.get_tag.return_value = {'name': 'tag', 'id': 'tag_id'}
self.lookup_group.return_value = {'name': 'group', 'id': 'group_id'}
self.get_tag.return_value = self.taginfo
self.lookup_group.return_value = self.groupinfo
self.get_tag_groups.return_value = {}
self.context.event_id = 42
self.context.session.user_id = 24
self.context_db.event_id = 42
self.context_db.session.user_id = 24
kojihub.grplist_block(tag, group)
kojihub.grplist_block(self.tag, self.group)
# what was called
self.context.session.assertPerm.assert_called_once_with('tag')
self.get_tag.assert_called_once_with(tag, strict=True)
self.lookup_group.assert_called_once_with(group, create=True)
self.get_tag_groups.assert_called_with('tag_id', inherit=True,
incl_pkgs=False, incl_reqs=False)
self.get_tag.assert_called_once_with(self.tag, strict=True)
self.lookup_group.assert_called_once_with(self.group, create=True)
self.get_tag_groups.assert_called_with(self.taginfo['id'], inherit=True, incl_pkgs=False,
incl_reqs=False)
# db
# revoke
self.assertEqual(len(self.updates), 1)
@ -155,8 +157,8 @@ class TestGrouplist(unittest.TestCase):
'uservisible': True,
'create_event': 42,
'creator_id': 24,
'tag_id': 'tag_id',
'group_id': 'group_id',
'tag_id': self.taginfo['id'],
'group_id': self.groupinfo['id'],
'blocked': True,
}
self.assertEqual(insert.table, 'group_config')
@ -164,19 +166,17 @@ class TestGrouplist(unittest.TestCase):
self.assertEqual(insert.rawdata, {})
def test_grplist_remove(self):
tag = 'tag'
group = 'group'
self.get_tag.return_value = {'name': 'tag', 'id': 'tag_id'}
self.lookup_group.return_value = {'name': 'group', 'id': 'group_id'}
self.context.event_id = 42
self.context.session.user_id = 24
self.get_tag.return_value = self.taginfo
self.lookup_group.return_value = self.groupinfo
self.context_db.event_id = 42
self.context_db.session.user_id = 24
kojihub.grplist_remove(tag, group)
kojihub.grplist_remove(self.tag, self.group)
# what was called
self.context.session.assertPerm.assert_called_once_with('tag')
self.get_tag.assert_called_once_with(tag, strict=True)
self.lookup_group.assert_called_once_with(group, strict=True)
self.context.session.assertPerm.assert_called_once_with(self.tag)
self.get_tag.assert_called_once_with(self.tag, strict=True)
self.lookup_group.assert_called_once_with(self.group, strict=True)
# db
self.assertEqual(len(self.queries), 1)
@ -197,7 +197,7 @@ class TestGrouplist(unittest.TestCase):
self.reset_db_processors()
with mock.patch('kojihub.QueryProcessor', side_effect=self.getEmptyQuery):
with self.assertRaises(koji.GenericError) as cm:
kojihub.grplist_remove(tag, group)
kojihub.grplist_remove(self.tag, self.group)
self.assertEqual(len(self.queries), 1)
self.assertEqual(len(self.inserts), 0)
@ -209,7 +209,7 @@ class TestGrouplist(unittest.TestCase):
self.reset_db_processors()
with mock.patch('kojihub.QueryProcessor',
side_effect=self.getEmptyQuery):
kojihub.grplist_remove(tag, group, force=True)
kojihub.grplist_remove(self.tag, self.group, force=True)
self.assertEqual(len(self.queries), 0)
self.assertEqual(len(self.inserts), 0)
@ -217,27 +217,23 @@ class TestGrouplist(unittest.TestCase):
def test_grplist_unblock(self):
# identical with test_grplist_add except blocked=True
tag = 'tag'
group = 'group'
self.lookup_tag.return_value = {'name': 'tag', 'id': 'tag_id'}
self.lookup_group.return_value = {'name': 'group', 'id': 'group_id'}
#self.context.event_id = 42
#self.context.session.user_id = 24
self.lookup_tag.return_value = self.taginfo
self.lookup_group.return_value = self.groupinfo
# will fail for non-blocked group
with self.assertRaises(koji.GenericError):
kojihub.grplist_unblock(tag, group)
kojihub.grplist_unblock(self.tag, self.group)
# what was called
self.context.session.assertPerm.assert_called_once_with('tag')
self.lookup_tag.assert_called_once_with(tag, strict=True)
self.lookup_group.assert_called_once_with(group, strict=True)
self.lookup_tag.assert_called_once_with(self.tag, strict=True)
self.lookup_group.assert_called_once_with(self.group, strict=True)
# db
self.assertEqual(len(self.queries), 1)
query = self.queries[0]
self.assertEqual(query.tables, ['group_config'])
self.assertEqual(query.joins,None)
self.assertEqual(query.joins, None)
self.assertEqual(query.clauses,
['active = TRUE', 'group_id=%(grp_id)s', 'tag_id=%(tag_id)s'])
self.assertEqual(len(self.updates), 0)
@ -260,7 +256,7 @@ class TestGrouplist(unittest.TestCase):
}
self.get_tag_groups.return_value = {1: group}
r = kojihub.readTagGroups('tag')
r = kojihub.readTagGroups(self.tag)
self.assertEqual(r, [{'name': 'a', 'packagelist': [], 'grouplist': [], 'blocked': False}])
def test_readTagGroups_blocked(self):
@ -273,10 +269,10 @@ class TestGrouplist(unittest.TestCase):
self.get_tag_groups.return_value = {1: group.copy()}
# without blocked
r = kojihub.readTagGroups('tag')
r = kojihub.readTagGroups(self.tag)
self.assertEqual(r, [])
# with blocked
self.get_tag_groups.return_value = {1: group.copy()}
r = kojihub.readTagGroups('tag', incl_blocked=True)
r = kojihub.readTagGroups(self.tag, incl_blocked=True)
self.assertEqual(r, [{'name': 'a', 'packagelist': [], 'grouplist': [], 'blocked': True}])

View file

@ -22,7 +22,7 @@ class TestImportBuild(unittest.TestCase):
self.check_volume_policy = mock.patch('kojihub.check_volume_policy').start()
self.new_typed_build = mock.patch('kojihub.new_typed_build').start()
self._dml = mock.patch('kojihub._dml').start()
self._dml = mock.patch('koji.db._dml').start()
self._singleValue = mock.patch('kojihub._singleValue').start()
self.get_build = mock.patch('kojihub.get_build').start()
self.add_rpm_sig = mock.patch('kojihub.add_rpm_sig').start()
@ -31,6 +31,7 @@ class TestImportBuild(unittest.TestCase):
self.import_rpm = mock.patch('kojihub.import_rpm').start()
self.QueryProcessor = mock.patch('kojihub.QueryProcessor').start()
self.context = mock.patch('kojihub.context').start()
self.context_db = mock.patch('koji.db.context').start()
self.new_package = mock.patch('kojihub.new_package').start()
self.get_rpm_header = mock.patch('koji.get_rpm_header').start()
self.pathinfo_work = mock.patch('koji.pathinfo.work').start()

View file

@ -10,20 +10,22 @@ import kojihub
class TestImportImageInternal(unittest.TestCase):
def setUp(self):
self.tempdir = tempfile.mkdtemp()
self.context_db = mock.patch('koji.db.context').start()
self.Task = mock.patch('kojihub.Task').start()
self.get_build = mock.patch('kojihub.get_build').start()
self.get_archive_type = mock.patch('kojihub.get_archive_type').start()
self.path_work = mock.patch('koji.pathinfo.work').start()
self.import_archive = mock.patch('kojihub.import_archive').start()
self.build = mock.patch('koji.pathinfo.build').start()
self.get_rpm = mock.patch('kojihub.get_rpm').start()
def tearDown(self):
shutil.rmtree(self.tempdir)
@mock.patch('koji.pathinfo.work')
@mock.patch('kojihub.import_archive')
@mock.patch('kojihub.get_archive_type')
@mock.patch('kojihub.get_build')
@mock.patch('kojihub.Task')
@mock.patch('kojihub.context')
def test_basic(self, context, Task, get_build, get_archive_type, import_archive, work):
def test_basic(self):
task = mock.MagicMock()
task.assertHost = mock.MagicMock()
Task.return_value = task
self.Task.return_value = task
imgdata = {
'arch': 'x86_64',
'task_id': 1,
@ -34,32 +36,24 @@ class TestImportImageInternal(unittest.TestCase):
],
}
cursor = mock.MagicMock()
context.cnx.cursor.return_value = cursor
context.session.host_id = 42
get_build.return_value = {
self.context_db.cnx.cursor.return_value = cursor
self.context_db.session.host_id = 42
self.get_build.return_value = {
'id': 2,
'name': 'name',
'version': 'version',
'release': 'release',
}
get_archive_type.return_value = 4
work.return_value = self.tempdir
self.get_archive_type.return_value = 4
self.path_work.return_value = self.tempdir
os.makedirs(self.tempdir + "/tasks/1/1")
kojihub.importImageInternal(task_id=1, build_info=get_build.return_value, imgdata=imgdata)
kojihub.importImageInternal(
task_id=1, build_info=self.get_build.return_value, imgdata=imgdata)
@mock.patch('kojihub.get_rpm')
@mock.patch('koji.pathinfo.build')
@mock.patch('koji.pathinfo.work')
@mock.patch('kojihub.import_archive')
@mock.patch('kojihub.get_archive_type')
@mock.patch('kojihub.get_build')
@mock.patch('kojihub.Task')
@mock.patch('kojihub.context')
def test_with_rpm(self, context, Task, get_build, get_archive_type, import_archive, build,
work, get_rpm):
def test_with_rpm(self):
task = mock.MagicMock()
task.assertHost = mock.MagicMock()
Task.return_value = task
self.Task.return_value = task
rpm = {
# 'location': 'foo',
'id': 6,
@ -87,14 +81,14 @@ class TestImportImageInternal(unittest.TestCase):
'id': 2
}
cursor = mock.MagicMock()
context.cnx.cursor.return_value = cursor
context.session.host_id = 42
get_build.return_value = build_info
get_rpm.return_value = rpm
get_archive_type.return_value = 4
work.return_value = self.tempdir
build.return_value = self.tempdir
import_archive.return_value = {
self.context_db.cnx.cursor.return_value = cursor
self.context_db.session.host_id = 42
self.get_build.return_value = build_info
self.get_rpm.return_value = rpm
self.get_archive_type.return_value = 4
self.path_work.return_value = self.tempdir
self.build.return_value = self.tempdir
self.import_archive.return_value = {
'id': 9,
'filename': self.tempdir + '/foo.archive',
}

View file

@ -22,6 +22,7 @@ class TestImportRPM(unittest.TestCase):
pass
self.context = mock.patch('kojihub.context').start()
self.context.session.assertPerm = mock.MagicMock()
self.context_db = mock.patch('koji.db.context').start()
self.cursor = mock.MagicMock()
self.rpm_header_retval = {
@ -40,7 +41,7 @@ class TestImportRPM(unittest.TestCase):
self.get_build = mock.patch('kojihub.get_build').start()
self.get_rpm_header = mock.patch('koji.get_rpm_header').start()
self.new_typed_build = mock.patch('kojihub.new_typed_build').start()
self._dml = mock.patch('kojihub._dml').start()
self._dml = mock.patch('koji.db._dml').start()
self._singleValue = mock.patch('kojihub._singleValue').start()
self.os_path_exists = mock.patch('os.path.exists').start()
self.os_path_basename = mock.patch('os.path.basename').start()
@ -172,7 +173,7 @@ class TestImportRPM(unittest.TestCase):
def test_non_exist_build(self):
self.cursor.fetchone.return_value = None
self.context.cnx.cursor.return_value = self.cursor
self.context_db.cnx.cursor.return_value = self.cursor
retval = copy.copy(self.rpm_header_retval)
retval.update({
'filename': 'name-version-release.arch.rpm',

View file

@ -16,6 +16,7 @@ class DummyExports(object):
class TestMulticall(unittest.TestCase):
def test_multicall(self):
self.context_db = mock.patch('koji.db.context').start()
kojixmlrpc.kojihub = mock.MagicMock()
kojixmlrpc.context.opts = mock.MagicMock()
kojixmlrpc.context.session = mock.MagicMock()

View file

@ -20,37 +20,43 @@ class TestRemoveHostFromChannel(unittest.TestCase):
side_effect=self.getUpdate).start()
self.updates = []
self.context = mock.patch('kojihub.context').start()
self.context_db = mock.patch('koji.db.context').start()
# It seems MagicMock will not automatically handle attributes that
# start with "assert"
self.context.session.assertLogin = mock.MagicMock()
self.context_db.session.assertLogin = mock.MagicMock()
self.context.session.assertPerm = mock.MagicMock()
self.context.event_id = 42
self.context.session.user_id = 23
self.context_db.event_id = 42
self.context_db.session.user_id = 23
self.context.opts = {'HostPrincipalFormat': '-%s-'}
self.exports = kojihub.RootExports()
self.list_channels = mock.patch('kojihub.list_channels').start()
self.get_channel_id = mock.patch('kojihub.get_channel_id').start()
self.get_host = mock.patch('kojihub.get_host').start()
self.hostname = 'hostname'
self.hostinfo = {'id': 123, 'name': self.hostname}
self.channel_id = 234
self.channelname = 'channelname'
self.list_channels_output = [{'id': self.channel_id, 'name': self.channelname}]
def tearDown(self):
mock.patch.stopall()
@mock.patch('kojihub.list_channels')
@mock.patch('kojihub.get_channel_id')
@mock.patch('kojihub.get_host')
def test_valid(self, get_host, get_channel_id, list_channels):
get_host.return_value = {'id': 123, 'name': 'hostname'}
get_channel_id.return_value = 234
list_channels.return_value = [{'id': 234, 'name': 'channelname'}]
def test_valid(self):
self.get_host.return_value = self.hostinfo
self.get_channel_id.return_value = self.channel_id
self.list_channels.return_value = self.list_channels_output
kojihub.remove_host_from_channel('hostname', 'channelname')
kojihub.remove_host_from_channel(self.hostname, self.channelname)
get_host.assert_called_once_with('hostname')
get_channel_id.assert_called_once_with('channelname')
list_channels.assert_called_once_with(123)
self.get_host.assert_called_once_with(self.hostname)
self.get_channel_id.assert_called_once_with(self.channelname)
self.list_channels.assert_called_once_with(self.hostinfo['id'])
self.assertEqual(len(self.updates), 1)
update = self.updates[0]
values = {
'host_id': 123,
'channel_id': 234,
'host_id': self.hostinfo['id'],
'channel_id': self.channel_id,
}
clauses = [
'host_id = %(host_id)i AND channel_id = %(channel_id)i',
@ -60,45 +66,36 @@ class TestRemoveHostFromChannel(unittest.TestCase):
self.assertEqual(update.values, values)
self.assertEqual(update.clauses, clauses)
@mock.patch('kojihub.list_channels')
@mock.patch('kojihub.get_channel_id')
@mock.patch('kojihub.get_host')
def test_wrong_host(self, get_host, get_channel_id, list_channels):
get_host.return_value = None
def test_wrong_host(self):
self.get_host.return_value = None
with self.assertRaises(koji.GenericError):
kojihub.remove_host_from_channel('hostname', 'channelname')
kojihub.remove_host_from_channel(self.hostname, self.channelname)
get_host.assert_called_once_with('hostname')
self.get_host.assert_called_once_with(self.hostname)
self.assertEqual(len(self.updates), 0)
@mock.patch('kojihub.list_channels')
@mock.patch('kojihub.get_channel_id')
@mock.patch('kojihub.get_host')
def test_wrong_channel(self, get_host, get_channel_id, list_channels):
get_host.return_value = {'id': 123, 'name': 'hostname'}
get_channel_id.return_value = None
list_channels.return_value = [{'id': 234, 'name': 'channelname'}]
def test_wrong_channel(self):
self.get_host.return_value = self.hostinfo
self.get_channel_id.return_value = None
self.list_channels.return_value = self.list_channels_output
with self.assertRaises(koji.GenericError):
kojihub.remove_host_from_channel('hostname', 'channelname')
kojihub.remove_host_from_channel(self.hostname, self.channelname)
get_host.assert_called_once_with('hostname')
get_channel_id.assert_called_once_with('channelname')
self.get_host.assert_called_once_with(self.hostname)
self.get_channel_id.assert_called_once_with(self.channelname)
self.assertEqual(len(self.updates), 0)
@mock.patch('kojihub.list_channels')
@mock.patch('kojihub.get_channel_id')
@mock.patch('kojihub.get_host')
def test_missing_record(self, get_host, get_channel_id, list_channels):
get_host.return_value = {'id': 123, 'name': 'hostname'}
get_channel_id.return_value = 234
list_channels.return_value = []
def test_missing_record(self):
self.get_host.return_value = self.hostinfo
self.get_channel_id.return_value = self.channel_id
self.list_channels.return_value = []
with self.assertRaises(koji.GenericError):
kojihub.remove_host_from_channel('hostname', 'channelname')
kojihub.remove_host_from_channel(self.hostname, self.channelname)
get_host.assert_called_once_with('hostname')
get_channel_id.assert_called_once_with('channelname')
list_channels.assert_called_once_with(123)
self.get_host.assert_called_once_with(self.hostname)
self.get_channel_id.assert_called_once_with(self.channelname)
self.list_channels.assert_called_once_with(self.hostinfo['id'])
self.assertEqual(len(self.updates), 0)

View file

@ -29,9 +29,10 @@ class TestSetHostEnabled(unittest.TestCase):
side_effect=self.getUpdate).start()
self.updates = []
self.context = mock.patch('kojihub.context').start()
self.context_db = mock.patch('koji.db.context').start()
# It seems MagicMock will not automatically handle attributes that
# start with "assert"
self.context.session.assertLogin = mock.MagicMock()
self.context_db.session.assertLogin = mock.MagicMock()
self.context.session.assertPerm = mock.MagicMock()
self.exports = kojihub.RootExports()
@ -61,8 +62,8 @@ class TestSetHostEnabled(unittest.TestCase):
'enabled': False,
}
kojihub.get_host.return_value = hostinfo
self.context.event_id = 42
self.context.session.user_id = 23
self.context_db.event_id = 42
self.context_db.session.user_id = 23
self.exports.enableHost('hostname')
@ -113,8 +114,8 @@ class TestSetHostEnabled(unittest.TestCase):
'enabled': True,
}
kojihub.get_host.return_value = hostinfo
self.context.event_id = 42
self.context.session.user_id = 23
self.context_db.event_id = 42
self.context_db.session.user_id = 23
self.exports.disableHost('hostname')

View file

@ -50,32 +50,37 @@ class TestTagBuild(unittest.TestCase):
self.check_tag_access = mock.patch('kojihub.check_tag_access').start()
self.writeInheritanceData = mock.patch('kojihub.writeInheritanceData').start()
self.context = mock.patch('kojihub.context').start()
self.context_db = mock.patch('koji.db.context').start()
# It seems MagicMock will not automatically handle attributes that
# start with "assert"
self.context.session.assertPerm = mock.MagicMock()
self.context.session.assertLogin = mock.MagicMock()
def tearDown(self):
mock.patch.stopall()
def test_simple_tag(self):
self.check_tag_access.return_value = (True, False, "")
self.get_build.return_value = {
self.context_db.session.assertLogin = mock.MagicMock()
self.buildinfo = {
'id': 1,
'name': 'name',
'version': 'version',
'release': 'release',
'state': koji.BUILD_STATES['COMPLETE'],
}
self.get_tag.return_value = {
self.taginfo = {
'id': 777,
'name': 'tag',
}
self.get_user.return_value = {
self.userinfo = {
'id': 999,
'name': 'user',
}
self.context.event_id = 42
self.event_id = 42
def tearDown(self):
mock.patch.stopall()
def test_simple_tag(self):
self.check_tag_access.return_value = (True, False, "")
self.get_build.return_value = self.buildinfo
self.get_tag.return_value = self.taginfo
self.get_user.return_value = self.userinfo
self.context_db.event_id = self.event_id
# set return for the already tagged check
self.query_executeOne.return_value = None
@ -91,10 +96,10 @@ class TestTagBuild(unittest.TestCase):
insert = self.inserts[0]
self.assertEqual(insert.table, 'tag_listing')
values = {
'build_id': 1,
'create_event': 42,
'creator_id': 999,
'tag_id': 777
'build_id': self.buildinfo['id'],
'create_event': self.event_id,
'creator_id': self.userinfo['id'],
'tag_id': self.taginfo['id']
}
self.assertEqual(insert.data, values)
self.assertEqual(insert.rawdata, {})
@ -102,30 +107,18 @@ class TestTagBuild(unittest.TestCase):
def test_simple_tag_with_user(self):
self.check_tag_access.return_value = (True, False, "")
self.get_build.return_value = {
'id': 1,
'name': 'name',
'version': 'version',
'release': 'release',
'state': koji.BUILD_STATES['COMPLETE'],
}
self.get_tag.return_value = {
'id': 777,
'name': 'tag',
}
self.get_user.return_value = {
'id': 999,
'name': 'user',
}
self.context.event_id = 42
self.get_build.return_value = self.buildinfo
self.get_tag.return_value = self.taginfo
self.get_user.return_value = self.userinfo
self.context_db.event_id = self.event_id
# set return for the already tagged check
self.query_executeOne.return_value = None
# call it
kojihub._tag_build('sometag', 'name-version-release', user_id=999)
kojihub._tag_build('sometag', 'name-version-release', user_id=self.userinfo['id'])
self.get_tag.called_once_with('sometag', strict=True)
self.get_user.called_one_with(999, strict=True)
self.get_user.called_one_with(self.userinfo['id'], strict=True)
self.get_build.called_once_with('name-version-release', strict=True)
self.context.session.assertPerm.assert_not_called()
@ -134,10 +127,10 @@ class TestTagBuild(unittest.TestCase):
insert = self.inserts[0]
self.assertEqual(insert.table, 'tag_listing')
values = {
'build_id': 1,
'create_event': 42,
'creator_id': 999,
'tag_id': 777
'build_id': self.buildinfo['id'],
'create_event': self.event_id,
'creator_id': self.userinfo['id'],
'tag_id': self.taginfo['id']
}
self.assertEqual(insert.data, values)
self.assertEqual(insert.rawdata, {})
@ -145,22 +138,10 @@ class TestTagBuild(unittest.TestCase):
def test_simple_untag(self):
self.check_tag_access.return_value = (True, False, "")
self.get_build.return_value = {
'id': 1,
'name': 'name',
'version': 'version',
'release': 'release',
'state': koji.BUILD_STATES['COMPLETE'],
}
self.get_tag.return_value = {
'id': 777,
'name': 'tag',
}
self.get_user.return_value = {
'id': 999,
'name': 'user',
}
self.context.event_id = 42
self.get_build.return_value = self.buildinfo
self.get_tag.return_value = self.taginfo
self.get_user.return_value = self.userinfo
self.context_db.event_id = self.event_id
# set return for the already tagged check
self.query_executeOne.return_value = None
@ -177,8 +158,8 @@ class TestTagBuild(unittest.TestCase):
update = self.updates[0]
self.assertEqual(update.table, 'tag_listing')
values = {
'build_id': 1,
'tag_id': 777
'build_id': self.buildinfo['id'],
'tag_id': self.taginfo['id']
}
data = {
'revoke_event': 42,
@ -191,30 +172,18 @@ class TestTagBuild(unittest.TestCase):
def test_simple_untag_with_user(self):
self.check_tag_access.return_value = (True, False, "")
self.get_build.return_value = {
'id': 1,
'name': 'name',
'version': 'version',
'release': 'release',
'state': koji.BUILD_STATES['COMPLETE'],
}
self.get_tag.return_value = {
'id': 777,
'name': 'tag',
}
self.get_user.return_value = {
'id': 999,
'name': 'user',
}
self.context.event_id = 42
self.get_build.return_value = self.buildinfo
self.get_tag.return_value = self.taginfo
self.get_user.return_value = self.userinfo
self.context_db.event_id = self.event_id
# set return for the already tagged check
self.query_executeOne.return_value = None
# call it
kojihub._untag_build('sometag', 'name-version-release', user_id=999)
kojihub._untag_build('sometag', 'name-version-release', user_id=self.userinfo['id'])
self.get_tag.called_once_with('sometag', strict=True)
self.get_user.called_one_with(999, strict=True)
self.get_user.called_one_with(self.userinfo['id'], strict=True)
self.get_build.called_once_with('name-version-release', strict=True)
self.context.session.assertPerm.assert_not_called()
self.assertEqual(len(self.inserts), 0)
@ -224,8 +193,8 @@ class TestTagBuild(unittest.TestCase):
update = self.updates[0]
self.assertEqual(update.table, 'tag_listing')
values = {
'build_id': 1,
'tag_id': 777
'build_id': self.buildinfo['id'],
'tag_id': self.taginfo['id']
}
data = {
'revoke_event': 42,

View file

@ -32,12 +32,13 @@ class TestGrouplist(unittest.TestCase):
def setUp(self):
self.context = mock.patch('kojihub.context').start()
self.context_db = mock.patch('koji.db.context').start()
self.get_user = mock.patch('kojihub.get_user').start()
self.verify_name_internal = mock.patch('kojihub.verify_name_internal').start()
# It seems MagicMock will not automatically handle attributes that
# start with "assert"
self.context.session.assertPerm = mock.MagicMock()
self.context.session.assertLogin = mock.MagicMock()
self.context_db.session.assertLogin = mock.MagicMock()
self.QueryProcessor = mock.patch('kojihub.QueryProcessor',
side_effect=self.getQuery).start()

View file

@ -6,6 +6,12 @@ import kojihub
class TestInsertProcessor(unittest.TestCase):
def setUp(self):
self.context_db = mock.patch('koji.db.context').start()
def tearDown(self):
mock.patch.stopall()
def test_basic_instantiation(self):
proc = kojihub.InsertProcessor('sometable')
actual = str(proc)
@ -18,43 +24,40 @@ class TestInsertProcessor(unittest.TestCase):
expected = 'INSERT INTO sometable (foo) VALUES (%(foo)s)'
self.assertEqual(actual, expected)
@mock.patch('kojihub.context')
def test_simple_execution_with_iterate(self, context):
def test_simple_execution_with_iterate(self):
cursor = mock.MagicMock()
context.cnx.cursor.return_value = cursor
self.context_db.cnx.cursor.return_value = cursor
proc = kojihub.InsertProcessor('sometable', data={'foo': 'bar'})
proc.execute()
cursor.execute.assert_called_once_with(
'INSERT INTO sometable (foo) VALUES (%(foo)s)',
{'foo': 'bar'}, log_errors=True)
@mock.patch('kojihub.context')
def test_make_create(self, context):
def test_make_create(self,):
cursor = mock.MagicMock()
context.cnx.cursor.return_value = cursor
context.session.assertLogin = mock.MagicMock()
self.context_db.cnx.cursor.return_value = cursor
self.context_db.session.assertLogin = mock.MagicMock()
proc = kojihub.InsertProcessor('sometable', data={'foo': 'bar'})
proc.make_create(event_id=1, user_id=2)
self.assertEqual(proc.data['create_event'], 1)
self.assertEqual(proc.data['creator_id'], 2)
proc.make_create(user_id=2)
self.assertEqual(proc.data['create_event'], context.event_id)
self.assertEqual(proc.data['create_event'], self.context_db.event_id)
self.assertEqual(proc.data['creator_id'], 2)
proc.make_create(event_id=1)
self.assertEqual(proc.data['create_event'], 1)
self.assertEqual(proc.data['creator_id'], context.session.user_id)
self.assertEqual(proc.data['creator_id'], self.context_db.session.user_id)
proc.make_create()
self.assertEqual(proc.data['create_event'], context.event_id)
self.assertEqual(proc.data['creator_id'], context.session.user_id)
self.assertEqual(proc.data['create_event'], self.context_db.event_id)
self.assertEqual(proc.data['creator_id'], self.context_db.session.user_id)
@mock.patch('kojihub.context')
def test_dup_check(self, context):
def test_dup_check(self):
cursor = mock.MagicMock()
context.cnx.cursor.return_value = cursor
context.session.assertLogin = mock.MagicMock()
self.context_db.cnx.cursor.return_value = cursor
self.context_db.session.assertLogin = mock.MagicMock()
proc = kojihub.InsertProcessor('sometable', data={'foo': 'bar'})
proc.dup_check()
@ -76,10 +79,9 @@ class TestInsertProcessor(unittest.TestCase):
result = proc.dup_check()
self.assertEqual(result, None)
@mock.patch('kojihub.context')
def test_raw_data(self, context):
def test_raw_data(self):
cursor = mock.MagicMock()
context.cnx.cursor.return_value = cursor
self.context_db.cnx.cursor.return_value = cursor
proc = kojihub.InsertProcessor('sometable', rawdata={'foo': '\'bar\''})
result = proc.dup_check()
self.assertEqual(result, None)
@ -89,6 +91,12 @@ class TestInsertProcessor(unittest.TestCase):
class TestBulkInsertProcessor(unittest.TestCase):
def setUp(self):
self.context_db = mock.patch('koji.db.context').start()
def tearDown(self):
mock.patch.stopall()
def test_basic_instantiation(self):
proc = kojihub.BulkInsertProcessor('sometable')
actual = str(proc)
@ -106,10 +114,9 @@ class TestBulkInsertProcessor(unittest.TestCase):
actual = str(proc)
self.assertEqual(actual, expected)
@mock.patch('kojihub.context')
def test_simple_execution(self, context):
def test_simple_execution(self):
cursor = mock.MagicMock()
context.cnx.cursor.return_value = cursor
self.context_db.cnx.cursor.return_value = cursor
proc = kojihub.BulkInsertProcessor('sometable', data=[{'foo': 'bar'}])
proc.execute()
cursor.execute.assert_called_once_with(
@ -128,10 +135,9 @@ class TestBulkInsertProcessor(unittest.TestCase):
log_errors=True
)
@mock.patch('kojihub.context')
def test_bulk_execution(self, context):
def test_bulk_execution(self):
cursor = mock.MagicMock()
context.cnx.cursor.return_value = cursor
self.context_db.cnx.cursor.return_value = cursor
proc = kojihub.BulkInsertProcessor('sometable', data=[{'foo': 'bar1'}])
proc.add_record(foo='bar2')
@ -166,10 +172,9 @@ class TestBulkInsertProcessor(unittest.TestCase):
str(proc)
self.assertEqual(cm.exception.args[0], 'Missing value foo2 in BulkInsert')
@mock.patch('kojihub.context')
def test_batch_execution(self, context):
def test_batch_execution(self):
cursor = mock.MagicMock()
context.cnx.cursor.return_value = cursor
self.context_db.cnx.cursor.return_value = cursor
proc = kojihub.BulkInsertProcessor('sometable', data=[{'foo': 'bar1'}], batch=2)
proc.add_record(foo='bar2')
@ -185,10 +190,9 @@ class TestBulkInsertProcessor(unittest.TestCase):
mock.call('INSERT INTO sometable (foo) VALUES (%(foo0)s)',
{'foo0': 'bar3'}, log_errors=True))
@mock.patch('kojihub.context')
def test_no_batch_execution(self, context):
def test_no_batch_execution(self):
cursor = mock.MagicMock()
context.cnx.cursor.return_value = cursor
self.context_db.cnx.cursor.return_value = cursor
proc = kojihub.BulkInsertProcessor('sometable', data=[{'foo': 'bar1'}], batch=0)
proc.add_record(foo='bar2')

View file

@ -29,6 +29,7 @@ class TestQueryProcessor(unittest.TestCase):
)
self.original_chunksize = kojihub.QueryProcessor.iterchunksize
kojihub.QueryProcessor.iterchunksize = 2
self.context_db = mock.patch('koji.db.context').start()
def tearDown(self):
kojihub.QueryProcessor.iterchunksize = self.original_chunksize
@ -71,19 +72,17 @@ class TestQueryProcessor(unittest.TestCase):
" ORDER BY something OFFSET 10 LIMIT 3"
self.assertEqual(actual, expected)
@mock.patch('kojihub.context')
def test_simple_with_execution(self, context):
def test_simple_with_execution(self):
cursor = mock.MagicMock()
context.cnx.cursor.return_value = cursor
self.context_db.cnx.cursor.return_value = cursor
proc = kojihub.QueryProcessor(**self.simple_arguments)
proc.execute()
cursor.execute.assert_called_once_with(
'\nSELECT something\n FROM awesome\n\n\n \n \n\n \n', {})
@mock.patch('kojihub.context')
def test_simple_count_with_execution(self, context):
def test_simple_count_with_execution(self):
cursor = mock.MagicMock()
context.cnx.cursor.return_value = cursor
self.context_db.cnx.cursor.return_value = cursor
cursor.fetchall.return_value = [('some count',)]
args = self.simple_arguments.copy()
args['opts'] = {'countOnly': True}
@ -103,10 +102,9 @@ class TestQueryProcessor(unittest.TestCase):
' FROM awesome\n\n\n GROUP BY id\n \n\n \n) numrows', {})
self.assertEqual(results, 'some count')
@mock.patch('kojihub.context')
def test_simple_execution_with_iterate(self, context):
def test_simple_execution_with_iterate(self):
cursor = mock.MagicMock()
context.cnx.cursor.return_value = cursor
self.context_db.cnx.cursor.return_value = cursor
cursor.fetchall.return_value = [
('value number 1',),
('value number 2',),

View file

@ -8,7 +8,8 @@ import kojihub
class TestSavepoint(unittest.TestCase):
def setUp(self):
self.dml = mock.patch('kojihub._dml').start()
self.dml = mock.patch('koji.db._dml').start()
self.context_db = mock.patch('koji.db.context').start()
def tearDown(self):
mock.patch.stopall()

View file

@ -21,10 +21,10 @@ class TestUpdateProcessor(unittest.TestCase):
expected = {'data.foo': 'bar'}
self.assertEqual(actual, expected)
@mock.patch('kojihub.context')
def test_simple_execution_with_iterate(self, context):
@mock.patch('koji.db.context')
def test_simple_execution_with_iterate(self, context_db):
cursor = mock.MagicMock()
context.cnx.cursor.return_value = cursor
context_db.cnx.cursor.return_value = cursor
proc = kojihub.UpdateProcessor('sometable', data={'foo': 'bar'})
proc.execute()
cursor.execute.assert_called_once_with(

View file

@ -1,62 +1,73 @@
#!/usr/bin/python2
#!/usr/bin/python3
import os
import xmlrpc.client
from optparse import OptionParser
from koji.context import context
import koji.db
def clean_sessions(cursor, vacuum, test, age):
q = " FROM sessions WHERE update_time < NOW() - '%s days'::interval" % int(age)
table = 'sessions'
clauses = [f"update_time < NOW() - '{int(age)} days'::interval"]
if options.verbose:
cursor.execute("SELECT COUNT(*) " + q)
rows = cursor.fetchall()[0][0]
print("Deleting %d sessions" % rows)
query = koji.db.QueryProcessor(clauses=clauses, opts={'countOnly': True},
tables=[table], values=locals())
rows = query.iterate()
print(f"Deleting {rows} sessions")
if not test:
cursor.execute("DELETE " + q)
cursor.execute(
f"DELETE FROM sessions WHERE update_time < NOW() - '{int(age)} days'::interval")
if vacuum:
cursor.execute("VACUUM ANALYZE sessions")
def clean_reservations(cursor, vacuum, test, age):
q = " FROM build_reservations WHERE created < NOW() - '%s days'::interval" % int(age)
table = 'build_reservations'
clauses = [f"created < NOW() - '{int(age)} days'::interval"]
if options.verbose:
cursor.execute("SELECT COUNT(*) " + q)
rows = cursor.fetchall()[0][0]
print("Deleting %d build reservations" % rows)
query = koji.db.QueryProcessor(clauses=clauses, opts={'countOnly': True},
tables=[table], values=locals())
rows = query.iterate()
print(f"Deleting {rows} build reservations")
if not test:
cursor.execute("DELETE " + q)
cursor.execute(
f"DELETE FROM build_reservations WHERE created < NOW() - '{int(age)} days'::interval")
if vacuum:
cursor.execute("VACUUM ANALYZE build_reservations")
def clean_notification_tasks(cursor, vacuum, test, age):
q = " FROM task WHERE method = 'tagNotification' AND" + \
" completion_time < NOW() - '%s days'::interval" % int(age)
table = 'task'
clauses = ["method = 'tagNotification'",
f"completion_time < NOW() - '{int(age)} days'::interval"]
if options.verbose:
cursor.execute("SELECT COUNT(*) " + q)
rows = cursor.fetchall()[0][0]
print("Deleting %d tagNotification tasks" % rows)
query = koji.db.QueryProcessor(clauses=clauses, opts={'countOnly': True},
tables=[table], values=locals())
rows = query.iterate()
print(f"Deleting {rows} tagNotification tasks")
if not test:
# cascade
cursor.execute("DELETE " + q)
cursor.execute(f"DELETE FROM task WHERE method = 'tagNotification' AND "
f"completion_time < NOW() - '{int(age)} days'::interval")
if vacuum:
cursor.execute("VACUUM ANALYZE task")
def clean_scratch_tasks(cursor, vacuum, test, age):
q = """ FROM task
WHERE method = 'build' AND
completion_time < NOW() - '%s days'::interval AND
request LIKE '%%%%<name>scratch</name>%%%%'""" % int(age)
table = 'task'
clauses = ["method = 'build'",
f"completion_time < NOW() - '{int(age)} days'::interval",
"request LIKE '%%%%<name>scratch</name>%%%%'"]
if options.verbose:
cursor.execute("SELECT COUNT(*) " + q)
rows = cursor.fetchall()[0][0]
print("Deleting %d scratch build tasks" % rows)
query = koji.db.QueryProcessor(clauses=clauses, opts={'countOnly': True},
tables=[table], values=locals())
rows = query.iterate()
print(f"Deleting {rows} scratch build tasks")
if test:
return
@ -65,14 +76,19 @@ def clean_scratch_tasks(cursor, vacuum, test, age):
ids = []
# will be dropped automatically in the end of script/connection
cursor.execute("CREATE TEMPORARY TABLE temp_scratch_tasks (task_id INTEGER NOT NULL)")
cursor.execute("SELECT id, request " + q)
for row in cursor.fetchall():
query = koji.db.QueryProcessor(columns=['id', 'request'], clauses=clauses,
tables=[table], values=locals())
rows = query.execute()
for row in rows:
task_id, request = row
try:
params, method = xmlrpc.client.loads(request)
opts = params[2]
if opts['scratch']:
cursor.execute("INSERT INTO temp_scratch_tasks VALUES (%s)", (task_id,))
insert = koji.db.InsertProcessor('temp_scratch_tasks')
insert.set((task_id,))
insert.make_create()
insert.execute()
ids.append(task_id)
except Exception:
continue
@ -82,13 +98,21 @@ def clean_scratch_tasks(cursor, vacuum, test, age):
if not parents:
break
children = []
cursor.execute("SELECT id FROM task WHERE parent IN %s", (parents,))
for row in cursor.fetchall():
string_parents = ', '.join(parents)
query = koji.db.QueryProcessor(columns=['id'],
clauses=[f"parent IN ({int(age)})"],
tables=['task'],
values=locals())
rows = query.execute()
for row in rows:
children.append(row[0])
parents = children
if children:
values = ', '.join(["(%d)" % task_id for task_id in children])
cursor.execute("INSERT INTO temp_scratch_tasks VALUES %s" % values)
insert = koji.db.InsertProcessor('temp_scratch_tasks')
insert.set(values)
insert.make_create()
insert.execute()
if not ids:
return
@ -106,17 +130,19 @@ def clean_scratch_tasks(cursor, vacuum, test, age):
def clean_buildroots(cursor, vacuum, test):
q = " FROM buildroot " \
"WHERE cg_id IS NULL AND id NOT IN (SELECT buildroot_id FROM standard_buildroot)"
if options.verbose:
cursor.execute("SELECT COUNT(*) " + q)
rows = cursor.fetchall()[0][0]
print("Deleting %d buildroots" % rows)
clauses = ["cg_id IS NULL",
"id NOT IN (SELECT buildroot_id FROM standard_buildroot)"]
query = koji.db.QueryProcessor(clauses=clauses, opts={'countOnly': True},
tables=['buildroot'], values=locals())
rows = query.iterate()
print(f"Deleting {rows} buildroots")
if not test:
cursor.execute("DELETE FROM buildroot_listing WHERE buildroot_id IN (SELECT id %s)" % q)
cursor.execute("DELETE " + q)
q = " FROM buildroot WHERE cg_id IS NULL AND id NOT IN " \
"(SELECT buildroot_id FROM standard_buildroot)"
cursor.execute(f"DELETE FROM buildroot_listing WHERE buildroot_id IN (SELECT id {q})")
cursor.execute(f"DELETE {q}")
if vacuum:
cursor.execute("VACUUM ANALYZE buildroot_listing")
cursor.execute("VACUUM ANALYZE buildroot")
@ -205,9 +231,9 @@ if __name__ == "__main__":
host=opts.get("DBHost", None),
port=opts.get("DBPort", None))
conn = koji.db.connect()
conn.set_session(autocommit=True)
cursor = conn.cursor()
context.cnx = koji.db.connect()
context.cnx.set_session(autocommit=True)
cursor = context.cnx.cursor()
clean_sessions(cursor, options.vacuum, options.test, options.sessions_age)
clean_reservations(cursor, options.vacuum, options.test, options.reservations_age)