6980 lines
292 KiB
Python
Executable file
6980 lines
292 KiB
Python
Executable file
#!/usr/bin/python
|
|
# coding=utf-8
|
|
|
|
# command line interface for the Koji build system
|
|
# Copyright (c) 2005-2014 Red Hat, Inc.
|
|
#
|
|
# Koji is free software; you can redistribute it and/or
|
|
# modify it under the terms of the GNU Lesser General Public
|
|
# License as published by the Free Software Foundation;
|
|
# version 2.1 of the License.
|
|
#
|
|
# This software is distributed in the hope that it will be useful,
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
# Lesser General Public License for more details.
|
|
#
|
|
# You should have received a copy of the GNU Lesser General Public
|
|
# License along with this software; if not, write to the Free Software
|
|
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
|
#
|
|
# Authors:
|
|
# Dennis Gregorovic <dgregor@redhat.com>
|
|
# Mike McLean <mikem@redhat.com>
|
|
# Mike Bonnet <mikeb@redhat.com>
|
|
# Cristian Balint <cbalint@redhat.com>
|
|
|
|
import sys
|
|
try:
|
|
import krbV
|
|
except ImportError:
|
|
pass
|
|
try:
|
|
import ast
|
|
except ImportError:
|
|
ast = None
|
|
try:
|
|
import json
|
|
except ImportError:
|
|
try:
|
|
import simplejson as json
|
|
except ImportError:
|
|
json = None
|
|
import ConfigParser
|
|
import base64
|
|
import errno
|
|
import koji
|
|
import koji.util
|
|
import fnmatch
|
|
from koji.util import md5_constructor
|
|
import logging
|
|
import os
|
|
import re
|
|
import pprint
|
|
import random
|
|
import socket
|
|
import stat
|
|
import string
|
|
import time
|
|
import traceback
|
|
import urlgrabber.grabber as grabber
|
|
import urlgrabber.progress as progress
|
|
import xmlrpclib
|
|
import yum.comps
|
|
import optparse
|
|
#for import-comps handler (currently disabled)
|
|
#from rhpl.comps import Comps
|
|
|
|
# fix OptionParser for python 2.3 (optparse verion 1.4.1+)
|
|
# code taken from optparse version 1.5a2
|
|
OptionParser = optparse.OptionParser
|
|
if optparse.__version__ == "1.4.1+":
|
|
def _op_error(self, msg):
|
|
self.print_usage(sys.stderr)
|
|
msg = "%s: error: %s\n" % (self._get_prog_name(), msg)
|
|
if msg:
|
|
sys.stderr.write(msg)
|
|
sys.exit(2)
|
|
OptionParser.error = _op_error
|
|
|
|
greetings = ('hello', 'hi', 'yo', "what's up", "g'day", 'back to work',
|
|
'bonjour',
|
|
'hallo',
|
|
'ciao',
|
|
'hola',
|
|
u'olá',
|
|
u'dobrý den',
|
|
u'zdravstvuite',
|
|
u'góðan daginn',
|
|
'hej',
|
|
'tervehdys',
|
|
u'grüezi',
|
|
u'céad míle fáilte',
|
|
u'hylô',
|
|
u'bună ziua',
|
|
u'jó napot',
|
|
'dobre dan',
|
|
u'你好',
|
|
u'こんにちは',
|
|
u'नमस्कार',
|
|
u'안녕하세요')
|
|
|
|
def _(args):
|
|
"""Stub function for translation"""
|
|
return args
|
|
|
|
ARGMAP = {'None': None,
|
|
'True': True,
|
|
'False': False}
|
|
|
|
def arg_filter(arg):
|
|
try:
|
|
return int(arg)
|
|
except ValueError:
|
|
pass
|
|
try:
|
|
return float(arg)
|
|
except ValueError:
|
|
pass
|
|
if arg in ARGMAP:
|
|
return ARGMAP[arg]
|
|
#handle lists/dicts?
|
|
return arg
|
|
|
|
categories = {
|
|
'admin' : 'admin commands',
|
|
'build' : 'build commands',
|
|
'search' : 'search commands',
|
|
'download' : 'download commands',
|
|
'monitor' : 'monitor commands',
|
|
'info' : 'info commands',
|
|
'bind' : 'bind commands',
|
|
'misc' : 'miscellaneous commands',
|
|
}
|
|
|
|
def get_epilog_str(progname=None):
|
|
if progname is None:
|
|
progname = os.path.basename(sys.argv[0]) or 'koji'
|
|
categories_ordered=', '.join(sorted(['all'] + categories.keys()))
|
|
epilog_str = '''
|
|
Try "%(progname)s --help" for help about global options
|
|
Try "%(progname)s help" to get all available commands
|
|
Try "%(progname)s <command> --help" for help about the options of a particular command
|
|
Try "%(progname)s help <category>" to get commands under a particular category
|
|
Available categories are: %(categories)s
|
|
''' % ({'progname': progname, 'categories': categories_ordered})
|
|
return _(epilog_str)
|
|
|
|
def get_options():
|
|
"""process options from command line and config file"""
|
|
|
|
common_commands = ['build', 'help', 'download-build',
|
|
'latest-pkg', 'search', 'list-targets']
|
|
usage = _("%%prog [global-options] command [command-options-and-arguments]"
|
|
"\n\nCommon commands: %s" % ', '.join(sorted(common_commands)))
|
|
parser = OptionParser(usage=usage)
|
|
parser.disable_interspersed_args()
|
|
progname = os.path.basename(sys.argv[0]) or 'koji'
|
|
parser.__dict__['origin_format_help'] = parser.format_help
|
|
parser.__dict__['format_help'] = lambda formatter=None: (
|
|
"%(origin_format_help)s%(epilog)s" % ({
|
|
'origin_format_help': parser.origin_format_help(formatter),
|
|
'epilog': get_epilog_str()}))
|
|
parser.add_option("-c", "--config", dest="configFile",
|
|
help=_("use alternate configuration file"), metavar="FILE")
|
|
parser.add_option("-p", "--profile", default=progname,
|
|
help=_("specify a configuration profile"))
|
|
parser.add_option("--keytab", help=_("specify a Kerberos keytab to use"), metavar="FILE")
|
|
parser.add_option("--principal", help=_("specify a Kerberos principal to use"))
|
|
parser.add_option("--krbservice", help=_("specify the Kerberos service name for the hub"))
|
|
parser.add_option("--runas", help=_("run as the specified user (requires special privileges)"))
|
|
parser.add_option("--user", help=_("specify user"))
|
|
parser.add_option("--password", help=_("specify password"))
|
|
parser.add_option("--noauth", action="store_true", default=False,
|
|
help=_("do not authenticate"))
|
|
parser.add_option("--force-auth", action="store_true", default=False,
|
|
help=_("authenticate even for read-only operations"))
|
|
parser.add_option("--authtype", help=_("force use of a type of authentication, options: noauth, ssl, password, or kerberos"))
|
|
parser.add_option("-d", "--debug", action="store_true", default=False,
|
|
help=_("show debug output"))
|
|
parser.add_option("--debug-xmlrpc", action="store_true", default=False,
|
|
help=_("show xmlrpc debug output"))
|
|
parser.add_option("-q", "--quiet", action="store_true", default=False,
|
|
help=_("run quietly"))
|
|
parser.add_option("--skip-main", action="store_true", default=False,
|
|
help=_("don't actually run main"))
|
|
parser.add_option("-s", "--server", help=_("url of XMLRPC server"))
|
|
parser.add_option("--topdir", help=_("specify topdir"))
|
|
parser.add_option("--weburl", help=_("url of the Koji web interface"))
|
|
parser.add_option("--topurl", help=_("url for Koji file access"))
|
|
parser.add_option("--pkgurl", help=optparse.SUPPRESS_HELP)
|
|
parser.add_option("--help-commands", action="store_true", default=False, help=_("list commands"))
|
|
(options, args) = parser.parse_args()
|
|
|
|
if options.help_commands:
|
|
list_commands()
|
|
sys.exit(0)
|
|
if not args:
|
|
list_commands()
|
|
sys.exit(0)
|
|
|
|
aliases = {
|
|
'cancel-task' : 'cancel',
|
|
'cxl' : 'cancel',
|
|
'list-commands' : 'help',
|
|
'move-pkg': 'move-build',
|
|
'move': 'move-build',
|
|
'latest-pkg': 'latest-build',
|
|
'tag-pkg': 'tag-build',
|
|
'tag': 'tag-build',
|
|
'untag-pkg': 'untag-build',
|
|
'untag': 'untag-build',
|
|
'watch-tasks': 'watch-task',
|
|
}
|
|
cmd = args[0]
|
|
cmd = aliases.get(cmd, cmd)
|
|
if cmd.lower() in greetings:
|
|
cmd = "moshimoshi"
|
|
cmd = cmd.replace('-', '_')
|
|
if globals().has_key('anon_handle_' + cmd):
|
|
if not options.force_auth and '--mine' not in args:
|
|
options.noauth = True
|
|
cmd = 'anon_handle_' + cmd
|
|
elif globals().has_key('handle_' + cmd):
|
|
cmd = 'handle_' + cmd
|
|
else:
|
|
list_commands()
|
|
parser.error('Unknown command: %s' % args[0])
|
|
assert False
|
|
|
|
# load local config
|
|
try:
|
|
result = koji.read_config(options.profile, user_config=options.configFile)
|
|
except koji.ConfigurationError, e:
|
|
parser.error(e.args[0])
|
|
assert False
|
|
|
|
# update options according to local config
|
|
for name, value in result.iteritems():
|
|
if getattr(options, name, None) is None:
|
|
setattr(options, name, value)
|
|
|
|
dir_opts = ('topdir', 'cert', 'serverca')
|
|
for name in dir_opts:
|
|
# expand paths here, so we don't have to worry about it later
|
|
value = os.path.expanduser(getattr(options, name))
|
|
setattr(options, name, value)
|
|
|
|
#honor topdir
|
|
if options.topdir:
|
|
koji.BASEDIR = options.topdir
|
|
koji.pathinfo.topdir = options.topdir
|
|
|
|
#pkgurl is obsolete
|
|
if options.pkgurl:
|
|
if options.topurl:
|
|
warn("Warning: the pkgurl option is obsolete")
|
|
else:
|
|
suggest = re.sub(r'/packages/?$', '', options.pkgurl)
|
|
if suggest != options.pkgurl:
|
|
warn("Warning: the pkgurl option is obsolete, using topurl=%r"
|
|
% suggest)
|
|
options.topurl = suggest
|
|
else:
|
|
warn("Warning: The pkgurl option is obsolete, please use topurl instead")
|
|
|
|
return options, cmd, args[1:]
|
|
|
|
def ensure_connection(session):
|
|
try:
|
|
ret = session.getAPIVersion()
|
|
except xmlrpclib.ProtocolError:
|
|
error(_("Error: Unable to connect to server"))
|
|
if ret != koji.API_VERSION:
|
|
warn(_("WARNING: The server is at API version %d and the client is at %d" % (ret, koji.API_VERSION)))
|
|
|
|
def print_task_headers():
|
|
"""Print the column headers"""
|
|
print "ID Pri Owner State Arch Name"
|
|
|
|
def print_task(task,depth=0):
|
|
"""Print a task"""
|
|
task = task.copy()
|
|
task['state'] = koji.TASK_STATES.get(task['state'],'BADSTATE')
|
|
fmt = "%(id)-8s %(priority)-4s %(owner_name)-20s %(state)-8s %(arch)-10s "
|
|
if depth:
|
|
indent = " "*(depth-1) + " +"
|
|
else:
|
|
indent = ''
|
|
label = koji.taskLabel(task)
|
|
print ''.join([fmt % task, indent, label])
|
|
|
|
def print_task_recurse(task,depth=0):
|
|
"""Print a task and its children"""
|
|
print_task(task,depth)
|
|
for child in task.get('children',()):
|
|
print_task_recurse(child,depth+1)
|
|
|
|
|
|
class TaskWatcher(object):
|
|
|
|
def __init__(self,task_id,session,level=0,quiet=False):
|
|
self.id = task_id
|
|
self.session = session
|
|
self.info = None
|
|
self.level = level
|
|
self.quiet = quiet
|
|
|
|
#XXX - a bunch of this stuff needs to adapt to different tasks
|
|
|
|
def str(self):
|
|
if self.info:
|
|
label = koji.taskLabel(self.info)
|
|
return "%s%d %s" % (' ' * self.level, self.id, label)
|
|
else:
|
|
return "%s%d" % (' ' * self.level, self.id)
|
|
|
|
def __str__(self):
|
|
return self.str()
|
|
|
|
def get_failure(self):
|
|
"""Print infomation about task completion"""
|
|
if self.info['state'] != koji.TASK_STATES['FAILED']:
|
|
return ''
|
|
error = None
|
|
try:
|
|
result = self.session.getTaskResult(self.id)
|
|
except (xmlrpclib.Fault,koji.GenericError),e:
|
|
error = e
|
|
if error is None:
|
|
# print "%s: complete" % self.str()
|
|
# We already reported this task as complete in update()
|
|
return ''
|
|
else:
|
|
return '%s: %s' % (error.__class__.__name__, str(error).strip())
|
|
|
|
def update(self):
|
|
"""Update info and log if needed. Returns True on state change."""
|
|
if self.is_done():
|
|
# Already done, nothing else to report
|
|
return False
|
|
last = self.info
|
|
self.info = self.session.getTaskInfo(self.id, request=True)
|
|
if self.info is None:
|
|
if not self.quiet:
|
|
print "No such task id: %i" % self.id
|
|
sys.exit(1)
|
|
state = self.info['state']
|
|
if last:
|
|
#compare and note status changes
|
|
laststate = last['state']
|
|
if laststate != state:
|
|
if not self.quiet:
|
|
print "%s: %s -> %s" % (self.str(), self.display_state(last), self.display_state(self.info))
|
|
return True
|
|
return False
|
|
else:
|
|
# First time we're seeing this task, so just show the current state
|
|
if not self.quiet:
|
|
print "%s: %s" % (self.str(), self.display_state(self.info))
|
|
return False
|
|
|
|
def is_done(self):
|
|
if self.info is None:
|
|
return False
|
|
state = koji.TASK_STATES[self.info['state']]
|
|
return (state in ['CLOSED','CANCELED','FAILED'])
|
|
|
|
def is_success(self):
|
|
if self.info is None:
|
|
return False
|
|
state = koji.TASK_STATES[self.info['state']]
|
|
return (state == 'CLOSED')
|
|
|
|
def display_state(self, info):
|
|
# We can sometimes be passed a task that is not yet open, but
|
|
# not finished either. info would be none.
|
|
if not info:
|
|
return 'unknown'
|
|
if info['state'] == koji.TASK_STATES['OPEN']:
|
|
if info['host_id']:
|
|
host = self.session.getHost(info['host_id'])
|
|
return 'open (%s)' % host['name']
|
|
else:
|
|
return 'open'
|
|
elif info['state'] == koji.TASK_STATES['FAILED']:
|
|
return 'FAILED: %s' % self.get_failure()
|
|
else:
|
|
return koji.TASK_STATES[info['state']].lower()
|
|
|
|
def display_tasklist_status(tasks):
|
|
free = 0
|
|
open = 0
|
|
failed = 0
|
|
done = 0
|
|
for task_id in tasks.keys():
|
|
status = tasks[task_id].info['state']
|
|
if status == koji.TASK_STATES['FAILED']:
|
|
failed += 1
|
|
elif status == koji.TASK_STATES['CLOSED'] or status == koji.TASK_STATES['CANCELED']:
|
|
done += 1
|
|
elif status == koji.TASK_STATES['OPEN'] or status == koji.TASK_STATES['ASSIGNED']:
|
|
open += 1
|
|
elif status == koji.TASK_STATES['FREE']:
|
|
free += 1
|
|
print " %d free %d open %d done %d failed" % (free, open, done, failed)
|
|
|
|
def display_task_results(tasks):
|
|
for task in [task for task in tasks.values() if task.level == 0]:
|
|
state = task.info['state']
|
|
task_label = task.str()
|
|
|
|
if state == koji.TASK_STATES['CLOSED']:
|
|
print '%s completed successfully' % task_label
|
|
elif state == koji.TASK_STATES['FAILED']:
|
|
print '%s failed' % task_label
|
|
elif state == koji.TASK_STATES['CANCELED']:
|
|
print '%s was canceled' % task_label
|
|
else:
|
|
# shouldn't happen
|
|
print '%s has not completed' % task_label
|
|
|
|
def watch_tasks(session,tasklist,quiet=False):
|
|
global options
|
|
if not tasklist:
|
|
return
|
|
if not quiet:
|
|
print "Watching tasks (this may be safely interrupted)..."
|
|
sys.stdout.flush()
|
|
rv = 0
|
|
try:
|
|
tasks = {}
|
|
for task_id in tasklist:
|
|
tasks[task_id] = TaskWatcher(task_id,session,quiet=quiet)
|
|
while True:
|
|
all_done = True
|
|
for task_id,task in tasks.items():
|
|
changed = task.update()
|
|
if not task.is_done():
|
|
all_done = False
|
|
else:
|
|
if changed:
|
|
# task is done and state just changed
|
|
if not quiet:
|
|
display_tasklist_status(tasks)
|
|
if not task.is_success():
|
|
rv = 1
|
|
for child in session.getTaskChildren(task_id):
|
|
child_id = child['id']
|
|
if not child_id in tasks.keys():
|
|
tasks[child_id] = TaskWatcher(child_id, session, task.level + 1, quiet=quiet)
|
|
tasks[child_id].update()
|
|
# If we found new children, go through the list again,
|
|
# in case they have children also
|
|
all_done = False
|
|
if all_done:
|
|
if not quiet:
|
|
print
|
|
display_task_results(tasks)
|
|
break
|
|
|
|
sys.stdout.flush()
|
|
time.sleep(options.poll_interval)
|
|
except (KeyboardInterrupt):
|
|
if tasks and not quiet:
|
|
progname = os.path.basename(sys.argv[0]) or 'koji'
|
|
tlist = ['%s: %s' % (t.str(), t.display_state(t.info))
|
|
for t in tasks.values() if not t.is_done()]
|
|
print \
|
|
"""Tasks still running. You can continue to watch with the '%s watch-task' command.
|
|
Running Tasks:
|
|
%s""" % (progname, '\n'.join(tlist))
|
|
rv = 1
|
|
return rv
|
|
|
|
def watch_logs(session, tasklist, opts):
|
|
global options
|
|
print "Watching logs (this may be safely interrupted)..."
|
|
def _isDone(session, taskId):
|
|
info = session.getTaskInfo(taskId)
|
|
if info is None:
|
|
print "No such task id: %i" % taskId
|
|
sys.exit(1)
|
|
state = koji.TASK_STATES[info['state']]
|
|
return (state in ['CLOSED','CANCELED','FAILED'])
|
|
|
|
try:
|
|
offsets = {}
|
|
for task_id in tasklist:
|
|
offsets[task_id] = {}
|
|
|
|
lastlog = None
|
|
while True:
|
|
for task_id in tasklist[:]:
|
|
if _isDone(session, task_id):
|
|
tasklist.remove(task_id)
|
|
|
|
output = session.listTaskOutput(task_id)
|
|
|
|
if opts.log:
|
|
logs = [filename for filename in output if filename == opts.log]
|
|
else:
|
|
logs = [filename for filename in output if filename.endswith('.log')]
|
|
|
|
taskoffsets = offsets[task_id]
|
|
for log in logs:
|
|
contents = 'placeholder'
|
|
while contents:
|
|
if not taskoffsets.has_key(log):
|
|
taskoffsets[log] = 0
|
|
|
|
contents = session.downloadTaskOutput(task_id, log, taskoffsets[log], 16384)
|
|
taskoffsets[log] += len(contents)
|
|
if contents:
|
|
currlog = "%d:%s:" % (task_id, log)
|
|
if currlog != lastlog:
|
|
if lastlog:
|
|
sys.stdout.write("\n")
|
|
sys.stdout.write("==> %s <==\n" % currlog)
|
|
lastlog = currlog
|
|
sys.stdout.write(contents)
|
|
|
|
if not tasklist:
|
|
break
|
|
|
|
time.sleep(options.poll_interval)
|
|
except (KeyboardInterrupt):
|
|
pass
|
|
|
|
def handle_add_group(options, session, args):
|
|
"[admin] Add a group to a tag"
|
|
usage = _("usage: %prog add-group <tag> <group>")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) != 2:
|
|
parser.error(_("Please specify a tag name and a group name"))
|
|
assert False
|
|
tag = args[0]
|
|
group = args[1]
|
|
|
|
activate_session(session)
|
|
if not session.hasPerm('admin'):
|
|
print "This action requires admin privileges"
|
|
return 1
|
|
|
|
dsttag = session.getTag(tag)
|
|
if not dsttag:
|
|
print "Unknown tag: %s" % tag
|
|
return 1
|
|
|
|
groups = dict([(p['name'], p['group_id']) for p in session.getTagGroups(tag, inherit=False)])
|
|
group_id = groups.get(group, None)
|
|
if group_id is not None:
|
|
print "Group %s already exists for tag %s" % (group, tag)
|
|
return 1
|
|
|
|
session.groupListAdd(tag, group)
|
|
|
|
def handle_add_host(options, session, args):
|
|
"[admin] Add a host"
|
|
usage = _("usage: %prog add-host [options] hostname arch [arch2 ...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--krb-principal", help=_("set a non-default kerberos principal for the host"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 2:
|
|
parser.error(_("Please specify a hostname and at least one arch"))
|
|
assert False
|
|
host = args[0]
|
|
activate_session(session)
|
|
id = session.getHost(host)
|
|
if id:
|
|
print "%s is already in the database" % host
|
|
return 1
|
|
else:
|
|
kwargs = {}
|
|
if options.krb_principal is not None:
|
|
kwargs['krb_principal'] = options.krb_principal
|
|
id = session.addHost(host, args[1:], **kwargs)
|
|
if id:
|
|
print "%s added: id %d" % (host, id)
|
|
|
|
def handle_edit_host(options, session, args):
|
|
"[admin] Edit a host"
|
|
usage = _("usage: %prog edit-host hostname ... [options]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--arches", help=_("Space-separated list of supported architectures"))
|
|
parser.add_option("--capacity", type="float", help=_("Capacity of this host"))
|
|
parser.add_option("--description", metavar="DESC", help=_("Description of this host"))
|
|
parser.add_option("--comment", help=_("A brief comment about this host"))
|
|
(subopts, args) = parser.parse_args(args)
|
|
if not args:
|
|
parser.error(_("Please specify a hostname"))
|
|
|
|
activate_session(session)
|
|
|
|
vals = {}
|
|
for key, val in subopts.__dict__.items():
|
|
if val is not None:
|
|
vals[key] = val
|
|
|
|
session.multicall = True
|
|
for host in args:
|
|
session.getHost(host)
|
|
error = False
|
|
for host, [info] in zip(args, session.multiCall(strict=True)):
|
|
if not info:
|
|
print _("Host %s does not exist") % host
|
|
error = True
|
|
|
|
if error:
|
|
print _("No changes made, please correct the command line")
|
|
return 1
|
|
|
|
session.multicall = True
|
|
for host in args:
|
|
session.editHost(host, **vals)
|
|
for host, [result] in zip(args, session.multiCall(strict=True)):
|
|
if result:
|
|
print _("Edited %s") % host
|
|
else:
|
|
print _("No changes made to %s") % host
|
|
|
|
def handle_add_host_to_channel(options, session, args):
|
|
"[admin] Add a host to a channel"
|
|
usage = _("usage: %prog add-host-to-channel [options] hostname channel")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--list", action="store_true", help=_("List possible channels"))
|
|
parser.add_option("--new", action="store_true", help=_("Create channel if needed"))
|
|
(options, args) = parser.parse_args(args)
|
|
if not options.list and len(args) != 2:
|
|
parser.error(_("Please specify a hostname and a channel"))
|
|
assert False
|
|
activate_session(session)
|
|
if options.list:
|
|
for channel in session.listChannels():
|
|
print channel['name']
|
|
return
|
|
channel = args[1]
|
|
if not options.new:
|
|
channelinfo = session.getChannel(channel)
|
|
if not channelinfo:
|
|
print "No such channel: %s" % channel
|
|
return 1
|
|
host = args[0]
|
|
hostinfo = session.getHost(host)
|
|
if not hostinfo:
|
|
print "No such host: %s" % host
|
|
return 1
|
|
kwargs = {}
|
|
if options.new:
|
|
kwargs['create'] = True
|
|
session.addHostToChannel(host, channel, **kwargs)
|
|
|
|
def handle_remove_host_from_channel(options, session, args):
|
|
"[admin] Remove a host from a channel"
|
|
usage = _("usage: %prog remove-host-from-channel [options] hostname channel")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) != 2:
|
|
parser.error(_("Please specify a hostname and a channel"))
|
|
assert False
|
|
host = args[0]
|
|
activate_session(session)
|
|
hostinfo = session.getHost(host)
|
|
if not hostinfo:
|
|
print "No such host: %s" % host
|
|
return 1
|
|
hostchannels = [c['name'] for c in session.listChannels(hostinfo['id'])]
|
|
|
|
channel = args[1]
|
|
if channel not in hostchannels:
|
|
print "Host %s is not a member of channel %s" % (host, channel)
|
|
return 1
|
|
|
|
session.removeHostFromChannel(host, channel)
|
|
|
|
def handle_remove_channel(options, session, args):
|
|
"[admin] Remove a channel entirely"
|
|
usage = _("usage: %prog remove-channel [options] channel")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--force", action="store_true", help=_("force removal, if possible"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) != 1:
|
|
parser.error(_("Incorrect number of arguments"))
|
|
assert False
|
|
activate_session(session)
|
|
cinfo = session.getChannel(args[0])
|
|
if not cinfo:
|
|
print "No such channel: %s" % args[0]
|
|
return 1
|
|
session.removeChannel(args[0], force=options.force)
|
|
|
|
def handle_rename_channel(options, session, args):
|
|
"[admin] Rename a channel"
|
|
usage = _("usage: %prog rename-channel [options] old-name new-name")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) != 2:
|
|
parser.error(_("Incorrect number of arguments"))
|
|
assert False
|
|
activate_session(session)
|
|
cinfo = session.getChannel(args[0])
|
|
if not cinfo:
|
|
print "No such channel: %s" % args[0]
|
|
return 1
|
|
session.renameChannel(args[0], args[1])
|
|
|
|
def handle_add_pkg(options, session, args):
|
|
"[admin] Add a package to the listing for tag"
|
|
usage = _("usage: %prog add-pkg [options] tag package [package2 ...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--force", action='store_true', help=_("Override blocks if necessary"))
|
|
parser.add_option("--owner", help=_("Specify owner"))
|
|
parser.add_option("--extra-arches", help=_("Specify extra arches"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 2:
|
|
parser.error(_("Please specify a tag and at least one package"))
|
|
assert False
|
|
if not options.owner:
|
|
parser.error(_("Please specify an owner for the package(s)"))
|
|
assert False
|
|
if not session.getUser(options.owner):
|
|
print "User %s does not exist" % options.owner
|
|
return 1
|
|
activate_session(session)
|
|
tag = args[0]
|
|
opts = {}
|
|
opts['force'] = options.force
|
|
opts['block'] = False
|
|
# check if list of packages exists for that tag already
|
|
dsttag=session.getTag(tag)
|
|
if dsttag is None:
|
|
print "No such tag: %s" % tag
|
|
sys.exit(1)
|
|
pkglist = dict([(p['package_name'], p['package_id']) for p in session.listPackages(tagID=dsttag['id'])])
|
|
to_add = []
|
|
for package in args[1:]:
|
|
package_id = pkglist.get(package, None)
|
|
if not package_id is None:
|
|
print "Package %s already exists in tag %s" % (package, tag)
|
|
continue
|
|
to_add.append(package)
|
|
if options.extra_arches:
|
|
opts['extra_arches'] = ' '.join(options.extra_arches.replace(',',' ').split())
|
|
|
|
# add the packages
|
|
print "Adding %i packages to tag %s" % (len(to_add), dsttag['name'])
|
|
session.multicall = True
|
|
for package in to_add:
|
|
session.packageListAdd(tag, package, options.owner, **opts)
|
|
session.multiCall(strict=True)
|
|
|
|
|
|
def handle_block_pkg(options, session, args):
|
|
"[admin] Block a package in the listing for tag"
|
|
usage = _("usage: %prog block-pkg [options] tag package [package2 ...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 2:
|
|
parser.error(_("Please specify a tag and at least one package"))
|
|
assert False
|
|
activate_session(session)
|
|
tag = args[0]
|
|
# check if list of packages exists for that tag already
|
|
dsttag=session.getTag(tag)
|
|
pkglist = dict([(p['package_name'], p['package_id']) for p in session.listPackages(tagID=dsttag['id'], inherited=True)])
|
|
ret = 0
|
|
for package in args[1:]:
|
|
package_id = pkglist.get(package, None)
|
|
if package_id is None:
|
|
print "Package %s doesn't exist in tag %s" % (package, tag)
|
|
ret = 1
|
|
if ret:
|
|
return ret
|
|
for package in args[1:]:
|
|
#really should implement multicall...
|
|
session.packageListBlock(tag,package)
|
|
|
|
def handle_remove_pkg(options, session, args):
|
|
"[admin] Remove a package from the listing for tag"
|
|
usage = _("usage: %prog remove-pkg [options] tag package [package2 ...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--force", action='store_true', help=_("Override blocks if necessary"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 2:
|
|
parser.error(_("Please specify a tag and at least one package"))
|
|
assert False
|
|
activate_session(session)
|
|
tag = args[0]
|
|
opts = {}
|
|
opts['force'] = options.force
|
|
# check if list of packages exists for that tag already
|
|
dsttag=session.getTag(tag)
|
|
pkglist = dict([(p['package_name'], p['package_id']) for p in session.listPackages(tagID=dsttag['id'])])
|
|
ret = 0
|
|
for package in args[1:]:
|
|
package_id = pkglist.get(package, None)
|
|
if package_id is None:
|
|
print "Package %s is not in tag %s" % (package, tag)
|
|
ret = 1
|
|
if ret:
|
|
return ret
|
|
for package in args[1:]:
|
|
#really should implement multicall...
|
|
session.packageListRemove(tag, package, **opts)
|
|
|
|
def _unique_path(prefix):
|
|
"""Create a unique path fragment by appending a path component
|
|
to prefix. The path component will consist of a string of letter and numbers
|
|
that is unlikely to be a duplicate, but is not guaranteed to be unique."""
|
|
# Use time() in the dirname to provide a little more information when
|
|
# browsing the filesystem.
|
|
# For some reason repr(time.time()) includes 4 or 5
|
|
# more digits of precision than str(time.time())
|
|
return '%s/%r.%s' % (prefix, time.time(),
|
|
''.join([random.choice(string.ascii_letters) for i in range(8)]))
|
|
|
|
def _format_size(size):
|
|
if (size / 1073741824 >= 1):
|
|
return "%0.2f GiB" % (size / 1073741824.0)
|
|
if (size / 1048576 >= 1):
|
|
return "%0.2f MiB" % (size / 1048576.0)
|
|
if (size / 1024 >=1):
|
|
return "%0.2f KiB" % (size / 1024.0)
|
|
return "%0.2f B" % (size)
|
|
|
|
def _format_secs(t):
|
|
h = t / 3600
|
|
t = t % 3600
|
|
m = t / 60
|
|
s = t % 60
|
|
return "%02d:%02d:%02d" % (h, m, s)
|
|
|
|
def _progress_callback(uploaded, total, piece, time, total_time):
|
|
percent_done = float(uploaded)/float(total)
|
|
percent_done_str = "%02d%%" % (percent_done * 100)
|
|
data_done = _format_size(uploaded)
|
|
elapsed = _format_secs(total_time)
|
|
|
|
speed = "- B/sec"
|
|
if (time):
|
|
if (uploaded != total):
|
|
speed = _format_size(float(piece)/float(time)) + "/sec"
|
|
else:
|
|
speed = _format_size(float(total)/float(total_time)) + "/sec"
|
|
|
|
# write formated string and flush
|
|
sys.stdout.write("[% -36s] % 4s % 8s % 10s % 14s\r" % ('='*(int(percent_done*36)), percent_done_str, elapsed, data_done, speed))
|
|
sys.stdout.flush()
|
|
|
|
def _running_in_bg():
|
|
try:
|
|
if (not os.isatty(0)) or (os.getpgrp() != os.tcgetpgrp(0)):
|
|
return True
|
|
except OSError, e:
|
|
return True
|
|
return False
|
|
|
|
def handle_build(options, session, args):
|
|
"[build] Build a package from source"
|
|
usage = _("usage: %prog build [options] target <srpm path or scm url>")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--skip-tag", action="store_true",
|
|
help=_("Do not attempt to tag package"))
|
|
parser.add_option("--scratch", action="store_true",
|
|
help=_("Perform a scratch build"))
|
|
parser.add_option("--wait", action="store_true",
|
|
help=_("Wait on the build, even if running in the background"))
|
|
parser.add_option("--nowait", action="store_false", dest="wait",
|
|
help=_("Don't wait on build"))
|
|
parser.add_option("--quiet", action="store_true",
|
|
help=_("Do not print the task information"), default=options.quiet)
|
|
parser.add_option("--arch-override", help=_("Override build arches"))
|
|
parser.add_option("--repo-id", type="int", help=_("Use a specific repo"))
|
|
parser.add_option("--noprogress", action="store_true",
|
|
help=_("Do not display progress of the upload"))
|
|
parser.add_option("--background", action="store_true",
|
|
help=_("Run the build at a lower priority"))
|
|
(build_opts, args) = parser.parse_args(args)
|
|
if len(args) != 2:
|
|
parser.error(_("Exactly two arguments (a build target and a SCM URL or srpm file) are required"))
|
|
assert False
|
|
if build_opts.arch_override and not build_opts.scratch:
|
|
parser.error(_("--arch_override is only allowed for --scratch builds"))
|
|
activate_session(session)
|
|
target = args[0]
|
|
if target.lower() == "none" and build_opts.repo_id:
|
|
target = None
|
|
build_opts.skip_tag = True
|
|
else:
|
|
build_target = session.getBuildTarget(target)
|
|
if not build_target:
|
|
parser.error(_("Unknown build target: %s" % target))
|
|
dest_tag = session.getTag(build_target['dest_tag'])
|
|
if not dest_tag:
|
|
parser.error(_("Unknown destination tag: %s" % build_target['dest_tag_name']))
|
|
if dest_tag['locked'] and not build_opts.scratch:
|
|
parser.error(_("Destination tag %s is locked" % dest_tag['name']))
|
|
source = args[1]
|
|
opts = {}
|
|
if build_opts.arch_override:
|
|
opts['arch_override'] = ' '.join(build_opts.arch_override.replace(',',' ').split())
|
|
for key in ('skip_tag', 'scratch', 'repo_id'):
|
|
val = getattr(build_opts, key)
|
|
if val is not None:
|
|
opts[key] = val
|
|
priority = None
|
|
if build_opts.background:
|
|
#relative to koji.PRIO_DEFAULT
|
|
priority = 5
|
|
# try to check that source is an SRPM
|
|
if '://' not in source:
|
|
#treat source as an srpm and upload it
|
|
if not build_opts.quiet:
|
|
print "Uploading srpm: %s" % source
|
|
serverdir = _unique_path('cli-build')
|
|
if _running_in_bg() or build_opts.noprogress or build_opts.quiet:
|
|
callback = None
|
|
else:
|
|
callback = _progress_callback
|
|
session.uploadWrapper(source, serverdir, callback=callback)
|
|
print
|
|
source = "%s/%s" % (serverdir, os.path.basename(source))
|
|
task_id = session.build(source, target, opts, priority=priority)
|
|
if not build_opts.quiet:
|
|
print "Created task:", task_id
|
|
print "Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id)
|
|
if build_opts.wait or (build_opts.wait is None and not _running_in_bg()):
|
|
session.logout()
|
|
return watch_tasks(session, [task_id], quiet=build_opts.quiet)
|
|
else:
|
|
return
|
|
|
|
def handle_chain_build(options, session, args):
|
|
# XXX - replace handle_build with this, once chain-building has gotten testing
|
|
"[build] Build one or more packages from source"
|
|
usage = _("usage: %prog chain-build [options] target URL [URL2 [:] URL3 [:] URL4 ...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--nowait", action="store_true",
|
|
help=_("Don't wait on build"))
|
|
parser.add_option("--noprogress", action="store_true",
|
|
help=_("Do not display progress of the upload"))
|
|
parser.add_option("--background", action="store_true",
|
|
help=_("Run the build at a lower priority"))
|
|
(build_opts, args) = parser.parse_args(args)
|
|
if len(args) < 2:
|
|
parser.error(_("At least two arguments (a build target and a SCM URL) are required"))
|
|
assert False
|
|
activate_session(session)
|
|
target = args[0]
|
|
build_target = session.getBuildTarget(target)
|
|
if not build_target:
|
|
parser.error(_("Unknown build target: %s" % target))
|
|
dest_tag = session.getTag(build_target['dest_tag'], strict=True)
|
|
if dest_tag['locked']:
|
|
parser.error(_("Destination tag %s is locked" % dest_tag['name']))
|
|
|
|
# check that the destination tag is in the inheritance tree of the build tag
|
|
# otherwise there is no way that a chain-build can work
|
|
ancestors = session.getFullInheritance(build_target['build_tag'])
|
|
if dest_tag['id'] not in [build_target['build_tag']] + [ancestor['parent_id'] for ancestor in ancestors]:
|
|
print _("Packages in destination tag %(dest_tag_name)s are not inherited by build tag %(build_tag_name)s" % build_target)
|
|
print _("Target %s is not usable for a chain-build" % build_target['name'])
|
|
return 1
|
|
|
|
sources = args[1:]
|
|
|
|
src_list = []
|
|
build_level = []
|
|
#src_lists is a list of lists of sources to build.
|
|
# each list is block of builds ("build level") which must all be completed
|
|
# before the next block begins. Blocks are separated on the command line with ':'
|
|
for src in sources:
|
|
if src == ':':
|
|
if build_level:
|
|
src_list.append(build_level)
|
|
build_level = []
|
|
elif '://' in src:
|
|
# quick check that src might be a url
|
|
build_level.append(src)
|
|
elif '/' not in src and not src.endswith('.rpm') and len(src.split('-')) >= 3:
|
|
# quick check that it looks like a N-V-R
|
|
build_level.append(src)
|
|
else:
|
|
print _('"%s" is not a SCM URL or package N-V-R' % src)
|
|
return 1
|
|
if build_level:
|
|
src_list.append(build_level)
|
|
|
|
if len(src_list) < 2:
|
|
parser.error(_('You must specify at least one dependency between builds with : (colon)\nIf there are no dependencies, use the build command instead'))
|
|
|
|
priority = None
|
|
if build_opts.background:
|
|
#relative to koji.PRIO_DEFAULT
|
|
priority = 5
|
|
|
|
task_id = session.chainBuild(src_list, target, priority=priority)
|
|
|
|
print "Created task:", task_id
|
|
print "Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id)
|
|
if _running_in_bg() or build_opts.nowait:
|
|
return
|
|
else:
|
|
session.logout()
|
|
return watch_tasks(session,[task_id],quiet=options.quiet)
|
|
|
|
def handle_maven_build(options, session, args):
|
|
"[build] Build a Maven package from source"
|
|
usage = _("usage: %prog maven-build [options] target URL")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--patches", action="store", metavar="URL",
|
|
help=_("SCM URL of a directory containing patches to apply to the sources before building"))
|
|
parser.add_option("-G", "--goal", action="append",
|
|
dest="goals", metavar="GOAL", default=[],
|
|
help=_("Additional goal to run before \"deploy\""))
|
|
parser.add_option("-P", "--profile", action="append",
|
|
dest="profiles", metavar="PROFILE", default=[],
|
|
help=_("Enable a profile for the Maven build"))
|
|
parser.add_option("-D", "--property", action="append",
|
|
dest="properties", metavar="NAME=VALUE", default=[],
|
|
help=_("Pass a system property to the Maven build"))
|
|
parser.add_option("-E", "--env", action="append",
|
|
dest="envs", metavar="NAME=VALUE", default=[],
|
|
help=_("Set an environment variable"))
|
|
parser.add_option("-p", "--package", action="append",
|
|
dest="packages", metavar="PACKAGE", default=[],
|
|
help=_("Install an additional package into the buildroot"))
|
|
parser.add_option("-J", "--jvm-option", action="append",
|
|
dest="jvm_options", metavar="OPTION", default=[],
|
|
help=_("Pass a command-line option to the JVM"))
|
|
parser.add_option("-M", "--maven-option", action="append",
|
|
dest="maven_options", metavar="OPTION", default=[],
|
|
help=_("Pass a command-line option to Maven"))
|
|
parser.add_option("--ini", action="append",
|
|
dest="inis", metavar="CONFIG", default=[],
|
|
help=_("Pass build parameters via a .ini file"))
|
|
parser.add_option("-s", "--section",
|
|
help=_("Get build parameters from this section of the .ini"))
|
|
parser.add_option("--debug", action="store_true",
|
|
help=_("Run Maven build in debug mode"))
|
|
parser.add_option("--specfile", action="store", metavar="URL",
|
|
help=_("SCM URL of a spec file fragment to use to generate wrapper RPMs"))
|
|
parser.add_option("--skip-tag", action="store_true",
|
|
help=_("Do not attempt to tag package"))
|
|
parser.add_option("--scratch", action="store_true",
|
|
help=_("Perform a scratch build"))
|
|
parser.add_option("--nowait", action="store_true",
|
|
help=_("Don't wait on build"))
|
|
parser.add_option("--noprogress", action="store_true",
|
|
help=_("Do not display progress of the upload"))
|
|
parser.add_option("--background", action="store_true",
|
|
help=_("Run the build at a lower priority"))
|
|
(build_opts, args) = parser.parse_args(args)
|
|
if build_opts.inis:
|
|
if len(args)!= 1:
|
|
parser.error(_("Exactly one argument (a build target) is required"))
|
|
else:
|
|
if len(args) != 2:
|
|
parser.error(_("Exactly two arguments (a build target and a SCM URL) are required"))
|
|
activate_session(session)
|
|
target = args[0]
|
|
build_target = session.getBuildTarget(target)
|
|
if not build_target:
|
|
parser.error(_("Unknown build target: %s" % target))
|
|
dest_tag = session.getTag(build_target['dest_tag'])
|
|
if not dest_tag:
|
|
parser.error(_("Unknown destination tag: %s" % build_target['dest_tag_name']))
|
|
if dest_tag['locked'] and not build_opts.scratch:
|
|
parser.error(_("Destination tag %s is locked" % dest_tag['name']))
|
|
if build_opts.inis:
|
|
try:
|
|
params = koji.util.parse_maven_param(build_opts.inis, scratch=build_opts.scratch,
|
|
section=build_opts.section)
|
|
except ValueError, e:
|
|
parser.error(e.args[0])
|
|
opts = params.values()[0]
|
|
if opts.pop('type', 'maven') != 'maven':
|
|
parser.error(_("Section %s does not contain a maven-build config") % params.keys()[0])
|
|
source = opts.pop('scmurl')
|
|
else:
|
|
source = args[1]
|
|
if '://' not in source:
|
|
parser.error(_("Invalid SCM URL: %s" % source))
|
|
opts = koji.util.maven_opts(build_opts, scratch=build_opts.scratch)
|
|
if build_opts.debug:
|
|
opts.setdefault('maven_options', []).append('--debug')
|
|
if build_opts.skip_tag:
|
|
opts['skip_tag'] = True
|
|
priority = None
|
|
if build_opts.background:
|
|
#relative to koji.PRIO_DEFAULT
|
|
priority = 5
|
|
task_id = session.mavenBuild(source, target, opts, priority=priority)
|
|
print "Created task:", task_id
|
|
print "Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id)
|
|
if _running_in_bg() or build_opts.nowait:
|
|
return
|
|
else:
|
|
session.logout()
|
|
return watch_tasks(session,[task_id],quiet=options.quiet)
|
|
|
|
def handle_wrapper_rpm(options, session, args):
|
|
"""[build] Build wrapper rpms for any archives associated with a build."""
|
|
usage = _("usage: %prog wrapper-rpm [options] target build-id|n-v-r URL")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--create-build", action="store_true", help=_("Create a new build to contain wrapper rpms"))
|
|
parser.add_option("--ini", action="append",
|
|
dest="inis", metavar="CONFIG", default=[],
|
|
help=_("Pass build parameters via a .ini file"))
|
|
parser.add_option("-s", "--section",
|
|
help=_("Get build parameters from this section of the .ini"))
|
|
parser.add_option("--skip-tag", action="store_true", help=_("If creating a new build, don't tag it"))
|
|
parser.add_option("--scratch", action="store_true", help=_("Perform a scratch build"))
|
|
parser.add_option("--nowait", action="store_true", help=_("Don't wait on build"))
|
|
parser.add_option("--background", action="store_true", help=_("Run the build at a lower priority"))
|
|
|
|
(build_opts, args) = parser.parse_args(args)
|
|
if build_opts.inis:
|
|
if len(args)!= 1:
|
|
parser.error(_("Exactly one argument (a build target) is required"))
|
|
else:
|
|
if len(args) < 3:
|
|
parser.error(_("You must provide a build target, a build ID or NVR, and a SCM URL to a specfile fragment"))
|
|
activate_session(session)
|
|
|
|
target = args[0]
|
|
if build_opts.inis:
|
|
try:
|
|
params = koji.util.parse_maven_param(build_opts.inis, scratch=build_opts.scratch,
|
|
section=build_opts.section)
|
|
except ValueError, e:
|
|
parser.error(e.args[0])
|
|
opts = params.values()[0]
|
|
if opts.get('type') != 'wrapper':
|
|
parser.error(_("Section %s does not contain a wrapper-rpm config") % params.keys()[0])
|
|
url = opts['scmurl']
|
|
package = opts['buildrequires'][0]
|
|
target_info = session.getBuildTarget(target, strict=True)
|
|
latest_builds = session.getLatestBuilds(target_info['dest_tag'], package=package)
|
|
if not latest_builds:
|
|
parser.error(_("No build of %s in %s") % (package, target_info['dest_tag_name']))
|
|
build_id = latest_builds[0]['nvr']
|
|
else:
|
|
build_id = args[1]
|
|
if build_id.isdigit():
|
|
build_id = int(build_id)
|
|
url = args[2]
|
|
priority = None
|
|
if build_opts.background:
|
|
priority = 5
|
|
opts = {}
|
|
if build_opts.create_build:
|
|
opts['create_build'] = True
|
|
if build_opts.skip_tag:
|
|
opts['skip_tag'] = True
|
|
if build_opts.scratch:
|
|
opts['scratch'] = True
|
|
task_id = session.wrapperRPM(build_id, url, target, priority, opts=opts)
|
|
print "Created task:", task_id
|
|
print "Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id)
|
|
if _running_in_bg() or build_opts.nowait:
|
|
return
|
|
else:
|
|
session.logout()
|
|
return watch_tasks(session,[task_id],quiet=options.quiet)
|
|
|
|
def handle_maven_chain(options, session, args):
|
|
"[build] Run a set of Maven builds in dependency order"
|
|
usage = _("usage: %prog maven-chain [options] target config...")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--skip-tag", action="store_true",
|
|
help=_("Do not attempt to tag builds"))
|
|
parser.add_option("--scratch", action="store_true",
|
|
help=_("Perform scratch builds"))
|
|
parser.add_option("--debug", action="store_true",
|
|
help=_("Run Maven build in debug mode"))
|
|
parser.add_option("--force", action="store_true",
|
|
help=_("Force rebuilds of all packages"))
|
|
parser.add_option("--nowait", action="store_true",
|
|
help=_("Don't wait on build"))
|
|
parser.add_option("--background", action="store_true",
|
|
help=_("Run the build at a lower priority"))
|
|
(build_opts, args) = parser.parse_args(args)
|
|
if len(args) < 2:
|
|
parser.error(_("Two arguments (a build target and a config file) are required"))
|
|
assert False
|
|
activate_session(session)
|
|
target = args[0]
|
|
build_target = session.getBuildTarget(target)
|
|
if not build_target:
|
|
parser.error(_("Unknown build target: %s") % target)
|
|
dest_tag = session.getTag(build_target['dest_tag'])
|
|
if not dest_tag:
|
|
parser.error(_("Unknown destination tag: %s") % build_target['dest_tag_name'])
|
|
if dest_tag['locked'] and not build_opts.scratch:
|
|
parser.error(_("Destination tag %s is locked") % dest_tag['name'])
|
|
opts = {}
|
|
for key in ('skip_tag', 'scratch', 'debug', 'force'):
|
|
val = getattr(build_opts, key)
|
|
if val:
|
|
opts[key] = val
|
|
try:
|
|
builds = koji.util.parse_maven_chain(args[1:], scratch=opts.get('scratch'))
|
|
except ValueError, e:
|
|
parser.error(e.args[0])
|
|
priority = None
|
|
if build_opts.background:
|
|
priority = 5
|
|
task_id = session.chainMaven(builds, target, opts, priority=priority)
|
|
print "Created task:", task_id
|
|
print "Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id)
|
|
if _running_in_bg() or build_opts.nowait:
|
|
return
|
|
else:
|
|
session.logout()
|
|
return watch_tasks(session, [task_id], quiet=options.quiet)
|
|
|
|
def handle_resubmit(options, session, args):
|
|
"""[build] Retry a canceled or failed task, using the same parameter as the original task."""
|
|
usage = _("usage: %prog resubmit [options] taskID")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--nowait", action="store_true", help=_("Don't wait on task"))
|
|
parser.add_option("--nowatch", action="store_true", dest="nowait",
|
|
help=_("An alias for --nowait"))
|
|
parser.add_option("--quiet", action="store_true", help=_("Do not print the task information"), default=options.quiet)
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) != 1:
|
|
parser.error(_("Please specify a single task ID"))
|
|
assert False
|
|
activate_session(session)
|
|
taskID = int(args[0])
|
|
if not options.quiet:
|
|
print "Resubmitting the following task:"
|
|
_printTaskInfo(session, taskID, 0, False, True)
|
|
newID = session.resubmitTask(taskID)
|
|
if not options.quiet:
|
|
print "Resubmitted task %s as new task %s" % (taskID, newID)
|
|
if _running_in_bg() or options.nowait:
|
|
return
|
|
else:
|
|
session.logout()
|
|
return watch_tasks(session, [newID], quiet=options.quiet)
|
|
|
|
def handle_call(options, session, args):
|
|
"Execute an arbitrary XML-RPC call"
|
|
usage = _("usage: %prog call [options] name [arg...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--python", action="store_true", help=_("Use python syntax for values"))
|
|
parser.add_option("--kwargs", help=_("Specify keyword arguments as a dictionary (implies --python)"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 1:
|
|
parser.error(_("Please specify the name of the XML-RPC method"))
|
|
assert False
|
|
if options.kwargs:
|
|
options.python = True
|
|
if options.python and ast is None:
|
|
parser.error(_("The ast module is required to read python syntax"))
|
|
activate_session(session)
|
|
name = args[0]
|
|
non_kw = []
|
|
kw = {}
|
|
if options.python:
|
|
non_kw = [ast.literal_eval(a) for a in args[1:]]
|
|
if options.kwargs:
|
|
kw = ast.literal_eval(options.kwargs)
|
|
else:
|
|
for arg in args[1:]:
|
|
if arg.find('=') != -1:
|
|
key, value = arg.split('=', 1)
|
|
kw[key] = arg_filter(value)
|
|
else:
|
|
non_kw.append(arg_filter(arg))
|
|
pprint.pprint(getattr(session, name).__call__(*non_kw, **kw))
|
|
|
|
def anon_handle_mock_config(options, session, args):
|
|
"[info] Create a mock config"
|
|
usage = _("usage: %prog mock-config [options]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("-a", "--arch", help=_("Specify the arch"))
|
|
parser.add_option("-n", "--name", help=_("Specify the name for the buildroot"))
|
|
parser.add_option("--tag", help=_("Create a mock config for a tag"))
|
|
parser.add_option("--target", help=_("Create a mock config for a build target"))
|
|
parser.add_option("--task", help=_("Duplicate the mock config of a previous task"))
|
|
parser.add_option("--latest", action="store_true", help=_("use the latest redirect url"))
|
|
parser.add_option("--buildroot", help=_("Duplicate the mock config for the specified buildroot id"))
|
|
parser.add_option("--mockdir", default="/var/lib/mock", metavar="DIR",
|
|
help=_("Specify mockdir"))
|
|
parser.add_option("--topdir", metavar="DIR",
|
|
help=_("Specify topdir"))
|
|
parser.add_option("--topurl", metavar="URL", default=options.topurl,
|
|
help=_("URL under which Koji files are accessible"))
|
|
parser.add_option("--distribution", default="Koji Testing",
|
|
help=_("Change the distribution macro"))
|
|
parser.add_option("--yum-proxy", help=_("Specify a yum proxy"))
|
|
parser.add_option("-o", metavar="FILE", dest="ofile", help=_("Output to a file"))
|
|
(options, args) = parser.parse_args(args)
|
|
activate_session(session)
|
|
if args:
|
|
#for historical reasons, we also accept buildroot name as first arg
|
|
if not options.name:
|
|
options.name = args[0]
|
|
else:
|
|
parser.error(_("Name already specified via option"))
|
|
arch = None
|
|
opts = {}
|
|
for k in ('topdir', 'topurl', 'distribution', 'mockdir', 'yum_proxy'):
|
|
if hasattr(options, k):
|
|
opts[k] = getattr(options, k)
|
|
if options.buildroot:
|
|
try:
|
|
br_id = int(options.buildroot)
|
|
except ValueError:
|
|
parser.error(_("Buildroot id must be an integer"))
|
|
brootinfo = session.getBuildroot(br_id)
|
|
if options.latest:
|
|
opts['repoid'] = 'latest'
|
|
else:
|
|
opts['repoid'] = brootinfo['repo_id']
|
|
opts['tag_name'] = brootinfo['tag_name']
|
|
arch = brootinfo['arch']
|
|
elif options.task:
|
|
try:
|
|
task_id = int(options.task)
|
|
except ValueError:
|
|
parser.error(_("Task id must be an integer"))
|
|
broots = session.listBuildroots(taskID=task_id)
|
|
if not broots:
|
|
print _("No buildroots for task %s (or no such task)") % options.task
|
|
return 1
|
|
if len(broots) > 1:
|
|
print _("Multiple buildroots found: %s" % [br['id'] for br in broots])
|
|
brootinfo = broots[-1]
|
|
if options.latest:
|
|
opts['repoid'] = 'latest'
|
|
else:
|
|
opts['repoid'] = brootinfo['repo_id']
|
|
opts['tag_name'] = brootinfo['tag_name']
|
|
arch = brootinfo['arch']
|
|
def_name = "%s-task_%i" % (opts['tag_name'], task_id)
|
|
elif options.tag:
|
|
if not options.arch:
|
|
print _("Please specify an arch")
|
|
return 1
|
|
tag = session.getTag(options.tag)
|
|
if not tag:
|
|
parser.error(_("Invalid tag: %s" % options.tag))
|
|
arch = options.arch
|
|
config = session.getBuildConfig(tag['id'])
|
|
if not config:
|
|
print _("Could not get config info for tag: %(name)s") % tag
|
|
return 1
|
|
opts['tag_name'] = tag['name']
|
|
if options.latest:
|
|
opts['repoid'] = 'latest'
|
|
else:
|
|
repo = session.getRepo(config['id'])
|
|
if not repo:
|
|
print _("Could not get a repo for tag: %(name)s") % tag
|
|
return 1
|
|
opts['repoid'] = repo['id']
|
|
def_name = "%(tag_name)s-repo_%(repoid)s" % opts
|
|
elif options.target:
|
|
if not options.arch:
|
|
print _("Please specify an arch")
|
|
return 1
|
|
arch = options.arch
|
|
target = session.getBuildTarget(options.target)
|
|
if not target:
|
|
parser.error(_("Invalid target: %s" % options.target))
|
|
opts['tag_name'] = target['build_tag_name']
|
|
if options.latest:
|
|
opts['repoid'] = 'latest'
|
|
else:
|
|
repo = session.getRepo(target['build_tag'])
|
|
if not repo:
|
|
print _("Could not get a repo for tag: %(name)s") % opts['tag_name']
|
|
return 1
|
|
opts['repoid'] = repo['id']
|
|
else:
|
|
parser.error(_("Please specify one of: --tag, --target, --task, --buildroot"))
|
|
assert False
|
|
if options.name:
|
|
name = options.name
|
|
else:
|
|
name = "%(tag_name)s-repo_%(repoid)s" % opts
|
|
output = koji.genMockConfig(name, arch, **opts)
|
|
if options.ofile:
|
|
fo = file(options.ofile, 'w')
|
|
fo.write(output)
|
|
fo.close()
|
|
else:
|
|
print output
|
|
|
|
def handle_disable_host(options, session, args):
|
|
"[admin] Mark one or more hosts as disabled"
|
|
usage = _("usage: %prog disable-host [options] hostname ...")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--comment", help=_("Comment indicating why the host(s) are being disabled"))
|
|
(options, args) = parser.parse_args(args)
|
|
|
|
activate_session(session)
|
|
session.multicall = True
|
|
for host in args:
|
|
session.getHost(host)
|
|
error = False
|
|
for host, [id] in zip(args, session.multiCall(strict=True)):
|
|
if not id:
|
|
print "Host %s does not exist" % host
|
|
error = True
|
|
if error:
|
|
print "No changes made. Please correct the command line."
|
|
return 1
|
|
session.multicall = True
|
|
for host in args:
|
|
session.disableHost(host)
|
|
if options.comment is not None:
|
|
session.editHost(host, comment=options.comment)
|
|
session.multiCall(strict=True)
|
|
|
|
def handle_enable_host(options, session, args):
|
|
"[admin] Mark one or more hosts as enabled"
|
|
usage = _("usage: %prog enable-host [options] hostname ...")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--comment", help=_("Comment indicating why the host(s) are being enabled"))
|
|
(options, args) = parser.parse_args(args)
|
|
|
|
activate_session(session)
|
|
session.multicall = True
|
|
for host in args:
|
|
session.getHost(host)
|
|
error = False
|
|
for host, [id] in zip(args, session.multiCall(strict=True)):
|
|
if not id:
|
|
print "Host %s does not exist" % host
|
|
error = True
|
|
if error:
|
|
print "No changes made. Please correct the command line."
|
|
return 1
|
|
session.multicall = True
|
|
for host in args:
|
|
session.enableHost(host)
|
|
if options.comment is not None:
|
|
session.editHost(host, comment=options.comment)
|
|
session.multiCall(strict=True)
|
|
|
|
|
|
def handle_restart_hosts(options, session, args):
|
|
"[admin] Restart enabled hosts"
|
|
usage = _("usage: %prog restart-hosts [options]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--wait", action="store_true",
|
|
help=_("Wait on the task, even if running in the background"))
|
|
parser.add_option("--nowait", action="store_false", dest="wait",
|
|
help=_("Don't wait on task"))
|
|
parser.add_option("--quiet", action="store_true",
|
|
help=_("Do not print the task information"), default=options.quiet)
|
|
(my_opts, args) = parser.parse_args(args)
|
|
|
|
activate_session(session)
|
|
task_id = session.restartHosts()
|
|
if my_opts.wait or (my_opts.wait is None and not _running_in_bg()):
|
|
session.logout()
|
|
return watch_tasks(session, [task_id], quiet=my_opts.quiet)
|
|
else:
|
|
return
|
|
|
|
|
|
def linked_upload(localfile, path, name=None):
|
|
"""Link a file into the (locally writable) workdir, bypassing upload"""
|
|
old_umask = os.umask(002)
|
|
try:
|
|
if name is None:
|
|
name = os.path.basename(localfile)
|
|
dest_dir = os.path.join(koji.pathinfo.work(), path)
|
|
dst = os.path.join(dest_dir, name)
|
|
koji.ensuredir(dest_dir)
|
|
# fix uid/gid to keep httpd happy
|
|
st = os.stat(koji.pathinfo.work())
|
|
os.chown(dest_dir, st.st_uid, st.st_gid)
|
|
print "Linking rpm to: %s" % dst
|
|
os.link(localfile, dst)
|
|
finally:
|
|
os.umask(old_umask)
|
|
|
|
|
|
def handle_import(options, session, args):
|
|
"[admin] Import externally built RPMs into the database"
|
|
usage = _("usage: %prog import [options] package [package...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--link", action="store_true", help=_("Attempt to hardlink instead of uploading"))
|
|
parser.add_option("--test", action="store_true", help=_("Don't actually import"))
|
|
parser.add_option("--create-build", action="store_true", help=_("Auto-create builds as needed"))
|
|
parser.add_option("--src-epoch", help=_("When auto-creating builds, use this epoch"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 1:
|
|
parser.error(_("At least one package must be specified"))
|
|
assert False
|
|
if options.src_epoch in ('None', 'none', '(none)'):
|
|
options.src_epoch = None
|
|
elif options.src_epoch:
|
|
try:
|
|
options.src_epoch = int(options.src_epoch)
|
|
except (ValueError, TypeError):
|
|
parser.error(_("Invalid value for epoch: %s") % options.src_epoch)
|
|
assert False
|
|
activate_session(session)
|
|
to_import = {}
|
|
for path in args:
|
|
data = koji.get_header_fields(path, ('name','version','release','epoch',
|
|
'arch','sigmd5','sourcepackage','sourcerpm'))
|
|
if data['sourcepackage']:
|
|
data['arch'] = 'src'
|
|
nvr = "%(name)s-%(version)s-%(release)s" % data
|
|
else:
|
|
nvr = "%(name)s-%(version)s-%(release)s" % koji.parse_NVRA(data['sourcerpm'])
|
|
to_import.setdefault(nvr,[]).append((path,data))
|
|
builds_missing = False
|
|
nvrs = to_import.keys()
|
|
nvrs.sort()
|
|
for nvr in nvrs:
|
|
to_import[nvr].sort()
|
|
for path, data in to_import[nvr]:
|
|
if data['sourcepackage']:
|
|
break
|
|
else:
|
|
#no srpm included, check for build
|
|
binfo = session.getBuild(nvr)
|
|
if not binfo:
|
|
print _("Missing build or srpm: %s") % nvr
|
|
builds_missing = True
|
|
if builds_missing and not options.create_build:
|
|
print _("Aborting import")
|
|
return
|
|
|
|
#local function to help us out below
|
|
def do_import(path, data):
|
|
rinfo = dict([(k,data[k]) for k in ('name','version','release','arch')])
|
|
prev = session.getRPM(rinfo)
|
|
if prev and not prev.get('external_repo_id', 0):
|
|
if prev['payloadhash'] == koji.hex_string(data['sigmd5']):
|
|
print _("RPM already imported: %s") % path
|
|
else:
|
|
print _("WARNING: md5sum mismatch for %s") % path
|
|
print _("Skipping import")
|
|
return
|
|
if options.test:
|
|
print _("Test mode -- skipping import for %s") % path
|
|
return
|
|
serverdir = _unique_path('cli-import')
|
|
if options.link:
|
|
linked_upload(path, serverdir)
|
|
else:
|
|
print _("uploading %s...") % path,
|
|
sys.stdout.flush()
|
|
session.uploadWrapper(path, serverdir)
|
|
print _("done")
|
|
sys.stdout.flush()
|
|
print _("importing %s...") % path,
|
|
sys.stdout.flush()
|
|
try:
|
|
session.importRPM(serverdir, os.path.basename(path))
|
|
except koji.GenericError, e:
|
|
print _("\nError importing: %s" % str(e).splitlines()[-1])
|
|
sys.stdout.flush()
|
|
else:
|
|
print _("done")
|
|
sys.stdout.flush()
|
|
|
|
for nvr in nvrs:
|
|
# check for existing build
|
|
need_build = True
|
|
binfo = session.getBuild(nvr)
|
|
if binfo:
|
|
b_state = koji.BUILD_STATES[binfo['state']]
|
|
if b_state == 'COMPLETE':
|
|
need_build = False
|
|
elif b_state in ['FAILED', 'CANCELED']:
|
|
if not options.create_build:
|
|
print _("Build %s state is %s. Skipping import") % (nvr, b_state)
|
|
continue
|
|
else:
|
|
print _("Build %s exists with state=%s. Skipping import") % (nvr, b_state)
|
|
continue
|
|
|
|
# import srpms first, if any
|
|
for path, data in to_import[nvr]:
|
|
if data['sourcepackage']:
|
|
if binfo and b_state != 'COMPLETE':
|
|
# need to fix the state
|
|
print _("Creating empty build: %s") % nvr
|
|
b_data = koji.util.dslice(binfo, ['name', 'version', 'release'])
|
|
b_data['epoch'] = data['epoch']
|
|
session.createEmptyBuild(**b_data)
|
|
binfo = session.getBuild(nvr)
|
|
do_import(path, data)
|
|
need_build = False
|
|
|
|
if need_build:
|
|
# if we're doing this here, we weren't given the matching srpm
|
|
if not options.create_build:
|
|
if binfo:
|
|
# should have caught this earlier, but just in case...
|
|
b_state = koji.BUILD_STATES[binfo['state']]
|
|
print _("Build %s state is %s. Skipping import") % (nvr, b_state)
|
|
continue
|
|
else:
|
|
print _("No such build: %s (include matching srpm or use "
|
|
"--create-build option to add it)") % nvr
|
|
continue
|
|
else:
|
|
# let's make a new build
|
|
b_data = koji.parse_NVR(nvr)
|
|
if options.src_epoch:
|
|
b_data['epoch'] = options.src_epoch
|
|
else:
|
|
# pull epoch from first rpm
|
|
data = to_import[nvr][0][1]
|
|
b_data['epoch'] = data['epoch']
|
|
if options.test:
|
|
print _("Test mode -- would have created empty build: %s") % nvr
|
|
else:
|
|
print _("Creating empty build: %s") % nvr
|
|
session.createEmptyBuild(**b_data)
|
|
binfo = session.getBuild(nvr)
|
|
|
|
for path, data in to_import[nvr]:
|
|
if data['sourcepackage']:
|
|
continue
|
|
do_import(path, data)
|
|
|
|
|
|
def handle_import_cg(options, session, args):
|
|
"[admin] Import external builds with rich metadata"
|
|
usage = _("usage: %prog import-cg [options] metadata_file files_dir")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--noprogress", action="store_true",
|
|
help=_("Do not display progress of the upload"))
|
|
parser.add_option("--link", action="store_true", help=_("Attempt to hardlink instead of uploading"))
|
|
parser.add_option("--test", action="store_true", help=_("Don't actually import"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 2:
|
|
parser.error(_("Please specify metadata files directory"))
|
|
assert False
|
|
if json is None:
|
|
parser.error(_("Unable to find json module"))
|
|
assert False
|
|
activate_session(session)
|
|
metadata = json.load(file(args[0], 'r'))
|
|
if 'output' not in metadata:
|
|
print _("Metadata contains no output")
|
|
sys.exit(1)
|
|
localdir = args[1]
|
|
|
|
to_upload = []
|
|
for info in metadata['output']:
|
|
if info.get('metadata_only', False):
|
|
continue
|
|
localpath = os.path.join(localdir, info.get('relpath', ''), info['filename'])
|
|
if not os.path.exists(localpath):
|
|
parser.error(_("No such file: %s") % localpath)
|
|
to_upload.append([localpath, info])
|
|
|
|
if options.test:
|
|
return
|
|
|
|
# get upload path
|
|
# XXX - need a better way
|
|
serverdir = _unique_path('cli-import')
|
|
|
|
for localpath, info in to_upload:
|
|
relpath = os.path.join(serverdir, info.get('relpath', ''))
|
|
if _running_in_bg() or options.noprogress:
|
|
callback = None
|
|
else:
|
|
callback = _progress_callback
|
|
if options.link:
|
|
linked_upload(localpath, relpath)
|
|
else:
|
|
print "Uploading %s" % localpath
|
|
session.uploadWrapper(localpath, relpath, callback=callback)
|
|
if callback:
|
|
print
|
|
|
|
session.CGImport(metadata, serverdir)
|
|
|
|
|
|
def handle_import_comps(options, session, args):
|
|
"Import group/package information from a comps file"
|
|
usage = _("usage: %prog import-comps [options] <file> <tag>")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--force", action="store_true", help=_("force import"))
|
|
(local_options, args) = parser.parse_args(args)
|
|
if len(args) != 2:
|
|
parser.error(_("Incorrect number of arguments"))
|
|
assert False
|
|
comps = yum.comps.Comps()
|
|
comps.add(args[0])
|
|
tag = args[1]
|
|
force = local_options.force
|
|
activate_session(session)
|
|
for group in comps.groups:
|
|
print "Group: %(groupid)s (%(name)s)" % vars(group)
|
|
session.groupListAdd(tag, group.groupid, force=force, display_name=group.name,
|
|
is_default=bool(group.default),
|
|
uservisible=bool(group.user_visible),
|
|
description=group.description,
|
|
langonly=group.langonly)
|
|
#yum.comps does not support the biarchonly field
|
|
for ptype, pdata in [('mandatory', group.mandatory_packages),
|
|
('default', group.default_packages),
|
|
('optional', group.optional_packages),
|
|
('conditional', group.conditional_packages)]:
|
|
for pkg in pdata:
|
|
pkgopts = {'type' : ptype}
|
|
if ptype == 'conditional':
|
|
pkgopts['requires'] = pdata[pkg]
|
|
#yum.comps does not support basearchonly
|
|
print " Package: %s: %r" % (pkg, pkgopts)
|
|
session.groupPackageListAdd(tag, group.groupid, pkg, force=force, **pkgopts)
|
|
#yum.comps does not support group dependencies
|
|
#yum.comps does not support metapkgs
|
|
|
|
def handle_import_sig(options, session, args):
|
|
"[admin] Import signatures into the database"
|
|
usage = _("usage: %prog import-sig [options] package [package...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--with-unsigned", action="store_true",
|
|
help=_("Also import unsigned sig headers"))
|
|
parser.add_option("--test", action="store_true",
|
|
help=_("Test mode -- don't actually import"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 1:
|
|
parser.error(_("At least one package must be specified"))
|
|
assert False
|
|
for path in args:
|
|
if not os.path.exists(path):
|
|
parser.error(_("No such file: %s") % path)
|
|
activate_session(session)
|
|
for path in args:
|
|
data = koji.get_header_fields(path, ('name','version','release','arch','siggpg','sigpgp','sourcepackage'))
|
|
if data['sourcepackage']:
|
|
data['arch'] = 'src'
|
|
sigkey = data['siggpg']
|
|
if not sigkey:
|
|
sigkey = data['sigpgp']
|
|
if not sigkey:
|
|
sigkey = ""
|
|
if not options.with_unsigned:
|
|
print _("Skipping unsigned package: %s" % path)
|
|
continue
|
|
else:
|
|
sigkey = koji.get_sigpacket_key_id(sigkey)
|
|
del data['siggpg']
|
|
del data['sigpgp']
|
|
rinfo = session.getRPM(data)
|
|
if not rinfo:
|
|
print "No such rpm in system: %(name)s-%(version)s-%(release)s.%(arch)s" % data
|
|
continue
|
|
if rinfo.get('external_repo_id'):
|
|
print "Skipping external rpm: %(name)s-%(version)s-%(release)s.%(arch)s@%(external_repo_name)s" % rinfo
|
|
continue
|
|
sighdr = koji.rip_rpm_sighdr(path)
|
|
previous = session.queryRPMSigs(rpm_id=rinfo['id'], sigkey=sigkey)
|
|
assert len(previous) <= 1
|
|
if previous:
|
|
sighash = md5_constructor(sighdr).hexdigest()
|
|
if previous[0]['sighash'] == sighash:
|
|
print _("Signature already imported: %s") % path
|
|
continue
|
|
else:
|
|
print _("Warning: signature mismatch: %s") % path
|
|
continue
|
|
print _("Importing signature [key %s] from %s...") % (sigkey, path)
|
|
if not options.test:
|
|
session.addRPMSig(rinfo['id'], base64.encodestring(sighdr))
|
|
|
|
def handle_write_signed_rpm(options, session, args):
|
|
"[admin] Write signed RPMs to disk"
|
|
usage = _("usage: %prog write-signed-rpm [options] <signature-key> n-v-r [n-v-r...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--all", action="store_true", help=_("Write out all RPMs signed with this key"))
|
|
parser.add_option("--buildid", help=_("Specify a build id rather than an n-v-r"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 1:
|
|
parser.error(_("A signature key must be specified"))
|
|
assert False
|
|
if len(args) < 2 and not (options.all or options.buildid):
|
|
parser.error(_("At least one RPM must be specified"))
|
|
assert False
|
|
key = args.pop(0)
|
|
activate_session(session)
|
|
if options.all:
|
|
rpms = session.queryRPMSigs(sigkey=key)
|
|
count = 1
|
|
for rpm in rpms:
|
|
print "%d/%d" % (count, len(rpms))
|
|
count += 1
|
|
session.writeSignedRPM(rpm['rpm_id'], key)
|
|
elif options.buildid:
|
|
rpms = session.listRPMs(int(options.buildid))
|
|
for rpm in rpms:
|
|
session.writeSignedRPM(rpm['id'], key)
|
|
else:
|
|
for nvr in args:
|
|
build = session.getBuild(nvr)
|
|
rpms = session.listRPMs(buildID=build['id'])
|
|
for rpm in rpms:
|
|
session.writeSignedRPM(rpm['id'], key)
|
|
|
|
def handle_prune_signed_copies(options, session, args):
|
|
"[admin] Prune signed copies"
|
|
usage = _("usage: %prog prune-sigs [options]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("-n", "--test", action="store_true", help=_("Test mode"))
|
|
parser.add_option("-v", "--verbose", action="store_true", help=_("Be more verbose"))
|
|
parser.add_option("--days", type="int", default=5, help=_("Timeout before clearing"))
|
|
parser.add_option("-p", "--package", "--pkg", help=_("Limit to a single package"))
|
|
parser.add_option("-b", "--build", help=_("Limit to a single build"))
|
|
parser.add_option("-i", "--ignore-tag", action="append", default=[],
|
|
help=_("Ignore these tags when considering whether a build is/was latest"))
|
|
parser.add_option("--ignore-tag-file",
|
|
help=_("File to read tag ignore patterns from"))
|
|
parser.add_option("-r", "--protect-tag", action="append", default=[],
|
|
help=_("Do not prune signed copies from matching tags"))
|
|
parser.add_option("--protect-tag-file",
|
|
help=_("File to read tag protect patterns from"))
|
|
parser.add_option("--trashcan-tag", default="trashcan", help=_("Specify trashcan tag"))
|
|
parser.add_option("--debug", action="store_true", help=_("Show debugging output"))
|
|
(options, args) = parser.parse_args(args)
|
|
# different ideas/modes
|
|
# 1) remove all signed copies of builds that are not latest for some tag
|
|
# 2) remove signed copies when a 'better' signature is available
|
|
# 3) for a specified tag, remove all signed copies that are not latest (w/ inheritance)
|
|
# 4) for a specified tag, remove all signed copies (no inheritance)
|
|
# (but skip builds that are multiply tagged)
|
|
|
|
#for now, we're just implementing mode #1
|
|
#(with the modification that we check to see if the build was latest within
|
|
#the last N days)
|
|
if options.ignore_tag_file:
|
|
fo = file(options.ignore_tag_file)
|
|
options.ignore_tag.extend([line.strip() for line in fo.readlines()])
|
|
fo.close()
|
|
if options.protect_tag_file:
|
|
fo = file(options.protect_tag_file)
|
|
options.protect_tag.extend([line.strip() for line in fo.readlines()])
|
|
fo.close()
|
|
if options.debug:
|
|
options.verbose = True
|
|
cutoff_ts = time.time() - options.days * 24 * 3600
|
|
if options.debug:
|
|
print "Cutoff date: %s" % time.asctime(time.localtime(cutoff_ts))
|
|
if not options.build:
|
|
if options.verbose:
|
|
print "Getting builds..."
|
|
qopts = {'state' : koji.BUILD_STATES['COMPLETE']}
|
|
if options.package:
|
|
pkginfo = session.getPackage(options.package)
|
|
qopts['packageID'] = pkginfo['id']
|
|
builds = [(b['nvr'], b) for b in session.listBuilds(**qopts)]
|
|
if options.verbose:
|
|
print "...got %i builds" % len(builds)
|
|
builds.sort()
|
|
else:
|
|
#single build
|
|
binfo = session.getBuild(options.build)
|
|
if not binfo:
|
|
parser.error('No such build: %s' % options.build)
|
|
assert False
|
|
builds = [("%(name)s-%(version)s-%(release)s" % binfo, binfo)]
|
|
total_files = 0
|
|
total_space = 0
|
|
def _histline(event_id, x):
|
|
if event_id == x['revoke_event']:
|
|
ts = x['revoke_ts']
|
|
fmt = "Untagged %(name)s-%(version)s-%(release)s from %(tag_name)s"
|
|
elif event_id == x['create_event']:
|
|
ts = x['create_ts']
|
|
fmt = "Tagged %(name)s-%(version)s-%(release)s with %(tag_name)s"
|
|
if x['active']:
|
|
fmt += " [still active]"
|
|
else:
|
|
raise koji.GenericError, "unknown event: (%r, %r)" % (event_id, x)
|
|
time_str = time.asctime(time.localtime(ts))
|
|
return "%s: %s" % (time_str, fmt % x)
|
|
for nvr, binfo in builds:
|
|
#listBuilds returns slightly different data than normal
|
|
if not binfo.has_key('id'):
|
|
binfo['id'] = binfo['build_id']
|
|
if not binfo.has_key('name'):
|
|
binfo['name'] = binfo['package_name']
|
|
if options.debug:
|
|
print "DEBUG: %s" % nvr
|
|
#see how recently this build was latest for a tag
|
|
is_latest = False
|
|
is_protected = False
|
|
last_latest = None
|
|
tags = {}
|
|
for entry in session.tagHistory(build=binfo['id']):
|
|
#we used tagHistory rather than listTags so we can consider tags
|
|
#that the build was recently untagged from
|
|
tags.setdefault(entry['tag_name'], 1)
|
|
if options.debug:
|
|
print "Tags: %s" % tags.keys()
|
|
for tag_name in tags:
|
|
if tag_name == options.trashcan_tag:
|
|
if options.debug:
|
|
print "Ignoring trashcan tag for build %s" % nvr
|
|
continue
|
|
ignore_tag = False
|
|
for pattern in options.ignore_tag:
|
|
if fnmatch.fnmatch(tag_name, pattern):
|
|
if options.debug:
|
|
print "Ignoring tag %s for build %s" % (tag_name, nvr)
|
|
ignore_tag = True
|
|
break
|
|
if ignore_tag:
|
|
continue
|
|
#in order to determine how recently this build was latest, we have
|
|
#to look at the tagging history.
|
|
hist = session.tagHistory(tag=tag_name, package=binfo['name'])
|
|
if not hist:
|
|
#really shouldn't happen
|
|
raise koji.GenericError, "No history found for %s in %s" % (nvr, tag_name)
|
|
timeline = []
|
|
for x in hist:
|
|
#note that for revoked entries, we're effectively splitting them into
|
|
#two parts: creation and revocation.
|
|
timeline.append((x['create_event'], 1, x))
|
|
#at the same event, revokes happen first
|
|
if x['revoke_event'] is not None:
|
|
timeline.append((x['revoke_event'], 0, x))
|
|
timeline.sort()
|
|
#find most recent creation entry for our build and crop there
|
|
latest_ts = None
|
|
for i in xrange(len(timeline)-1, -1, -1):
|
|
#searching in reverse cronological order
|
|
event_id, is_create, entry = timeline[i]
|
|
if entry['build_id'] == binfo['id'] and is_create:
|
|
latest_ts = event_id
|
|
break
|
|
if not latest_ts:
|
|
#really shouldn't happen
|
|
raise koji.GenericError, "No creation event found for %s in %s" % (nvr, tag_name)
|
|
our_entry = entry
|
|
if options.debug:
|
|
print _histline(event_id, our_entry)
|
|
#now go through the events since most recent creation entry
|
|
timeline = timeline[i+1:]
|
|
if not timeline:
|
|
is_latest = True
|
|
if options.debug:
|
|
print "%s is latest in tag %s" % (nvr, tag_name)
|
|
break
|
|
#before we go any further, is this a protected tag?
|
|
protect_tag = False
|
|
for pattern in options.protect_tag:
|
|
if fnmatch.fnmatch(tag_name, pattern):
|
|
protect_tag = True
|
|
break
|
|
if protect_tag:
|
|
# we use the same time limit as for the latest calculation
|
|
# if this build was in this tag within that limit, then we will
|
|
# not prune its signed copies
|
|
if our_entry['revoke_event'] is None:
|
|
#we're still tagged with a protected tag
|
|
if options.debug:
|
|
print "Build %s has protected tag %s" % (nvr, tag_name)
|
|
is_protected = True
|
|
break
|
|
elif our_entry['revoke_ts'] > cutoff_ts:
|
|
#we were still tagged here sometime before the cutoff
|
|
if options.debug:
|
|
print "Build %s had protected tag %s until %s" \
|
|
% (nvr, tag_name, time.asctime(time.localtime(our_entry['revoke_ts'])))
|
|
is_protected = True
|
|
break
|
|
replaced_ts = None
|
|
revoke_ts = None
|
|
others = {}
|
|
for event_id, is_create, entry in timeline:
|
|
#So two things can knock this build from the title of latest:
|
|
# - it could be untagged (entry revoked)
|
|
# - another build could become latest (replaced)
|
|
#Note however that if the superceding entry is itself revoked, then
|
|
#our build could become latest again
|
|
if options.debug:
|
|
print _histline(event_id, entry)
|
|
if entry['build_id'] == binfo['id']:
|
|
if is_create:
|
|
#shouldn't happen
|
|
raise koji.GenericError, "Duplicate creation event found for %s in %s" \
|
|
% (nvr, tag_name)
|
|
else:
|
|
#we've been revoked
|
|
revoke_ts = entry['revoke_ts']
|
|
break
|
|
else:
|
|
if is_create:
|
|
#this build has become latest
|
|
replaced_ts = entry['create_ts']
|
|
if entry['active']:
|
|
#this entry not revoked yet, so we're done for this tag
|
|
break
|
|
#since this entry is revoked later, our build might eventually be
|
|
#uncovered, so we have to keep looking
|
|
others[entry['build_id']] = 1
|
|
else:
|
|
#other build revoked
|
|
#see if our build has resurfaced
|
|
if others.has_key(entry['build_id']):
|
|
del others[entry['build_id']]
|
|
if replaced_ts is not None and not others:
|
|
#we've become latest again
|
|
#(note: we're not revoked yet because that triggers a break above)
|
|
replaced_ts = None
|
|
latest_ts = entry['revoke_ts']
|
|
if last_latest is None:
|
|
timestamps = []
|
|
else:
|
|
timestamps = [last_latest]
|
|
if revoke_ts is None:
|
|
if replaced_ts is None:
|
|
#turns out we are still latest
|
|
is_latest = True
|
|
if options.debug:
|
|
print "%s is latest (again) in tag %s" % (nvr, tag_name)
|
|
break
|
|
else:
|
|
#replaced (but not revoked)
|
|
timestamps.append(replaced_ts)
|
|
if options.debug:
|
|
print "tag %s: %s not latest (replaced %s)" \
|
|
% (tag_name, nvr, time.asctime(time.localtime(replaced_ts)))
|
|
elif replaced_ts is None:
|
|
#revoked but not replaced
|
|
timestamps.append(revoke_ts)
|
|
if options.debug:
|
|
print "tag %s: %s not latest (revoked %s)" \
|
|
% (tag_name, nvr, time.asctime(time.localtime(revoke_ts)))
|
|
else:
|
|
#revoked AND replaced
|
|
timestamps.append(min(revoke_ts, replaced_ts))
|
|
if options.debug:
|
|
print "tag %s: %s not latest (revoked %s, replaced %s)" \
|
|
% (tag_name, nvr, time.asctime(time.localtime(revoke_ts)),
|
|
time.asctime(time.localtime(replaced_ts)))
|
|
last_latest = max(timestamps)
|
|
if last_latest > cutoff_ts:
|
|
if options.debug:
|
|
print "%s was latest past the cutoff" % nvr
|
|
is_latest = True
|
|
break
|
|
if is_latest:
|
|
continue
|
|
if is_protected:
|
|
continue
|
|
#not latest anywhere since cutoff, so we can remove all signed copies
|
|
rpms = session.listRPMs(buildID=binfo['id'])
|
|
session.multicall = True
|
|
for rpminfo in rpms:
|
|
session.queryRPMSigs(rpm_id=rpminfo['id'])
|
|
by_sig = {}
|
|
#index by sig
|
|
for rpminfo, [sigs] in zip(rpms, session.multiCall()):
|
|
for sig in sigs:
|
|
sigkey = sig['sigkey']
|
|
by_sig.setdefault(sigkey, []).append(rpminfo)
|
|
builddir = koji.pathinfo.build(binfo)
|
|
build_files = 0
|
|
build_space = 0
|
|
if not by_sig and options.debug:
|
|
print "(build has no signatures)"
|
|
for sigkey, rpms in by_sig.iteritems():
|
|
mycount = 0
|
|
archdirs = {}
|
|
sigdirs = {}
|
|
for rpminfo in rpms:
|
|
signedpath = "%s/%s" % (builddir, koji.pathinfo.signed(rpminfo, sigkey))
|
|
try:
|
|
st = os.lstat(signedpath)
|
|
except OSError:
|
|
continue
|
|
if not stat.S_ISREG(st.st_mode):
|
|
#warn about this
|
|
print "Skipping %s. Not a regular file" % signedpath
|
|
continue
|
|
if st.st_mtime > cutoff_ts:
|
|
print "Skipping %s. File newer than cutoff" % signedpath
|
|
continue
|
|
if options.test:
|
|
print "Would have unlinked: %s" % signedpath
|
|
else:
|
|
if options.verbose:
|
|
print "Unlinking: %s" % signedpath
|
|
try:
|
|
os.unlink(signedpath)
|
|
except OSError, e:
|
|
print "Error removing %s: %s" % (signedpath, e)
|
|
print "This script needs write access to %s" % koji.BASEDIR
|
|
continue
|
|
mycount +=1
|
|
build_files += 1
|
|
build_space += st.st_size
|
|
#XXX - this makes some layout assumptions, but
|
|
# pathinfo doesn't report what we need
|
|
mydir = os.path.dirname(signedpath)
|
|
archdirs[mydir] = 1
|
|
sigdirs[os.path.dirname(mydir)] = 1
|
|
for dir in archdirs:
|
|
if options.test:
|
|
print "Would have removed dir: %s" % dir
|
|
else:
|
|
if options.verbose:
|
|
print "Removing dir: %s" % dir
|
|
try:
|
|
os.rmdir(dir)
|
|
except OSError, e:
|
|
print "Error removing %s: %s" % (signedpath, e)
|
|
if len(sigdirs) == 1:
|
|
dir = sigdirs.keys()[0]
|
|
if options.test:
|
|
print "Would have removed dir: %s" % dir
|
|
else:
|
|
if options.verbose:
|
|
print "Removing dir: %s" % dir
|
|
try:
|
|
os.rmdir(dir)
|
|
except OSError, e:
|
|
print "Error removing %s: %s" % (signedpath, e)
|
|
elif len(sigdirs) > 1:
|
|
print "Warning: more than one signature dir for %s: %r" % (sigkey, sigdirs)
|
|
if build_files:
|
|
total_files += build_files
|
|
total_space += build_space
|
|
if options.verbose:
|
|
print "Build: %s, Removed %i signed copies (%i bytes). Total: %i/%i" \
|
|
% (nvr, build_files, build_space, total_files, total_space)
|
|
elif options.debug and by_sig:
|
|
print "(build has no signed copies)"
|
|
print "--- Grand Totals ---"
|
|
print "Files: %i" % total_files
|
|
print "Bytes: %i" % total_space
|
|
|
|
def handle_set_build_volume(options, session, args):
|
|
"[admin] Move a build to a different volume"
|
|
usage = _("usage: %prog set-build-volume volume n-v-r [n-v-r ...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("-v", "--verbose", action="store_true", help=_("Be verbose"))
|
|
(options, args) = parser.parse_args(args)
|
|
volinfo = session.getVolume(args[0])
|
|
if not volinfo:
|
|
print "No such volume: %s" % args[0]
|
|
return 1
|
|
activate_session(session)
|
|
builds = []
|
|
for nvr in args[1:]:
|
|
binfo = session.getBuild(nvr)
|
|
if not binfo:
|
|
print "No such build: %s" % nvr
|
|
elif binfo['volume_id'] == volinfo['id']:
|
|
print "Build %s already on volume %s" %(nvr, volinfo['name'])
|
|
else:
|
|
builds.append(binfo)
|
|
if not builds:
|
|
print "No builds to move"
|
|
return 1
|
|
for binfo in builds:
|
|
session.changeBuildVolume(binfo['id'], volinfo['id'])
|
|
if options.verbose:
|
|
print "%s: %s -> %s" % (binfo['nvr'], binfo['volume_name'], volinfo['name'])
|
|
|
|
def handle_add_volume(options, session, args):
|
|
"[admin] Add a new storage volume"
|
|
usage = _("usage: %prog add-volume volume-name")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
(options, args) = parser.parse_args(args)
|
|
name = args[0]
|
|
volinfo = session.getVolume(name)
|
|
if volinfo:
|
|
print "Volume %s already exists" % name
|
|
return 1
|
|
activate_session(session)
|
|
volinfo = session.addVolume(name)
|
|
print "Added volume %(name)s with id %(id)i" % volinfo
|
|
|
|
def handle_list_volumes(options, session, args):
|
|
"[info] List storage volumes"
|
|
usage = _("usage: %prog list-volumes")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
(options, args) = parser.parse_args(args)
|
|
for volinfo in session.listVolumes():
|
|
print volinfo['name']
|
|
|
|
def handle_list_permissions(options, session, args):
|
|
"[admin] List user permissions"
|
|
usage = _("usage: %prog list-permissions [options]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--user", help=_("List permissions for the given user"))
|
|
parser.add_option("--mine", action="store_true", help=_("List your permissions"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) > 0:
|
|
parser.error(_("This command takes no arguments"))
|
|
assert False
|
|
activate_session(session)
|
|
if options.user:
|
|
user = session.getUser(options.user)
|
|
if not user:
|
|
print "User %s does not exist" % options.user
|
|
return 1
|
|
perms = session.getUserPerms(user['id'])
|
|
elif options.mine:
|
|
perms = session.getPerms()
|
|
else:
|
|
perms = [p['name'] for p in session.getAllPerms()]
|
|
for perm in perms:
|
|
print perm
|
|
|
|
def handle_add_user(options, session, args):
|
|
"[admin] Add a user"
|
|
usage = _("usage: %prog add-user username [options]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--principal", help=_("The Kerberos principal for this user"))
|
|
parser.add_option("--disable", help=_("Prohibit logins by this user"), action="store_true")
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 1:
|
|
parser.error(_("You must specify the username of the user to add"))
|
|
elif len(args) > 1:
|
|
parser.error(_("This command only accepts one argument (username)"))
|
|
username = args[0]
|
|
if options.disable:
|
|
status = koji.USER_STATUS['BLOCKED']
|
|
else:
|
|
status = koji.USER_STATUS['NORMAL']
|
|
activate_session(session)
|
|
user_id = session.createUser(username, status=status, krb_principal=options.principal)
|
|
print "Added user %s (%i)" % (username, user_id)
|
|
|
|
def handle_enable_user(options, session, args):
|
|
"[admin] Enable logins by a user"
|
|
usage = _("usage: %prog enable-user username")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 1:
|
|
parser.error(_("You must specify the username of the user to enable"))
|
|
elif len(args) > 1:
|
|
parser.error(_("This command only accepts one argument (username)"))
|
|
username = args[0]
|
|
activate_session(session)
|
|
session.enableUser(username)
|
|
|
|
def handle_disable_user(options, session, args):
|
|
"[admin] Disable logins by a user"
|
|
usage = _("usage: %prog disable-user username")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 1:
|
|
parser.error(_("You must specify the username of the user to disable"))
|
|
elif len(args) > 1:
|
|
parser.error(_("This command only accepts one argument (username)"))
|
|
username = args[0]
|
|
activate_session(session)
|
|
session.disableUser(username)
|
|
|
|
def handle_list_signed(options, session, args):
|
|
"[admin] List signed copies of rpms"
|
|
usage = _("usage: %prog list-signed [options]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--debug", action="store_true")
|
|
parser.add_option("--key", help=_("Only list RPMs signed with this key"))
|
|
parser.add_option("--build", help=_("Only list RPMs from this build"))
|
|
parser.add_option("--rpm", help=_("Only list signed copies for this RPM"))
|
|
parser.add_option("--tag", help=_("Only list RPMs within this tag"))
|
|
(options, args) = parser.parse_args(args)
|
|
activate_session(session)
|
|
qopts = {}
|
|
build_idx = {}
|
|
rpm_idx = {}
|
|
if options.key:
|
|
qopts['sigkey'] = options.key
|
|
if options.rpm:
|
|
rinfo = session.getRPM(options.rpm)
|
|
rpm_idx[rinfo['id']] = rinfo
|
|
if rinfo is None:
|
|
parser.error(_("No such RPM: %s") % options.rpm)
|
|
if rinfo.get('external_repo_id'):
|
|
print "External rpm: %(name)s-%(version)s-%(release)s.%(arch)s@%(external_repo_name)s" % rinfo
|
|
return 1
|
|
qopts['rpm_id'] = rinfo['id']
|
|
if options.build:
|
|
binfo = session.getBuild(options.build)
|
|
build_idx[binfo['id']] = binfo
|
|
if binfo is None:
|
|
parser.error(_("No such build: %s") % options.rpm)
|
|
sigs = []
|
|
rpms = session.listRPMs(buildID=binfo['id'])
|
|
for rinfo in rpms:
|
|
rpm_idx[rinfo['id']] = rinfo
|
|
sigs += session.queryRPMSigs(rpm_id=rinfo['id'], **qopts)
|
|
else:
|
|
sigs = session.queryRPMSigs(**qopts)
|
|
if options.tag:
|
|
print "getting tag listing"
|
|
rpms, builds = session.listTaggedRPMS(options.tag, inherit=False, latest=False)
|
|
print "got tag listing"
|
|
tagged = {}
|
|
for binfo in builds:
|
|
build_idx.setdefault(binfo['id'], binfo)
|
|
for rinfo in rpms:
|
|
rpm_idx.setdefault(rinfo['id'], rinfo)
|
|
tagged[rinfo['id']] = 1
|
|
#Now figure out which sig entries actually have live copies
|
|
for sig in sigs:
|
|
rpm_id = sig['rpm_id']
|
|
sigkey = sig['sigkey']
|
|
if options.tag:
|
|
if tagged.get(rpm_id) is None:
|
|
continue
|
|
rinfo = rpm_idx.get(rpm_id)
|
|
if not rinfo:
|
|
rinfo = session.getRPM(rpm_id)
|
|
rpm_idx[rinfo['id']] = rinfo
|
|
binfo = build_idx.get(rinfo['build_id'])
|
|
if not binfo:
|
|
binfo = session.getBuild(rinfo['build_id'])
|
|
build_idx[binfo['id']] = binfo
|
|
binfo['name'] = binfo['package_name']
|
|
builddir = koji.pathinfo.build(binfo)
|
|
signedpath = "%s/%s" % (builddir, koji.pathinfo.signed(rinfo, sigkey))
|
|
if not os.path.exists(signedpath):
|
|
if options.debug:
|
|
print "No copy: %s" % signedpath
|
|
continue
|
|
print signedpath
|
|
|
|
def handle_import_in_place(options, session, args):
|
|
"[admin] Import RPMs that are already in place"
|
|
usage = _("usage: %prog import-in-place [options] package [package...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 1:
|
|
parser.error(_("At least one package must be specified"))
|
|
assert False
|
|
activate_session(session)
|
|
for nvr in args:
|
|
data = koji.parse_NVR(nvr)
|
|
print _("importing %s...") % nvr,
|
|
try:
|
|
session.importBuildInPlace(data)
|
|
except koji.GenericError, e:
|
|
print _("\nError importing: %s" % str(e).splitlines()[-1])
|
|
sys.stdout.flush()
|
|
else:
|
|
print _("done")
|
|
sys.stdout.flush()
|
|
|
|
def handle_import_archive(options, session, args):
|
|
"[admin] Import an archive file and associate it with a build"
|
|
usage = _("usage: %prog import-archive build-id|n-v-r /path/to/archive...")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--noprogress", action="store_true",
|
|
help=_("Do not display progress of the upload"))
|
|
parser.add_option("--create-build", action="store_true", help=_("Auto-create builds as needed"))
|
|
parser.add_option("--link", action="store_true", help=_("Attempt to hardlink instead of uploading"))
|
|
parser.add_option("--type", help=_("The type of archive being imported. Currently supported types: maven, win, image"))
|
|
parser.add_option("--type-info", help=_("Type-specific information to associate with the archives. "
|
|
"For Maven archives this should be a local path to a .pom file. "
|
|
"For Windows archives this should be relpath:platforms[:flags])) "
|
|
"Images need an arch"))
|
|
(suboptions, args) = parser.parse_args(args)
|
|
|
|
if not len(args) > 1:
|
|
parser.error(_("You must specify a build ID or N-V-R and an archive to import"))
|
|
assert False
|
|
|
|
activate_session(session)
|
|
|
|
if not suboptions.type:
|
|
parser.error(_("You must specify an archive type"))
|
|
assert False
|
|
if suboptions.type == 'maven':
|
|
if not (session.hasPerm('maven-import') or session.hasPerm('admin')):
|
|
parser.error(_("This action requires the maven-import privilege"))
|
|
assert False
|
|
if not suboptions.type_info:
|
|
parser.error(_("--type-info must point to a .pom file when importing Maven archives"))
|
|
assert False
|
|
pom_info = koji.parse_pom(suboptions.type_info)
|
|
maven_info = koji.pom_to_maven_info(pom_info)
|
|
suboptions.type_info = maven_info
|
|
elif suboptions.type == 'win':
|
|
if not (session.hasPerm('win-import') or session.hasPerm('admin')):
|
|
parser.error(_("This action requires the win-import privilege"))
|
|
assert False
|
|
if not suboptions.type_info:
|
|
parser.error(_("--type-info must be specified"))
|
|
assert False
|
|
type_info = suboptions.type_info.split(':', 2)
|
|
if len(type_info) < 2:
|
|
parser.error(_("--type-info must be in relpath:platforms[:flags] format"))
|
|
win_info = {'relpath': type_info[0], 'platforms': type_info[1].split()}
|
|
if len(type_info) > 2:
|
|
win_info['flags'] = type_info[2].split()
|
|
else:
|
|
win_info['flags'] = []
|
|
suboptions.type_info = win_info
|
|
elif suboptions.type == 'image':
|
|
if not (session.hasPerm('image-import') or session.hasPerm('admin')):
|
|
parser.error(_("This action requires the image-import privilege"))
|
|
assert False
|
|
if not suboptions.type_info:
|
|
parser.error(_("--type-info must be specified"))
|
|
assert False
|
|
image_info = {'arch': suboptions.type_info}
|
|
suboptions.type_info = image_info
|
|
else:
|
|
parser.error(_("Unsupported archive type: %s" % suboptions.type))
|
|
assert False
|
|
|
|
buildinfo = session.getBuild(arg_filter(args[0]))
|
|
if not buildinfo:
|
|
if not suboptions.create_build:
|
|
parser.error(_("No such build: %s") % args[0])
|
|
buildinfo = koji.parse_NVR(args[0])
|
|
if buildinfo['epoch'] == '':
|
|
buildinfo['epoch'] = None
|
|
else:
|
|
buildinfo['epoch'] = int(buildinfo['epoch'])
|
|
if suboptions.type == 'maven':
|
|
# --type-info should point to a local .pom file
|
|
session.createMavenBuild(buildinfo, suboptions.type_info)
|
|
elif suboptions.type == 'win':
|
|
# We're importing, so we don't know what platform the build
|
|
# was run on. Use "import" as a placeholder.
|
|
session.createWinBuild(buildinfo, {'platform': 'import'})
|
|
elif suboptions.type == 'image':
|
|
# --type-info should have an arch of the image
|
|
session.createImageBuild(buildinfo)
|
|
else:
|
|
# should get caught above
|
|
assert False
|
|
|
|
for filepath in args[1:]:
|
|
filename = os.path.basename(filepath)
|
|
print "Uploading archive: %s" % filename
|
|
serverdir = _unique_path('cli-import')
|
|
if _running_in_bg() or suboptions.noprogress:
|
|
callback = None
|
|
else:
|
|
callback = _progress_callback
|
|
if suboptions.link:
|
|
linked_upload(filepath, serverdir)
|
|
else:
|
|
session.uploadWrapper(filepath, serverdir, callback=callback)
|
|
print
|
|
serverpath = "%s/%s" % (serverdir, filename)
|
|
session.importArchive(serverpath, buildinfo, suboptions.type, suboptions.type_info)
|
|
print "Imported: %s" % filename
|
|
|
|
def handle_grant_permission(options, session, args):
|
|
"[admin] Grant a permission to a user"
|
|
usage = _("usage: %prog grant-permission <permission> <user> [<user> ...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--new", action="store_true", help=_("Create a new permission"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 2:
|
|
parser.error(_("Please specify a permission and at least one user"))
|
|
assert False
|
|
activate_session(session)
|
|
perm = args[0]
|
|
names = args[1:]
|
|
users = []
|
|
for n in names:
|
|
user = session.getUser(n)
|
|
if user is None:
|
|
parser.error(_("No such user: %s" % n))
|
|
assert False
|
|
users.append(user)
|
|
kwargs = {}
|
|
if options.new:
|
|
kwargs['create'] = True
|
|
for user in users:
|
|
session.grantPermission(user['name'], perm, **kwargs)
|
|
|
|
def handle_revoke_permission(options, session, args):
|
|
"[admin] Revoke a permission from a user"
|
|
usage = _("usage: %prog revoke-permission <permission> <user> [<user> ...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 2:
|
|
parser.error(_("Please specify a permission and at least one user"))
|
|
assert False
|
|
activate_session(session)
|
|
perm = args[0]
|
|
names = args[1:]
|
|
users = []
|
|
for n in names:
|
|
user = session.getUser(n)
|
|
if user is None:
|
|
parser.error(_("No such user: %s" % n))
|
|
assert False
|
|
users.append(user)
|
|
for user in users:
|
|
session.revokePermission(user['name'], perm)
|
|
|
|
|
|
def handle_grant_cg_access(options, session, args):
|
|
"[admin] Add a user to a content generator"
|
|
usage = _("usage: %prog grant-cg-access <user> <content generator>")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--new", action="store_true", help=_("Create a new content generator"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) != 2:
|
|
parser.error(_("Please specify a user and content generator"))
|
|
assert False
|
|
activate_session(session)
|
|
user = args[0]
|
|
cg = args[1]
|
|
uinfo = session.getUser(user)
|
|
if uinfo is None:
|
|
parser.error(_("No such user: %s" % user))
|
|
assert False
|
|
kwargs = {}
|
|
if options.new:
|
|
kwargs['create'] = True
|
|
session.grantCGAccess(uinfo['name'], cg, **kwargs)
|
|
|
|
|
|
def handle_revoke_cg_access(options, session, args):
|
|
"[admin] Remove a user from a content generator"
|
|
usage = _("usage: %prog revoke-cg-access <user> <content generator>")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) != 2:
|
|
parser.error(_("Please specify a user and content generator"))
|
|
assert False
|
|
activate_session(session)
|
|
user = args[0]
|
|
cg = args[1]
|
|
uinfo = session.getUser(user)
|
|
if uinfo is None:
|
|
parser.error(_("No such user: %s" % user))
|
|
assert False
|
|
session.revokeCGAccess(uinfo['name'], cg)
|
|
|
|
|
|
def anon_handle_latest_build(options, session, args):
|
|
"[info] Print the latest builds for a tag"
|
|
usage = _("usage: %prog latest-build [options] tag package [package...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--arch", help=_("List all of the latest packages for this arch"))
|
|
parser.add_option("--all", action="store_true", help=_("List all of the latest packages for this tag"))
|
|
parser.add_option("--quiet", action="store_true", help=_("Do not print the header information"), default=options.quiet)
|
|
parser.add_option("--paths", action="store_true", help=_("Show the file paths"))
|
|
parser.add_option("--type", help=_("Show builds of the given type only. Currently supported types: maven"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) == 0:
|
|
parser.error(_("A tag name must be specified"))
|
|
assert False
|
|
activate_session(session)
|
|
if options.all:
|
|
if len(args) > 1:
|
|
parser.error(_("A package name may not be combined with --all"))
|
|
assert False
|
|
# Set None as the package argument
|
|
args.append(None)
|
|
else:
|
|
if len(args) < 2:
|
|
parser.error(_("A tag name and package name must be specified"))
|
|
assert False
|
|
pathinfo = koji.PathInfo()
|
|
|
|
for pkg in args[1:]:
|
|
if options.arch:
|
|
rpms, builds = session.getLatestRPMS(args[0], package=pkg, arch=options.arch)
|
|
builds_hash = dict([(x['build_id'], x) for x in builds])
|
|
data = rpms
|
|
if options.paths:
|
|
for x in data:
|
|
z = x.copy()
|
|
x['name'] = builds_hash[x['build_id']]['package_name']
|
|
x['path'] = os.path.join(pathinfo.build(x), pathinfo.rpm(z))
|
|
fmt = "%(path)s"
|
|
else:
|
|
fmt = "%(name)s-%(version)s-%(release)s.%(arch)s"
|
|
else:
|
|
kwargs = {'package': pkg}
|
|
if options.type:
|
|
kwargs['type'] = options.type
|
|
data = session.getLatestBuilds(args[0], **kwargs)
|
|
if options.paths:
|
|
if options.type == 'maven':
|
|
for x in data:
|
|
x['path'] = pathinfo.mavenbuild(x)
|
|
fmt = "%(path)-40s %(tag_name)-20s %(maven_group_id)-20s %(maven_artifact_id)-20s %(owner_name)s"
|
|
else:
|
|
for x in data:
|
|
x['path'] = pathinfo.build(x)
|
|
fmt = "%(path)-40s %(tag_name)-20s %(owner_name)s"
|
|
else:
|
|
if options.type == 'maven':
|
|
fmt = "%(nvr)-40s %(tag_name)-20s %(maven_group_id)-20s %(maven_artifact_id)-20s %(owner_name)s"
|
|
else:
|
|
fmt = "%(nvr)-40s %(tag_name)-20s %(owner_name)s"
|
|
if not options.quiet:
|
|
if options.type == 'maven':
|
|
print "%-40s %-20s %-20s %-20s %s" % ("Build", "Tag", "Group Id", "Artifact Id", "Built by")
|
|
print "%s %s %s %s %s" % ("-"*40, "-"*20, "-"*20, "-"*20, "-"*16)
|
|
else:
|
|
print "%-40s %-20s %s" % ("Build","Tag","Built by")
|
|
print "%s %s %s" % ("-"*40, "-"*20, "-"*16)
|
|
options.quiet = True
|
|
|
|
output = [ fmt % x for x in data]
|
|
output.sort()
|
|
for line in output:
|
|
print line
|
|
|
|
|
|
def anon_handle_list_api(options, session, args):
|
|
"[info] Print the list of XML-RPC APIs"
|
|
usage = _("usage: %prog list-api [options]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) != 0:
|
|
parser.error(_("This command takes no arguments"))
|
|
assert False
|
|
activate_session(session)
|
|
tmplist = [(x['name'], x) for x in session._listapi()]
|
|
tmplist.sort()
|
|
funcs = [x[1] for x in tmplist]
|
|
for x in funcs:
|
|
if 'argdesc' in x:
|
|
args = x['argdesc']
|
|
elif x['args']:
|
|
# older servers may not provide argdesc
|
|
expanded = []
|
|
for arg in x['args']:
|
|
if type(arg) is str:
|
|
expanded.append(arg)
|
|
else:
|
|
expanded.append('%s=%s' % (arg[0], arg[1]))
|
|
args = "(%s)" % ", ".join(expanded)
|
|
else:
|
|
args = "()"
|
|
print '%s%s' % (x['name'], args)
|
|
if x['doc']:
|
|
print " description: %s" % x['doc']
|
|
|
|
def anon_handle_list_tagged(options, session, args):
|
|
"[info] List the builds or rpms in a tag"
|
|
usage = _("usage: %prog list-tagged [options] tag [package]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--arch", help=_("List rpms for this arch"))
|
|
parser.add_option("--rpms", action="store_true", help=_("Show rpms instead of builds"))
|
|
parser.add_option("--inherit", action="store_true", help=_("Follow inheritance"))
|
|
parser.add_option("--latest", action="store_true", help=_("Only show the latest builds/rpms"))
|
|
parser.add_option("--latest-n", type='int', metavar="N", help=_("Only show the latest N builds/rpms"))
|
|
parser.add_option("--quiet", action="store_true", help=_("Do not print the header information"), default=options.quiet)
|
|
parser.add_option("--paths", action="store_true", help=_("Show the file paths"))
|
|
parser.add_option("--sigs", action="store_true", help=_("Show signatures"))
|
|
parser.add_option("--type", help=_("Show builds of the given type only. Currently supported types: maven, win, image"))
|
|
parser.add_option("--event", type='int', metavar="EVENT#", help=_("query at event"))
|
|
parser.add_option("--ts", type='int', metavar="TIMESTAMP", help=_("query at timestamp"))
|
|
parser.add_option("--repo", type='int', metavar="REPO#", help=_("query at event for a repo"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) == 0:
|
|
parser.error(_("A tag name must be specified"))
|
|
assert False
|
|
elif len(args) > 2:
|
|
parser.error(_("Only one package name may be specified"))
|
|
assert False
|
|
activate_session(session)
|
|
pathinfo = koji.PathInfo()
|
|
package = None
|
|
if len(args) > 1:
|
|
package = args[1]
|
|
tag = args[0]
|
|
opts = {}
|
|
for key in ('latest','inherit'):
|
|
opts[key] = getattr(options, key)
|
|
if options.latest_n is not None:
|
|
opts['latest'] = options.latest_n
|
|
if package:
|
|
opts['package'] = package
|
|
if options.arch:
|
|
options.rpms = True
|
|
opts['arch'] = options.arch
|
|
if options.sigs:
|
|
opts['rpmsigs'] = True
|
|
options.rpms = True
|
|
if options.type:
|
|
opts['type'] = options.type
|
|
event = koji.util.eventFromOpts(session, options)
|
|
if event:
|
|
opts['event'] = event['id']
|
|
event['timestr'] = time.asctime(time.localtime(event['ts']))
|
|
print "Querying at event %(id)i (%(timestr)s)" % event
|
|
|
|
if options.rpms:
|
|
rpms, builds = session.listTaggedRPMS(tag, **opts)
|
|
data = rpms
|
|
if options.paths:
|
|
build_idx = dict([(b['id'],b) for b in builds])
|
|
for rinfo in data:
|
|
build = build_idx[rinfo['build_id']]
|
|
builddir = pathinfo.build(build)
|
|
if options.sigs:
|
|
sigkey = rinfo['sigkey']
|
|
signedpath = os.path.join(builddir, pathinfo.signed(rinfo, sigkey))
|
|
if os.path.exists(signedpath):
|
|
rinfo['path'] = signedpath
|
|
else:
|
|
rinfo['path'] = os.path.join(builddir, pathinfo.rpm(rinfo))
|
|
fmt = "%(path)s"
|
|
data = [x for x in data if x.has_key('path')]
|
|
else:
|
|
fmt = "%(name)s-%(version)s-%(release)s.%(arch)s"
|
|
if options.sigs:
|
|
fmt = "%(sigkey)s " + fmt
|
|
else:
|
|
data = session.listTagged(tag, **opts)
|
|
if options.paths:
|
|
if options.type == 'maven':
|
|
for x in data:
|
|
x['path'] = pathinfo.mavenbuild(x)
|
|
fmt = "%(path)-40s %(tag_name)-20s %(maven_group_id)-20s %(maven_artifact_id)-20s %(owner_name)s"
|
|
else:
|
|
for x in data:
|
|
x['path'] = pathinfo.build(x)
|
|
fmt = "%(path)-40s %(tag_name)-20s %(owner_name)s"
|
|
else:
|
|
if options.type == 'maven':
|
|
fmt = "%(nvr)-40s %(tag_name)-20s %(maven_group_id)-20s %(maven_artifact_id)-20s %(owner_name)s"
|
|
else:
|
|
fmt = "%(nvr)-40s %(tag_name)-20s %(owner_name)s"
|
|
if not options.quiet:
|
|
if options.type == 'maven':
|
|
print "%-40s %-20s %-20s %-20s %s" % ("Build", "Tag", "Group Id", "Artifact Id", "Built by")
|
|
print "%s %s %s %s %s" % ("-"*40, "-"*20, "-"*20, "-"*20, "-"*16)
|
|
else:
|
|
print "%-40s %-20s %s" % ("Build","Tag","Built by")
|
|
print "%s %s %s" % ("-"*40, "-"*20, "-"*16)
|
|
|
|
output = [ fmt % x for x in data]
|
|
output.sort()
|
|
for line in output:
|
|
print line
|
|
|
|
def anon_handle_list_buildroot(options, session, args):
|
|
"[info] List the rpms used in or built in a buildroot"
|
|
usage = _("usage: %prog list-buildroot [options] buildroot-id")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--paths", action="store_true", help=_("Show the file paths"))
|
|
parser.add_option("--built", action="store_true", help=_("Show the built rpms"))
|
|
parser.add_option("--verbose", "-v", action="store_true", help=_("Show more information"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) != 1:
|
|
parser.error(_("Incorrect number of arguments"))
|
|
assert False
|
|
activate_session(session)
|
|
buildrootID = int(args[0])
|
|
opts = {}
|
|
if options.built:
|
|
opts['buildrootID'] = buildrootID
|
|
else:
|
|
opts['componentBuildrootID'] = buildrootID
|
|
data = session.listRPMs(**opts)
|
|
|
|
fmt = "%(nvr)s.%(arch)s"
|
|
order = [(fmt % x, x) for x in data]
|
|
order.sort()
|
|
for nvra, rinfo in order:
|
|
if options.verbose and rinfo.get('is_update'):
|
|
print nvra, "[update]"
|
|
else:
|
|
print nvra
|
|
|
|
def anon_handle_list_untagged(options, session, args):
|
|
"[info] List untagged builds"
|
|
usage = _("usage: %prog list-untagged [options] [package]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--paths", action="store_true", help=_("Show the file paths"))
|
|
parser.add_option("--show-references", action="store_true", help=_("Show build references"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) > 1:
|
|
parser.error(_("Only one package name may be specified"))
|
|
assert False
|
|
activate_session(session)
|
|
package = None
|
|
if len(args) > 0:
|
|
package = args[0]
|
|
opts = {}
|
|
if package:
|
|
opts['name'] = package
|
|
pathinfo = koji.PathInfo()
|
|
|
|
data = session.untaggedBuilds(**opts)
|
|
if options.show_references:
|
|
print "(Showing build references)"
|
|
refs = {}
|
|
refs2 = {} #reverse map
|
|
for x in session.buildMap():
|
|
refs.setdefault(x['used'], {}).setdefault(x['built'], 1)
|
|
refs2.setdefault(x['built'], {}).setdefault(x['used'], 1)
|
|
#XXX - need to ignore refs to unreferenced builds
|
|
for x in data:
|
|
builds = refs.get(x['id'])
|
|
if builds:
|
|
x['refs'] = "%s" % builds
|
|
else:
|
|
x['refs'] = ''
|
|
#data = [x for x in data if not refs.has_key(x['id'])]
|
|
if options.paths:
|
|
for x in data:
|
|
x['path'] = pathinfo.build(x)
|
|
fmt = "%(path)s"
|
|
else:
|
|
fmt = "%(name)s-%(version)s-%(release)s"
|
|
if options.show_references:
|
|
fmt = fmt + " %(refs)s"
|
|
|
|
output = [ fmt % x for x in data]
|
|
output.sort()
|
|
for line in output:
|
|
print line
|
|
|
|
def print_group_list_req_group(group):
|
|
print " @%(name)s [%(tag_name)s]" % group
|
|
|
|
def print_group_list_req_package(pkg):
|
|
print " %(package)s: %(basearchonly)s, %(type)s [%(tag_name)s]" % pkg
|
|
|
|
def anon_handle_list_groups(options, session, args):
|
|
"[info] Print the group listings"
|
|
usage = _("usage: %prog list-groups [options] <tag> [group]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--event", type='int', metavar="EVENT#", help=_("query at event"))
|
|
parser.add_option("--ts", type='int', metavar="TIMESTAMP", help=_("query at timestamp"))
|
|
parser.add_option("--repo", type='int', metavar="REPO#", help=_("query at event for a repo"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 1 or len(args) > 2:
|
|
parser.error(_("Incorrect number of arguments"))
|
|
assert False
|
|
opts = {}
|
|
activate_session(session)
|
|
event = koji.util.eventFromOpts(session, options)
|
|
if event:
|
|
opts['event'] = event['id']
|
|
event['timestr'] = time.asctime(time.localtime(event['ts']))
|
|
print "Querying at event %(id)i (%(timestr)s)" % event
|
|
tags = dict([(x['id'], x['name']) for x in session.listTags()])
|
|
tmp_list = [(x['name'], x) for x in session.getTagGroups(args[0], **opts)]
|
|
tmp_list.sort()
|
|
groups = [x[1] for x in tmp_list]
|
|
for group in groups:
|
|
if len(args) > 1 and group['name'] != args[1]:
|
|
continue
|
|
print "%s [%s]" % (group['name'], tags.get(group['tag_id'], group['tag_id']))
|
|
groups = [(x['name'], x) for x in group['grouplist']]
|
|
groups.sort()
|
|
for x in [x[1] for x in groups]:
|
|
x['tag_name'] = tags.get(x['tag_id'], x['tag_id'])
|
|
print_group_list_req_group(x)
|
|
pkgs = [(x['package'], x) for x in group['packagelist']]
|
|
pkgs.sort()
|
|
for x in [x[1] for x in pkgs]:
|
|
x['tag_name'] = tags.get(x['tag_id'], x['tag_id'])
|
|
print_group_list_req_package(x)
|
|
|
|
def handle_add_group_pkg(options, session, args):
|
|
"[admin] Add a package to a group's package listing"
|
|
usage = _("usage: %prog add-group-pkg [options] <tag> <group> <pkg> [<pkg>...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 3:
|
|
parser.error(_("You must specify a tag name, group name, and one or more package names"))
|
|
assert False
|
|
tag = args[0]
|
|
group = args[1]
|
|
activate_session(session)
|
|
for pkg in args[2:]:
|
|
session.groupPackageListAdd(tag, group, pkg)
|
|
|
|
def handle_block_group_pkg(options, session, args):
|
|
"[admin] Block a package from a group's package listing"
|
|
usage = _("usage: %prog block-group-pkg [options] <tag> <group> <pkg> [<pkg>...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 3:
|
|
parser.error(_("You must specify a tag name, group name, and one or more package names"))
|
|
assert False
|
|
tag = args[0]
|
|
group = args[1]
|
|
activate_session(session)
|
|
for pkg in args[2:]:
|
|
session.groupPackageListBlock(tag, group, pkg)
|
|
|
|
def handle_unblock_group_pkg(options, session, args):
|
|
"[admin] Unblock a package from a group's package listing"
|
|
usage = _("usage: %prog unblock-group-pkg [options] <tag> <group> <pkg> [<pkg>...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 3:
|
|
parser.error(_("You must specify a tag name, group name, and one or more package names"))
|
|
assert False
|
|
tag = args[0]
|
|
group = args[1]
|
|
activate_session(session)
|
|
for pkg in args[2:]:
|
|
session.groupPackageListUnblock(tag, group, pkg)
|
|
|
|
def handle_add_group_req(options, session, args):
|
|
"[admin] Add a group to a group's required list"
|
|
usage = _("usage: %prog add-group-req [options] <tag> <target group> <required group>")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) != 3:
|
|
parser.error(_("You must specify a tag name and two group names"))
|
|
assert False
|
|
tag = args[0]
|
|
group = args[1]
|
|
req = args[2]
|
|
activate_session(session)
|
|
session.groupReqListAdd(tag, group, req)
|
|
|
|
def handle_block_group_req(options, session, args):
|
|
"[admin] Block a group's requirement listing"
|
|
usage = _("usage: %prog block-group-req [options] <tag> <group> <blocked req>")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) != 3:
|
|
parser.error(_("You must specify a tag name and two group names"))
|
|
assert False
|
|
tag = args[0]
|
|
group = args[1]
|
|
req = args[2]
|
|
activate_session(session)
|
|
session.groupReqListBlock(tag, group, req)
|
|
|
|
def handle_unblock_group_req(options, session, args):
|
|
"[admin] Unblock a group's requirement listing"
|
|
usage = _("usage: %prog unblock-group-req [options] <tag> <group> <requirement>")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) != 3:
|
|
parser.error(_("You must specify a tag name and two group names"))
|
|
assert False
|
|
tag = args[0]
|
|
group = args[1]
|
|
req = args[2]
|
|
activate_session(session)
|
|
session.groupReqListUnblock(tag, group, req)
|
|
|
|
def anon_handle_list_hosts(options, session, args):
|
|
"[info] Print the host listing"
|
|
usage = _("usage: %prog list-hosts [options]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--arch", action="append", default=[], help=_("Specify an architecture"))
|
|
parser.add_option("--channel", help=_("Specify a channel"))
|
|
parser.add_option("--ready", action="store_true", help=_("Limit to ready hosts"))
|
|
parser.add_option("--not-ready", action="store_false", dest="ready", help=_("Limit to not ready hosts"))
|
|
parser.add_option("--enabled", action="store_true", help=_("Limit to enabled hosts"))
|
|
parser.add_option("--not-enabled", action="store_false", dest="enabled", help=_("Limit to not enabled hosts"))
|
|
parser.add_option("--quiet", action="store_true", help=_("Do not print header information"), default=options.quiet)
|
|
(options, args) = parser.parse_args(args)
|
|
opts = {}
|
|
activate_session(session)
|
|
if options.arch:
|
|
opts['arches'] = options.arch
|
|
if options.channel:
|
|
channel = session.getChannel(options.channel)
|
|
if not channel:
|
|
parser.error(_('Unknown channel: %s' % options.channel))
|
|
assert False
|
|
opts['channelID'] = channel['id']
|
|
if options.ready is not None:
|
|
opts['ready'] = options.ready
|
|
if options.enabled is not None:
|
|
opts['enabled'] = options.enabled
|
|
tmp_list = [(x['name'], x) for x in session.listHosts(**opts)]
|
|
tmp_list.sort()
|
|
hosts = [x[1] for x in tmp_list]
|
|
|
|
def yesno(x):
|
|
if x: return 'Y'
|
|
else: return 'N'
|
|
|
|
# pull in the last update using multicall to speed it up a bit
|
|
session.multicall = True
|
|
for host in hosts:
|
|
session.getLastHostUpdate(host['id'])
|
|
updateList = session.multiCall()
|
|
|
|
for host, [update] in zip(hosts, updateList):
|
|
if update is None:
|
|
host['update'] = '-'
|
|
else:
|
|
host['update'] = update.split('.')[0]
|
|
host['enabled'] = yesno(host['enabled'])
|
|
host['ready'] = yesno(host['ready'])
|
|
host['arches'] = ','.join(host['arches'].split())
|
|
|
|
if not options.quiet:
|
|
print "Hostname Enb Rdy Load/Cap Arches Last Update"
|
|
for host in hosts:
|
|
print "%(name)-28s %(enabled)-3s %(ready)-3s %(task_load)4.1f/%(capacity)-3.1f %(arches)-16s %(update)s" % host
|
|
|
|
def anon_handle_list_pkgs(options, session, args):
|
|
"[info] Print the package listing for tag or for owner"
|
|
usage = _("usage: %prog list-pkgs [options]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--owner", help=_("Specify owner"))
|
|
parser.add_option("--tag", help=_("Specify tag"))
|
|
parser.add_option("--package", help=_("Specify package"))
|
|
parser.add_option("--quiet", action="store_true", help=_("Do not print header information"), default=options.quiet)
|
|
parser.add_option("--noinherit", action="store_true", help=_("Don't follow inheritance"))
|
|
parser.add_option("--show-blocked", action="store_true", help=_("Show blocked packages"))
|
|
parser.add_option("--show-dups", action="store_true", help=_("Show superseded owners"))
|
|
parser.add_option("--event", type='int', metavar="EVENT#", help=_("query at event"))
|
|
parser.add_option("--ts", type='int', metavar="TIMESTAMP", help=_("query at timestamp"))
|
|
parser.add_option("--repo", type='int', metavar="REPO#", help=_("query at event for a repo"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) != 0:
|
|
parser.error(_("This command takes no arguments"))
|
|
assert False
|
|
activate_session(session)
|
|
opts = {}
|
|
if options.owner:
|
|
user = session.getUser(options.owner)
|
|
if user is None:
|
|
parser.error(_("Invalid user"))
|
|
assert False
|
|
opts['userID'] = user['id']
|
|
if options.tag:
|
|
tag = session.getTag(options.tag)
|
|
if tag is None:
|
|
parser.error(_("Invalid tag"))
|
|
assert False
|
|
opts['tagID'] = tag['id']
|
|
if options.package:
|
|
opts['pkgID'] = options.package
|
|
allpkgs = False
|
|
if not opts:
|
|
# no limiting clauses were specified
|
|
allpkgs = True
|
|
opts['inherited'] = not options.noinherit
|
|
#hiding dups only makes sense if we're querying a tag
|
|
if options.tag:
|
|
opts['with_dups'] = options.show_dups
|
|
else:
|
|
opts['with_dups'] = True
|
|
event = koji.util.eventFromOpts(session, options)
|
|
if event:
|
|
opts['event'] = event['id']
|
|
event['timestr'] = time.asctime(time.localtime(event['ts']))
|
|
print "Querying at event %(id)i (%(timestr)s)" % event
|
|
data = session.listPackages(**opts)
|
|
if not data:
|
|
print "(no matching packages)"
|
|
return 1
|
|
if not options.quiet:
|
|
if allpkgs:
|
|
print "Package"
|
|
print '-'*23
|
|
else:
|
|
print "%-23s %-23s %-16s %-15s" % ('Package','Tag','Extra Arches','Owner')
|
|
print "%s %s %s %s" % ('-'*23,'-'*23,'-'*16,'-'*15)
|
|
for pkg in data:
|
|
if allpkgs:
|
|
print pkg['package_name']
|
|
else:
|
|
if not options.show_blocked and pkg.get('blocked',False):
|
|
continue
|
|
if pkg.has_key('tag_id'):
|
|
if pkg['extra_arches'] is None:
|
|
pkg['extra_arches'] = ""
|
|
fmt = "%(package_name)-23s %(tag_name)-23s %(extra_arches)-16s %(owner_name)-15s"
|
|
if pkg.get('blocked',False):
|
|
fmt += " [BLOCKED]"
|
|
else:
|
|
fmt = "%(package_name)s"
|
|
print fmt % pkg
|
|
|
|
def anon_handle_rpminfo(options, session, args):
|
|
"[info] Print basic information about an RPM"
|
|
usage = _("usage: %prog rpminfo [options] <n-v-r.a> [<n-v-r.a> ...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--buildroots", action="store_true", help=_("show buildroots the rpm was used in"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 1:
|
|
parser.error(_("Please specify an RPM"))
|
|
assert False
|
|
activate_session(session)
|
|
for rpm in args:
|
|
info = session.getRPM(rpm)
|
|
if info is None:
|
|
print "No such rpm: %s\n" % rpm
|
|
continue
|
|
if info['epoch'] is None:
|
|
info['epoch'] = ""
|
|
else:
|
|
info['epoch'] = str(info['epoch']) + ":"
|
|
if not info.get('external_repo_id', 0):
|
|
buildinfo = session.getBuild(info['build_id'])
|
|
buildinfo['name'] = buildinfo['package_name']
|
|
buildinfo['arch'] = 'src'
|
|
if buildinfo['epoch'] is None:
|
|
buildinfo['epoch'] = ""
|
|
else:
|
|
buildinfo['epoch'] = str(buildinfo['epoch']) + ":"
|
|
print "RPM: %(epoch)s%(name)s-%(version)s-%(release)s.%(arch)s [%(id)d]" %info
|
|
if info.get('external_repo_id'):
|
|
repo = session.getExternalRepo(info['external_repo_id'])
|
|
print "External Repository: %(name)s [%(id)i]" % repo
|
|
print "External Repository url: %(url)s" % repo
|
|
else:
|
|
print "RPM Path: %s" % os.path.join(koji.pathinfo.build(buildinfo), koji.pathinfo.rpm(info))
|
|
print "SRPM: %(epoch)s%(name)s-%(version)s-%(release)s [%(id)d]" % buildinfo
|
|
print "SRPM Path: %s" % os.path.join(koji.pathinfo.build(buildinfo), koji.pathinfo.rpm(buildinfo))
|
|
print "Built: %s" % time.strftime('%a, %d %b %Y %H:%M:%S %Z', time.localtime(info['buildtime']))
|
|
print "Payload: %(payloadhash)s" %info
|
|
print "Size: %(size)s" %info
|
|
if not info.get('external_repo_id', 0):
|
|
print "Build ID: %(build_id)s" %info
|
|
if info['buildroot_id'] is None:
|
|
print "No buildroot data available"
|
|
else:
|
|
br_info = session.getBuildroot(info['buildroot_id'])
|
|
if br_info['br_type'] == koji.BR_TYPES['STANDARD']:
|
|
print "Buildroot: %(id)i (tag %(tag_name)s, arch %(arch)s, repo %(repo_id)i)" % br_info
|
|
print "Build Host: %(host_name)s" % br_info
|
|
print "Build Task: %(task_id)i" % br_info
|
|
else:
|
|
print "Content generator: %(cg_name)s" % br_info
|
|
print "Buildroot: %(id)i" % br_info
|
|
print "Build Host OS: %(host_os)s (%(host_arch)s)" % br_info
|
|
if info.get('extra'):
|
|
print "Extra: %(extra)r" % info
|
|
if options.buildroots:
|
|
br_list = session.listBuildroots(rpmID=info['id'], queryOpts={'order':'buildroot.id'})
|
|
print "Used in %i buildroots:" % len(br_list)
|
|
if len(br_list):
|
|
print " %8s %-28s %-8s %-29s" % ('id','build tag','arch','build host')
|
|
print " %s %s %s %s" % ('-'*8, '-'*28, '-'*8, '-'*29)
|
|
for br_info in br_list:
|
|
print " %(id)8i %(tag_name)-28s %(arch)-8s %(host_name)-29s" % br_info
|
|
|
|
|
|
def anon_handle_buildinfo(options, session, args):
|
|
"[info] Print basic information about a build"
|
|
usage = _("usage: %prog buildinfo [options] <n-v-r> [<n-v-r> ...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--changelog", action="store_true", help=_("Show the changelog for the build"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 1:
|
|
parser.error(_("Please specify a build"))
|
|
assert False
|
|
activate_session(session)
|
|
for build in args:
|
|
if build.isdigit():
|
|
build = int(build)
|
|
info = session.getBuild(build)
|
|
if info is None:
|
|
print "No such build: %s\n" % build
|
|
continue
|
|
task = None
|
|
if info['task_id']:
|
|
task = session.getTaskInfo(info['task_id'], request=True)
|
|
taglist = []
|
|
for tag in session.listTags(build):
|
|
taglist.append(tag['name'])
|
|
info['arch'] = 'src'
|
|
info['state'] = koji.BUILD_STATES[info['state']]
|
|
print "BUILD: %(name)s-%(version)s-%(release)s [%(id)d]" % info
|
|
print "State: %(state)s" % info
|
|
print "Built by: %(owner_name)s" % info
|
|
if 'volume_name' in info:
|
|
print "Volume: %(volume_name)s" % info
|
|
if task:
|
|
print "Task: %s %s" % (task['id'], koji.taskLabel(task))
|
|
else:
|
|
print "Task: none"
|
|
print "Finished: %s" % koji.formatTimeLong(info['completion_time'])
|
|
maven_info = session.getMavenBuild(info['id'])
|
|
if maven_info:
|
|
print "Maven groupId: %s" % maven_info['group_id']
|
|
print "Maven artifactId: %s" % maven_info['artifact_id']
|
|
print "Maven version: %s" % maven_info['version']
|
|
win_info = session.getWinBuild(info['id'])
|
|
if win_info:
|
|
print "Windows build platform: %s" % win_info['platform']
|
|
print "Tags: %s" % ' '.join(taglist)
|
|
if info.get('extra'):
|
|
print "Extra: %(extra)r" % info
|
|
maven_archives = session.listArchives(buildID=info['id'], type='maven')
|
|
if maven_archives:
|
|
print "Maven archives:"
|
|
for archive in maven_archives:
|
|
print os.path.join(koji.pathinfo.mavenbuild(info), koji.pathinfo.mavenfile(archive))
|
|
win_archives = session.listArchives(buildID=info['id'], type='win')
|
|
if win_archives:
|
|
print "Windows archives:"
|
|
for archive in win_archives:
|
|
print os.path.join(koji.pathinfo.winbuild(info), koji.pathinfo.winfile(archive))
|
|
rpms = session.listRPMs(buildID=info['id'])
|
|
image_info = session.getImageBuild(info['id'])
|
|
img_archives = session.listArchives(buildID=info['id'], type='image')
|
|
if img_archives:
|
|
print 'Image archives:'
|
|
for archive in img_archives:
|
|
print os.path.join(koji.pathinfo.imagebuild(info), archive['filename'])
|
|
if rpms:
|
|
print "RPMs:"
|
|
for rpm in rpms:
|
|
print os.path.join(koji.pathinfo.build(info), koji.pathinfo.rpm(rpm))
|
|
if options.changelog:
|
|
changelog = session.getChangelogEntries(info['id'])
|
|
if changelog:
|
|
print "Changelog:"
|
|
print koji.util.formatChangelog(changelog)
|
|
|
|
def handle_clone_tag(options, session, args):
|
|
"[admin] Duplicate the contents of one tag onto another tag"
|
|
usage = _("usage: %prog clone-tag [options] <src-tag> <dst-tag>")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("-v","--verbose", action="store_true", help=_("show changes"),)
|
|
parser.add_option("-f","--force", action="store_true", help=_("override tag locks if necessary"),)
|
|
parser.add_option("-n","--test", action="store_true", help=_("test mode"))
|
|
(options, args) = parser.parse_args(args)
|
|
|
|
if len(args) != 2:
|
|
parser.error(_("This command takes two argument: <src-tag> <dst-tag>"))
|
|
assert False
|
|
activate_session(session)
|
|
|
|
if not session.hasPerm('admin') and not options.test:
|
|
print "This action requires admin privileges"
|
|
return
|
|
|
|
if args[0] == args[1]:
|
|
sys.stdout.write('Source and destination tags must be different.\n')
|
|
return
|
|
# store tags.
|
|
srctag = session.getTag(args[0])
|
|
dsttag = session.getTag(args[1])
|
|
if not srctag:
|
|
sys.stdout.write("Unknown src-tag: %s\n" % args[0])
|
|
return
|
|
if (srctag['locked'] and not options.force) or (dsttag and dsttag['locked'] and not options.force):
|
|
print _("Error: You are attempting to clone from or to a tag which is locked.")
|
|
print _("Please use --force if this is what you really want to do.")
|
|
return
|
|
|
|
# init debug lists.
|
|
chgpkglist=[]
|
|
chgbldlist=[]
|
|
chggrplist=[]
|
|
# case of brand new dst-tag.
|
|
if not dsttag:
|
|
# create a new tag, copy srctag header.
|
|
if not options.test:
|
|
session.createTag(args[1], parent=None, arches=srctag['arches'], perm=srctag['perm_id'], locked=srctag['locked'])
|
|
newtag = session.getTag(args[1]) # store the new tag, need its asigned id.
|
|
# get pkglist of src-tag, including inherited packages.
|
|
srcpkgs = session.listPackages(tagID=srctag['id'],inherited=True)
|
|
srcpkgs.sort(lambda x, y: cmp(x['package_name'],y['package_name']))
|
|
for pkgs in srcpkgs:
|
|
# for each package add one entry in the new tag.
|
|
chgpkglist.append(('[new]',pkgs['package_name'],pkgs['blocked'],pkgs['owner_name'],pkgs['tag_name']))
|
|
if not options.test:
|
|
# add packages.
|
|
session.packageListAdd(newtag['name'],pkgs['package_name'],
|
|
owner=pkgs['owner_name'],block=pkgs['blocked'],
|
|
extra_arches=pkgs['extra_arches'])
|
|
# get --all latest builds from src tag
|
|
builds=session.getLatestBuilds(srctag['name'])
|
|
for build in builds:
|
|
build['name']=build['package_name'] # add missing 'name' field.
|
|
chgbldlist.append(('[new]',build['package_name'],
|
|
build['nvr'],koji.BUILD_STATES[build['state']],
|
|
build['owner_name'],build['tag_name']))
|
|
# copy latest builds into new tag
|
|
if not options.test:
|
|
session.tagBuildBypass(newtag['name'], build, force=options.force)
|
|
# Copy the group data
|
|
srcgroups = session.getTagGroups(srctag['name'])
|
|
for group in srcgroups:
|
|
if not options.test:
|
|
session.groupListAdd(newtag['name'], group['name'])
|
|
for pkg in group['packagelist']:
|
|
if not options.test:
|
|
session.groupPackageListAdd(newtag['name'], group['name'], pkg['package'], block=pkg['blocked'])
|
|
chggrplist.append(('[new]', pkg['package'], group['name']))
|
|
# case of existing dst-tag.
|
|
if dsttag:
|
|
# get fresh list of packages & builds into maps.
|
|
srcpkgs = {}
|
|
for pkg in session.listPackages(tagID=srctag['id'],inherited=True):
|
|
srcpkgs[pkg['package_name']] = pkg
|
|
dstpkgs = {}
|
|
for pkg in session.listPackages(tagID=dsttag['id'],inherited=True):
|
|
dstpkgs[pkg['package_name']] = pkg
|
|
srclblds = {}
|
|
for build in session.getLatestBuilds(srctag['name']):
|
|
srclblds[build['nvr']] = build
|
|
dstlblds = {}
|
|
for build in session.getLatestBuilds(dsttag['name']):
|
|
dstlblds[build['nvr']] = build
|
|
srcgroups = {}
|
|
for group in session.getTagGroups(srctag['name']):
|
|
srcgroups[group['name']] = group
|
|
dstgroups = {}
|
|
for group in session.getTagGroups(dsttag['name']):
|
|
dstgroups[group['name']] = group
|
|
#construct to-do lists.
|
|
paddlist=[] # list containing new packages to be added from src tag
|
|
for (package_name,pkg) in srcpkgs.iteritems():
|
|
if not dstpkgs.has_key(package_name):
|
|
paddlist.append(pkg)
|
|
paddlist.sort(lambda x, y: cmp(x['package_name'],y['package_name']))
|
|
pdellist=[] # list containing packages no more present in dst tag
|
|
for (package_name,pkg) in dstpkgs.iteritems():
|
|
if not srcpkgs.has_key(package_name):
|
|
pdellist.append(pkg)
|
|
pdellist.sort(lambda x, y: cmp(x['package_name'],y['package_name']))
|
|
baddlist=[] # list containing new builds to be added from src tag
|
|
for (nvr,lbld) in srclblds.iteritems():
|
|
if not dstlblds.has_key(nvr):
|
|
baddlist.append(lbld)
|
|
baddlist.sort(lambda x, y: cmp(x['package_name'],y['package_name']))
|
|
bdellist=[] # list containing new builds to be removed from src tag
|
|
for (nvr,lbld) in dstlblds.iteritems():
|
|
if not srclblds.has_key(nvr):
|
|
bdellist.append(lbld)
|
|
bdellist.sort(lambda x, y: cmp(x['package_name'],y['package_name']))
|
|
gaddlist=[] # list containing new groups to be added from src tag
|
|
for (grpname, group) in srcgroups.iteritems():
|
|
if not dstgroups.has_key(grpname):
|
|
gaddlist.append(group)
|
|
gdellist=[] # list containing groups to be removed from src tag
|
|
for (grpname, group) in dstgroups.iteritems():
|
|
if not srcgroups.has_key(grpname):
|
|
gdellist.append(group)
|
|
grpchanges={} # dict of changes to make in shared groups
|
|
for (grpname, group) in srcgroups.iteritems():
|
|
if dstgroups.has_key(grpname):
|
|
grpchanges[grpname] = {'adds':[], 'dels':[]}
|
|
# Store whether group is inherited or not
|
|
grpchanges[grpname]['inherited'] = False
|
|
if group['tag_id'] != dsttag['id']:
|
|
grpchanges[grpname]['inherited'] = True
|
|
srcgrppkglist=[]
|
|
dstgrppkglist=[]
|
|
for pkg in group['packagelist']:
|
|
srcgrppkglist.append(pkg['package'])
|
|
for pkg in dstgroups[grpname]['packagelist']:
|
|
dstgrppkglist.append(pkg['package'])
|
|
for pkg in srcgrppkglist:
|
|
if not pkg in dstgrppkglist:
|
|
grpchanges[grpname]['adds'].append(pkg)
|
|
for pkg in dstgrppkglist:
|
|
if not pkg in srcgrppkglist:
|
|
grpchanges[grpname]['dels'].append(pkg)
|
|
# ADD new packages.
|
|
for pkg in paddlist:
|
|
chgpkglist.append(('[add]',pkg['package_name'],
|
|
pkg['blocked'],pkg['owner_name'],
|
|
pkg['tag_name']))
|
|
if not options.test:
|
|
session.packageListAdd(dsttag['name'],pkg['package_name'],
|
|
owner=pkg['owner_name'],
|
|
block=pkg['blocked'],
|
|
extra_arches=pkg['extra_arches'])
|
|
# ADD builds.
|
|
for build in baddlist:
|
|
build['name']=build['package_name'] # add missing 'name' field.
|
|
chgbldlist.append(('[add]',build['package_name'],build['nvr'],
|
|
koji.BUILD_STATES[build['state']],
|
|
build['owner_name'],build['tag_name']))
|
|
# copy latest builds into new tag.
|
|
if not options.test:
|
|
session.tagBuildBypass(dsttag['name'], build, force=options.force)
|
|
# ADD groups.
|
|
for group in gaddlist:
|
|
if not options.test:
|
|
session.groupListAdd(dsttag['name'], group['name'], force=options.force)
|
|
for pkg in group['packagelist']:
|
|
if not options.test:
|
|
session.groupPackageListAdd(dsttag['name'], group['name'], pkg['package'], force=options.force)
|
|
chggrplist.append(('[new]', pkg['package'], group['name']))
|
|
# ADD group pkgs.
|
|
for group in grpchanges:
|
|
for pkg in grpchanges[group]['adds']:
|
|
chggrplist.append(('[new]', pkg, group))
|
|
if not options.test:
|
|
session.groupPackageListAdd(dsttag['name'], group, pkg, force=options.force)
|
|
# DEL builds.
|
|
for build in bdellist:
|
|
# dont delete an inherited build.
|
|
if build['tag_name'] == dsttag['name']:
|
|
build['name']=build['package_name'] # add missing 'name' field.
|
|
chgbldlist.append(('[del]',build['package_name'],build['nvr'],
|
|
koji.BUILD_STATES[build['state']],
|
|
build['owner_name'],build['tag_name']))
|
|
# go on del builds from new tag.
|
|
if not options.test:
|
|
session.untagBuildBypass(dsttag['name'], build, force=options.force)
|
|
# DEL packages.
|
|
for pkg in pdellist:
|
|
# delete only non-inherited packages.
|
|
if build['tag_name'] == dsttag['name']:
|
|
# check if package have owned builds inside.
|
|
builds=session.listTagged(dsttag['name'],package=pkg['package_name'],inherit=False)
|
|
#remove all its builds first if there are any.
|
|
for build in builds:
|
|
build['name']=build['package_name'] #add missing 'name' field.
|
|
chgbldlist.append(('[del]',build['package_name'],build['nvr'],
|
|
koji.BUILD_STATES[build['state']],
|
|
build['owner_name'],build['tag_name']))
|
|
# so delete latest build(s) from new tag.
|
|
if not options.test:
|
|
session.untagBuildBypass(dsttag['name'], build, force=options.force)
|
|
# now safe to remove package itselfm since we resolved its builds.
|
|
chgpkglist.append(('[del]',pkg['package_name'],pkg['blocked'],
|
|
pkg['owner_name'],pkg['tag_name']))
|
|
if not options.test:
|
|
session.packageListRemove(dsttag['name'],pkg['package_name'],force=False)
|
|
# mark as blocked inherited packages.
|
|
if build['tag_name'] != dsttag['name']:
|
|
chgpkglist.append(('[blk]',pkg['package_name'],pkg['blocked'],pkg['owner_name'],pkg['tag_name']))
|
|
if not options.test:
|
|
session.packageListBlock(dsttag['name'],pkg['package_name'])
|
|
# DEL groups.
|
|
for group in gdellist:
|
|
# Only delete a group that isn't inherited
|
|
if group['tag_id'] == dsttag['id']:
|
|
if not options.test:
|
|
session.groupListRemove(dsttag['name'], group['name'], force=options.force)
|
|
for pkg in group['packagelist']:
|
|
chggrplist.append(('[del]', pkg['package'], group['name']))
|
|
# mark as blocked inherited groups.
|
|
else:
|
|
if not options.test:
|
|
session.groupListBlock(dsttag['name'], group['name'])
|
|
for pkg in group['packagelist']:
|
|
chggrplist.append(('[blk]', pkg['package'], group['name']))
|
|
# DEL group pkgs.
|
|
for group in grpchanges:
|
|
for pkg in grpchanges[group]['dels']:
|
|
# Only delete a group that isn't inherited
|
|
if not grpchanges[group]['inherited']:
|
|
chggrplist.append(('[del]', pkg, group))
|
|
if not options.test:
|
|
session.groupPackageListRemove(dsttag['name'], group, pkg, force=options.force)
|
|
else:
|
|
chggrplist.append(('[blk]', pkg, group))
|
|
if not options.test:
|
|
session.groupPackageListBlock(dsttag['name'], group, pkg)
|
|
# print final list of actions.
|
|
if options.verbose:
|
|
pfmt=' %-7s %-28s %-10s %-10s %-10s\n'
|
|
bfmt=' %-7s %-28s %-40s %-10s %-10s %-10s\n'
|
|
gfmt=' %-7s %-28s %-28s\n'
|
|
sys.stdout.write('\nList of changes:\n\n')
|
|
sys.stdout.write(pfmt % ('Action','Package','Blocked','Owner','From Tag'))
|
|
sys.stdout.write(pfmt % ('-'*7,'-'*28,'-'*10,'-'*10,'-'*10))
|
|
for changes in chgpkglist:
|
|
sys.stdout.write(pfmt % changes)
|
|
sys.stdout.write('\n')
|
|
sys.stdout.write(bfmt % ('Action','From/To Package','Latest Build(s)','State','Owner','From Tag'))
|
|
sys.stdout.write(bfmt % ('-'*7,'-'*28,'-'*40,'-'*10,'-'*10,'-'*10))
|
|
for changes in chgbldlist:
|
|
sys.stdout.write(bfmt % changes)
|
|
sys.stdout.write('\n')
|
|
sys.stdout.write(gfmt % ('Action','Package','Group'))
|
|
sys.stdout.write(gfmt % ('-'*7,'-'*28,'-'*28))
|
|
for changes in chggrplist:
|
|
sys.stdout.write(gfmt % changes)
|
|
|
|
|
|
def handle_add_target(options, session, args):
|
|
"[admin] Create a new build target"
|
|
usage = _("usage: %prog add-target name build-tag <dest-tag>")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 2:
|
|
parser.error(_("Please specify a target name, a build tag, and destination tag"))
|
|
assert False
|
|
elif len(args) > 3:
|
|
parser.error(_("Incorrect number of arguments"))
|
|
assert False
|
|
name = args[0]
|
|
build_tag = args[1]
|
|
if len(args) > 2:
|
|
dest_tag = args[2]
|
|
else:
|
|
#most targets have the same name as their destination
|
|
dest_tag = name
|
|
activate_session(session)
|
|
if not session.hasPerm('admin'):
|
|
print "This action requires admin privileges"
|
|
return 1
|
|
|
|
chkbuildtag = session.getTag(build_tag)
|
|
chkdesttag = session.getTag(dest_tag)
|
|
if not chkbuildtag:
|
|
print "Build tag does not exist: %s" % build_tag
|
|
return 1
|
|
if not chkbuildtag.get("arches", None):
|
|
print "Build tag has no arches: %s" % build_tag
|
|
return 1
|
|
if not chkdesttag:
|
|
print "Destination tag does not exist: %s" % dest_tag
|
|
return 1
|
|
|
|
session.createBuildTarget(name, build_tag, dest_tag)
|
|
|
|
def handle_edit_target(options, session, args):
|
|
"[admin] Set the name, build_tag, and/or dest_tag of an existing build target to new values"
|
|
usage = _("usage: %prog edit-target [options] name")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--rename", help=_("Specify new name for target"))
|
|
parser.add_option("--build-tag", help=_("Specify a different build tag"))
|
|
parser.add_option("--dest-tag", help=_("Specify a different destination tag"))
|
|
|
|
(options, args) = parser.parse_args(args)
|
|
|
|
if len(args) != 1:
|
|
parser.error(_("Please specify a build target"))
|
|
assert False
|
|
activate_session(session)
|
|
|
|
if not session.hasPerm('admin'):
|
|
print "This action requires admin privileges"
|
|
return
|
|
|
|
targetInfo = session.getBuildTarget(args[0])
|
|
if targetInfo == None:
|
|
raise koji.GenericError("No build target with the name or id '%s'" % args[0])
|
|
|
|
targetInfo['orig_name'] = targetInfo['name']
|
|
|
|
if options.rename:
|
|
targetInfo['name'] = options.rename
|
|
if options.build_tag:
|
|
targetInfo['build_tag_name'] = options.build_tag
|
|
chkbuildtag = session.getTag(options.build_tag)
|
|
if not chkbuildtag:
|
|
print "Build tag does not exist: %s" % options.build_tag
|
|
return 1
|
|
if not chkbuildtag.get("arches", None):
|
|
print "Build tag has no arches: %s" % options.build_tag
|
|
return 1
|
|
if options.dest_tag:
|
|
chkdesttag = session.getTag(options.dest_tag)
|
|
if not chkdesttag:
|
|
print "Destination tag does not exist: %s" % options.dest_tag
|
|
return 1
|
|
targetInfo['dest_tag_name'] = options.dest_tag
|
|
|
|
session.editBuildTarget(targetInfo['orig_name'], targetInfo['name'], targetInfo['build_tag_name'], targetInfo['dest_tag_name'])
|
|
|
|
def handle_remove_target(options, session, args):
|
|
"[admin] Remove a build target"
|
|
usage = _("usage: %prog remove-target [options] name")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
(options, args) = parser.parse_args(args)
|
|
|
|
if len(args) != 1:
|
|
parser.error(_("Please specify a build target to remove"))
|
|
assert False
|
|
activate_session(session)
|
|
|
|
if not session.hasPerm('admin'):
|
|
print "This action requires admin privileges"
|
|
return
|
|
|
|
target = args[0]
|
|
target_info = session.getBuildTarget(target)
|
|
if not target_info:
|
|
print "Build target %s does not exist" % target
|
|
return 1
|
|
|
|
session.deleteBuildTarget(target_info['id'])
|
|
|
|
def handle_remove_tag(options, session, args):
|
|
"[admin] Remove a tag"
|
|
usage = _("usage: %prog remove-tag [options] name")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
(options, args) = parser.parse_args(args)
|
|
|
|
if len(args) != 1:
|
|
parser.error(_("Please specify a tag to remove"))
|
|
assert False
|
|
activate_session(session)
|
|
|
|
if not session.hasPerm('admin'):
|
|
print "This action requires admin privileges"
|
|
return
|
|
|
|
tag = args[0]
|
|
tag_info = session.getTag(tag)
|
|
if not tag_info:
|
|
print "Tag %s does not exist" % tag
|
|
return 1
|
|
|
|
session.deleteTag(tag_info['id'])
|
|
|
|
def anon_handle_list_targets(options, session, args):
|
|
"[info] List the build targets"
|
|
usage = _("usage: %prog list-targets [options]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--name", help=_("Specify the build target name"))
|
|
parser.add_option("--quiet", action="store_true", help=_("Do not print the header information"), default=options.quiet)
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) != 0:
|
|
parser.error(_("This command takes no arguments"))
|
|
assert False
|
|
activate_session(session)
|
|
|
|
fmt = "%(name)-30s %(build_tag_name)-30s %(dest_tag_name)-30s"
|
|
if not options.quiet:
|
|
print "%-30s %-30s %-30s" % ('Name','Buildroot','Destination')
|
|
print "-" * 93
|
|
tmp_list = [(x['name'], x) for x in session.getBuildTargets(options.name)]
|
|
tmp_list.sort()
|
|
targets = [x[1] for x in tmp_list]
|
|
for target in targets:
|
|
print fmt % target
|
|
#pprint.pprint(session.getBuildTargets())
|
|
|
|
def _printInheritance(tags, sibdepths=None, reverse=False):
|
|
if len(tags) == 0:
|
|
return
|
|
if sibdepths == None:
|
|
sibdepths = []
|
|
currtag = tags[0]
|
|
tags = tags[1:]
|
|
if reverse:
|
|
siblings = len([tag for tag in tags if tag['parent_id'] == currtag['parent_id']])
|
|
else:
|
|
siblings = len([tag for tag in tags if tag['child_id'] == currtag['child_id']])
|
|
|
|
outdepth = 0
|
|
for depth in sibdepths:
|
|
if depth < currtag['currdepth']:
|
|
outspacing = depth - outdepth
|
|
sys.stdout.write(' ' * (outspacing * 3 - 1))
|
|
sys.stdout.write(u'\u2502'.encode('UTF-8'))
|
|
outdepth = depth
|
|
|
|
sys.stdout.write(' ' * ((currtag['currdepth'] - outdepth) * 3 - 1))
|
|
if siblings:
|
|
sys.stdout.write(u'\u251c'.encode('UTF-8'))
|
|
else:
|
|
sys.stdout.write(u'\u2514'.encode('UTF-8'))
|
|
sys.stdout.write(u'\u2500'.encode('UTF-8'))
|
|
if reverse:
|
|
sys.stdout.write('%(name)s (%(tag_id)i)\n' % currtag)
|
|
else:
|
|
sys.stdout.write('%(name)s (%(parent_id)i)\n' % currtag)
|
|
|
|
if siblings:
|
|
if len(sibdepths) == 0 or sibdepths[-1] != currtag['currdepth']:
|
|
sibdepths.append(currtag['currdepth'])
|
|
else:
|
|
if len(sibdepths) > 0 and sibdepths[-1] == currtag['currdepth']:
|
|
sibdepths.pop()
|
|
|
|
_printInheritance(tags, sibdepths, reverse)
|
|
|
|
def anon_handle_list_tag_inheritance(options, session, args):
|
|
"[info] Print the inheritance information for a tag"
|
|
usage = _("usage: %prog list-tag-inheritance [options] <tag>")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--reverse", action="store_true", help=_("Process tag's children instead of its parents"))
|
|
parser.add_option("--stop", help=_("Stop processing inheritance at this tag"))
|
|
parser.add_option("--jump", help=_("Jump from one tag to another when processing inheritance"))
|
|
parser.add_option("--event", type='int', metavar="EVENT#", help=_("query at event"))
|
|
parser.add_option("--ts", type='int', metavar="TIMESTAMP", help=_("query at timestamp"))
|
|
parser.add_option("--repo", type='int', metavar="REPO#", help=_("query at event for a repo"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) != 1:
|
|
parser.error(_("This command takes exctly one argument: a tag name or ID"))
|
|
assert False
|
|
activate_session(session)
|
|
event = koji.util.eventFromOpts(session, options)
|
|
if event:
|
|
event['timestr'] = time.asctime(time.localtime(event['ts']))
|
|
print "Querying at event %(id)i (%(timestr)s)" % event
|
|
if event:
|
|
tag = session.getTag(args[0], event=event['id'])
|
|
else:
|
|
tag = session.getTag(args[0])
|
|
if not tag:
|
|
parser.error(_("Unknown tag: %s" % args[0]))
|
|
|
|
opts = {}
|
|
opts['reverse'] = options.reverse or False
|
|
opts['stops'] = {}
|
|
opts['jumps'] = {}
|
|
if event:
|
|
opts['event'] = event['id']
|
|
|
|
if options.jump:
|
|
match = re.match(r'^(.*)/(.*)$', options.jump)
|
|
if match:
|
|
tag1 = session.getTagID(match.group(1))
|
|
if not tag1:
|
|
parser.error(_("Unknown tag: %s" % match.group(1)))
|
|
tag2 = session.getTagID(match.group(2))
|
|
if not tag2:
|
|
parser.error(_("Unknown tag: %s" % match.group(2)))
|
|
opts['jumps'][str(tag1)] = tag2
|
|
|
|
if options.stop:
|
|
tag1 = session.getTagID(options.stop)
|
|
if not tag1:
|
|
parser.error(_("Unknown tag: %s" % options.stop))
|
|
opts['stops'] = {str(tag1): 1}
|
|
|
|
sys.stdout.write('%s (%i)\n' % (tag['name'], tag['id']))
|
|
data = session.getFullInheritance(tag['id'], **opts)
|
|
_printInheritance(data, None, opts['reverse'])
|
|
|
|
def anon_handle_list_tags(options, session, args):
|
|
"[info] Print the list of tags"
|
|
usage = _("usage: %prog list-tags [options] [pattern]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--show-id", action="store_true", help=_("Show tag ids"))
|
|
parser.add_option("--verbose", action="store_true", help=_("Show more information"))
|
|
parser.add_option("--unlocked", action="store_true", help=_("Only show unlocked tags"))
|
|
parser.add_option("--build", help=_("Show tags associated with a build"))
|
|
parser.add_option("--package", help=_("Show tags associated with a package"))
|
|
(options, args) = parser.parse_args(args)
|
|
activate_session(session)
|
|
|
|
pkginfo = {}
|
|
buildinfo = {}
|
|
|
|
if options.package:
|
|
pkginfo = session.getPackage(options.package)
|
|
if not pkginfo:
|
|
parser.error(_("Invalid package %s" % options.package))
|
|
assert False
|
|
|
|
if options.build:
|
|
buildinfo = session.getBuild(options.build)
|
|
if not buildinfo:
|
|
parser.error(_("Invalid build %s" % options.build))
|
|
assert False
|
|
|
|
tags = session.listTags(buildinfo.get('id',None), pkginfo.get('id',None))
|
|
tags.sort(lambda a,b: cmp(a['name'],b['name']))
|
|
#if options.verbose:
|
|
# fmt = "%(name)s [%(id)i] %(perm)s %(locked)s %(arches)s"
|
|
if options.show_id:
|
|
fmt = "%(name)s [%(id)i]"
|
|
else:
|
|
fmt = "%(name)s"
|
|
for tag in tags:
|
|
if args:
|
|
for pattern in args:
|
|
if fnmatch.fnmatch(tag['name'], pattern):
|
|
break
|
|
else:
|
|
continue
|
|
if options.unlocked:
|
|
if tag['locked'] or tag['perm']:
|
|
continue
|
|
if not options.verbose:
|
|
print fmt % tag
|
|
else:
|
|
print fmt % tag,
|
|
if tag['locked']:
|
|
print ' [LOCKED]',
|
|
if tag['perm']:
|
|
print ' [%(perm)s perm required]' % tag,
|
|
print ''
|
|
|
|
def anon_handle_list_tag_history(options, session, args):
|
|
"[info] Print a history of tag operations"
|
|
usage = _("usage: %prog list-tag-history [options]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--debug", action="store_true")
|
|
parser.add_option("--build", help=_("Only show data for a specific build"))
|
|
parser.add_option("--package", help=_("Only show data for a specific package"))
|
|
parser.add_option("--tag", help=_("Only show data for a specific tag"))
|
|
parser.add_option("--all", action="store_true", help=_("Allows listing the entire global history"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) != 0:
|
|
parser.error(_("This command takes no arguments"))
|
|
assert False
|
|
kwargs = {}
|
|
limited = False
|
|
if options.package:
|
|
kwargs['package'] = options.package
|
|
limited = True
|
|
if options.tag:
|
|
kwargs['tag'] = options.tag
|
|
limited = True
|
|
if options.build:
|
|
kwargs['build'] = options.build
|
|
limited = True
|
|
if not limited and not options.all:
|
|
parser.error(_("Please specify an option to limit the query"))
|
|
|
|
activate_session(session)
|
|
|
|
hist = session.tagHistory(**kwargs)
|
|
timeline = []
|
|
for x in hist:
|
|
event_id = x['revoke_event']
|
|
if event_id is not None:
|
|
timeline.append((event_id, x))
|
|
event_id = x['create_event']
|
|
timeline.append((event_id, x))
|
|
timeline.sort()
|
|
def _histline(event_id, x):
|
|
if event_id == x['revoke_event']:
|
|
ts = x['revoke_ts']
|
|
fmt = "%(name)s-%(version)s-%(release)s untagged from %(tag_name)s"
|
|
if x.has_key('revoker_name'):
|
|
fmt += " by %(revoker_name)s"
|
|
elif event_id == x['create_event']:
|
|
ts = x['create_ts']
|
|
fmt = "%(name)s-%(version)s-%(release)s tagged into %(tag_name)s"
|
|
if x.has_key('creator_name'):
|
|
fmt += " by %(creator_name)s"
|
|
if x['active']:
|
|
fmt += " [still active]"
|
|
else:
|
|
raise koji.GenericError, "unknown event: (%r, %r)" % (event_id, x)
|
|
time_str = time.asctime(time.localtime(ts))
|
|
return "%s: %s" % (time_str, fmt % x)
|
|
for event_id, x in timeline:
|
|
if options.debug:
|
|
print "%r" % x
|
|
print _histline(event_id, x)
|
|
|
|
def _print_histline(entry, **kwargs):
|
|
options = kwargs['options']
|
|
event_id, table, create, x = entry
|
|
who = None
|
|
edit = x.get('.related')
|
|
if edit:
|
|
del x['.related']
|
|
bad_edit = None
|
|
if len(edit) != 1:
|
|
bad_edit = "%i elements" % len(edit)+1
|
|
other = edit[0]
|
|
#check edit for sanity
|
|
if create or not other[2]:
|
|
bad_edit = "out of order"
|
|
if event_id != other[0]:
|
|
bad_edit = "non-matching"
|
|
if bad_edit:
|
|
print "Warning: unusual edit at event %i in table %s (%s)" % (event_id, table, bad_edit)
|
|
#we'll simply treat them as separate events
|
|
pprint.pprint(entry)
|
|
pprint.pprint(edit)
|
|
_print_histline(entry, **kwargs)
|
|
for data in edit:
|
|
_print_histline(entry, **kwargs)
|
|
return
|
|
if create:
|
|
ts = x['create_ts']
|
|
if x.has_key('creator_name'):
|
|
who = "by %(creator_name)s"
|
|
else:
|
|
ts = x['revoke_ts']
|
|
if x.has_key('revoker_name'):
|
|
who = "by %(revoker_name)s"
|
|
if table == 'tag_listing':
|
|
if edit:
|
|
fmt = "%(name)s-%(version)s-%(release)s re-tagged into %(tag.name)s"
|
|
elif create:
|
|
fmt = "%(name)s-%(version)s-%(release)s tagged into %(tag.name)s"
|
|
else:
|
|
fmt = "%(name)s-%(version)s-%(release)s untagged from %(tag.name)s"
|
|
elif table == 'user_perms':
|
|
if edit:
|
|
fmt = "permission %(permission.name)s re-granted to %(user.name)s"
|
|
elif create:
|
|
fmt = "permission %(permission.name)s granted to %(user.name)s"
|
|
else:
|
|
fmt = "permission %(permission.name)s revoked for %(user.name)s"
|
|
elif table == 'user_groups':
|
|
if edit:
|
|
fmt = "user %(user.name)s re-added to group %(group.name)s"
|
|
elif create:
|
|
fmt = "user %(user.name)s added to group %(group.name)s"
|
|
else:
|
|
fmt = "user %(user.name)s removed from group %(group.name)s"
|
|
elif table == 'cg_users':
|
|
if edit:
|
|
fmt = "user %(user.name)s re-added to content generator %(content_generator.name)s"
|
|
elif create:
|
|
fmt = "user %(user.name)s added to content generator %(content_generator.name)s"
|
|
else:
|
|
fmt = "user %(user.name)s removed from content generator %(content_generator.name)s"
|
|
elif table == 'tag_packages':
|
|
if edit:
|
|
fmt = "package list entry for %(package.name)s in %(tag.name)s updated"
|
|
elif create:
|
|
fmt = "package list entry created: %(package.name)s in %(tag.name)s"
|
|
else:
|
|
fmt = "package list entry revoked: %(package.name)s in %(tag.name)s"
|
|
elif table == 'tag_inheritance':
|
|
if edit:
|
|
fmt = "inheritance line %(tag.name)s->%(parent.name)s updated"
|
|
elif create:
|
|
fmt = "inheritance line %(tag.name)s->%(parent.name)s added"
|
|
else:
|
|
fmt = "inheritance line %(tag.name)s->%(parent.name)s removed"
|
|
elif table == 'tag_config':
|
|
if edit:
|
|
fmt = "tag configuration for %(tag.name)s altered"
|
|
elif create:
|
|
fmt = "new tag: %(tag.name)s"
|
|
else:
|
|
fmt = "tag deleted: %(tag.name)s"
|
|
elif table == 'tag_extra':
|
|
if edit:
|
|
fmt = "tag option %(key)s for tag %(tag.name)s altered"
|
|
elif create:
|
|
fmt = "added tag option %(key)s for tag %(tag.name)s"
|
|
else:
|
|
fmt = "tag option %(key)s removed for %(tag.name)s"
|
|
elif table == 'build_target_config':
|
|
if edit:
|
|
fmt = "build target configuration for %(build_target.name)s updated"
|
|
elif create:
|
|
fmt = "new build target: %(build_target.name)s"
|
|
else:
|
|
fmt = "build target deleted: %(build_target.name)s"
|
|
elif table == 'external_repo_config':
|
|
if edit:
|
|
fmt = "external repo configuration for %(external_repo.name)s altered"
|
|
elif create:
|
|
fmt = "new external repo: %(external_repo.name)s"
|
|
else:
|
|
fmt = "external repo deleted: %(external_repo.name)s"
|
|
elif table == 'tag_external_repos':
|
|
if edit:
|
|
fmt = "external repo entry for %(external_repo.name)s in tag %(tag.name)s updated"
|
|
elif create:
|
|
fmt = "external repo entry for %(external_repo.name)s added to tag %(tag.name)s"
|
|
else:
|
|
fmt = "external repo entry for %(external_repo.name)s removed from tag %(tag.name)s"
|
|
elif table == 'group_config':
|
|
if edit:
|
|
fmt = "group %(group.name)s configuration for tag %(tag.name)s updated"
|
|
elif create:
|
|
fmt = "group %(group.name)s added to tag %(tag.name)s"
|
|
else:
|
|
fmt = "group %(group.name)s removed from tag %(tag.name)s"
|
|
elif table == 'group_req_listing':
|
|
if edit:
|
|
fmt = "group dependency %(group.name)s->%(req.name)s updated in tag %(tag.name)s"
|
|
elif create:
|
|
fmt = "group dependency %(group.name)s->%(req.name)s added in tag %(tag.name)s"
|
|
else:
|
|
fmt = "group dependency %(group.name)s->%(req.name)s dropped from tag %(tag.name)s"
|
|
elif table == 'group_package_listing':
|
|
if edit:
|
|
fmt = "package entry %(package)s in group %(group.name)s, tag %(tag.name)s updated"
|
|
elif create:
|
|
fmt = "package %(package)s added to group %(group.name)s in tag %(tag.name)s"
|
|
else:
|
|
fmt = "package %(package)s removed from group %(group.name)s in tag %(tag.name)s"
|
|
else:
|
|
if edit:
|
|
fmt = "%s entry updated" % table
|
|
elif create:
|
|
fmt = "%s entry created" % table
|
|
else:
|
|
fmt = "%s entry revoked" % table
|
|
time_str = time.asctime(time.localtime(ts))
|
|
parts = [time_str, fmt % x]
|
|
if options.events or options.verbose:
|
|
parts.insert(1, "(eid %i)" % event_id)
|
|
if who:
|
|
parts.append(who % x)
|
|
if create and x['active']:
|
|
parts.append("[still active]")
|
|
print ' '.join(parts)
|
|
hidden_fields = ['active', 'create_event', 'revoke_event', 'creator_id', 'revoker_id',
|
|
'creator_name', 'revoker_name', 'create_ts', 'revoke_ts']
|
|
def get_nkey(key):
|
|
if key == 'perm_id':
|
|
return 'permission.name'
|
|
elif key.endswith('_id'):
|
|
return '%s.name' % key[:-3]
|
|
else:
|
|
return '%s.name' % key
|
|
if edit:
|
|
keys = x.keys()
|
|
keys.sort()
|
|
y = other[-1]
|
|
for key in keys:
|
|
if key in hidden_fields:
|
|
continue
|
|
if x[key] == y[key]:
|
|
continue
|
|
if key[0] == '_':
|
|
continue
|
|
nkey = get_nkey(key)
|
|
if nkey in x and nkey in y:
|
|
continue
|
|
print " %s: %s -> %s" % (key, x[key], y[key])
|
|
elif create and options.verbose and table != 'tag_listing':
|
|
keys = x.keys()
|
|
keys.sort()
|
|
# the table keys have already been represented in the base format string
|
|
also_hidden = list(_table_keys[table])
|
|
also_hidden.extend([get_nkey(k) for k in also_hidden])
|
|
for key in keys:
|
|
if key in hidden_fields or key in also_hidden:
|
|
continue
|
|
nkey = get_nkey(key)
|
|
if nkey in x:
|
|
continue
|
|
if key[0] == '_':
|
|
continue
|
|
if x.get('blocked') and key != 'blocked':
|
|
continue
|
|
if key.endswith('.name'):
|
|
dkey = key[:-5]
|
|
else:
|
|
dkey = key
|
|
print " %s: %s" % (dkey, x[key])
|
|
|
|
_table_keys = {
|
|
'user_perms' : ['user_id', 'perm_id'],
|
|
'user_groups' : ['user_id', 'group_id'],
|
|
'cg_users' : ['user_id', 'cg_id'],
|
|
'tag_inheritance' : ['tag_id', 'parent_id'],
|
|
'tag_config' : ['tag_id'],
|
|
'tag_extra' : ['tag_id', 'key'],
|
|
'build_target_config' : ['build_target_id'],
|
|
'external_repo_config' : ['external_repo_id'],
|
|
'tag_external_repos' : ['tag_id', 'external_repo_id'],
|
|
'tag_listing' : ['build_id', 'tag_id'],
|
|
'tag_packages' : ['package_id', 'tag_id'],
|
|
'group_config' : ['group_id', 'tag_id'],
|
|
'group_req_listing' : ['group_id', 'tag_id', 'req_id'],
|
|
'group_package_listing' : ['group_id', 'tag_id', 'package'],
|
|
}
|
|
|
|
def anon_handle_list_history(options, session, args):
|
|
"[info] Display historical data"
|
|
usage = _("usage: %prog list-history [options]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--debug", action="store_true")
|
|
parser.add_option("--build", help=_("Only show data for a specific build"))
|
|
parser.add_option("--package", help=_("Only show data for a specific package"))
|
|
parser.add_option("--tag", help=_("Only show data for a specific tag"))
|
|
parser.add_option("--editor", "--by", metavar="USER", help=_("Only show entries modified by user"))
|
|
parser.add_option("--user", help=_("Only show entries affecting a user"))
|
|
parser.add_option("--permission", help=_("Only show entries relating to a given permission"))
|
|
parser.add_option("--external-repo", "--erepo", help=_("Only show entries relating to a given external repo"))
|
|
parser.add_option("--build-target", "--target", help=_("Only show entries relating to a given build target"))
|
|
parser.add_option("--group", help=_("Only show entries relating to a given group"))
|
|
parser.add_option("--before", metavar="TIMESTAMP", help=_("Only show entries before timestamp"))
|
|
parser.add_option("--after", metavar="TIMESTAMP", help=_("Only show entries after timestamp"))
|
|
parser.add_option("--before-event", metavar="EVENT_ID", type='int', help=_("Only show entries before event"))
|
|
parser.add_option("--after-event", metavar="EVENT_ID", type='int', help=_("Only show entries after event"))
|
|
parser.add_option("--watch", action="store_true", help=_("Monitor history data"))
|
|
parser.add_option("--active", action='store_true', help=_("Only show entries that are currently active"))
|
|
parser.add_option("--revoked", action='store_false', dest='active',
|
|
help=_("Only show entries that are currently revoked"))
|
|
parser.add_option("--context", action="store_true", help=_("Show related entries"))
|
|
parser.add_option("-s", "--show", action="append", help=_("Show data from selected tables"))
|
|
parser.add_option("-v", "--verbose", action="store_true", help=_("Show more detail"))
|
|
parser.add_option("-e", "--events", action="store_true", help=_("Show event ids"))
|
|
parser.add_option("--all", action="store_true", help=_("Allows listing the entire global history"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) != 0:
|
|
parser.error(_("This command takes no arguments"))
|
|
assert False
|
|
kwargs = {}
|
|
limited = False
|
|
for opt in 'package', 'tag', 'build', 'editor', 'user', 'permission', 'external_repo', \
|
|
'build_target', 'group', 'before', 'after':
|
|
val = getattr(options, opt)
|
|
if val:
|
|
kwargs[opt] = val
|
|
limited = True
|
|
if options.before_event:
|
|
kwargs['beforeEvent'] = options.before_event
|
|
if options.after_event:
|
|
kwargs['afterEvent'] = options.after_event
|
|
if options.active is not None:
|
|
kwargs['active'] = options.active
|
|
tables = None
|
|
if options.show:
|
|
tables = []
|
|
for arg in options.show:
|
|
tables.extend(arg.split(','))
|
|
if not limited and not options.all:
|
|
parser.error(_("Please specify an option to limit the query"))
|
|
|
|
activate_session(session)
|
|
|
|
if options.watch:
|
|
if not kwargs.get('afterEvent') and not kwargs.get('after'):
|
|
kwargs['afterEvent'] = session.getLastEvent()['id']
|
|
|
|
while True:
|
|
histdata = session.queryHistory(tables=tables, **kwargs)
|
|
timeline = []
|
|
def distinguish_match(x, name):
|
|
"""determine if create or revoke event matched"""
|
|
if options.context:
|
|
return True
|
|
name = "_" + name
|
|
ret = True
|
|
for key in x:
|
|
if key.startswith(name):
|
|
ret = ret and x[key]
|
|
return ret
|
|
for table in histdata:
|
|
hist = histdata[table]
|
|
for x in hist:
|
|
if x['revoke_event'] is not None:
|
|
if distinguish_match(x, 'revoked'):
|
|
timeline.append((x['revoke_event'], table, 0, x.copy()))
|
|
#pprint.pprint(timeline[-1])
|
|
if distinguish_match(x, 'created'):
|
|
timeline.append((x['create_event'], table, 1, x))
|
|
timeline.sort()
|
|
#group edits together
|
|
new_timeline = []
|
|
last_event = None
|
|
edit_index = {}
|
|
for entry in timeline:
|
|
event_id, table, create, x = entry
|
|
if event_id != last_event:
|
|
edit_index = {}
|
|
last_event = event_id
|
|
key = tuple([x[k] for k in _table_keys[table]])
|
|
prev = edit_index.get((table, event_id), {}).get(key)
|
|
if prev:
|
|
prev[-1].setdefault('.related', []).append(entry)
|
|
else:
|
|
edit_index.setdefault((table, event_id), {})[key] = entry
|
|
new_timeline.append(entry)
|
|
for entry in new_timeline:
|
|
if options.debug:
|
|
print "%r" % list(entry)
|
|
_print_histline(entry, options=options)
|
|
if not options.watch:
|
|
break
|
|
else:
|
|
time.sleep(5)
|
|
# repeat query for later events
|
|
if last_event:
|
|
kwargs['afterEvent'] = last_event
|
|
|
|
def _handleMap(lines, data, prefix=''):
|
|
for key, val in data.items():
|
|
if key != '__starstar':
|
|
lines.append(' %s%s: %s' % (prefix, key, val))
|
|
|
|
def _handleOpts(lines, opts, prefix=''):
|
|
if opts:
|
|
lines.append("%sOptions:" % prefix)
|
|
_handleMap(lines, opts, prefix)
|
|
|
|
|
|
def _parseTaskParams(session, method, task_id):
|
|
try:
|
|
return _do_parseTaskParams(session, method, task_id)
|
|
except Exception:
|
|
if logger.isEnabledFor(logging.DEBUG):
|
|
tb_str = ''.join(traceback.format_exception(*sys.exc_info()))
|
|
logger.debug(tb_str)
|
|
return ['Unable to parse task parameters']
|
|
|
|
|
|
def _do_parseTaskParams(session, method, task_id):
|
|
"""Parse the return of getTaskRequest()"""
|
|
params = session.getTaskRequest(task_id)
|
|
|
|
lines = []
|
|
|
|
if method == 'buildSRPMFromCVS':
|
|
lines.append("CVS URL: %s" % params[0])
|
|
elif method == 'buildSRPMFromSCM':
|
|
lines.append("SCM URL: %s" % params[0])
|
|
elif method == 'buildArch':
|
|
lines.append("SRPM: %s/work/%s" % (options.topdir, params[0]))
|
|
lines.append("Build Tag: %s" % session.getTag(params[1])['name'])
|
|
lines.append("Build Arch: %s" % params[2])
|
|
lines.append("SRPM Kept: %r" % params[3])
|
|
if len(params) > 4:
|
|
_handleOpts(lines, params[4])
|
|
elif method == 'tagBuild':
|
|
build = session.getBuild(params[1])
|
|
lines.append("Destination Tag: %s" % session.getTag(params[0])['name'])
|
|
lines.append("Build: %s" % koji.buildLabel(build))
|
|
elif method == 'buildNotification':
|
|
build = params[1]
|
|
buildTarget = params[2]
|
|
lines.append("Recipients: %s" % (", ".join(params[0])))
|
|
lines.append("Build: %s" % koji.buildLabel(build))
|
|
lines.append("Build Target: %s" % buildTarget['name'])
|
|
lines.append("Web URL: %s" % params[3])
|
|
elif method == 'build':
|
|
lines.append("Source: %s" % params[0])
|
|
lines.append("Build Target: %s" % params[1])
|
|
if len(params) > 2:
|
|
_handleOpts(lines, params[2])
|
|
elif method == 'maven':
|
|
lines.append("SCM URL: %s" % params[0])
|
|
lines.append("Build Target: %s" % params[1])
|
|
if len(params) > 2:
|
|
_handleOpts(lines, params[2])
|
|
elif method == 'buildMaven':
|
|
lines.append("SCM URL: %s" % params[0])
|
|
lines.append("Build Tag: %s" % params[1]['name'])
|
|
if len(params) > 2:
|
|
_handleOpts(lines, params[2])
|
|
elif method == 'wrapperRPM':
|
|
lines.append("Spec File URL: %s" % params[0])
|
|
lines.append("Build Tag: %s" % params[1]['name'])
|
|
if params[2]:
|
|
lines.append("Build: %s" % koji.buildLabel(params[2]))
|
|
if params[3]:
|
|
lines.append("Task: %s %s" % (params[3]['id'], koji.taskLabel(params[3])))
|
|
if len(params) > 4:
|
|
_handleOpts(lines, params[4])
|
|
elif method == 'chainmaven':
|
|
lines.append("Builds:")
|
|
for package, opts in params[0].items():
|
|
lines.append(" " + package)
|
|
_handleMap(lines, opts, prefix=" ")
|
|
lines.append("Build Target: %s" % params[1])
|
|
if len(params) > 2:
|
|
_handleOpts(lines, params[2])
|
|
elif method == 'winbuild':
|
|
lines.append("VM: %s" % params[0])
|
|
lines.append("SCM URL: %s" % params[1])
|
|
lines.append("Build Target: %s" % params[2])
|
|
if len(params) > 3:
|
|
_handleOpts(lines, params[3])
|
|
elif method == 'vmExec':
|
|
lines.append("VM: %s" % params[0])
|
|
lines.append("Exec Params:")
|
|
for info in params[1]:
|
|
if isinstance(info, dict):
|
|
_handleMap(lines, info, prefix=' ')
|
|
else:
|
|
lines.append(" %s" % info)
|
|
if len(params) > 2:
|
|
_handleOpts(lines, params[2])
|
|
elif method in ('createLiveCD', 'createAppliance'):
|
|
lines.append("Arch: %s" % params[3])
|
|
lines.append("Kickstart File: %s" % params[7])
|
|
if len(params) > 8:
|
|
_handleOpts(lines, params[8])
|
|
elif method == 'newRepo':
|
|
tag = session.getTag(params[0])
|
|
lines.append("Tag: %s" % tag['name'])
|
|
elif method == 'prepRepo':
|
|
lines.append("Tag: %s" % params[0]['name'])
|
|
elif method == 'createrepo':
|
|
lines.append("Repo ID: %i" % params[0])
|
|
lines.append("Arch: %s" % params[1])
|
|
oldrepo = params[2]
|
|
if oldrepo:
|
|
lines.append("Old Repo ID: %i" % oldrepo['id'])
|
|
lines.append("Old Repo Creation: %s" % koji.formatTimeLong(oldrepo['creation_time']))
|
|
if len(params) > 3:
|
|
lines.append("External Repos: %s" % ', '.join([ext['external_repo_name'] for ext in params[3]]))
|
|
elif method == 'tagNotification':
|
|
destTag = session.getTag(params[2])
|
|
srcTag = None
|
|
if params[3]:
|
|
srcTag = session.getTag(params[3])
|
|
build = session.getBuild(params[4])
|
|
user = session.getUser(params[5])
|
|
|
|
lines.append("Recipients: %s" % ", ".join(params[0]))
|
|
lines.append("Successful?: %s" % (params[1] and 'yes' or 'no'))
|
|
lines.append("Tagged Into: %s" % destTag['name'])
|
|
if srcTag:
|
|
lines.append("Moved From: %s" % srcTag['name'])
|
|
lines.append("Build: %s" % koji.buildLabel(build))
|
|
lines.append("Tagged By: %s" % user['name'])
|
|
lines.append("Ignore Success?: %s" % (params[6] and 'yes' or 'no'))
|
|
if params[7]:
|
|
lines.append("Failure Message: %s" % params[7])
|
|
elif method == 'dependantTask':
|
|
lines.append("Dependant Tasks: %s" % ", ".join([str(depID) for depID in params[0]]))
|
|
lines.append("Subtasks:")
|
|
for subtask in params[1]:
|
|
lines.append(" Method: %s" % subtask[0])
|
|
lines.append(" Parameters: %s" % ", ".join([str(subparam) for subparam in subtask[1]]))
|
|
if len(subtask) > 2 and subtask[2]:
|
|
subopts = subtask[2]
|
|
_handleOpts(lines, subopts, prefix=' ')
|
|
lines.append("")
|
|
elif method == 'chainbuild':
|
|
lines.append("Build Groups:")
|
|
group_num = 0
|
|
for group_list in params[0]:
|
|
group_num += 1
|
|
lines.append(" %i: %s" % (group_num, ', '.join(group_list)))
|
|
lines.append("Build Target: %s" % params[1])
|
|
if len(params) > 2:
|
|
_handleOpts(lines, params[2])
|
|
elif method == 'waitrepo':
|
|
lines.append("Build Target: %s" % params[0])
|
|
if params[1]:
|
|
lines.append("Newer Than: %s" % params[1])
|
|
if params[2]:
|
|
lines.append("NVRs: %s" % ', '.join(params[2]))
|
|
|
|
return lines
|
|
|
|
def _printTaskInfo(session, task_id, level=0, recurse=True, verbose=True):
|
|
"""Recursive function to print information about a task
|
|
and its children."""
|
|
|
|
BUILDDIR = '/var/lib/mock'
|
|
indent = " "*2*level
|
|
|
|
info = session.getTaskInfo(task_id)
|
|
if info['host_id']:
|
|
host_info = session.getHost(info['host_id'])
|
|
else:
|
|
host_info = None
|
|
buildroot_infos = session.listBuildroots(taskID=task_id)
|
|
build_info = session.listBuilds(taskID=task_id)
|
|
|
|
files = session.listTaskOutput(task_id)
|
|
logs = [filename for filename in files if filename.endswith('.log')]
|
|
output = [filename for filename in files if not filename.endswith('.log')]
|
|
files_dir = '%s/%s' % (koji.pathinfo.work(), koji.pathinfo.taskrelpath(task_id))
|
|
|
|
owner = session.getUser(info['owner'])['name']
|
|
|
|
print "%sTask: %d" % (indent, task_id)
|
|
print "%sType: %s" % (indent, info['method'])
|
|
if verbose:
|
|
print "%sRequest Parameters:" % indent
|
|
for line in _parseTaskParams(session, info['method'], task_id):
|
|
print "%s %s" % (indent, line)
|
|
print "%sOwner: %s" % (indent, owner)
|
|
print "%sState: %s" % (indent, koji.TASK_STATES[info['state']].lower())
|
|
print "%sCreated: %s" % (indent, time.asctime(time.localtime(info['create_ts'])))
|
|
if info.get('start_ts'):
|
|
print "%sStarted: %s" % (indent, time.asctime(time.localtime(info['start_ts'])))
|
|
if info.get('completion_ts'):
|
|
print "%sFinished: %s" % (indent, time.asctime(time.localtime(info['completion_ts'])))
|
|
if host_info:
|
|
print "%sHost: %s" % (indent, host_info['name'])
|
|
if build_info:
|
|
print "%sBuild: %s (%d)" % (indent, build_info[0]['nvr'], build_info[0]['build_id'])
|
|
if buildroot_infos:
|
|
print "%sBuildroots:" % indent
|
|
for root in buildroot_infos:
|
|
print "%s %s/%s-%d-%d/" % (indent, BUILDDIR, root['tag_name'], root['id'], root['repo_id'])
|
|
if logs:
|
|
print "%sLog Files:" % indent
|
|
for log in logs:
|
|
print "%s %s/%s" % (indent, files_dir, log)
|
|
if output:
|
|
print "%sOutput:" % indent
|
|
for filename in output:
|
|
print "%s %s/%s" % (indent, files_dir, filename)
|
|
|
|
# white space
|
|
print
|
|
|
|
if recurse:
|
|
level += 1
|
|
children = session.getTaskChildren(task_id, request=True)
|
|
children.sort(cmp=lambda a, b: cmp(a['id'], b['id']))
|
|
for child in children:
|
|
_printTaskInfo(session, child['id'], level, verbose=verbose)
|
|
|
|
def anon_handle_taskinfo(options, session, args):
|
|
"""[info] Show information about a task"""
|
|
usage = _("usage: %prog taskinfo [options] taskID [taskID...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("-r", "--recurse", action="store_true", help=_("Show children of this task as well"))
|
|
parser.add_option("-v", "--verbose", action="store_true", help=_("Be verbose"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 1:
|
|
parser.error(_("You must specify at least one task ID"))
|
|
assert False
|
|
|
|
activate_session(session)
|
|
|
|
for arg in args:
|
|
task_id = int(arg)
|
|
_printTaskInfo(session, task_id, 0, options.recurse, options.verbose)
|
|
|
|
def anon_handle_taginfo(options, session, args):
|
|
"[info] Print basic information about a tag"
|
|
usage = _("usage: %prog taginfo [options] <tag> [<tag> ...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--event", type='int', metavar="EVENT#", help=_("query at event"))
|
|
parser.add_option("--ts", type='int', metavar="TIMESTAMP", help=_("query at timestamp"))
|
|
parser.add_option("--repo", type='int', metavar="REPO#", help=_("query at event for a repo"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 1:
|
|
parser.error(_("Please specify a tag"))
|
|
assert False
|
|
activate_session(session)
|
|
event = koji.util.eventFromOpts(session, options)
|
|
event_opts = {}
|
|
if event:
|
|
event['timestr'] = time.asctime(time.localtime(event['ts']))
|
|
print "Querying at event %(id)i (%(timestr)s)" % event
|
|
event_opts['event'] = event['id']
|
|
perms = dict([(p['id'], p['name']) for p in session.getAllPerms()])
|
|
|
|
tags = []
|
|
for tag in args:
|
|
info = session.getTag(tag, **event_opts)
|
|
if info is None:
|
|
print "No such tag: %s" % tag
|
|
sys.exit(1)
|
|
tags.append(info)
|
|
|
|
for n, info in enumerate(tags):
|
|
if n > 0:
|
|
print
|
|
print "Tag: %(name)s [%(id)d]" %info
|
|
print "Arches: %(arches)s" %info
|
|
group_list = [x['name'] for x in session.getTagGroups(info['id'], **event_opts)]
|
|
group_list.sort()
|
|
print "Groups: " + ', '.join(group_list)
|
|
if info.get('locked'):
|
|
print 'LOCKED'
|
|
if info.get('perm_id') is not None:
|
|
perm_id = info['perm_id']
|
|
print "Required permission: %r" % perms.get(perm_id, perm_id)
|
|
if session.mavenEnabled():
|
|
print "Maven support?: %s" % (info['maven_support'] and 'yes' or 'no')
|
|
print "Include all Maven archives?: %s" % (info['maven_include_all'] and 'yes' or 'no')
|
|
if 'extra' in info:
|
|
print "Tag options:"
|
|
keys = info['extra'].keys()
|
|
keys.sort()
|
|
for key in keys:
|
|
print " %s : %s" % (key, pprint.pformat(info['extra'][key]))
|
|
dest_targets = session.getBuildTargets(destTagID=info['id'], **event_opts)
|
|
build_targets = session.getBuildTargets(buildTagID=info['id'], **event_opts)
|
|
repos = {}
|
|
if not event:
|
|
for target in dest_targets + build_targets:
|
|
if not repos.has_key(target['build_tag']):
|
|
repo = session.getRepo(target['build_tag'])
|
|
if repo is None:
|
|
repos[target['build_tag']] = "no active repo"
|
|
else:
|
|
repos[target['build_tag']] = "repo#%(id)i: %(creation_time)s" % repo
|
|
if dest_targets:
|
|
print "Targets that build into this tag:"
|
|
for target in dest_targets:
|
|
if event:
|
|
print " %s (%s)" % (target['name'], target['build_tag_name'])
|
|
else:
|
|
print " %s (%s, %s)" % (target['name'], target['build_tag_name'], repos[target['build_tag']])
|
|
if build_targets:
|
|
print "This tag is a buildroot for one or more targets"
|
|
if not event:
|
|
print "Current repo: %s" % repos[info['id']]
|
|
print "Targets that build from this tag:"
|
|
for target in build_targets:
|
|
print " %s" % target['name']
|
|
external_repos = session.getTagExternalRepos(tag_info=info['id'], **event_opts)
|
|
if external_repos:
|
|
print "External repos:"
|
|
for rinfo in external_repos:
|
|
print " %(priority)3i %(external_repo_name)s (%(url)s)" % rinfo
|
|
print "Inheritance:"
|
|
for parent in session.getInheritanceData(tag, **event_opts):
|
|
flags = ''
|
|
for code,expr in (
|
|
('M',parent['maxdepth'] is not None),
|
|
('F',parent['pkg_filter']),
|
|
('I',parent['intransitive']),
|
|
('N',parent['noconfig']),):
|
|
if expr:
|
|
flags += code
|
|
else:
|
|
flags += '.'
|
|
parent['flags'] = flags
|
|
print " %(priority)-4d %(flags)s %(name)s [%(parent_id)s]" % parent
|
|
if parent['maxdepth'] is not None:
|
|
print " maxdepth: %(maxdepth)s" % parent
|
|
if parent['pkg_filter']:
|
|
print " package filter: %(pkg_filter)s" % parent
|
|
|
|
|
|
def handle_add_tag(options, session, args):
|
|
"[admin] Add a new tag to the database"
|
|
usage = _("usage: %prog add-tag [options] name")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--parent", help=_("Specify parent"))
|
|
parser.add_option("--arches", help=_("Specify arches"))
|
|
parser.add_option("--maven-support", action="store_true", help=_("Enable creation of Maven repos for this tag"))
|
|
parser.add_option("--include-all", action="store_true", help=_("Include all packages in this tag when generating Maven repos"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) != 1:
|
|
parser.error(_("Please specify a name for the tag"))
|
|
assert False
|
|
activate_session(session)
|
|
if not session.hasPerm('admin'):
|
|
print "This action requires admin privileges"
|
|
return
|
|
opts = {}
|
|
if options.parent:
|
|
opts['parent'] = options.parent
|
|
if options.arches:
|
|
opts['arches'] = ' '.join(options.arches.replace(',',' ').split())
|
|
if options.maven_support:
|
|
opts['maven_support'] = True
|
|
if options.include_all:
|
|
opts['maven_include_all'] = True
|
|
session.createTag(args[0],**opts)
|
|
|
|
def handle_edit_tag(options, session, args):
|
|
"[admin] Alter tag information"
|
|
usage = _("usage: %prog edit-tag [options] name")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--arches", help=_("Specify arches"))
|
|
parser.add_option("--perm", help=_("Specify permission requirement"))
|
|
parser.add_option("--no-perm", action="store_true", help=_("Remove permission requirement"))
|
|
parser.add_option("--lock", action="store_true", help=_("Lock the tag"))
|
|
parser.add_option("--unlock", action="store_true", help=_("Unlock the tag"))
|
|
parser.add_option("--rename", help=_("Rename the tag"))
|
|
parser.add_option("--maven-support", action="store_true", help=_("Enable creation of Maven repos for this tag"))
|
|
parser.add_option("--no-maven-support", action="store_true", help=_("Disable creation of Maven repos for this tag"))
|
|
parser.add_option("--include-all", action="store_true", help=_("Include all packages in this tag when generating Maven repos"))
|
|
parser.add_option("--no-include-all", action="store_true", help=_("Do not include all packages in this tag when generating Maven repos"))
|
|
parser.add_option("-x", "--extra", action="append", default=[], metavar="key=value",
|
|
help=_("Set tag extra option"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) != 1:
|
|
parser.error(_("Please specify a name for the tag"))
|
|
assert False
|
|
activate_session(session)
|
|
tag = args[0]
|
|
opts = {}
|
|
if options.arches:
|
|
opts['arches'] = ' '.join(options.arches.replace(',',' ').split())
|
|
if options.no_perm:
|
|
opts['perm_id'] = None
|
|
elif options.perm:
|
|
opts['perm'] = options.perm
|
|
if options.unlock:
|
|
opts['locked'] = False
|
|
if options.lock:
|
|
opts['locked'] = True
|
|
if options.rename:
|
|
opts['name'] = options.rename
|
|
if options.maven_support:
|
|
opts['maven_support'] = True
|
|
if options.no_maven_support:
|
|
opts['maven_support'] = False
|
|
if options.include_all:
|
|
opts['maven_include_all'] = True
|
|
if options.no_include_all:
|
|
opts['maven_include_all'] = False
|
|
if options.extra:
|
|
extra = {}
|
|
for xopt in options.extra:
|
|
key, value = xopt.split('=')
|
|
value = arg_filter(value)
|
|
extra[key] = value
|
|
opts['extra'] = extra
|
|
#XXX change callname
|
|
session.editTag2(tag,**opts)
|
|
|
|
def handle_lock_tag(options, session, args):
|
|
"[admin] Lock a tag"
|
|
usage = _("usage: %prog lock-tag [options] <tag> [<tag> ...] ")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--perm", help=_("Specify permission requirement"))
|
|
parser.add_option("--glob", action="store_true", help=_("Treat args as glob patterns"))
|
|
parser.add_option("--master", action="store_true", help=_("Lock the master lock"))
|
|
parser.add_option("-n", "--test", action="store_true", help=_("Test mode"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 1:
|
|
parser.error(_("Please specify a tag"))
|
|
assert False
|
|
activate_session(session)
|
|
pdata = session.getAllPerms()
|
|
perm_ids = dict([(p['name'], p['id']) for p in pdata])
|
|
perm = options.perm
|
|
if perm is None:
|
|
perm = 'admin'
|
|
perm_id = perm_ids[perm]
|
|
if options.glob:
|
|
selected = []
|
|
for tag in session.listTags():
|
|
for pattern in args:
|
|
if fnmatch.fnmatch(tag['name'], pattern):
|
|
selected.append(tag)
|
|
break
|
|
if not selected:
|
|
print _("No tags matched")
|
|
else:
|
|
selected = [session.getTag(name) for name in args]
|
|
for tag in selected:
|
|
if options.master:
|
|
#set the master lock
|
|
if tag['locked']:
|
|
print _("Tag %s: master lock already set") % tag['name']
|
|
continue
|
|
elif options.test:
|
|
print _("Would have set master lock for: %s") % tag['name']
|
|
continue
|
|
session.editTag2(tag['id'], locked=True)
|
|
else:
|
|
if tag['perm_id'] == perm_id:
|
|
print _("Tag %s: %s permission already required") % (tag['name'], perm)
|
|
continue
|
|
elif options.test:
|
|
print _("Would have set permission requirement %s for tag %s") % (perm, tag['name'])
|
|
continue
|
|
session.editTag2(tag['id'], perm=perm_id)
|
|
|
|
def handle_unlock_tag(options, session, args):
|
|
"[admin] Unlock a tag"
|
|
usage = _("usage: %prog unlock-tag [options] <tag> [<tag> ...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--glob", action="store_true", help=_("Treat args as glob patterns"))
|
|
parser.add_option("-n", "--test", action="store_true", help=_("Test mode"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 1:
|
|
parser.error(_("Please specify a tag"))
|
|
assert False
|
|
activate_session(session)
|
|
if options.glob:
|
|
selected = []
|
|
for tag in session.listTags():
|
|
for pattern in args:
|
|
if fnmatch.fnmatch(tag['name'], pattern):
|
|
selected.append(tag)
|
|
break
|
|
if not selected:
|
|
print _("No tags matched")
|
|
else:
|
|
selected = []
|
|
for name in args:
|
|
tag = session.getTag(name)
|
|
if tag is None:
|
|
parser.error(_("No such tag: %s") % name)
|
|
assert False
|
|
selected.append(tag)
|
|
selected = [session.getTag(name) for name in args]
|
|
for tag in selected:
|
|
opts = {}
|
|
if tag['locked']:
|
|
opts['locked'] = False
|
|
if tag['perm_id']:
|
|
opts['perm'] = None
|
|
if not opts:
|
|
print "Tag %(name)s: not locked" % tag
|
|
continue
|
|
if options.test:
|
|
print "Tag %s: skipping changes: %r" % (tag['name'], opts)
|
|
else:
|
|
session.editTag2(tag['id'], locked=False, perm_id=None)
|
|
|
|
def handle_add_tag_inheritance(options, session, args):
|
|
"""[admin] Add to a tag's inheritance"""
|
|
usage = _("usage: %prog add-tag-inheritance [options] tag parent-tag")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--priority", help=_("Specify priority"))
|
|
parser.add_option("--maxdepth", help=_("Specify max depth"))
|
|
parser.add_option("--intransitive", action="store_true", help=_("Set intransitive"))
|
|
parser.add_option("--noconfig", action="store_true", help=_("Set to packages only"))
|
|
parser.add_option("--pkg-filter", help=_("Specify the package filter"))
|
|
parser.add_option("--force", help=_("Force adding a parent to a tag that already has that parent tag"))
|
|
(options, args) = parser.parse_args(args)
|
|
|
|
if len(args) != 2:
|
|
parser.error(_("This command takes exctly two argument: a tag name or ID and that tag's new parent name or ID"))
|
|
assert False
|
|
|
|
activate_session(session)
|
|
|
|
tag = session.getTag(args[0])
|
|
if not tag:
|
|
parser.error(_("Invalid tag: %s" % args[0]))
|
|
|
|
parent = session.getTag(args[1])
|
|
if not parent:
|
|
parser.error(_("Invalid tag: %s" % args[1]))
|
|
|
|
inheritanceData = session.getInheritanceData(tag['id'])
|
|
priority = options.priority and int(options.priority) or 0
|
|
sameParents = [datum for datum in inheritanceData if datum['parent_id'] == parent['id']]
|
|
samePriority = [datum for datum in inheritanceData if datum['priority'] == priority]
|
|
|
|
if sameParents and not options.force:
|
|
print _("Error: You are attempting to add %s as %s's parent even though it already is %s's parent."
|
|
% (parent['name'], tag['name'], tag['name']))
|
|
print _("Please use --force if this is what you really want to do.")
|
|
return
|
|
if samePriority:
|
|
print _("Error: There is already an active inheritance with that priority on %s, please specify a different priority with --priority." % tag['name'])
|
|
return
|
|
|
|
new_data = {}
|
|
new_data['parent_id'] = parent['id']
|
|
new_data['priority'] = options.priority or 0
|
|
if options.maxdepth and options.maxdepth.isdigit():
|
|
new_data['maxdepth'] = int(options.maxdepth)
|
|
else:
|
|
new_data['maxdepth'] = None
|
|
new_data['intransitive'] = options.intransitive or False
|
|
new_data['noconfig'] = options.noconfig or False
|
|
new_data['pkg_filter'] = options.pkg_filter or ''
|
|
|
|
inheritanceData.append(new_data)
|
|
session.setInheritanceData(tag['id'], inheritanceData)
|
|
|
|
|
|
def handle_edit_tag_inheritance(options, session, args):
|
|
"""[admin] Edit tag inheritance"""
|
|
usage = _("usage: %prog edit-tag-inheritance [options] tag <parent> <priority>")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--priority", help=_("Specify a new priority"))
|
|
parser.add_option("--maxdepth", help=_("Specify max depth"))
|
|
parser.add_option("--intransitive", action="store_true", help=_("Set intransitive"))
|
|
parser.add_option("--noconfig", action="store_true", help=_("Set to packages only"))
|
|
parser.add_option("--pkg-filter", help=_("Specify the package filter"))
|
|
(options, args) = parser.parse_args(args)
|
|
|
|
if len(args) < 1:
|
|
parser.error(_("This command takes at lease one argument: a tag name or ID"))
|
|
assert False
|
|
|
|
if len(args) > 3:
|
|
parser.error(_("This command takes at most three argument: a tag name or ID, a parent tag name or ID, and a priority"))
|
|
assert False
|
|
|
|
activate_session(session)
|
|
|
|
tag = session.getTag(args[0])
|
|
if not tag:
|
|
parser.error(_("Invalid tag: %s" % args[0]))
|
|
|
|
parent = None
|
|
priority = None
|
|
if len(args) > 1:
|
|
parent = session.getTag(args[1])
|
|
if not parent:
|
|
parser.error(_("Invalid tag: %s" % args[1]))
|
|
if len(args) > 2:
|
|
priority = args[2]
|
|
|
|
data = session.getInheritanceData(tag['id'])
|
|
if parent and data:
|
|
data = [datum for datum in data if datum['parent_id'] == parent['id']]
|
|
if priority and data:
|
|
data = [datum for datum in data if datum['priority'] == priority]
|
|
|
|
if len(data) == 0:
|
|
print _("No inheritance link found to remove. Please check your arguments")
|
|
return 1
|
|
elif len(data) > 1:
|
|
print _("Multiple matches for tag.")
|
|
if not parent:
|
|
print _("Please specify a parent on the command line.")
|
|
return 1
|
|
if not priority:
|
|
print _("Please specify a priority on the command line.")
|
|
return 1
|
|
print _("Error: Key constraints may be broken. Exiting.")
|
|
return 1
|
|
|
|
# len(data) == 1
|
|
data = data[0]
|
|
|
|
inheritanceData = session.getInheritanceData(tag['id'])
|
|
samePriority = [datum for datum in inheritanceData if datum['priority'] == options.priority]
|
|
if samePriority:
|
|
print _("Error: There is already an active inheritance with that priority on %s, please specify a different priority with --priority.") % tag['name']
|
|
return 1
|
|
|
|
new_data = data.copy()
|
|
if options.priority is not None and options.priority.isdigit():
|
|
new_data['priority'] = int(options.priority)
|
|
if options.maxdepth is not None:
|
|
if options.maxdepth.isdigit():
|
|
new_data['maxdepth'] = int(options.maxdepth)
|
|
elif options.maxdepth.lower() == "none":
|
|
new_data['maxdepth'] = None
|
|
else:
|
|
print _("Invalid maxdepth: %s") % options.maxdepth
|
|
return 1
|
|
if options.intransitive:
|
|
new_data['intransitive'] = options.intransitive
|
|
if options.noconfig:
|
|
new_data['noconfig'] = options.noconfig
|
|
if options.pkg_filter:
|
|
new_data['pkg_filter'] = options.pkg_filter
|
|
|
|
# find the data we want to edit and replace it
|
|
index = inheritanceData.index(data)
|
|
inheritanceData[index] = new_data
|
|
session.setInheritanceData(tag['id'], inheritanceData)
|
|
|
|
def handle_remove_tag_inheritance(options, session, args):
|
|
"""[admin] Remove a tag inheritance link"""
|
|
usage = _("usage: %prog remove-tag-inheritance tag <parent> <priority>")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
(options, args) = parser.parse_args(args)
|
|
|
|
if len(args) < 1:
|
|
parser.error(_("This command takes at lease one argument: a tag name or ID"))
|
|
assert False
|
|
|
|
if len(args) > 3:
|
|
parser.error(_("This command takes at most three argument: a tag name or ID, a parent tag name or ID, and a priority"))
|
|
assert False
|
|
|
|
activate_session(session)
|
|
|
|
tag = session.getTag(args[0])
|
|
if not tag:
|
|
parser.error(_("Invalid tag: %s" % args[0]))
|
|
|
|
parent = None
|
|
priority = None
|
|
if len(args) > 1:
|
|
parent = session.getTag(args[1])
|
|
if not parent:
|
|
parser.error(_("Invalid tag: %s" % args[1]))
|
|
if len(args) > 2:
|
|
priority = args[2]
|
|
|
|
data = session.getInheritanceData(tag['id'])
|
|
if parent and data:
|
|
data = [datum for datum in data if datum['parent_id'] == parent['id']]
|
|
if priority and data:
|
|
data = [datum for datum in data if datum['priority'] == priority]
|
|
|
|
if len(data) == 0:
|
|
print _("No inheritance link found to remove. Please check your arguments")
|
|
return
|
|
elif len(data) > 1:
|
|
print _("Multiple matches for tag.")
|
|
if not parent:
|
|
print _("Please specify a parent on the command line.")
|
|
return
|
|
if not priority:
|
|
print _("Please specify a priority on the command line.")
|
|
return
|
|
print _("Error: Key constrainsts may be broken. Exiting.")
|
|
return
|
|
|
|
# len(data) == 1
|
|
data = data[0]
|
|
|
|
inheritanceData = session.getInheritanceData(tag['id'])
|
|
|
|
new_data = data.copy()
|
|
new_data['delete link'] = True
|
|
|
|
# find the data we want to edit and replace it
|
|
index = inheritanceData.index(data)
|
|
inheritanceData[index] = new_data
|
|
session.setInheritanceData(tag['id'], inheritanceData)
|
|
|
|
def anon_handle_show_groups(options, session, args):
|
|
"[info] Show groups data for a tag"
|
|
usage = _("usage: %prog show-groups [options] <tag>")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--comps", action="store_true", help=_("Print in comps format"))
|
|
parser.add_option("-x", "--expand", action="store_true", default=False,
|
|
help=_("Expand groups in comps format"))
|
|
parser.add_option("--spec", action="store_true", help=_("Print build spec"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) != 1:
|
|
parser.error(_("Incorrect number of arguments"))
|
|
assert False
|
|
activate_session(session)
|
|
tag = args[0]
|
|
groups = session.getTagGroups(tag)
|
|
if options.comps:
|
|
print koji.generate_comps(groups, expand_groups=options.expand)
|
|
elif options.spec:
|
|
print koji.make_groups_spec(groups,name='buildgroups',buildgroup='build')
|
|
else:
|
|
pprint.pprint(groups)
|
|
|
|
def anon_handle_list_external_repos(options, session, args):
|
|
"[info] List external repos"
|
|
usage = _("usage: %prog list-external-repos [options]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--url", help=_("Select by url"))
|
|
parser.add_option("--name", help=_("Select by name"))
|
|
parser.add_option("--id", type="int", help=_("Select by id"))
|
|
parser.add_option("--tag", help=_("Select by tag"))
|
|
parser.add_option("--used", action='store_true', help=_("List which tags use the repo(s)"))
|
|
parser.add_option("--inherit", action='store_true', help=_("Follow tag inheritance when selecting by tag"))
|
|
parser.add_option("--event", type='int', metavar="EVENT#", help=_("Query at event"))
|
|
parser.add_option("--ts", type='int', metavar="TIMESTAMP", help=_("Query at timestamp"))
|
|
parser.add_option("--repo", type='int', metavar="REPO#",
|
|
help=_("Query at event corresponding to (nonexternal) repo"))
|
|
parser.add_option("--quiet", action="store_true", default=options.quiet,
|
|
help=_("Do not display the column headers"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) > 0:
|
|
parser.error(_("This command takes no arguments"))
|
|
assert False
|
|
activate_session(session)
|
|
opts = {}
|
|
event = koji.util.eventFromOpts(session, options)
|
|
if event:
|
|
opts['event'] = event['id']
|
|
event['timestr'] = time.asctime(time.localtime(event['ts']))
|
|
print "Querying at event %(id)i (%(timestr)s)" % event
|
|
if options.tag:
|
|
format = "tag"
|
|
opts['tag_info'] = options.tag
|
|
opts['repo_info'] = options.id or options.name or None
|
|
if opts['repo_info']:
|
|
if options.inherit:
|
|
parser.error(_("Can't select by repo when using --inherit"))
|
|
assert False
|
|
if options.inherit:
|
|
del opts['repo_info']
|
|
data = session.getExternalRepoList(**opts)
|
|
format = "multitag"
|
|
else:
|
|
data = session.getTagExternalRepos(**opts)
|
|
elif options.used:
|
|
format = "multitag"
|
|
opts['repo_info'] = options.id or options.name or None
|
|
data = session.getTagExternalRepos(**opts)
|
|
else:
|
|
format = "basic"
|
|
opts['info'] = options.id or options.name or None
|
|
opts['url'] = options.url or None
|
|
data = session.listExternalRepos (**opts)
|
|
|
|
# There are three different output formats
|
|
# 1) Listing just repo data (name, url)
|
|
# 2) Listing repo data for a tag (priority, name, url)
|
|
# 3) Listing repo data for multiple tags (tag, priority, name, url)
|
|
if format == "basic":
|
|
format = "%(name)-25s %(url)s"
|
|
header1 = "%-25s %s" % ("External repo name", "URL")
|
|
header2 = "%s %s" % ("-"*25, "-"*40)
|
|
elif format == "tag":
|
|
format = "%(priority)-3i %(external_repo_name)-25s %(url)s"
|
|
header1 = "%-3s %-25s %s" % ("Pri", "External repo name", "URL")
|
|
header2 = "%s %s %s" % ("-"*3, "-"*25, "-"*40)
|
|
elif format == "multitag":
|
|
format = "%(tag_name)-20s %(priority)-3i %(external_repo_name)s"
|
|
header1 = "%-20s %-3s %s" % ("Tag", "Pri", "External repo name")
|
|
header2 = "%s %s %s" % ("-"*20, "-"*3, "-"*25)
|
|
if not options.quiet:
|
|
print header1
|
|
print header2
|
|
for rinfo in data:
|
|
print format % rinfo
|
|
|
|
def _pick_external_repo_priority(session, tag):
|
|
"""pick priority after current ones, leaving space for later insertions"""
|
|
repolist = session.getTagExternalRepos(tag_info=tag)
|
|
#ordered by priority
|
|
if not repolist:
|
|
priority = 5
|
|
else:
|
|
priority = (repolist[-1]['priority'] + 7) / 5 * 5
|
|
#at least 3 higher than current max and a multiple of 5
|
|
return priority
|
|
|
|
def _parse_tagpri(tagpri):
|
|
parts = tagpri.rsplit('::', 1)
|
|
tag = parts[0]
|
|
if len(parts) == 1:
|
|
return tag, None
|
|
elif parts[1] in ('auto', '-1'):
|
|
return tag, None
|
|
else:
|
|
try:
|
|
pri = int(parts[1])
|
|
except ValueError:
|
|
raise koji.GenericError, "Invalid priority: %s" % parts[1]
|
|
return tag, pri
|
|
|
|
def handle_add_external_repo(options, session, args):
|
|
"[admin] Create an external repo and/or add one to a tag"
|
|
usage = _("usage: %prog add-external-repo [options] name [url]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("-t", "--tag", action="append", metavar="TAG",
|
|
help=_("Also add repo to tag. Use tag::N to set priority"))
|
|
parser.add_option("-p", "--priority", type='int',
|
|
help=_("Set priority (when adding to tag)"))
|
|
(options, args) = parser.parse_args(args)
|
|
activate_session(session)
|
|
if len(args) == 1:
|
|
name = args[0]
|
|
rinfo = session.getExternalRepo(name, strict=True)
|
|
if not options.tag:
|
|
parser.error(_("A url is required to create an external repo entry"))
|
|
elif len(args) == 2:
|
|
name, url = args
|
|
rinfo = session.createExternalRepo(name, url)
|
|
print "Created external repo %(id)i" % rinfo
|
|
else:
|
|
parser.error(_("Incorrect number of arguments"))
|
|
assert False
|
|
if options.tag:
|
|
for tagpri in options.tag:
|
|
tag, priority = _parse_tagpri(tagpri)
|
|
if priority is None:
|
|
if options.priority is not None:
|
|
priority = options.priority
|
|
else:
|
|
priority = _pick_external_repo_priority(session, tag)
|
|
session.addExternalRepoToTag(tag, rinfo['name'], priority)
|
|
print "Added external repo %s to tag %s (priority %i)" \
|
|
% (rinfo['name'], tag, priority)
|
|
|
|
def handle_edit_external_repo(options, session, args):
|
|
"[admin] Edit data for an external repo"
|
|
usage = _("usage: %prog edit-external-repo name")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--url", help=_("Change the url"))
|
|
parser.add_option("--name", help=_("Change the name"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) != 1:
|
|
parser.error(_("Incorrect number of arguments"))
|
|
parser.error(_("This command takes no arguments"))
|
|
assert False
|
|
opts = {}
|
|
if options.url:
|
|
opts['url'] = options.url
|
|
if options.name:
|
|
opts['name'] = options.name
|
|
if not opts:
|
|
parser.error(_("No changes specified"))
|
|
activate_session(session)
|
|
session.editExternalRepo(args[0], **opts)
|
|
|
|
def handle_remove_external_repo(options, session, args):
|
|
"[admin] Remove an external repo from a tag or tags, or remove entirely"
|
|
usage = _("usage: %prog remove-external-repo repo [tag ...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--alltags", action="store_true", help=_("Remove from all tags"))
|
|
parser.add_option("--force", action='store_true', help=_("Force action"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 1:
|
|
parser.error(_("Incorrect number of arguments"))
|
|
assert False
|
|
activate_session(session)
|
|
repo = args[0]
|
|
tags = args[1:]
|
|
delete = not bool(tags)
|
|
data = session.getTagExternalRepos(repo_info=repo)
|
|
current_tags = [d['tag_name'] for d in data]
|
|
if options.alltags:
|
|
delete = False
|
|
if tags:
|
|
parser.error(_("Do not specify tags when using --alltags"))
|
|
assert False
|
|
if not current_tags:
|
|
print _("External repo %s not associated with any tags") % repo
|
|
return 0
|
|
tags = current_tags
|
|
if delete:
|
|
#removing entirely
|
|
if current_tags and not options.force:
|
|
print _("Error: external repo %s used by tag(s): %s") % (repo, ', '.join(current_tags))
|
|
print _("Use --force to remove anyway")
|
|
return 1
|
|
session.deleteExternalRepo(args[0])
|
|
else:
|
|
for tag in tags:
|
|
if not tag in current_tags:
|
|
print _("External repo %s not associated with tag %s") % (repo, tag)
|
|
continue
|
|
session.removeExternalRepoFromTag(tag, repo)
|
|
|
|
# This handler is for spinning livecd images
|
|
#
|
|
def handle_spin_livecd(options, session, args):
|
|
"""[build] Create a live CD image given a kickstart file"""
|
|
|
|
# Usage & option parsing.
|
|
usage = _("usage: %prog spin-livecd [options] <name> <version> <target>" +
|
|
" <arch> <kickstart-file>")
|
|
usage += _("\n(Specify the --help global option for a list of other " +
|
|
"help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--wait", action="store_true",
|
|
help=_("Wait on the livecd creation, even if running in the background"))
|
|
parser.add_option("--nowait", action="store_false", dest="wait",
|
|
help=_("Don't wait on livecd creation"))
|
|
parser.add_option("--noprogress", action="store_true",
|
|
help=_("Do not display progress of the upload"))
|
|
parser.add_option("--background", action="store_true",
|
|
help=_("Run the livecd creation task at a lower priority"))
|
|
parser.add_option("--ksurl", metavar="SCMURL",
|
|
help=_("The URL to the SCM containing the kickstart file"))
|
|
parser.add_option("--ksversion", metavar="VERSION",
|
|
help=_("The syntax version used in the kickstart file"))
|
|
parser.add_option("--scratch", action="store_true",
|
|
help=_("Create a scratch LiveCD image"))
|
|
parser.add_option("--repo", action="append",
|
|
help=_("Specify a repo that will override the repo used to install " +
|
|
"RPMs in the LiveCD. May be used multiple times. The " +
|
|
"build tag repo associated with the target is the default."))
|
|
parser.add_option("--release", help=_("Forcibly set the release field"))
|
|
parser.add_option("--specfile", metavar="URL",
|
|
help=_("SCM URL to spec file fragment to use to generate wrapper RPMs"))
|
|
parser.add_option("--skip-tag", action="store_true",
|
|
help=_("Do not attempt to tag package"))
|
|
(task_options, args) = parser.parse_args(args)
|
|
|
|
# Make sure the target and kickstart is specified.
|
|
if len(args) != 5:
|
|
parser.error(_("Five arguments are required: a name, a version, an" +
|
|
" architecture, a build target, and a relative path to" +
|
|
" a kickstart file."))
|
|
assert False
|
|
_build_image(options, task_options, session, args, 'livecd')
|
|
|
|
# This handler is for spinning appliance images
|
|
#
|
|
def handle_spin_appliance(options, session, args):
|
|
"""[build] Create an appliance given a kickstart file"""
|
|
|
|
# Usage & option parsing
|
|
usage = _("usage: %prog spin-appliance [options] <name> <version> " +
|
|
"<target> <arch> <kickstart-file>")
|
|
usage += _("\n(Specify the --help global option for a list of other " +
|
|
"help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--wait", action="store_true",
|
|
help=_("Wait on the appliance creation, even if running in the background"))
|
|
parser.add_option("--nowait", action="store_false", dest="wait",
|
|
help=_("Don't wait on appliance creation"))
|
|
parser.add_option("--noprogress", action="store_true",
|
|
help=_("Do not display progress of the upload"))
|
|
parser.add_option("--background", action="store_true",
|
|
help=_("Run the appliance creation task at a lower priority"))
|
|
parser.add_option("--ksurl", metavar="SCMURL",
|
|
help=_("The URL to the SCM containing the kickstart file"))
|
|
parser.add_option("--ksversion", metavar="VERSION",
|
|
help=_("The syntax version used in the kickstart file"))
|
|
parser.add_option("--scratch", action="store_true",
|
|
help=_("Create a scratch appliance"))
|
|
parser.add_option("--repo", action="append",
|
|
help=_("Specify a repo that will override the repo used to install " +
|
|
"RPMs in the appliance. May be used multiple times. The " +
|
|
"build tag repo associated with the target is the default."))
|
|
parser.add_option("--release", help=_("Forcibly set the release field"))
|
|
parser.add_option("--specfile", metavar="URL",
|
|
help=_("SCM URL to spec file fragment to use to generate wrapper RPMs"))
|
|
parser.add_option("--skip-tag", action="store_true",
|
|
help=_("Do not attempt to tag package"))
|
|
parser.add_option("--vmem", metavar="VMEM", default=None,
|
|
help=_("Set the amount of virtual memory in the appliance in MB, " +
|
|
"default is 512"))
|
|
parser.add_option("--vcpu", metavar="VCPU", default=None,
|
|
help=_("Set the number of virtual cpus in the appliance, " +
|
|
"default is 1"))
|
|
parser.add_option("--format", metavar="DISK_FORMAT", default='raw',
|
|
help=_("Disk format, default is raw. Other options are qcow, " +
|
|
"qcow2, and vmx."))
|
|
|
|
(task_options, args) = parser.parse_args(args)
|
|
|
|
# Make sure the target and kickstart is specified.
|
|
print 'spin-appliance is deprecated and will be replaced with image-build'
|
|
if len(args) != 5:
|
|
parser.error(_("Five arguments are required: a name, a version, " +
|
|
"an architecture, a build target, and a relative path" +
|
|
" to a kickstart file."))
|
|
assert False
|
|
_build_image(options, task_options, session, args, 'appliance')
|
|
|
|
def handle_image_build_indirection(options, session, args):
|
|
"""[build] Create a disk image using other disk images via the Indirection plugin"""
|
|
usage = _("usage: %prog image-build-indirection [base_image] " +
|
|
"[utility_image] [indirection_build_template]")
|
|
usage += _("\n %prog image-build --config FILE")
|
|
usage += _("\n\n(Specify the --help global option for a list of other " +
|
|
"help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--config",
|
|
help=_("Use a configuration file to define image-build options " +
|
|
"instead of command line options (they will be ignored)."))
|
|
parser.add_option("--background", action="store_true",
|
|
help=_("Run the image creation task at a lower priority"))
|
|
parser.add_option("--name",
|
|
help=_("Name of the output image"))
|
|
parser.add_option("--version",
|
|
help=_("Version of the output image"))
|
|
parser.add_option("--release",
|
|
help=_("Release of the output image"))
|
|
parser.add_option("--arch",
|
|
help=_("Architecture of the output image and input images"))
|
|
parser.add_option("--target",
|
|
help=_("Build target to use for the indirection build"))
|
|
parser.add_option("--skip-tag", action="store_true",
|
|
help=_("Do not tag the resulting build"))
|
|
parser.add_option("--base-image-task",
|
|
help=_("ID of the createImage task of the base image to be used"))
|
|
parser.add_option("--base-image-build",
|
|
help=_("NVR or build ID of the base image to be used"))
|
|
parser.add_option("--utility-image-task",
|
|
help=_("ID of the createImage task of the utility image to be used"))
|
|
parser.add_option("--utility-image-build",
|
|
help=_("NVR or build ID of the utility image to be used"))
|
|
parser.add_option("--indirection-template",
|
|
help=_("Name of the local file, or SCM file containing the template used to drive the indirection plugin"))
|
|
parser.add_option("--indirection-template-url",
|
|
help=_("SCM URL containing the template used to drive the indirection plugin"))
|
|
parser.add_option("--results-loc",
|
|
help=_("Relative path inside the working space image where the results should be extracted from"))
|
|
parser.add_option("--scratch", action="store_true",
|
|
help=_("Create a scratch image"))
|
|
parser.add_option("--wait", action="store_true",
|
|
help=_("Wait on the image creation, even if running in the background"))
|
|
parser.add_option("--noprogress", action="store_true",
|
|
help=_("Do not display progress of the upload"))
|
|
|
|
|
|
(task_options, args) = parser.parse_args(args)
|
|
_build_image_indirection(options, task_options, session, args)
|
|
|
|
|
|
def _build_image_indirection(options, task_opts, session, args):
|
|
"""
|
|
A private helper function for builds using the indirection plugin of ImageFactory
|
|
"""
|
|
|
|
# Do some sanity checks before even attempting to create the session
|
|
if not (bool(task_opts.utility_image_task) !=
|
|
bool(task_opts.utility_image_build)):
|
|
raise koji.GenericError, _("You must specify either a utility-image task or build ID/NVR")
|
|
|
|
if not (bool(task_opts.base_image_task) !=
|
|
bool(task_opts.base_image_build)):
|
|
raise koji.GenericError, _("You must specify either a base-image task or build ID/NVR")
|
|
|
|
required_opts = [ 'name', 'version', 'arch', 'target', 'indirection_template', 'results_loc' ]
|
|
optional_opts = [ 'indirection_template_url', 'scratch', 'utility_image_task', 'utility_image_build',
|
|
'base_image_task', 'base_image_build', 'release', 'skip_tag' ]
|
|
|
|
missing = [ ]
|
|
for opt in required_opts:
|
|
if not getattr(task_opts, opt, None):
|
|
missing.append(opt)
|
|
|
|
if len(missing) > 0:
|
|
print "Missing the following required options:" ,
|
|
for opt in missing:
|
|
print "--" + opt.replace('_','-') ,
|
|
print
|
|
raise koji.GenericError, _("Missing required options specified above")
|
|
|
|
activate_session(session)
|
|
|
|
# Set the task's priority. Users can only lower it with --background.
|
|
priority = None
|
|
if task_opts.background:
|
|
# relative to koji.PRIO_DEFAULT; higher means a "lower" priority.
|
|
priority = 5
|
|
if _running_in_bg() or task_opts.noprogress:
|
|
callback = None
|
|
else:
|
|
callback = _progress_callback
|
|
|
|
# We do some early sanity checking of the given target.
|
|
# Kojid gets these values again later on, but we check now as a convenience
|
|
# for the user.
|
|
|
|
tmp_target = session.getBuildTarget(task_opts.target)
|
|
if not tmp_target:
|
|
raise koji.GenericError, _("Unknown build target: %s" % tmp_target)
|
|
dest_tag = session.getTag(tmp_target['dest_tag'])
|
|
if not dest_tag:
|
|
raise koji.GenericError, _("Unknown destination tag: %s" %
|
|
tmp_target['dest_tag_name'])
|
|
|
|
# Set the architecture
|
|
task_opts.arch = koji.canonArch(task_opts.arch)
|
|
|
|
|
|
# Upload the indirection template file to the staging area.
|
|
# If it's a URL, it's kojid's job to go get it when it does the checkout.
|
|
if not task_opts.indirection_template_url:
|
|
if not task_opts.scratch:
|
|
# only scratch builds can omit indirection_template_url
|
|
raise koji.GenericError, _("Non-scratch builds must provide a URL for the indirection template")
|
|
templatefile = task_opts.indirection_template
|
|
serverdir = _unique_path('cli-image-indirection')
|
|
session.uploadWrapper(templatefile, serverdir, callback=callback)
|
|
task_opts.indirection_template = os.path.join('work', serverdir,
|
|
os.path.basename(templatefile))
|
|
print
|
|
|
|
hub_opts = { }
|
|
# Just pass everything in as opts. No posiitonal arguments at all. Why not?
|
|
for opt in required_opts + optional_opts:
|
|
val = getattr(task_opts, opt, None)
|
|
# We pass these through even if they are None
|
|
# The builder code can then check if they are set without using getattr
|
|
hub_opts[opt] = val
|
|
|
|
# finally, create the task.
|
|
task_id = session.buildImageIndirection(opts=hub_opts,
|
|
priority=priority)
|
|
|
|
if not options.quiet:
|
|
print "Created task:", task_id
|
|
print "Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id)
|
|
#if task_opts.wait or (task_opts.wait is None and not _running_in_bg()):
|
|
# session.logout()
|
|
# return watch_tasks(session, [task_id], quiet=options.quiet)
|
|
#else:
|
|
# return
|
|
|
|
|
|
def handle_image_build(options, session, args):
|
|
"""[build] Create a disk image given an install tree"""
|
|
formats = ('vmdk', 'qcow', 'qcow2', 'vdi', 'vpc', 'rhevm-ova',
|
|
'vsphere-ova', 'vagrant-virtualbox', 'vagrant-libvirt',
|
|
'vagrant-vmware-fusion', 'docker', 'raw-xz',
|
|
'liveimg-squashfs', 'tar-gz')
|
|
usage = _("usage: %prog image-build [options] <name> <version> " +
|
|
"<target> <install-tree-url> <arch> [<arch>...]")
|
|
usage += _("\n %prog image-build --config FILE")
|
|
usage += _("\n\n(Specify the --help global option for a list of other " +
|
|
"help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--background", action="store_true",
|
|
help=_("Run the image creation task at a lower priority"))
|
|
parser.add_option("--config",
|
|
help=_("Use a configuration file to define image-build options " +
|
|
"instead of command line options (they will be ignored)."))
|
|
parser.add_option("--disk-size", default=10,
|
|
help=_("Set the disk device size in gigabytes"))
|
|
parser.add_option("--distro",
|
|
help=_("specify the RPM based distribution the image will be based " +
|
|
"on with the format RHEL-X.Y, CentOS-X.Y, SL-X.Y, or Fedora-NN. " +
|
|
"The packages for the Distro you choose must have been built " +
|
|
"in this system."))
|
|
parser.add_option("--format", default=[], action="append",
|
|
help=_("Convert results to one or more formats " +
|
|
"(%s), this option may be used " % ', '.join(formats) +
|
|
"multiple times. By default, specifying this option will " +
|
|
"omit the raw disk image (which is 10G in size) from the " +
|
|
"build results. If you really want it included with converted " +
|
|
"images, pass in 'raw' as an option."))
|
|
parser.add_option("--kickstart", help=_("Path to a local kickstart file"))
|
|
parser.add_option("--ksurl", metavar="SCMURL",
|
|
help=_("The URL to the SCM containing the kickstart file"))
|
|
parser.add_option("--ksversion", metavar="VERSION",
|
|
help=_("The syntax version used in the kickstart file"))
|
|
parser.add_option("--noprogress", action="store_true",
|
|
help=_("Do not display progress of the upload"))
|
|
parser.add_option("--nowait", action="store_false", dest="wait",
|
|
help=_("Don't wait on image creation"))
|
|
parser.add_option("--ova-option", action="append",
|
|
help=_("Override a value in the OVA description XML. Provide a value " +
|
|
"in a name=value format, such as 'ovf_memory_mb=6144'"))
|
|
parser.add_option("--factory-parameter", nargs=2, action="append",
|
|
help=_("Pass a parameter to Image Factory. The results are highly specific " +
|
|
"to the image format being created. This is a two argument parameter " +
|
|
"that can be specified an arbitrary number of times. For example: "
|
|
"--factory-parameter docker_cmd '[ \"/bin/echo Hello World\" ]'"))
|
|
parser.add_option("--release", help=_("Forcibly set the release field"))
|
|
parser.add_option("--repo", action="append",
|
|
help=_("Specify a repo that will override the repo used to install " +
|
|
"RPMs in the image. May be used multiple times. The " +
|
|
"build tag repo associated with the target is the default."))
|
|
parser.add_option("--scratch", action="store_true",
|
|
help=_("Create a scratch image"))
|
|
parser.add_option("--skip-tag", action="store_true",
|
|
help=_("Do not attempt to tag package"))
|
|
parser.add_option("--specfile", metavar="URL",
|
|
help=_("SCM URL to spec file fragment to use to generate wrapper RPMs"))
|
|
parser.add_option("--wait", action="store_true",
|
|
help=_("Wait on the image creation, even if running in the background"))
|
|
|
|
(task_options, args) = parser.parse_args(args)
|
|
|
|
if task_options.config:
|
|
if not os.path.exists(task_options.config):
|
|
parser.error(_("%s not found!" % task_options.config))
|
|
section = 'image-build'
|
|
config = ConfigParser.ConfigParser()
|
|
conf_fd = open(task_options.config)
|
|
config.readfp(conf_fd)
|
|
conf_fd.close()
|
|
if not config.has_section(section):
|
|
parser.error(_("single section called [%s] is required" % section))
|
|
# pluck out the positional arguments first
|
|
args = []
|
|
for arg in ('name', 'version', 'target', 'install_tree'):
|
|
args.append(config.get(section, arg))
|
|
config.remove_option(section, arg)
|
|
args.extend(config.get(section, 'arches').split(','))
|
|
config.remove_option(section, 'arches')
|
|
# turn comma-separated options into lists
|
|
for arg in ('repo', 'format'):
|
|
if config.has_option(section, arg):
|
|
setattr(task_options, arg, config.get(section, arg).split(','))
|
|
config.remove_option(section, arg)
|
|
# handle everything else
|
|
for k, v in config.items(section):
|
|
setattr(task_options, k, v)
|
|
|
|
# ova-options belong in their own section
|
|
section = 'ova-options'
|
|
if config.has_section(section):
|
|
task_options.ova_option = []
|
|
for k, v in config.items(section):
|
|
task_options.ova_option.append('%s=%s' % (k, v))
|
|
|
|
# as do factory-parameters
|
|
section = 'factory-parameters'
|
|
if config.has_section(section):
|
|
task_options.factory_parameter = [ ]
|
|
for k, v in config.items(section):
|
|
# We do this, rather than a dict, to match what argparse spits out
|
|
task_options.factory_parameter.append( (k, v) )
|
|
|
|
else:
|
|
if len(args) < 5:
|
|
parser.error(_("At least five arguments are required: a name, " +
|
|
"a version, a build target, a URL to an " +
|
|
"install tree, and 1 or more architectures."))
|
|
if not task_options.ksurl and not task_options.kickstart:
|
|
parser.error(_('You must specify --kickstart'))
|
|
if not task_options.distro:
|
|
parser.error(
|
|
_("You must specify --distro. Examples: Fedora-16, RHEL-6.4, " +
|
|
"SL-6.4 or CentOS-6.4"))
|
|
_build_image_oz(options, task_options, session, args)
|
|
|
|
def _build_image(options, task_opts, session, args, img_type):
|
|
"""
|
|
A private helper function that houses common CLI code for building
|
|
images with chroot-based tools.
|
|
"""
|
|
|
|
if img_type not in ('livecd', 'appliance'):
|
|
raise koji.GenericError, 'Unrecognized image type: %s' % img_type
|
|
activate_session(session)
|
|
|
|
# Set the task's priority. Users can only lower it with --background.
|
|
priority = None
|
|
if task_opts.background:
|
|
# relative to koji.PRIO_DEFAULT; higher means a "lower" priority.
|
|
priority = 5
|
|
if _running_in_bg() or task_opts.noprogress:
|
|
callback = None
|
|
else:
|
|
callback = _progress_callback
|
|
|
|
# We do some early sanity checking of the given target.
|
|
# Kojid gets these values again later on, but we check now as a convenience
|
|
# for the user.
|
|
target = args[2]
|
|
tmp_target = session.getBuildTarget(target)
|
|
if not tmp_target:
|
|
raise koji.GenericError, _("Unknown build target: %s" % target)
|
|
dest_tag = session.getTag(tmp_target['dest_tag'])
|
|
if not dest_tag:
|
|
raise koji.GenericError, _("Unknown destination tag: %s" %
|
|
tmp_target['dest_tag_name'])
|
|
|
|
# Set the architecture
|
|
arch = koji.canonArch(args[3])
|
|
|
|
# Upload the KS file to the staging area.
|
|
# If it's a URL, it's kojid's job to go get it when it does the checkout.
|
|
ksfile = args[4]
|
|
|
|
if not task_opts.ksurl:
|
|
serverdir = _unique_path('cli-' + img_type)
|
|
session.uploadWrapper(ksfile, serverdir, callback=callback)
|
|
ksfile = os.path.join(serverdir, os.path.basename(ksfile))
|
|
print
|
|
|
|
hub_opts = {}
|
|
for opt in ('isoname', 'ksurl', 'ksversion', 'scratch', 'repo',
|
|
'release', 'skip_tag', 'vmem', 'vcpu', 'format', 'specfile'):
|
|
val = getattr(task_opts, opt, None)
|
|
if val is not None:
|
|
hub_opts[opt] = val
|
|
|
|
# finally, create the task.
|
|
task_id = session.buildImage(args[0], args[1], arch, target, ksfile,
|
|
img_type, opts=hub_opts, priority=priority)
|
|
|
|
if not options.quiet:
|
|
print "Created task:", task_id
|
|
print "Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id)
|
|
if task_opts.wait or (task_opts.wait is None and not _running_in_bg()):
|
|
session.logout()
|
|
return watch_tasks(session, [task_id], quiet=options.quiet)
|
|
else:
|
|
return
|
|
|
|
def _build_image_oz(options, task_opts, session, args):
|
|
"""
|
|
A private helper function that houses common CLI code for building
|
|
images with Oz and ImageFactory
|
|
"""
|
|
activate_session(session)
|
|
|
|
# Set the task's priority. Users can only lower it with --background.
|
|
priority = None
|
|
if task_opts.background:
|
|
# relative to koji.PRIO_DEFAULT; higher means a "lower" priority.
|
|
priority = 5
|
|
if _running_in_bg() or task_opts.noprogress:
|
|
callback = None
|
|
else:
|
|
callback = _progress_callback
|
|
|
|
# We do some early sanity checking of the given target.
|
|
# Kojid gets these values again later on, but we check now as a convenience
|
|
# for the user.
|
|
target = args[2]
|
|
tmp_target = session.getBuildTarget(target)
|
|
if not tmp_target:
|
|
raise koji.GenericError, _("Unknown build target: %s" % target)
|
|
dest_tag = session.getTag(tmp_target['dest_tag'])
|
|
if not dest_tag:
|
|
raise koji.GenericError, _("Unknown destination tag: %s" %
|
|
tmp_target['dest_tag_name'])
|
|
|
|
# Set the architectures
|
|
arches = []
|
|
for arch in args[4:]:
|
|
arches.append(koji.canonArch(arch))
|
|
|
|
# Upload the KS file to the staging area.
|
|
# If it's a URL, it's kojid's job to go get it when it does the checkout.
|
|
if not task_opts.ksurl:
|
|
if not task_opts.scratch:
|
|
# only scratch builds can omit ksurl
|
|
raise koji.GenericError, _("Non-scratch builds must provide ksurl")
|
|
ksfile = task_opts.kickstart
|
|
serverdir = _unique_path('cli-image')
|
|
session.uploadWrapper(ksfile, serverdir, callback=callback)
|
|
task_opts.kickstart = os.path.join('work', serverdir,
|
|
os.path.basename(ksfile))
|
|
print
|
|
|
|
hub_opts = {}
|
|
for opt in ('ksurl', 'ksversion', 'kickstart', 'scratch', 'repo',
|
|
'release', 'skip_tag', 'specfile', 'distro', 'format',
|
|
'disk_size', 'ova_option', 'factory_parameter'):
|
|
val = getattr(task_opts, opt, None)
|
|
if val is not None:
|
|
hub_opts[opt] = val
|
|
|
|
# finally, create the task.
|
|
task_id = session.buildImageOz(args[0], args[1], arches, target, args[3],
|
|
opts=hub_opts, priority=priority)
|
|
|
|
if not options.quiet:
|
|
print "Created task:", task_id
|
|
print "Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id)
|
|
if task_opts.wait or (task_opts.wait is None and not _running_in_bg()):
|
|
session.logout()
|
|
return watch_tasks(session, [task_id], quiet=options.quiet)
|
|
else:
|
|
return
|
|
|
|
def handle_win_build(options, session, args):
|
|
"""[build] Build a Windows package from source"""
|
|
# Usage & option parsing
|
|
usage = _("usage: %prog win-build [options] target URL VM")
|
|
usage += _("\n(Specify the --help global option for a list of other " +
|
|
"help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--winspec", metavar="URL",
|
|
help=_("SCM URL to retrieve the build descriptor from. " + \
|
|
"If not specified, the winspec must be in the root directory " + \
|
|
"of the source repository."))
|
|
parser.add_option("--patches", metavar="URL",
|
|
help=_("SCM URL of a directory containing patches to apply " + \
|
|
"to the sources before building"))
|
|
parser.add_option("--cpus", type="int",
|
|
help=_("Number of cpus to allocate to the build VM " + \
|
|
"(requires admin access)"))
|
|
parser.add_option("--mem", type="int",
|
|
help=_("Amount of memory (in megabytes) to allocate to the build VM " + \
|
|
"(requires admin access)"))
|
|
parser.add_option("--static-mac", action="store_true",
|
|
help=_("Retain the original MAC address when cloning the VM"))
|
|
parser.add_option("--specfile", metavar="URL",
|
|
help=_("SCM URL of a spec file fragment to use to generate wrapper RPMs"))
|
|
parser.add_option("--scratch", action="store_true",
|
|
help=_("Perform a scratch build"))
|
|
parser.add_option("--repo-id", type="int", help=_("Use a specific repo"))
|
|
parser.add_option("--skip-tag", action="store_true",
|
|
help=_("Do not attempt to tag package"))
|
|
parser.add_option("--background", action="store_true",
|
|
help=_("Run the build at a lower priority"))
|
|
parser.add_option("--wait", action="store_true",
|
|
help=_("Wait on the build, even if running in the background"))
|
|
parser.add_option("--nowait", action="store_false", dest="wait",
|
|
help=_("Don't wait on build"))
|
|
parser.add_option("--quiet", action="store_true",
|
|
help=_("Do not print the task information"), default=options.quiet)
|
|
(build_opts, args) = parser.parse_args(args)
|
|
if len(args) != 3:
|
|
parser.error(_("Exactly three arguments (a build target, a SCM URL, and a VM name) are required"))
|
|
assert False
|
|
activate_session(session)
|
|
target = args[0]
|
|
if target.lower() == "none" and build_opts.repo_id:
|
|
target = None
|
|
build_opts.skip_tag = True
|
|
else:
|
|
build_target = session.getBuildTarget(target)
|
|
if not build_target:
|
|
parser.error(_("Unknown build target: %s" % target))
|
|
dest_tag = session.getTag(build_target['dest_tag'])
|
|
if not dest_tag:
|
|
parser.error(_("Unknown destination tag: %s" % build_target['dest_tag_name']))
|
|
if dest_tag['locked'] and not build_opts.scratch:
|
|
parser.error(_("Destination tag %s is locked" % dest_tag['name']))
|
|
scmurl = args[1]
|
|
vm_name = args[2]
|
|
opts = {}
|
|
for key in ('winspec', 'patches', 'cpus', 'mem', 'static_mac',
|
|
'specfile', 'scratch', 'repo_id', 'skip_tag'):
|
|
val = getattr(build_opts, key)
|
|
if val is not None:
|
|
opts[key] = val
|
|
priority = None
|
|
if build_opts.background:
|
|
#relative to koji.PRIO_DEFAULT
|
|
priority = 5
|
|
task_id = session.winBuild(vm_name, scmurl, target, opts, priority=priority)
|
|
if not build_opts.quiet:
|
|
print "Created task:", task_id
|
|
print "Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id)
|
|
if build_opts.wait or (build_opts.wait is None and not _running_in_bg()):
|
|
session.logout()
|
|
return watch_tasks(session, [task_id], quiet=build_opts.quiet)
|
|
else:
|
|
return
|
|
|
|
def handle_free_task(options, session, args):
|
|
"[admin] Free a task"
|
|
usage = _("usage: %prog free-task [options] <task-id> [<task-id> ...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
(options, args) = parser.parse_args(args)
|
|
activate_session(session)
|
|
tlist = []
|
|
for task_id in args:
|
|
try:
|
|
tlist.append(int(task_id))
|
|
except ValueError:
|
|
parser.error(_("task-id must be an integer"))
|
|
assert False
|
|
for task_id in tlist:
|
|
session.freeTask(task_id)
|
|
|
|
def handle_cancel(options, session, args):
|
|
"[build] Cancel tasks and/or builds"
|
|
usage = _("usage: %prog cancel [options] <task-id|build> [<task-id|build> ...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--justone", action="store_true", help=_("Do not cancel subtasks"))
|
|
parser.add_option("--full", action="store_true", help=_("Full cancellation (admin only)"))
|
|
parser.add_option("--force", action="store_true", help=_("Allow subtasks with --full"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) == 0:
|
|
parser.error(_("You must specify at least one task id or build"))
|
|
assert False
|
|
activate_session(session)
|
|
tlist = []
|
|
blist = []
|
|
for arg in args:
|
|
try:
|
|
tlist.append(int(arg))
|
|
except ValueError:
|
|
try:
|
|
koji.parse_NVR(arg)
|
|
blist.append(arg)
|
|
except koji.GenericError:
|
|
parser.error(_("please specify only task ids (integer) or builds (n-v-r)"))
|
|
assert False
|
|
if tlist:
|
|
opts = {}
|
|
remote_fn = session.cancelTask
|
|
if options.justone:
|
|
opts['recurse'] = False
|
|
elif options.full:
|
|
remote_fn = session.cancelTaskFull
|
|
if options.force:
|
|
opts['strict'] = False
|
|
for task_id in tlist:
|
|
remote_fn(task_id, **opts)
|
|
for build in blist:
|
|
session.cancelBuild(build)
|
|
|
|
def handle_set_task_priority(options, session, args):
|
|
"[admin] Set task priority"
|
|
usage = _("usage: %prog set-task-priority [options] --priority=<priority> <task-id> [task-id]...")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--priority", type="int", help=_("New priority"))
|
|
parser.add_option("--recurse", action="store_true", default=False, help=_("Change priority of child tasks as well"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) == 0:
|
|
parser.error(_("You must specify at least one task id"))
|
|
assert False
|
|
|
|
if options.priority is None:
|
|
parser.error(_("You must specify --priority"))
|
|
assert False
|
|
try:
|
|
tasks = [int(a) for a in args]
|
|
except ValueError:
|
|
parser.error(_("Task numbers must be integers"))
|
|
|
|
activate_session(session)
|
|
|
|
for task_id in tasks:
|
|
session.setTaskPriority(task_id, options.priority, options.recurse)
|
|
|
|
def _list_tasks(options, session):
|
|
"Retrieve a list of tasks"
|
|
|
|
callopts = {
|
|
'state' : [koji.TASK_STATES[s] for s in ('FREE', 'OPEN', 'ASSIGNED')],
|
|
'decode' : True,
|
|
}
|
|
|
|
if options.mine:
|
|
user = session.getLoggedInUser()
|
|
if not user:
|
|
print "Unable to determine user"
|
|
sys.exit(1)
|
|
callopts['owner'] = user['id']
|
|
if options.user:
|
|
user = session.getUser(options.user)
|
|
if not user:
|
|
print "No such user: %s" % options.user
|
|
sys.exit(1)
|
|
callopts['owner'] = user['id']
|
|
if options.arch:
|
|
arches = options.arch.replace(',',' ').split()
|
|
callopts['arch'] = arches
|
|
if options.method:
|
|
callopts['method'] = options.method
|
|
if options.channel:
|
|
chan = session.getChannel(options.channel)
|
|
if not chan:
|
|
print "No such channel: %s" % options.channel
|
|
sys.exit(1)
|
|
callopts['channel_id'] = chan['id']
|
|
if options.host:
|
|
host = session.getHost(options.host)
|
|
if not host:
|
|
print "No such host: %s" % options.host
|
|
sys.exit(1)
|
|
callopts['host_id'] = host['id']
|
|
|
|
qopts = {'order' : 'priority,create_time'}
|
|
tasklist = session.listTasks(callopts, qopts)
|
|
tasks = dict([(x['id'], x) for x in tasklist])
|
|
|
|
#thread the tasks
|
|
for t in tasklist:
|
|
if t['parent'] is not None:
|
|
parent = tasks.get(t['parent'])
|
|
if parent:
|
|
parent.setdefault('children',[])
|
|
parent['children'].append(t)
|
|
t['sub'] = True
|
|
|
|
return tasklist
|
|
|
|
|
|
def handle_list_tasks(options, session, args):
|
|
"[info] Print the list of tasks"
|
|
usage = _("usage: %prog list-tasks [options]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--mine", action="store_true", help=_("Just print your tasks"))
|
|
parser.add_option("--user", help=_("Only tasks for this user"))
|
|
parser.add_option("--arch", help=_("Only tasks for this architecture"))
|
|
parser.add_option("--method", help=_("Only tasks of this method"))
|
|
parser.add_option("--channel", help=_("Only tasks in this channel"))
|
|
parser.add_option("--host", help=_("Only tasks for this host"))
|
|
parser.add_option("--quiet", action="store_true", help=_("Do not display the column headers"), default=options.quiet)
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) != 0:
|
|
parser.error(_("This command takes no arguments"))
|
|
assert False
|
|
|
|
activate_session(session)
|
|
tasklist = _list_tasks(options, session)
|
|
if not tasklist:
|
|
print "(no tasks)"
|
|
return
|
|
if not options.quiet:
|
|
print_task_headers()
|
|
for t in tasklist:
|
|
if t.get('sub'):
|
|
# this subtask will appear under another task
|
|
continue
|
|
print_task_recurse(t)
|
|
|
|
def handle_set_pkg_arches(options, session, args):
|
|
"[admin] Set the list of extra arches for a package"
|
|
usage = _("usage: %prog set-pkg-arches [options] arches tag package [package2 ...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--force", action='store_true', help=_("Force operation"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 3:
|
|
parser.error(_("Please specify an archlist, a tag, and at least one package"))
|
|
assert False
|
|
activate_session(session)
|
|
arches = ' '.join(args[0].replace(',',' ').split())
|
|
tag = args[1]
|
|
for package in args[2:]:
|
|
#really should implement multicall...
|
|
session.packageListSetArches(tag,package,arches,force=options.force)
|
|
|
|
def handle_set_pkg_owner(options, session, args):
|
|
"[admin] Set the owner for a package"
|
|
usage = _("usage: %prog set-pkg-owner [options] owner tag package [package2 ...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--force", action='store_true', help=_("Force operation"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 3:
|
|
parser.error(_("Please specify an owner, a tag, and at least one package"))
|
|
assert False
|
|
activate_session(session)
|
|
owner = args[0]
|
|
tag = args[1]
|
|
for package in args[2:]:
|
|
#really should implement multicall...
|
|
session.packageListSetOwner(tag,package,owner,force=options.force)
|
|
|
|
def handle_set_pkg_owner_global(options, session, args):
|
|
"[admin] Set the owner for a package globally"
|
|
usage = _("usage: %prog set-pkg-owner-global [options] owner package [package2 ...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--verbose", action='store_true', help=_("List changes"))
|
|
parser.add_option("--test", action='store_true', help=_("Test mode"))
|
|
parser.add_option("--old-user", "--from", action="store", help=_("Only change ownership for packages belonging to this user"))
|
|
(options, args) = parser.parse_args(args)
|
|
if options.old_user:
|
|
if len(args) < 1:
|
|
parser.error(_("Please specify an owner"))
|
|
assert False
|
|
elif len(args) < 2:
|
|
parser.error(_("Please specify an owner and at least one package"))
|
|
assert False
|
|
activate_session(session)
|
|
owner = args[0]
|
|
packages = args[1:]
|
|
user = session.getUser(owner)
|
|
if not user:
|
|
print "No such user: %s" % owner
|
|
return 1
|
|
opts = {'with_dups' : True}
|
|
old_user = None
|
|
if options.old_user:
|
|
old_user = session.getUser(options.old_user)
|
|
if not old_user:
|
|
print "No such user: %s" % options.old_user
|
|
return 1
|
|
opts['userID'] = old_user['id']
|
|
to_change = []
|
|
for package in packages:
|
|
entries = session.listPackages(pkgID=package, **opts)
|
|
if not entries:
|
|
print "No data for package %s" % package
|
|
continue
|
|
to_change.extend(entries)
|
|
if not packages and options.old_user:
|
|
entries = session.listPackages(**opts)
|
|
if not entries:
|
|
print "No data for user %s" % old_user['name']
|
|
return 1
|
|
to_change.extend(entries)
|
|
for entry in to_change:
|
|
if user['id'] == entry['owner_id']:
|
|
if options.verbose:
|
|
print "Preserving owner=%s for package %s in tag %s" \
|
|
% (user['name'], package, entry['tag_name'] )
|
|
else:
|
|
if options.test:
|
|
print "Would have changed owner for %s in tag %s: %s -> %s" \
|
|
% (entry['package_name'], entry['tag_name'], entry['owner_name'], user['name'])
|
|
continue
|
|
if options.verbose:
|
|
print "Changing owner for %s in tag %s: %s -> %s" \
|
|
% (entry['package_name'], entry['tag_name'], entry['owner_name'], user['name'])
|
|
session.packageListSetOwner(entry['tag_id'], entry['package_name'], user['id'])
|
|
|
|
def anon_handle_watch_task(options, session, args):
|
|
"[monitor] Track progress of particular tasks"
|
|
usage = _("usage: %prog watch-task [options] <task id> [<task id>...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--quiet", action="store_true",
|
|
help=_("Do not print the task information"), default=options.quiet)
|
|
parser.add_option("--mine", action="store_true", help=_("Just watch your tasks"))
|
|
parser.add_option("--user", help=_("Only tasks for this user"))
|
|
parser.add_option("--arch", help=_("Only tasks for this architecture"))
|
|
parser.add_option("--method", help=_("Only tasks of this method"))
|
|
parser.add_option("--channel", help=_("Only tasks in this channel"))
|
|
parser.add_option("--host", help=_("Only tasks for this host"))
|
|
(options, args) = parser.parse_args(args)
|
|
selection = (options.mine or
|
|
options.user or
|
|
options.arch or
|
|
options.method or
|
|
options.channel or
|
|
options.host)
|
|
if args and selection:
|
|
parser.error(_("Selection options cannot be combined with a task list"))
|
|
|
|
activate_session(session)
|
|
if selection:
|
|
tasks = [task['id'] for task in _list_tasks(options, session)]
|
|
if not tasks:
|
|
print "(no tasks)"
|
|
return
|
|
else:
|
|
tasks = []
|
|
for task in args:
|
|
try:
|
|
tasks.append(int(task))
|
|
except ValueError:
|
|
parser.error(_("task id must be an integer"))
|
|
if not tasks:
|
|
parser.error(_("at least one task id must be specified"))
|
|
|
|
return watch_tasks(session, tasks, quiet=options.quiet)
|
|
|
|
def anon_handle_watch_logs(options, session, args):
|
|
"[monitor] Watch logs in realtime"
|
|
usage = _("usage: %prog watch-logs [options] <task id> [<task id>...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--log", help=_("Watch only a specific log"))
|
|
(options, args) = parser.parse_args(args)
|
|
activate_session(session)
|
|
|
|
tasks = []
|
|
for task in args:
|
|
try:
|
|
tasks.append(int(task))
|
|
except ValueError:
|
|
parser.error(_("task id must be an integer"))
|
|
if not tasks:
|
|
parser.error(_("at least one task id must be specified"))
|
|
|
|
watch_logs(session, tasks, options)
|
|
|
|
def handle_make_task(opts, session, args):
|
|
"[admin] Create an arbitrary task"
|
|
usage = _("usage: %prog make-task [options] <arg1> [<arg2>...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--channel", help=_("set channel"))
|
|
parser.add_option("--priority", help=_("set priority"))
|
|
parser.add_option("--watch", action="store_true", help=_("watch the task"))
|
|
parser.add_option("--arch", help=_("set arch"))
|
|
(options, args) = parser.parse_args(args)
|
|
activate_session(session)
|
|
|
|
taskopts = {}
|
|
for key in ('channel','priority','arch'):
|
|
value = getattr(options,key,None)
|
|
if value is not None:
|
|
taskopts[key] = value
|
|
task_id = session.makeTask(method=args[0],
|
|
arglist=map(arg_filter,args[1:]),
|
|
**taskopts)
|
|
print "Created task id %d" % task_id
|
|
if _running_in_bg() or not options.watch:
|
|
return
|
|
else:
|
|
session.logout()
|
|
return watch_tasks(session, [task_id], quiet=opts.quiet)
|
|
|
|
def handle_tag_build(opts, session, args):
|
|
"[bind] Apply a tag to one or more builds"
|
|
usage = _("usage: %prog tag-build [options] <tag> <pkg> [<pkg>...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--force", action="store_true", help=_("force operation"))
|
|
parser.add_option("--nowait", action="store_true", help=_("Do not wait on task"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 2:
|
|
parser.error(_("This command takes at least two arguments: a tag name/ID and one or more package n-v-r's"))
|
|
assert False
|
|
activate_session(session)
|
|
tasks = []
|
|
for pkg in args[1:]:
|
|
task_id = session.tagBuild(args[0], pkg, force=options.force)
|
|
#XXX - wait on task
|
|
tasks.append(task_id)
|
|
print "Created task %s" % task_id
|
|
if _running_in_bg() or options.nowait:
|
|
return
|
|
else:
|
|
session.logout()
|
|
return watch_tasks(session,tasks,quiet=opts.quiet)
|
|
|
|
def handle_move_build(opts, session, args):
|
|
"[bind] 'Move' one or more builds between tags"
|
|
usage = _("usage: %prog move-build [options] <tag1> <tag2> <pkg> [<pkg>...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--force", action="store_true", help=_("force operation"))
|
|
parser.add_option("--nowait", action="store_true", help=_("do not wait on tasks"))
|
|
parser.add_option("--all", action="store_true", help=_("move all instances of a package, <pkg>'s are package names"))
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 3:
|
|
if options.all:
|
|
parser.error(_("This command, with --all, takes at least three arguments: two tags and one or more package names"))
|
|
else:
|
|
parser.error(_("This command takes at least three arguments: two tags and one or more package n-v-r's"))
|
|
assert False
|
|
activate_session(session)
|
|
tasks = []
|
|
builds = []
|
|
|
|
if options.all:
|
|
for arg in args[2:]:
|
|
pkg = session.getPackage(arg)
|
|
if not pkg:
|
|
print _("Invalid package name %s, skipping." % arg)
|
|
continue
|
|
tasklist = session.moveAllBuilds(args[0], args[1], arg, options.force)
|
|
tasks.extend(tasklist)
|
|
else:
|
|
for arg in args[2:]:
|
|
build = session.getBuild(arg)
|
|
if not build:
|
|
print _("Invalid build %s, skipping." % arg)
|
|
continue
|
|
if not build in builds:
|
|
builds.append(build)
|
|
|
|
for build in builds:
|
|
task_id = session.moveBuild(args[0], args[1], build['id'], options.force)
|
|
tasks.append(task_id)
|
|
print "Created task %s, moving %s" % (task_id, koji.buildLabel(build))
|
|
if _running_in_bg() or options.nowait:
|
|
return
|
|
else:
|
|
session.logout()
|
|
return watch_tasks(session, tasks, quiet=opts.quiet)
|
|
|
|
def handle_untag_build(options, session, args):
|
|
"[bind] Remove a tag from one or more builds"
|
|
usage = _("usage: %prog untag-build [options] <tag> <pkg> [<pkg>...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--all", action="store_true", help=_("untag all versions of the package in this tag"))
|
|
parser.add_option("--non-latest", action="store_true", help=_("untag all versions of the package in this tag except the latest"))
|
|
parser.add_option("-n", "--test", action="store_true", help=_("test mode"))
|
|
parser.add_option("-v", "--verbose", action="store_true", help=_("print details"))
|
|
parser.add_option("--force", action="store_true", help=_("force operation"))
|
|
(options, args) = parser.parse_args(args)
|
|
if options.non_latest and options.force:
|
|
if len(args) < 1:
|
|
parser.error(_("Please specify a tag"))
|
|
assert False
|
|
elif len(args) < 2:
|
|
parser.error(_("This command takes at least two arguments: a tag name/ID and one or more package n-v-r's"))
|
|
assert False
|
|
activate_session(session)
|
|
tag = session.getTag(args[0])
|
|
if not tag:
|
|
parser.error(_("Invalid tag: %s" % args[0]))
|
|
if options.all:
|
|
builds = []
|
|
for pkg in args[1:]:
|
|
builds.extend(session.listTagged(args[0], package=pkg))
|
|
elif options.non_latest:
|
|
if options.force and len(args) == 1:
|
|
tagged = session.listTagged(args[0])
|
|
else:
|
|
tagged = []
|
|
for pkg in args[1:]:
|
|
tagged.extend(session.listTagged(args[0], package=pkg))
|
|
# listTagged orders entries latest first
|
|
seen_pkg = {}
|
|
builds = []
|
|
for binfo in tagged:
|
|
if not seen_pkg.has_key(binfo['name']):
|
|
#latest for this package
|
|
if options.verbose:
|
|
print _("Leaving latest build for package %(name)s: %(nvr)s") % binfo
|
|
else:
|
|
builds.append(binfo)
|
|
seen_pkg[binfo['name']] = 1
|
|
else:
|
|
tagged = session.listTagged(args[0])
|
|
idx = dict([(b['nvr'], b) for b in tagged])
|
|
builds = []
|
|
for nvr in args[1:]:
|
|
binfo = idx.get(nvr)
|
|
if binfo:
|
|
builds.append(binfo)
|
|
else:
|
|
# not in tag, see if it even exists
|
|
binfo = session.getBuild(nvr)
|
|
if not binfo:
|
|
print _("No such build: %s") % nvr
|
|
else:
|
|
print _("Build %s not in tag %s") % (nvr, tag['name'])
|
|
if not options.force:
|
|
return 1
|
|
builds.reverse()
|
|
for binfo in builds:
|
|
if options.test:
|
|
print _("would have untagged %(nvr)s") % binfo
|
|
else:
|
|
if options.verbose:
|
|
print _("untagging %(nvr)s") % binfo
|
|
session.untagBuild(tag['name'], binfo['nvr'], force=options.force)
|
|
|
|
def handle_unblock_pkg(options, session, args):
|
|
"[admin] Unblock a package in the listing for tag"
|
|
usage = _("usage: %prog unblock-pkg [options] tag package [package2 ...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
(options, args) = parser.parse_args(args)
|
|
if len(args) < 2:
|
|
parser.error(_("Please specify a tag and at least one package"))
|
|
assert False
|
|
activate_session(session)
|
|
tag = args[0]
|
|
for package in args[1:]:
|
|
#really should implement multicall...
|
|
session.packageListUnblock(tag,package)
|
|
|
|
def anon_handle_download_build(options, session, args):
|
|
"[download] Download a built package"
|
|
usage = _("usage: %prog download-build [options] <n-v-r | build_id | package>")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--arch", "-a", dest="arches", metavar="ARCH", action="append", default=[],
|
|
help=_("Only download packages for this arch (may be used multiple times)"))
|
|
parser.add_option("--type", help=_("Download archives of the given type, rather than rpms (maven, win, or image)"))
|
|
parser.add_option("--latestfrom", dest="latestfrom", help=_("Download the latest build from this tag"))
|
|
parser.add_option("--debuginfo", action="store_true", help=_("Also download -debuginfo rpms"))
|
|
parser.add_option("--task-id", action="store_true", help=_("Interperet id as a task id"))
|
|
parser.add_option("--rpm", action="store_true", help=_("Download the given rpm"))
|
|
parser.add_option("--key", help=_("Download rpms signed with the given key"))
|
|
parser.add_option("--topurl", metavar="URL", default=options.topurl,
|
|
help=_("URL under which Koji files are accessible"))
|
|
parser.add_option("-q", "--quiet", action="store_true", help=_("Do not display progress meter"),
|
|
default=options.quiet)
|
|
(suboptions, args) = parser.parse_args(args)
|
|
if len(args) < 1:
|
|
parser.error(_("Please specify a package N-V-R or build ID"))
|
|
assert False
|
|
elif len(args) > 1:
|
|
parser.error(_("Only a single package N-V-R or build ID may be specified"))
|
|
assert False
|
|
|
|
activate_session(session)
|
|
build = args[0]
|
|
|
|
if build.isdigit():
|
|
if suboptions.latestfrom:
|
|
print "--latestfrom not compatible with build IDs, specify a package name."
|
|
return 1
|
|
build = int(build)
|
|
if suboptions.task_id:
|
|
builds = session.listBuilds(taskID=build)
|
|
if not builds:
|
|
print "No associated builds for task %s" % build
|
|
return 1
|
|
build = builds[0]['build_id']
|
|
|
|
if suboptions.latestfrom:
|
|
# We want the latest build, not a specific build
|
|
try:
|
|
builds = session.listTagged(suboptions.latestfrom, latest=True, package=build, type=suboptions.type)
|
|
except koji.GenericError, data:
|
|
print "Error finding latest build: %s" % data
|
|
return 1
|
|
if not builds:
|
|
print "%s has no builds of %s" % (suboptions.latestfrom, build)
|
|
return 1
|
|
info = builds[0]
|
|
elif suboptions.rpm:
|
|
rpminfo = session.getRPM(build)
|
|
if rpminfo is None:
|
|
print "No such rpm: %s" % build
|
|
return 1
|
|
info = session.getBuild(rpminfo['build_id'])
|
|
else:
|
|
# if we're given an rpm name without --rpm, download the containing build
|
|
try:
|
|
nvra = koji.parse_NVRA(build)
|
|
rpminfo = session.getRPM(build)
|
|
build = rpminfo['build_id']
|
|
except Exception:
|
|
pass
|
|
info = session.getBuild(build)
|
|
|
|
if info is None:
|
|
print "No such build: %s" % build
|
|
return 1
|
|
|
|
if not suboptions.topurl:
|
|
print "You must specify --topurl to download files"
|
|
return 1
|
|
pathinfo = koji.PathInfo(topdir=suboptions.topurl)
|
|
|
|
urls = []
|
|
if suboptions.type:
|
|
archives = session.listArchives(buildID=info['id'], type=suboptions.type)
|
|
if not archives:
|
|
print "No %s archives available for %s" % (suboptions.type, koji.buildLabel(info))
|
|
return 1
|
|
if suboptions.type == 'maven':
|
|
for archive in archives:
|
|
url = pathinfo.mavenbuild(info) + '/' + pathinfo.mavenfile(archive)
|
|
urls.append((url, pathinfo.mavenfile(archive)))
|
|
elif suboptions.type == 'win':
|
|
for archive in archives:
|
|
url = pathinfo.winbuild(info) + '/' + pathinfo.winfile(archive)
|
|
urls.append((url, pathinfo.winfile(archive)))
|
|
elif suboptions.type == 'image':
|
|
if not suboptions.topurl:
|
|
print "You must specify --topurl to download images"
|
|
return 1
|
|
pi = koji.PathInfo(topdir=suboptions.topurl)
|
|
for archive in archives:
|
|
url = '%s/%s' % (pi.imagebuild(info), archive['filename'])
|
|
urls.append((url, archive['filename']))
|
|
else:
|
|
# can't happen
|
|
assert False
|
|
else:
|
|
arches = suboptions.arches
|
|
if len(arches) == 0:
|
|
arches = None
|
|
if suboptions.rpm:
|
|
rpms = [rpminfo]
|
|
else:
|
|
rpms = session.listRPMs(buildID=info['id'], arches=arches)
|
|
if not rpms:
|
|
if arches:
|
|
print "No %s packages available for %s" % (" or ".join(arches), koji.buildLabel(info))
|
|
else:
|
|
print "No packages available for %s" % koji.buildLabel(info)
|
|
return 1
|
|
for rpm in rpms:
|
|
if not suboptions.debuginfo and koji.is_debuginfo(rpm['name']):
|
|
continue
|
|
if suboptions.key:
|
|
fname = pathinfo.signed(rpm, suboptions.key)
|
|
else:
|
|
fname = pathinfo.rpm(rpm)
|
|
url = pathinfo.build(info) + '/' + fname
|
|
urls.append((url, os.path.basename(fname)))
|
|
|
|
if suboptions.quiet:
|
|
pg = None
|
|
else:
|
|
pg = progress.TextMeter()
|
|
|
|
for url, relpath in urls:
|
|
if '/' in relpath:
|
|
koji.ensuredir(os.path.dirname(relpath))
|
|
grabber.urlgrab(url, filename=relpath, progress_obj=pg, text=relpath)
|
|
|
|
|
|
def anon_handle_download_logs(options, session, args):
|
|
"[download] Download a logs for package"
|
|
|
|
FAIL_LOG = "task_failed.log"
|
|
usage = _("usage: %prog download-logs [options] <task-id> [<task-id> ...]")
|
|
usage += _("\n %prog download-logs [options] --nvr <n-v-r> [<n-v-r> ...]")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
usage += _("\nCreates special log with name %s if task failed." % FAIL_LOG)
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("-r", "--recurse", action="store_true",
|
|
help=_("Process children of this task as well"))
|
|
parser.add_option("--nvr", action="store_true",
|
|
help=_("Get logs from n-v-r"))
|
|
parser.add_option("-m", "--match", action="append", metavar="PATTERN",
|
|
help=_("Get only log matching PATTERN. May be used multiple times."))
|
|
parser.add_option("-c", "--continue", action="store_true", dest="cont",
|
|
help=_("Continue previous download"))
|
|
parser.add_option("-d", "--dir", metavar="DIRECTORY", default='kojilogs',
|
|
help=_("Write logs to DIRECTORY"))
|
|
(suboptions, args) = parser.parse_args(args)
|
|
|
|
if len(args) < 1:
|
|
parser.error(_("Please specify at least one task id or n-v-r"))
|
|
|
|
def write_fail_log(task_log_dir, task_id):
|
|
"""Gets output only from failed tasks"""
|
|
try:
|
|
result = session.getTaskResult(task_id)
|
|
# with current code, failed task results should always be faults,
|
|
# but that could change in the future
|
|
content = pprint.pformat(result)
|
|
except koji.GenericError:
|
|
etype, e = sys.exc_info()[:2]
|
|
content = ''.join(traceback.format_exception_only(etype, e))
|
|
full_filename = os.path.normpath(os.path.join(task_log_dir, FAIL_LOG))
|
|
koji.ensuredir(os.path.dirname(full_filename))
|
|
sys.stdout.write("Writing: %s\n" % full_filename)
|
|
file(full_filename, 'w').write(content)
|
|
|
|
def download_log(task_log_dir, task_id, filename, blocksize=102400):
|
|
#Create directories only if there is any log file to write to
|
|
full_filename = os.path.normpath(os.path.join(task_log_dir, filename))
|
|
koji.ensuredir(os.path.dirname(full_filename))
|
|
contents = 'IGNORE ME!'
|
|
if suboptions.cont and os.path.exists(full_filename):
|
|
sys.stdout.write("Continuing: %s\n" % full_filename)
|
|
fd = file(full_filename, 'ab')
|
|
offset = fd.tell()
|
|
else:
|
|
sys.stdout.write("Downloading: %s\n" % full_filename)
|
|
fd = file(full_filename, 'wb')
|
|
offset = 0
|
|
try:
|
|
while contents:
|
|
contents = session.downloadTaskOutput(task_id, filename, offset, blocksize)
|
|
offset += len(contents)
|
|
if contents:
|
|
fd.write(contents)
|
|
finally:
|
|
fd.close()
|
|
|
|
def save_logs(task_id, match, parent_dir='.', recurse=True):
|
|
assert task_id == int(task_id), "Task id must be number: %r" % task_id
|
|
task_info = session.getTaskInfo(task_id)
|
|
if task_info is None:
|
|
error(_("No such task id: %i" % task_id))
|
|
files = session.listTaskOutput(task_id)
|
|
logs = []
|
|
for filename in files:
|
|
if not filename.endswith(".log"):
|
|
continue
|
|
if match and not koji.util.multi_fnmatch(filename, match):
|
|
continue
|
|
logs.append(filename)
|
|
|
|
task_log_dir = os.path.join(parent_dir,
|
|
"%s-%s" % (task_info["arch"], task_id))
|
|
|
|
count = 0
|
|
state = koji.TASK_STATES[task_info['state']]
|
|
if state == 'FAILED':
|
|
if not match or koji.util.multi_fnmatch(FAIL_LOG, match):
|
|
write_fail_log(task_log_dir, task_id)
|
|
count += 1
|
|
elif state not in ['CLOSED', 'CANCELED']:
|
|
sys.stderr.write(_("Warning: task %s is %s\n") % (task_id, state))
|
|
|
|
for log_filename in logs:
|
|
download_log(task_log_dir, task_id, log_filename)
|
|
count += 1
|
|
|
|
if count == 0 and not recurse:
|
|
sys.stderr.write(_("No logs found for task %i. Perhaps try --recurse?\n") % task_id)
|
|
|
|
if recurse:
|
|
child_tasks = session.getTaskChildren(task_id)
|
|
for child_task in child_tasks:
|
|
save_logs(child_task['id'], match, task_log_dir, recurse)
|
|
|
|
for arg in args:
|
|
if suboptions.nvr:
|
|
suboptions.recurse = True
|
|
binfo = session.getBuild(arg)
|
|
if binfo is None:
|
|
error(_("There is no build with n-v-r: %s" % arg))
|
|
assert binfo['task_id'], binfo
|
|
arg = binfo['task_id']
|
|
sys.stdout.write("Using task ID: %s\n" % arg)
|
|
else:
|
|
try:
|
|
task_id = int(arg)
|
|
except ValueError:
|
|
error(_("Task id must be number: %r") % task_id)
|
|
continue
|
|
save_logs(task_id, suboptions.match, suboptions.dir, suboptions.recurse)
|
|
|
|
|
|
def anon_handle_download_task(options, session, args):
|
|
"[download] Download the output of a build task "
|
|
usage = _("usage: %prog download-task <task_id>")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--arch", dest="arches", metavar="ARCH", action="append", default=[],
|
|
help=_("Only download packages for this arch (may be used multiple times)"))
|
|
parser.add_option("--logs", dest="logs", action="store_true", default=False, help=_("Also download build logs"))
|
|
|
|
(suboptions, args) = parser.parse_args(args)
|
|
if len(args) == 0:
|
|
parser.error(_("Please specify a task ID"))
|
|
elif len(args) > 1:
|
|
parser.error(_("Only one task ID may be specified"))
|
|
|
|
base_task_id = int(args.pop())
|
|
if len(suboptions.arches) > 0:
|
|
suboptions.arches = ",".join(suboptions.arches).split(",")
|
|
|
|
# get downloadable tasks
|
|
|
|
base_task = session.getTaskInfo(base_task_id)
|
|
|
|
check_downloadable = lambda task: task["method"] == "buildArch"
|
|
downloadable_tasks = []
|
|
|
|
if check_downloadable(base_task):
|
|
downloadable_tasks.append(base_task)
|
|
else:
|
|
subtasks = session.getTaskChildren(base_task_id)
|
|
downloadable_tasks.extend(filter(check_downloadable, subtasks))
|
|
|
|
# get files for download
|
|
|
|
downloads = []
|
|
|
|
for task in downloadable_tasks:
|
|
files = session.listTaskOutput(task["id"])
|
|
for filename in files:
|
|
if filename.endswith(".log") and suboptions.logs:
|
|
# rename logs, they would conflict
|
|
new_filename = "%s.%s.log" % (filename.rstrip(".log"), task["arch"])
|
|
downloads.append((task, filename, new_filename))
|
|
continue
|
|
|
|
if filename.endswith(".rpm"):
|
|
filearch = filename.split(".")[-2]
|
|
if len(suboptions.arches) == 0 or filearch in suboptions.arches:
|
|
downloads.append((task, filename, filename))
|
|
continue
|
|
|
|
if len(downloads) == 0:
|
|
error(_("No files for download found."))
|
|
|
|
required_tasks = {}
|
|
for (task, nop, nop) in downloads:
|
|
if task["id"] not in required_tasks:
|
|
required_tasks[task["id"]] = task
|
|
|
|
for task_id in required_tasks:
|
|
if required_tasks[task_id]["state"] != koji.TASK_STATES.get("CLOSED"):
|
|
if task_id == base_task_id:
|
|
error(_("Task %d has not finished yet.") % task_id)
|
|
else:
|
|
error(_("Child task %d has not finished yet.") % task_id)
|
|
|
|
# perform the download
|
|
|
|
number = 0
|
|
for (task, filename, new_filename) in downloads:
|
|
number += 1
|
|
print _("Downloading [%d/%d]: %s") % (number, len(downloads), new_filename)
|
|
output_file = open(new_filename, "wb")
|
|
output_file.write(session.downloadTaskOutput(task["id"], filename))
|
|
output_file.close()
|
|
|
|
def anon_handle_wait_repo(options, session, args):
|
|
"[monitor] Wait for a repo to be regenerated"
|
|
usage = _("usage: %prog wait-repo [options] <tag>")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--build", metavar="NVR", dest="builds", action="append", default=[],
|
|
help=_("Check that the given build is in the newly-generated repo (may be used multiple times)"))
|
|
parser.add_option("--target", action="store_true", help=_("Interpret the argument as a build target name"))
|
|
parser.add_option("--timeout", type="int", help=_("Amount of time to wait (in minutes) before giving up (default: 120)"), default=120)
|
|
parser.add_option("--quiet", action="store_true", help=_("Suppress output, success or failure will be indicated by the return value only"), default=options.quiet)
|
|
(suboptions, args) = parser.parse_args(args)
|
|
|
|
start = time.time()
|
|
|
|
builds = [koji.parse_NVR(build) for build in suboptions.builds]
|
|
if len(args) < 1:
|
|
parser.error(_("Please specify a tag name"))
|
|
elif len(args) > 1:
|
|
parser.error(_("Only one tag may be specified"))
|
|
|
|
tag = args[0]
|
|
|
|
if suboptions.target:
|
|
target_info = session.getBuildTarget(tag)
|
|
if not target_info:
|
|
parser.error("Invalid build target: %s" % tag)
|
|
tag = target_info['build_tag_name']
|
|
tag_id = target_info['build_tag']
|
|
else:
|
|
tag_info = session.getTag(tag)
|
|
if not tag_info:
|
|
parser.error("Invalid tag: %s" % tag)
|
|
targets = session.getBuildTargets(buildTagID=tag_info['id'])
|
|
if not targets:
|
|
print "%(name)s is not a build tag for any target" % tag_info
|
|
targets = session.getBuildTargets(destTagID=tag_info['id'])
|
|
if targets:
|
|
maybe = {}.fromkeys([t['build_tag_name'] for t in targets])
|
|
maybe = maybe.keys()
|
|
maybe.sort()
|
|
print "Suggested tags: %s" % ', '.join(maybe)
|
|
return 1
|
|
tag_id = tag_info['id']
|
|
|
|
|
|
for nvr in builds:
|
|
data = session.getLatestBuilds(tag_id, package=nvr["name"])
|
|
if len(data) == 0:
|
|
print "Warning: package %s is not in tag %s" % (nvr["name"], tag)
|
|
else:
|
|
present_nvr = [x["nvr"] for x in data][0]
|
|
if present_nvr != "%s-%s-%s" % (nvr["name"], nvr["version"], nvr["release"]):
|
|
print "Warning: nvr %s-%s-%s is not current in tag %s\n latest build in %s is %s" % (nvr["name"], nvr["version"], nvr["release"], tag, tag, present_nvr)
|
|
|
|
last_repo = None
|
|
repo = session.getRepo(tag_id)
|
|
|
|
while True:
|
|
if builds and repo and repo != last_repo:
|
|
if koji.util.checkForBuilds(session, tag_id, builds, repo['create_event'], latest=True):
|
|
if not suboptions.quiet:
|
|
print "Successfully waited %s for %s to appear in the %s repo" % (koji.util.duration(start), koji.util.printList(suboptions.builds), tag)
|
|
return
|
|
|
|
if (time.time() - start) >= (suboptions.timeout * 60.0):
|
|
if not suboptions.quiet:
|
|
if builds:
|
|
print "Unsuccessfully waited %s for %s to appear in the %s repo" % (koji.util.duration(start), koji.util.printList(suboptions.builds), tag)
|
|
else:
|
|
print "Unsuccessfully waited %s for a new %s repo" % (koji.util.duration(start), tag)
|
|
return 1
|
|
|
|
time.sleep(60)
|
|
last_repo = repo
|
|
repo = session.getRepo(tag_id)
|
|
|
|
if not builds:
|
|
if repo != last_repo:
|
|
if not suboptions.quiet:
|
|
print "Successfully waited %s for a new %s repo" % (koji.util.duration(start), tag)
|
|
return
|
|
|
|
_search_types = ('package', 'build', 'tag', 'target', 'user', 'host', 'rpm', 'maven', 'win')
|
|
|
|
def handle_regen_repo(options, session, args):
|
|
"[admin] Force a repo to be regenerated"
|
|
usage = _("usage: %prog regen-repo [options] <tag>")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("--target", action="store_true", help=_("Interpret the argument as a build target name"))
|
|
parser.add_option("--nowait", action="store_true", help=_("Don't wait on for regen to finish"))
|
|
parser.add_option("--debuginfo", action="store_true", help=_("Include debuginfo rpms in repo"))
|
|
parser.add_option("--source", "--src", action="store_true", help=_("Include source rpms in the repo"))
|
|
(suboptions, args) = parser.parse_args(args)
|
|
if len(args) == 0:
|
|
parser.error(_("A tag name must be specified"))
|
|
assert False
|
|
elif len(args) > 1:
|
|
if suboptions.target:
|
|
parser.error(_("Only a single target may be specified"))
|
|
else:
|
|
parser.error(_("Only a single tag name may be specified"))
|
|
assert False
|
|
activate_session(session)
|
|
tag = args[0]
|
|
repo_opts = {}
|
|
if suboptions.target:
|
|
info = session.getBuildTarget(tag)
|
|
if not info:
|
|
parser.error(_("No matching build target: " + tag))
|
|
assert False
|
|
tag = info['build_tag_name']
|
|
info = session.getTag(tag)
|
|
else:
|
|
info = session.getTag(tag)
|
|
if not info:
|
|
parser.error(_("No matching tag: " + tag))
|
|
assert False
|
|
tag = info['name']
|
|
targets = session.getBuildTargets(buildTagID=info['id'])
|
|
if not targets:
|
|
print "Warning: %s is not a build tag" % tag
|
|
if not info['arches']:
|
|
print "Warning: tag %s has an empty arch list" % info['name']
|
|
if suboptions.debuginfo:
|
|
repo_opts['debuginfo'] = True
|
|
if suboptions.source:
|
|
repo_opts['src'] = True
|
|
task_id = session.newRepo(tag, **repo_opts)
|
|
print "Regenerating repo for tag: %s" % tag
|
|
print "Created task:", task_id
|
|
print "Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id)
|
|
if _running_in_bg() or suboptions.nowait:
|
|
return
|
|
else:
|
|
session.logout()
|
|
return watch_tasks(session, [task_id], quiet=options.quiet)
|
|
|
|
def anon_handle_search(options, session, args):
|
|
"[search] Search the system"
|
|
usage = _("usage: %prog search [options] search_type pattern")
|
|
usage += _('\nAvailable search types: %s') % ', '.join(_search_types)
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.add_option("-r", "--regex", action="store_true", help=_("treat pattern as regex"))
|
|
parser.add_option("--exact", action="store_true", help=_("exact matches only"))
|
|
(options, args) = parser.parse_args(args)
|
|
if not args:
|
|
parser.print_help()
|
|
return
|
|
type = args[0]
|
|
if type not in _search_types:
|
|
parser.error(_("Unknown search type: %s") % type)
|
|
pattern = args[1]
|
|
matchType = 'glob'
|
|
if options.regex:
|
|
matchType = 'regexp'
|
|
elif options.exact:
|
|
matchType = 'exact'
|
|
data = session.search(pattern, type, matchType)
|
|
for row in data:
|
|
print row['name']
|
|
|
|
def handle_moshimoshi(options, session, args):
|
|
"[misc] Introduce yourself"
|
|
usage = _("usage: %prog moshimoshi [options]")
|
|
parser = OptionParser(usage=usage)
|
|
(opts, args) = parser.parse_args(args)
|
|
if len(args) != 0:
|
|
parser.error(_("This command takes no arguments"))
|
|
assert False
|
|
activate_session(session)
|
|
u = session.getLoggedInUser()
|
|
if not u:
|
|
print "Not authenticated"
|
|
u = {'name' : 'anonymous user'}
|
|
print "%s, %s!" % (random.choice(greetings), u["name"],)
|
|
print ""
|
|
print "You are using the hub at %s" % (session.baseurl,)
|
|
authtype = u.get('authtype', getattr(session, 'authtype', None))
|
|
if authtype == koji.AUTHTYPE_NORMAL:
|
|
print "Authenticated via password"
|
|
elif authtype == koji.AUTHTYPE_KERB:
|
|
print "Authenticated via Kerberos principal %s" % u["krb_principal"]
|
|
elif authtype == koji.AUTHTYPE_SSL:
|
|
print "Authenticated via client certificate %s" % options.cert
|
|
|
|
def handle_runroot(options, session, args):
|
|
"[admin] Run a command in a buildroot"
|
|
usage = _("usage: %prog runroot [options] <tag> <arch> <command>")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
parser.disable_interspersed_args()
|
|
parser.add_option("-p", "--package", action="append", default=[], help=_("make sure this package is in the chroot"))
|
|
parser.add_option("-m", "--mount", action="append", default=[], help=_("mount this directory read-write in the chroot"))
|
|
parser.add_option("--skip-setarch", action="store_true", default=False,
|
|
help=_("bypass normal setarch in the chroot"))
|
|
parser.add_option("-w", "--weight", type='int', help=_("set task weight"))
|
|
parser.add_option("--channel-override", help=_("use a non-standard channel"))
|
|
parser.add_option("--task-id", action="store_true", default=False,
|
|
help=_("Print the ID of the runroot task"))
|
|
parser.add_option("--use-shell", action="store_true", default=False,
|
|
help=_("Run command through a shell, otherwise uses exec"))
|
|
parser.add_option("--repo-id", type="int", help=_("ID of the repo to use"))
|
|
|
|
(opts, args) = parser.parse_args(args)
|
|
|
|
if len(args) < 3:
|
|
parser.error(_("Incorrect number of arguments"))
|
|
activate_session(session)
|
|
tag = args[0]
|
|
arch = args[1]
|
|
if opts.use_shell:
|
|
# everything must be correctly quoted
|
|
command = ' '.join(args[2:])
|
|
else:
|
|
command = args[2:]
|
|
try:
|
|
task_id = session.runroot(tag, arch, command,
|
|
channel=opts.channel_override,
|
|
packages=opts.package, mounts=opts.mount,
|
|
repo_id=opts.repo_id,
|
|
skip_setarch=opts.skip_setarch,
|
|
weight=opts.weight)
|
|
except koji.GenericError, e:
|
|
if 'Invalid method' in str(e):
|
|
print "* The runroot plugin appears to not be installed on the",
|
|
print "koji hub. Please contact the administrator."
|
|
raise
|
|
if opts.task_id:
|
|
print task_id
|
|
|
|
try:
|
|
while True:
|
|
# wait for the task to finish
|
|
if session.taskFinished(task_id):
|
|
break
|
|
time.sleep(options.poll_interval)
|
|
except KeyboardInterrupt:
|
|
# this is probably the right thing to do here
|
|
print "User interrupt: canceling runroot task"
|
|
session.cancelTask(task_id)
|
|
return
|
|
output = None
|
|
if "runroot.log" in session.listTaskOutput(task_id):
|
|
output = session.downloadTaskOutput(task_id, "runroot.log")
|
|
if output:
|
|
sys.stdout.write(output)
|
|
info = session.getTaskInfo(task_id)
|
|
if info is None:
|
|
sys.exit(1)
|
|
state = koji.TASK_STATES[info['state']]
|
|
if state in ('FAILED', 'CANCELED'):
|
|
sys.exit(1)
|
|
return
|
|
|
|
|
|
def handle_help(options, session, args):
|
|
"[info] List available commands"
|
|
usage = _("usage: %prog help <category> ...")
|
|
usage += _("\n(Specify the --help global option for a list of other help options)")
|
|
parser = OptionParser(usage=usage)
|
|
# the --admin opt is for backwards compatibility. It is equivalent to: koji help admin
|
|
parser.add_option("--admin", action="store_true", help=optparse.SUPPRESS_HELP)
|
|
|
|
(options, args) = parser.parse_args(args)
|
|
|
|
chosen = set(args)
|
|
if options.admin:
|
|
chosen.add('admin')
|
|
avail = set(categories.keys() + ['all'])
|
|
unavail = chosen - avail
|
|
for arg in unavail:
|
|
print "No such help category: %s" % arg
|
|
|
|
if not chosen:
|
|
list_commands()
|
|
else:
|
|
list_commands(chosen)
|
|
|
|
|
|
def list_commands(categories_chosen=None):
|
|
if categories_chosen is None or "all" in categories_chosen:
|
|
categories_chosen = categories.keys()
|
|
else:
|
|
# copy list since we're about to modify it
|
|
categories_chosen = list(categories_chosen)
|
|
categories_chosen.sort()
|
|
handlers = []
|
|
for name,value in globals().items():
|
|
if name.startswith('handle_'):
|
|
alias = name.replace('handle_','')
|
|
alias = alias.replace('_','-')
|
|
handlers.append((alias,value))
|
|
elif name.startswith('anon_handle_'):
|
|
alias = name.replace('anon_handle_','')
|
|
alias = alias.replace('_','-')
|
|
handlers.append((alias,value))
|
|
handlers.sort()
|
|
print _("Available commands:")
|
|
for category in categories_chosen:
|
|
print _("\n%s:" % categories[category])
|
|
for alias,handler in handlers:
|
|
desc = handler.__doc__
|
|
if desc.startswith('[%s] ' % category):
|
|
desc = desc[len('[%s] ' % category):]
|
|
elif category != 'misc' or desc.startswith('['):
|
|
continue
|
|
print " %-25s %s" % (alias, desc)
|
|
|
|
print("%s" % get_epilog_str().rstrip("\n"))
|
|
|
|
def error(msg=None, code=1):
|
|
if msg:
|
|
sys.stderr.write(msg + "\n")
|
|
sys.stderr.flush()
|
|
sys.exit(code)
|
|
|
|
def warn(msg):
|
|
sys.stderr.write(msg + "\n")
|
|
sys.stderr.flush()
|
|
|
|
def has_krb_creds():
|
|
if not sys.modules.has_key('krbV'):
|
|
return False
|
|
try:
|
|
ctx = krbV.default_context()
|
|
ccache = ctx.default_ccache()
|
|
princ = ccache.principal()
|
|
return True
|
|
except krbV.Krb5Error:
|
|
return False
|
|
|
|
def activate_session(session):
|
|
"""Test and login the session is applicable"""
|
|
global options
|
|
if options.authtype == "noauth" or options.noauth:
|
|
#skip authentication
|
|
pass
|
|
elif options.authtype == "ssl" or os.path.isfile(options.cert) and options.authtype is None:
|
|
# authenticate using SSL client cert
|
|
session.ssl_login(options.cert, None, options.serverca, proxyuser=options.runas)
|
|
elif options.authtype == "password" or options.user and options.authtype is None:
|
|
# authenticate using user/password
|
|
session.login()
|
|
elif options.authtype == "kerberos" or has_krb_creds() and options.authtype is None:
|
|
try:
|
|
if options.keytab and options.principal:
|
|
session.krb_login(principal=options.principal, keytab=options.keytab, proxyuser=options.runas)
|
|
else:
|
|
session.krb_login(proxyuser=options.runas)
|
|
except krbV.Krb5Error, e:
|
|
error(_("Kerberos authentication failed: %s (%s)") % (e.args[1], e.args[0]))
|
|
except socket.error, e:
|
|
warn(_("Could not connect to Kerberos authentication service: %s") % e.args[1])
|
|
if not options.noauth and options.authtype != "noauth" and not session.logged_in:
|
|
error(_("Unable to log in, no authentication methods available"))
|
|
ensure_connection(session)
|
|
if options.debug:
|
|
print "successfully connected to hub"
|
|
|
|
if __name__ == "__main__":
|
|
global options
|
|
options, command, args = get_options()
|
|
|
|
logger = logging.getLogger("koji")
|
|
handler = logging.StreamHandler(sys.stderr)
|
|
handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(name)s: %(message)s'))
|
|
handler.setLevel(logging.DEBUG)
|
|
logger.addHandler(handler)
|
|
if options.debug:
|
|
logger.setLevel(logging.DEBUG)
|
|
elif options.quiet:
|
|
logger.setLevel(logging.ERROR)
|
|
else:
|
|
logger.setLevel(logging.WARN)
|
|
|
|
session_opts = {}
|
|
for k in ('user', 'password', 'krbservice', 'debug_xmlrpc', 'debug', 'max_retries',
|
|
'retry_interval', 'offline_retry', 'offline_retry_interval',
|
|
'anon_retry', 'keepalive', 'timeout', 'use_fast_upload',
|
|
'upload_blocksize'):
|
|
value = getattr(options,k)
|
|
if value is not None:
|
|
session_opts[k] = value
|
|
session = koji.ClientSession(options.server,session_opts)
|
|
rv = 0
|
|
try:
|
|
rv = locals()[command].__call__(options, session, args)
|
|
if not rv:
|
|
rv = 0
|
|
except KeyboardInterrupt:
|
|
pass
|
|
except SystemExit:
|
|
rv = 1
|
|
except:
|
|
if options.debug:
|
|
raise
|
|
else:
|
|
exctype, value = sys.exc_info()[:2]
|
|
rv = 1
|
|
print "%s: %s" % (exctype.__name__, value)
|
|
try:
|
|
session.logout()
|
|
except:
|
|
pass
|
|
sys.exit(rv)
|