Merge branch 'vm'

Conflicts:
	cli/koji
This commit is contained in:
Mike McLean 2010-10-20 17:36:24 -04:00
commit 4979c16937
36 changed files with 4689 additions and 2216 deletions

View file

@ -1,6 +1,6 @@
NAME=koji
SPECFILE = $(firstword $(wildcard *.spec))
SUBDIRS = hub builder koji cli docs util www plugins
SUBDIRS = hub builder koji cli docs util www plugins vm
ifdef DIST
DIST_DEFINES := --define "dist $(DIST)"

View file

@ -1,15 +1,12 @@
SUBDIRS = lib
BINFILES = kojid
LIBEXECFILES = mergerepos
PYFILES = $(wildcard *.py)
_default:
@echo "nothing to make. try make install"
clean:
rm -f *.o *.so *.pyc *~
for d in $(SUBDIRS); do make -s -C $$d clean; done
install:
@ -35,6 +32,4 @@ install:
mkdir -p $(DESTDIR)/etc/kojid
install -p -m 644 kojid.conf $(DESTDIR)/etc/kojid/kojid.conf
for d in $(SUBDIRS); do make DESTDIR=`cd $(DESTDIR); pwd` \
-C $$d install; [ $$? = 0 ] || exit 1; done

File diff suppressed because it is too large Load diff

View file

@ -1,20 +0,0 @@
PYTHON=python
SHAREDIR = $(DESTDIR)/usr/share/koji-builder
MODDIR = $(SHAREDIR)/lib
PYFILES = $(wildcard *.py)
PYVER := $(shell $(PYTHON) -c 'import sys; print "%.3s" %(sys.version)')
_default:
@echo "nothing to make. try make install"
clean:
rm -f *.o *.so *.pyc *~
install:
mkdir -p $(MODDIR)
for p in $(PYFILES) ; do \
install -p -m 644 $$p $(MODDIR)/$$p; \
done
$(PYTHON) -c "import compileall; compileall.compile_dir('$(MODDIR)', 1, '$(PYDIR)', 1)"

View file

@ -1,224 +0,0 @@
# Python module
# tasks handlers for the koji build daemon
# Copyright (c) 2008 Red Hat
#
# Koji is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation;
# version 2.1 of the License.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this software; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors:
# Mike McLean <mikem@redhat.com>
import koji
import logging
import os
import signal
import urllib2
class BaseTaskHandler(object):
"""The base class for task handlers
Each task handler is a class, a new instance of which is created
to handle each task.
"""
# list of methods the class can handle
Methods = []
# Options:
Foreground = False
def __init__(self, id, method, params, session, options, workdir=None):
self.id = id #task id
if method not in self.Methods:
raise koji.GenericError, 'method "%s" is not supported' % method
self.method = method
# handle named parameters
self.params,self.opts = koji.decode_args(*params)
self.session = session
self.options = options
if workdir is None:
workdir = "%s/%s" % (options.workdir, koji.pathinfo.taskrelpath(id))
self.workdir = workdir
self.logger = logging.getLogger("koji.build.BaseTaskHandler")
def setManager(self,manager):
"""Set the manager attribute
This is only used for foreground tasks to give them access
to their task manager.
"""
if not self.Foreground:
return
self.manager = manager
def handler(self):
"""(abstract) the handler for the task."""
raise NotImplementedError
def run(self):
"""Execute the task"""
self.createWorkdir()
try:
return self.handler(*self.params,**self.opts)
finally:
self.removeWorkdir()
_taskWeight = 1.0
def weight(self):
"""Return the weight of the task.
This is run by the taskmanager before the task is run to determine
the weight of the task. The weight is an abstract measure of the
total load the task places on the system while running.
A task may set _taskWeight for a constant weight different from 1, or
override this function for more complicated situations.
Note that task weight is partially ignored while the task is sleeping.
"""
return getattr(self,'_taskWeight',1.0)
def createWorkdir(self):
if self.workdir is None:
return
self.removeWorkdir()
os.makedirs(self.workdir)
def removeWorkdir(self):
if self.workdir is None:
return
safe_rmtree(self.workdir, unmount=False, strict=True)
#os.spawnvp(os.P_WAIT, 'rm', ['rm', '-rf', self.workdir])
def wait(self, subtasks=None, all=False, failany=False):
"""Wait on subtasks
subtasks is a list of integers (or an integer). If more than one subtask
is specified, then the default behavior is to return when any of those
tasks complete. However, if all is set to True, then it waits for all of
them to complete. If all and failany are both set to True, then each
finished task will be checked for failure, and a failure will cause all
of the unfinished tasks to be cancelled.
special values:
subtasks = None specify all subtasks
Implementation notes:
The build daemon forks all tasks as separate processes. This function
uses signal.pause to sleep. The main process watches subtasks in
the database and will send the subprocess corresponding to the
subtask a SIGUSR2 to wake it up when subtasks complete.
"""
if isinstance(subtasks,int):
# allow single integer w/o enclosing list
subtasks = [subtasks]
self.session.host.taskSetWait(self.id,subtasks)
self.logger.debug("Waiting on %r" % subtasks)
while True:
finished, unfinished = self.session.host.taskWait(self.id)
if len(unfinished) == 0:
#all done
break
elif len(finished) > 0:
if all:
if failany:
failed = False
for task in finished:
try:
result = self.session.getTaskResult(task)
except (koji.GenericError, Fault), task_error:
self.logger.info("task %s failed or was canceled" % task)
failed = True
break
if failed:
self.logger.info("at least one task failed or was canceled, cancelling unfinished tasks")
self.session.cancelTaskChildren(self.id)
# reraise the original error now, rather than waiting for
# an error in taskWaitResults()
raise task_error
else:
# at least one done
break
# signal handler set by TaskManager.forkTask
self.logger.debug("Pausing...")
signal.pause()
# main process will wake us up with SIGUSR2
self.logger.debug("...waking up")
self.logger.debug("Finished waiting")
return dict(self.session.host.taskWaitResults(self.id,subtasks))
def getUploadDir(self):
return koji.pathinfo.taskrelpath(self.id)
def uploadFile(self, filename, remoteName=None):
"""Upload the file with the given name to the task output directory
on the hub."""
# Only upload files with content
if os.path.isfile(filename) and os.stat(filename).st_size > 0:
self.session.uploadWrapper(filename, self.getUploadDir(), remoteName)
def localPath(self, relpath):
"""Return a local path to a remote file.
If the file is on an nfs mount, use that, otherwise download a copy"""
if self.options.topurl:
self.logger.debug("Downloading %s", relpath)
url = "%s/%s" % (self.options.topurl, relpath)
fsrc = urllib2.urlopen(url)
fn = "%s/local/%s" % (self.workdir, relpath)
os.makedirs(os.path.dirname(fn))
fdst = file(fn, 'w')
shutil.copyfileobj(fsrc, fdst)
fsrc.close()
fdst.close()
else:
fn = "%s/%s" % (self.options.topdir, relpath)
return fn
#XXX - not the right place for this
#XXX - not as safe as we want
def safe_rmtree(path, unmount=False, strict=True):
logger = logging.getLogger("koji.build")
#safe remove: with -xdev the find cmd will not cross filesystems
# (though it will cross bind mounts from the same filesystem)
if not os.path.exists(path):
logger.debug("No such path: %s" % path)
return
if unmount:
umount_all(path)
#first rm -f non-directories
logger.debug('Scrubbing files in %s' % path)
rv = os.system("find '%s' -xdev \\! -type d -print0 |xargs -0 rm -f" % path)
msg = 'file removal failed (code %r) for %s' % (rv,path)
if rv != 0:
logger.warn(msg)
if strict:
raise koji.GenericError, msg
else:
return rv
#them rmdir directories
#with -depth, we start at the bottom and work up
logger.debug('Scrubbing directories in %s' % path)
rv = os.system("find '%s' -xdev -depth -type d -print0 |xargs -0 rmdir" % path)
msg = 'dir removal failed (code %r) for %s' % (rv,path)
if rv != 0:
logger.warn(msg)
if strict:
raise koji.GenericError, msg
return rv

194
cli/koji
View file

@ -2724,26 +2724,46 @@ def anon_handle_buildinfo(options, session, args):
if info is None:
print "No such build: %s\n" % build
continue
task = None
if info['task_id']:
task = session.getTaskInfo(info['task_id'], request=True)
taglist = []
for tag in session.listTags(build):
taglist.append(tag['name'])
if info['epoch'] is None:
info['epoch'] = ""
else:
info['epoch'] = str(info['epoch']) + ":"
info['name'] = info['package_name']
info['arch'] = 'src'
info['state'] = koji.BUILD_STATES[info['state']]
rpms = session.listRPMs(buildID=info['id'])
print "BUILD: %(name)s-%(version)s-%(release)s [%(id)d]" % info
print "State: %(state)s" % info
print "Built by: %(owner_name)s" % info
print "Task: %(task_id)s" % info
if task:
print "Task: %s %s" % (task['id'], koji.taskLabel(task))
else:
print "Task: none"
print "Finished: %s" % koji.formatTimeLong(info['completion_time'])
maven_info = session.getMavenBuild(info['id'])
if maven_info:
print "Maven groupId: %s" % maven_info['group_id']
print "Maven artifactId: %s" % maven_info['artifact_id']
print "Maven version: %s" % maven_info['version']
win_info = session.getWinBuild(info['id'])
if win_info:
print "Windows build platform: %s" % win_info['platform']
print "Tags: %s" % ' '.join(taglist)
print "RPMs:"
for rpm in rpms:
print os.path.join(koji.pathinfo.build(info), koji.pathinfo.rpm(rpm))
maven_archives = session.listArchives(buildID=info['id'], type='maven')
if maven_archives:
print "Maven archives:"
for archive in maven_archives:
print os.path.join(koji.pathinfo.mavenbuild(info, maven_info), archive['filename'])
win_archives = session.listArchives(buildID=info['id'], type='win')
if win_archives:
print "Windows archives:"
for archive in win_archives:
print os.path.join(koji.pathinfo.winbuild(info), koji.pathinfo.winfile(archive))
rpms = session.listRPMs(buildID=info['id'])
if rpms:
print "RPMs:"
for rpm in rpms:
print os.path.join(koji.pathinfo.build(info), koji.pathinfo.rpm(rpm))
if options.changelog:
changelog = session.getChangelogEntries(info['id'])
if changelog:
@ -3651,6 +3671,16 @@ def anon_handle_list_history(options, session, args):
print "%r" % list(entry)
_print_histline(entry, options=options)
def _handleMap(lines, data, prefix=''):
for key, val in data.items():
if key != '__starstar':
lines.append(' %s%s: %s' % (prefix, key, val))
def _handleOpts(lines, opts, prefix=''):
if opts:
lines.append("%sOptions:" % prefix)
_handleMap(lines, opts, prefix)
def _parseTaskParams(session, method, task_id):
"""Parse the return of getTaskRequest()"""
params = session.getTaskRequest(task_id)
@ -3667,9 +3697,7 @@ def _parseTaskParams(session, method, task_id):
lines.append("Build Arch: %s" % params[2])
lines.append("SRPM Kept: %r" % params[3])
if len(params) > 4:
for key in params[4].keys():
if not key == '__starstar':
lines.append("%s: %s" % (key, params[4][key]))
_handleOpts(lines, params[4])
elif method == 'tagBuild':
build = session.getBuild(params[1])
lines.append("Destination Tag: %s" % session.getTag(params[0])['name'])
@ -3684,9 +3712,49 @@ def _parseTaskParams(session, method, task_id):
elif method == 'build':
lines.append("Source: %s" % params[0])
lines.append("Build Target: %s" % params[1])
for key in params[2].keys():
if not key == '__starstar':
lines.append("%s: %s" % (key, params[2][key]))
if len(params) > 2:
_handleOpts(lines, params[2])
elif method == 'maven':
lines.append("SCM URL: %s" % params[0])
lines.append("Build Target: %s" % params[1])
if len(params) > 2:
_handleOpts(lines, params[2])
elif method == 'buildMaven':
lines.append("SCM URL: %s" % params[0])
lines.append("Build Tag: %s" % params[1]['name'])
if len(params) > 2:
_handleOpts(lines, params[2])
elif method == 'wrapperRPM':
lines.append("Spec File URL: %s" % params[0])
lines.append("Build Tag: %s" % params[1]['name'])
if params[2]:
lines.append("Build: %s" % koji.buildLabel(params[2]))
if params[3]:
lines.append("Task: %s %s" % (params[3]['id'], koji.taskLabel(params[3])))
if len(params) > 4:
_handleOpts(lines, params[4])
elif method == 'winbuild':
lines.append("VM: %s" % params[0])
lines.append("SCM URL: %s" % params[1])
lines.append("Build Target: %s" % params[2])
if len(params) > 3:
_handleOpts(lines, params[3])
elif method == 'vmExec':
lines.append("VM: %s" % params[0])
lines.append("Exec Params:")
for info in params[1]:
if isinstance(info, dict):
_handleMap(lines, info, prefix=' ')
else:
lines.append(" %s" % info)
if len(params) > 2:
_handleOpts(lines, params[2])
elif method in ('createLiveCD', 'createAppliance'):
lines.append("Arch: %s" % params[0])
lines.append("Build Target: %s" % params[1])
lines.append("Kickstart File: %s" % params[2])
if len(params) > 3:
_handleOpts(lines, params[3])
elif method == 'newRepo':
tag = session.getTag(params[0])
lines.append("Tag: %s" % tag['name'])
@ -3726,11 +3794,8 @@ def _parseTaskParams(session, method, task_id):
lines.append(" Method: %s" % subtask[0])
lines.append(" Parameters: %s" % ", ".join([str(subparam) for subparam in subtask[1]]))
if len(subtask) > 2 and subtask[2]:
lines.append(" Options:")
subopts = subtask[2]
for key in subopts:
if not key == '__starstar':
lines.append(" %s: %s" % (key, subopts[key]))
_handleOpts(lines, subopts, prefix=' ')
lines.append("")
elif method == 'chainbuild':
lines.append("Build Groups:")
@ -3739,9 +3804,8 @@ def _parseTaskParams(session, method, task_id):
group_num += 1
lines.append(" %i: %s" % (group_num, ', '.join(group_list)))
lines.append("Build Target: %s" % params[1])
for key in params[2].keys():
if not key == '__starstar':
lines.append("%s: %s" % (key, params[2][key]))
if len(params) > 2:
_handleOpts(lines, params[2])
elif method == 'waitrepo':
lines.append("Build Target: %s" % params[0])
if params[1]:
@ -3801,14 +3865,15 @@ def _printTaskInfo(session, task_id, level=0, recurse=True, verbose=True):
if output:
print "%sOutput:" % indent
for filename in output:
print "%s %s/%s" % (indent, files_dir, filename)
print "%s %s/%s" % (indent, files_dir, filename)
# white space
sys.stdout.write("\n")
print
if recurse:
level += 1
children = session.getTaskChildren(task_id)
children = session.getTaskChildren(task_id, request=True)
children.sort(cmp=lambda a, b: cmp(a['id'], b['id']))
for child in children:
_printTaskInfo(session, child['id'], level, verbose=verbose)
@ -3817,7 +3882,7 @@ def anon_handle_taskinfo(options, session, args):
usage = _("usage: %prog taskinfo [options] taskID [taskID...]")
usage += _("\n(Specify the --help global option for a list of other help options)")
parser = OptionParser(usage=usage)
parser.add_option("--recurse", action="store_true", help=_("Show children of this task as well"))
parser.add_option("-r", "--recurse", action="store_true", help=_("Show children of this task as well"))
parser.add_option("-v", "--verbose", action="store_true", help=_("Be verbose"))
(options, args) = parser.parse_args(args)
if len(args) < 1:
@ -4646,6 +4711,81 @@ def _build_image(options, task_opts, session, args, img_type):
else:
return
def handle_win_build(options, session, args):
"""Build a Windows package from source"""
# Usage & option parsing
usage = _("usage: %prog win-build [options] target URL VM")
usage += _("\n(Specify the --help global option for a list of other " +
"help options)")
parser = OptionParser(usage=usage)
parser.add_option("--winspec", metavar="URL",
help=_("SCM URL to retrieve the build descriptor from. " + \
"If not specified, the winspec must be in the root directory " + \
"of the source repository."))
parser.add_option("--patches", metavar="URL",
help=_("SCM URL of a directory containing patches to apply " + \
"to the sources before building"))
parser.add_option("--cpus", type="int",
help=_("Number of cpus to allocate to the build VM " + \
"(requires admin access)"))
parser.add_option("--mem", type="int",
help=_("Amount of memory (in megabytes) to allocate to the build VM " + \
"(requires admin access)"))
parser.add_option("--specfile", metavar="URL",
help=_("SCM URL of a spec file fragment to use to generate wrapper RPMs"))
parser.add_option("--scratch", action="store_true",
help=_("Perform a scratch build"))
parser.add_option("--repo-id", type="int", help=_("Use a specific repo"))
parser.add_option("--skip-tag", action="store_true",
help=_("Do not attempt to tag package"))
parser.add_option("--background", action="store_true",
help=_("Run the build at a lower priority"))
parser.add_option("--wait", action="store_true",
help=_("Wait on the build, even if running in the background"))
parser.add_option("--nowait", action="store_false", dest="wait",
help=_("Don't wait on build"))
parser.add_option("--quiet", action="store_true",
help=_("Do not print the task information"), default=options.quiet)
(build_opts, args) = parser.parse_args(args)
if len(args) != 3:
parser.error(_("Exactly three arguments (a build target, a SCM URL, and a VM name) are required"))
assert False
activate_session(session)
target = args[0]
if target.lower() == "none" and build_opts.repo_id:
target = None
build_opts.skip_tag = True
else:
build_target = session.getBuildTarget(target)
if not build_target:
parser.error(_("Unknown build target: %s" % target))
dest_tag = session.getTag(build_target['dest_tag'])
if not dest_tag:
parser.error(_("Unknown destination tag: %s" % build_target['dest_tag_name']))
if dest_tag['locked'] and not build_opts.scratch:
parser.error(_("Destination tag %s is locked" % dest_tag['name']))
scmurl = args[1]
vm_name = args[2]
opts = {}
for key in ('winspec', 'patches', 'cpus', 'mem',
'specfile', 'scratch', 'repo_id', 'skip_tag'):
val = getattr(build_opts, key)
if val is not None:
opts[key] = val
priority = None
if build_opts.background:
#relative to koji.PRIO_DEFAULT
priority = 5
task_id = session.winBuild(vm_name, scmurl, target, opts, priority=priority)
if not build_opts.quiet:
print "Created task:", task_id
print "Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id)
if build_opts.wait or (build_opts.wait is None and not _running_in_bg()):
session.logout()
return watch_tasks(session, [task_id], quiet=build_opts.quiet)
else:
return
def handle_free_task(options, session, args):
"[admin] Free a task"
usage = _("usage: %prog free-task [options] <task-id> [<task-id> ...]")

8
docs/TODO.txt Normal file
View file

@ -0,0 +1,8 @@
vm/kojikamid:
- add vmplatform to the top-level dict
- capture build output itself into a build.log
- remove debug key for each file, we'll use flags to capture that
- fix heinous copy and paste job
cli/koji
- implement win-build directive

View file

@ -36,11 +36,11 @@ CREATE TABLE archivetypes (
extensions TEXT NOT NULL
) WITHOUT OIDS;
insert into archivetypes (name, description, extensions) values ('jar', 'Jar files', 'jar war rar ear');
insert into archivetypes (name, description, extensions) values ('zip', 'Zip archives', 'zip');
insert into archivetypes (name, description, extensions) values ('pom', 'Maven Project Object Management files', 'pom');
insert into archivetypes (name, description, extensions) values ('tar', 'Tar files', 'tar tar.gz tar.bz2');
insert into archivetypes (name, description, extensions) values ('xml', 'XML files', 'xml');
insert into archivetypes (name, description, extensions) values ('jar', 'Jar file', 'jar war rar ear');
insert into archivetypes (name, description, extensions) values ('zip', 'Zip archive', 'zip');
insert into archivetypes (name, description, extensions) values ('pom', 'Maven Project Object Management file', 'pom');
insert into archivetypes (name, description, extensions) values ('tar', 'Tar file', 'tar tar.gz tar.bz2');
insert into archivetypes (name, description, extensions) values ('xml', 'XML file', 'xml');
CREATE TABLE archiveinfo (
id SERIAL NOT NULL PRIMARY KEY,

View file

@ -0,0 +1,36 @@
-- upgrade script to migrate the Koji database schema
-- from version 1.4 to 1.5
BEGIN;
INSERT INTO permissions (name) VALUES ('win-import');
INSERT INTO permissions (name) VALUES ('win-admin');
INSERT INTO channels (name) VALUES ('vm');
insert into archivetypes (name, description, extensions) values ('spec', 'RPM spec file', 'spec');
insert into archivetypes (name, description, extensions) values ('exe', 'Windows executable', 'exe');
insert into archivetypes (name, description, extensions) values ('dll', 'Windows dynamic link library', 'dll');
insert into archivetypes (name, description, extensions) values ('lib', 'Windows import library', 'lib');
insert into archivetypes (name, description, extensions) values ('sys', 'Windows device driver', 'sys');
insert into archivetypes (name, description, extensions) values ('inf', 'Windows driver information file', 'inf');
insert into archivetypes (name, description, extensions) values ('cat', 'Windows catalog file', 'cat');
insert into archivetypes (name, description, extensions) values ('msi', 'Windows Installer package', 'msi');
insert into archivetypes (name, description, extensions) values ('pdb', 'Windows debug information', 'pdb');
insert into archivetypes (name, description, extensions) values ('oem', 'Windows driver oem file', 'oem');
-- flag to indicate that a build is a Windows build
CREATE TABLE win_builds (
build_id INTEGER NOT NULL PRIMARY KEY REFERENCES build(id),
platform TEXT NOT NULL
) WITHOUT OIDS;
-- Extended information about files built in Windows VMs
CREATE TABLE win_archives (
archive_id INTEGER NOT NULL PRIMARY KEY REFERENCES archiveinfo(id),
relpath TEXT NOT NULL,
platforms TEXT NOT NULL,
flags TEXT
) WITHOUT OIDS;
COMMIT WORK;

View file

@ -101,6 +101,8 @@ INSERT INTO permissions (name) VALUES ('build');
INSERT INTO permissions (name) VALUES ('repo');
INSERT INTO permissions (name) VALUES ('livecd');
INSERT INTO permissions (name) VALUES ('maven-import');
INSERT INTO permissions (name) VALUES ('win-import');
INSERT INTO permissions (name) VALUES ('win-admin')
INSERT INTO permissions (name) VALUES ('appliance');
CREATE TABLE user_perms (
@ -179,6 +181,7 @@ INSERT INTO channels (name) VALUES ('createrepo');
INSERT INTO channels (name) VALUES ('maven');
INSERT INTO channels (name) VALUES ('livecd');
INSERT INTO channels (name) VALUES ('appliance');
INSERT INTO channels (name) VALUES ('vm');
-- Here we track the build machines
-- each host has an entry in the users table also
@ -668,6 +671,12 @@ CREATE TABLE maven_builds (
version TEXT NOT NULL
) WITHOUT OIDS;
-- Windows-specific build information
CREATE TABLE win_builds (
build_id INTEGER NOT NULL PRIMARY KEY REFERENCES build(id),
platform TEXT NOT NULL
) WITHOUT OIDS;
-- Even though we call this archiveinfo, we can probably use it for
-- any filetype output by a build process. In general they will be
-- archives (.zip, .jar, .tar.gz) but could also be installer executables (.exe)
@ -678,11 +687,21 @@ CREATE TABLE archivetypes (
extensions TEXT NOT NULL
) WITHOUT OIDS;
insert into archivetypes (name, description, extensions) values ('jar', 'Jar files', 'jar war rar ear');
insert into archivetypes (name, description, extensions) values ('zip', 'Zip archives', 'zip');
insert into archivetypes (name, description, extensions) values ('pom', 'Maven Project Object Management files', 'pom');
insert into archivetypes (name, description, extensions) values ('tar', 'Tar files', 'tar tar.gz tar.bz2');
insert into archivetypes (name, description, extensions) values ('xml', 'XML files', 'xml');
insert into archivetypes (name, description, extensions) values ('jar', 'Jar file', 'jar war rar ear');
insert into archivetypes (name, description, extensions) values ('zip', 'Zip archive', 'zip');
insert into archivetypes (name, description, extensions) values ('pom', 'Maven Project Object Management file', 'pom');
insert into archivetypes (name, description, extensions) values ('tar', 'Tar file', 'tar tar.gz tar.bz2');
insert into archivetypes (name, description, extensions) values ('xml', 'XML file', 'xml');
insert into archivetypes (name, description, extensions) values ('spec', 'RPM spec file', 'spec');
insert into archivetypes (name, description, extensions) values ('exe', 'Windows executable', 'exe');
insert into archivetypes (name, description, extensions) values ('dll', 'Windows dynamic link library', 'dll');
insert into archivetypes (name, description, extensions) values ('lib', 'Windows import library', 'lib');
insert into archivetypes (name, description, extensions) values ('sys', 'Windows device driver', 'sys');
insert into archivetypes (name, description, extensions) values ('inf', 'Windows driver information file', 'inf');
insert into archivetypes (name, description, extensions) values ('cat', 'Windows catalog file', 'cat');
insert into archivetypes (name, description, extensions) values ('msi', 'Windows Installer package', 'msi');
insert into archivetypes (name, description, extensions) values ('pdb', 'Windows debug information', 'pdb');
insert into archivetypes (name, description, extensions) values ('oem', 'Windows driver oem file', 'oem');
-- Do we want to enforce a constraint that a build can only generate one
-- archive with a given name?
@ -715,4 +734,12 @@ CREATE TABLE buildroot_archives (
) WITHOUT OIDS;
CREATE INDEX buildroot_archives_archive_idx ON buildroot_archives (archive_id);
-- Extended information about files built in Windows VMs
CREATE TABLE win_archives (
archive_id INTEGER NOT NULL PRIMARY KEY REFERENCES archiveinfo(id),
relpath TEXT NOT NULL,
platforms TEXT NOT NULL,
flags TEXT
) WITHOUT OIDS;
COMMIT WORK;

View file

@ -49,6 +49,12 @@ NotifyOnSuccess = True
## Disables all notifications
# DisableNotifications = False
## Extended features
## Support Maven builds
# EnableMaven = False
## Support Windows builds
# EnableWin = False
## Koji hub plugins
## The path where plugins are found
# PluginPath = /usr/lib/koji-hub-plugins

View file

@ -1051,8 +1051,8 @@ def readTaggedBuilds(tag,event=None,inherit=False,latest=False,package=None,owne
set event to query at a time in the past
set latest=True to get only the latest build per package
If type is not None, restrict the list to builds of the given type. Currently the only
supported type is 'maven'.
If type is not None, restrict the list to builds of the given type. Currently the supported
types are 'maven' and 'win'.
"""
# build - id pkg_id version release epoch
# tag_listing - id build_id tag_id
@ -1077,14 +1077,17 @@ def readTaggedBuilds(tag,event=None,inherit=False,latest=False,package=None,owne
('users.id', 'owner_id'), ('users.name', 'owner_name')]
st_complete = koji.BUILD_STATES['COMPLETE']
maven_join = ''
type_join = ''
if type is None:
pass
elif type == 'maven':
maven_join = 'JOIN maven_builds on maven_builds.build_id = tag_listing.build_id'
type_join = 'JOIN maven_builds ON maven_builds.build_id = tag_listing.build_id'
fields.extend([('maven_builds.group_id', 'maven_group_id'),
('maven_builds.artifact_id', 'maven_artifact_id'),
('maven_builds.version', 'maven_version')])
elif type == 'win':
type_join = 'JOIN win_builds ON win_builds.build_id = tag_listing.build_id'
fields.append(('win_builds.platform', 'platform'))
else:
raise koji.GenericError, 'unsupported build type: %s' % type
@ -1098,7 +1101,7 @@ def readTaggedBuilds(tag,event=None,inherit=False,latest=False,package=None,owne
JOIN package ON package.id = build.pkg_id
WHERE %s AND tag_id=%%(tagid)s
AND build.state=%%(st_complete)i
""" % (', '.join([pair[0] for pair in fields]), maven_join, eventCondition(event, 'tag_listing'))
""" % (', '.join([pair[0] for pair in fields]), type_join, eventCondition(event, 'tag_listing'))
if package:
q += """AND package.name = %(package)s
"""
@ -1138,8 +1141,8 @@ def readTaggedRPMS(tag, package=None, arch=None, event=None,inherit=False,latest
set event to query at a time in the past
set latest=False to get all tagged RPMS (not just from the latest builds)
If type is not None, restrict the list to rpms from builds of the given type. Currently the only
supported type is 'maven'.
If type is not None, restrict the list to rpms from builds of the given type. Currently the
supported types are 'maven' and 'win'.
"""
taglist = [tag]
if inherit:
@ -1227,7 +1230,7 @@ def readTaggedArchives(tag, package=None, event=None, inherit=False, latest=True
set latest=False to get all tagged archives (not just from the latest builds)
If type is not None, restrict the listing to archives of the given type. Currently
the only supported type is 'maven'.
the supported types are 'maven' and 'win'.
"""
taglist = [tag]
if inherit:
@ -1263,6 +1266,11 @@ def readTaggedArchives(tag, package=None, event=None, inherit=False, latest=True
fields.extend([('maven_archives.group_id', 'maven_group_id'),
('maven_archives.artifact_id', 'maven_artifact_id'),
('maven_archives.version', 'maven_version')])
elif type == 'win':
joins.append('win_archives ON archiveinfo.id = win_archives.archive_id')
fields.extend([('win_archives.relpath', 'relpath'),
('win_archives.platforms', 'platforms'),
('win_archives.flags', 'flags')])
else:
raise koji.GenericError, 'unsupported archive type: %s' % type
@ -3280,6 +3288,32 @@ def get_maven_build(buildInfo, strict=False):
WHERE build_id = %%(build_id)i""" % ', '.join(fields)
return _singleRow(query, locals(), fields, strict)
def get_win_build(buildInfo, strict=False):
"""
Retrieve Windows-specific information about a build.
buildInfo can be either a string (n-v-r) or an integer
(build ID).
Returns a map containing the following keys:
build_id: id of the build (integer)
platform: the platform the build was performed on (string)
"""
fields = ('build_id', 'platform')
build_id = find_build_id(buildInfo)
if not build_id:
if strict:
raise koji.GenericError, 'No matching build found: %s' % buildInfo
else:
return None
query = QueryProcessor(tables=('win_builds',), columns=fields,
clauses=('build_id = %(build_id)i',),
values={'build_id': build_id})
result = query.executeOne()
if strict and not result:
raise koji.GenericError, 'no such Windows build: %s' % buildInfo
return result
def list_archives(buildID=None, buildrootID=None, componentBuildrootID=None, hostID=None, type=None,
filename=None, size=None, md5sum=None, typeInfo=None, queryOpts=None):
"""
@ -3312,7 +3346,7 @@ def list_archives(buildID=None, buildrootID=None, componentBuildrootID=None, hos
those associated with additional metadata of the given type.
Currently supported types are:
maven
maven, win
If 'maven' is specified as a type, each returned map will contain
these additional keys:
@ -3321,6 +3355,13 @@ def list_archives(buildID=None, buildrootID=None, componentBuildrootID=None, hos
artifact_id: Maven artifactId (string)
version: Maven version (string)
if 'win' is specified as a type, each returned map will contain
these additional keys:
relpath: the relative path where the file is located (string)
platforms: space-separated list of platforms the file is suitable for use on (string)
flags: space-separated list of flags used when building the file (fre, chk) (string)
typeInfo is a dict that can be used to filter the output by type-specific info.
For the 'maven' type, this dict may contain one or more of group_id, artifact_id, or version,
and the output will be restricted to archives with matching attributes.
@ -3379,6 +3420,22 @@ def list_archives(buildID=None, buildrootID=None, componentBuildrootID=None, hos
if typeInfo.has_key(key):
clauses.append('maven_archives.%s = %%(%s)s' % (key, key))
values[key] = typeInfo[key]
elif type == 'win':
joins.append('win_archives ON archiveinfo.id = win_archives.archive_id')
columns.extend(['win_archives.relpath', 'win_archives.platforms', 'win_archives.flags'])
aliases.extend(['relpath', 'platforms', 'flags'])
if typeInfo:
if 'relpath' in typeInfo:
clauses.append('win_archives.relpath = %(relpath)s')
values['relpath'] = typeInfo['relpath']
for key in ('platforms', 'flags'):
if key in typeInfo:
val = typeInfo[key]
if not isinstance(val, (list, tuple)):
val = [val]
for v in val:
clauses.append(r"""%s ~ E'\\m%s\\M'""" % (key, v))
else:
raise koji.GenericError, 'unsupported archive type: %s' % type
@ -3397,13 +3454,34 @@ def get_archive(archive_id, strict=False):
filename: name of the archive (string)
size: size of the archive (integer)
md5sum: md5sum of the archive (string)
If the archive is part of a Maven build, the following keys will be included:
group_id
artifact_id
version
If the archive is part of a Windows builds, the following keys will be included:
relpath
platforms
flags
"""
fields = ('id', 'type_id', 'build_id', 'buildroot_id', 'filename', 'size', 'md5sum')
select = """SELECT %s FROM archiveinfo
WHERE id = %%(archive_id)i""" % ', '.join(fields)
return _singleRow(select, locals(), fields, strict=strict)
archive = _singleRow(select, locals(), fields, strict=strict)
if not archive:
# strict is taken care of by _singleRow()
return None
maven_info = get_maven_archive(archive_id)
if maven_info:
del maven_info['archive_id']
archive.update(maven_info)
win_info = get_win_archive(archive_id)
if win_info:
del win_info['archive_id']
archive.update(win_info)
return archive
def get_maven_archive(archive_id):
def get_maven_archive(archive_id, strict=False):
"""
Retrieve Maven-specific information about an archive.
Returns a map containing the following keys:
@ -3416,7 +3494,22 @@ def get_maven_archive(archive_id):
fields = ('archive_id', 'group_id', 'artifact_id', 'version')
select = """SELECT %s FROM maven_archives
WHERE archive_id = %%(archive_id)i""" % ', '.join(fields)
return _singleRow(select, locals(), fields)
return _singleRow(select, locals(), fields, strict=strict)
def get_win_archive(archive_id, strict=False):
"""
Retrieve Windows-specific information about an archive.
Returns a map containing the following keys:
archive_id: id of the build (integer)
relpath: the relative path where the file is located (string)
platforms: space-separated list of platforms the file is suitable for use on (string)
flags: space-separated list of flags used when building the file (fre, chk) (string)
"""
fields = ('archive_id', 'relpath', 'platforms', 'flags')
select = """SELECT %s FROM win_archives
WHERE archive_id = %%(archive_id)i""" % ', '.join(fields)
return _singleRow(select, locals(), fields, strict=strict)
def _get_zipfile_list(archive_id, zippath):
"""
@ -3483,12 +3576,18 @@ def list_archive_files(archive_id, queryOpts=None):
archive_type = get_archive_type(type_id=archive_info['type_id'], strict=True)
build_info = get_build(archive_info['build_id'], strict=True)
maven_info = get_maven_build(build_info['id'])
if not maven_info:
# XXX support other archive types, when they exist
return _applyQueryOpts([], queryOpts)
win_info = get_win_build(build_info['id'])
file_path = os.path.join(koji.pathinfo.mavenbuild(build_info, maven_info),
archive_info['filename'])
if maven_info:
file_path = os.path.join(koji.pathinfo.mavenbuild(build_info, maven_info),
archive_info['filename'])
elif win_info:
win_archive = get_win_archive(archive_info['id'], strict=True)
archive_info.update(win_archive)
file_path = os.path.join(koji.pathinfo.winbuild(build_info),
koji.pathinfo.winfile(archive_info))
else:
return _applyQueryOpts([], queryOpts)
if archive_type['name'] in ('zip', 'jar'):
return _applyQueryOpts(_get_zipfile_list(archive_id, file_path), queryOpts)
@ -4274,7 +4373,26 @@ def new_maven_build(build, maven_info):
VALUES (%(build_id)i, %(group_id)s, %(artifact_id)s, %(version)s)"""
_dml(insert, maven_info)
def import_archive(filepath, buildinfo, type, typeInfo, buildroot_id=None):
def new_win_build(build_info, win_info):
"""
Add Windows metadata to an existing build.
win_info must contain a 'platform' key.
"""
build_id = build_info['id']
current = get_win_build(build_id, strict=False)
if current:
if current['platform'] != win_info['platform']:
update = UpdateProcessor('win_builds', clauses=['build_id=%(build_id)i'],
values={'build_id': build_id})
update.set(platform=win_info['platform'])
update.execute()
else:
insert = InsertProcessor('win_builds')
insert.set(build_id=build_id)
insert.set(platform=win_info['platform'])
insert.execute()
def import_archive(filepath, buildinfo, type, typeInfo, buildroot_id=None, destpath=None):
"""
Import an archive file and associate it with a build. The archive can
be any non-rpm filetype supported by Koji.
@ -4284,6 +4402,7 @@ def import_archive(filepath, buildinfo, type, typeInfo, buildroot_id=None):
type: type of the archive being imported. Currently supported archive types: maven
typeInfo: dict of type-specific information
buildroot_id: the id of the buildroot the archive was built in (may be null)
destpath: the path relative to the destination directory that the file should be moved to (may be null)
"""
if not os.path.exists(filepath):
raise koji.GenericError, 'no such file: %s' % filepath
@ -4327,6 +4446,19 @@ def import_archive(filepath, buildinfo, type, typeInfo, buildroot_id=None):
# move the file to it's final destination
_import_archive_file(filepath, mavendir)
_generate_maven_metadata(maveninfo, mavendir)
elif type == 'win':
wininfo = get_win_build(buildinfo, strict=True)
insert = InsertProcessor('win_archives')
insert.set(archive_id=archive_id)
insert.set(relpath=destpath)
insert.set(platforms=' '.join(typeInfo['platforms']))
if typeInfo['flags']:
insert.set(flags=' '.join(typeInfo['flags']))
insert.execute()
destdir = koji.pathinfo.winbuild(buildinfo)
if destpath:
destdir = os.path.join(destdir, destpath)
_import_archive_file(filepath, destdir)
else:
raise koji.BuildError, 'unsupported archive type: %s' % type
@ -5117,6 +5249,7 @@ def reset_build(build):
#nothing to do
return
minfo = get_maven_build(binfo)
winfo = get_win_build(binfo)
koji.plugin.run_callbacks('preBuildStateChange', attribute='state', old=binfo['state'], new=koji.BUILD_STATES['CANCELED'], info=binfo)
q = """SELECT id FROM rpminfo WHERE build_id=%(id)i"""
ids = _fetchMulti(q, binfo)
@ -5132,12 +5265,16 @@ def reset_build(build):
for (archive_id,) in ids:
delete = """DELETE FROM maven_archives WHERE archive_id=%(archive_id)i"""
_dml(delete, locals())
delete = """DELETE FROM win_archives WHERE archive_id=%(archive_id)i"""
_dml(delete, locals())
delete = """DELETE FROM buildroot_archives WHERE archive_id=%(archive_id)i"""
_dml(delete, locals())
delete = """DELETE FROM archiveinfo WHERE build_id=%(id)i"""
_dml(delete, binfo)
delete = """DELETE FROM maven_builds WHERE build_id = %(id)i"""
_dml(delete, binfo)
delete = """DELETE FROM win_builds WHERE build_id = %(id)i"""
_dml(delete, binfo)
binfo['state'] = koji.BUILD_STATES['CANCELED']
update = """UPDATE build SET state=%(state)i, task_id=NULL WHERE id=%(id)i"""
_dml(update, binfo)
@ -5146,6 +5283,7 @@ def reset_build(build):
builddir = koji.pathinfo.build(binfo)
if os.path.exists(builddir):
dirs_to_clear.append(builddir)
# Windows files exist under the builddir, and will be removed with the rpms
if minfo:
mavendir = koji.pathinfo.mavenbuild(binfo, minfo)
if os.path.exists(mavendir):
@ -5197,13 +5335,17 @@ def _get_build_target(task_id):
# XXX Should we be storing a reference to the build target
# in the build table for reproducibility?
task = Task(task_id)
request = task.getRequest()
# request is (path-to-srpm, build-target-name, map-of-other-options)
if request[1]:
ret = get_build_targets(request[1])
return ret[0]
else:
return None
info = task.getInfo(request=True)
request = info['request']
if info['method'] in ('build', 'maven'):
# request is (source-url, build-target, map-of-other-options)
if request[1]:
return get_build_target(request[1])
elif info['method'] == 'winbuild':
# request is (vm-name, source-url, build-target, map-of-other-options)
if request[2]:
return get_build_target(request[2])
return None
def get_notification_recipients(build, tag_id, state):
"""
@ -6302,6 +6444,38 @@ class RootExports(object):
return make_task('wrapperRPM', [url, build_tag, build, None, opts], **taskOpts)
def winBuild(self, vm, url, target, opts=None, priority=None, channel='vm'):
"""
Create a Windows build task
vm: the name of the VM to run the build in
url: The url to checkout the source from. May be a CVS, SVN, or GIT repository.
opts: task options
target: the build target
priority: the amount to increase (or decrease) the task priority, relative
to the default priority; higher values mean lower priority; only
admins have the right to specify a negative priority here
channel: the channel to allocate the task to (defaults to the "vm" channel)
Returns the task ID
"""
if not context.opts.get('EnableWin'):
raise koji.GenericError, "Windows support not enabled"
if not opts:
opts = {}
if 'cpus' in opts or 'mem' in opts:
context.session.assertPerm('win-admin')
taskOpts = {}
if priority:
if priority < 0:
if not context.session.hasPerm('admin'):
raise koji.ActionNotAllowed, 'only admins may create high-priority tasks'
taskOpts['priority'] = koji.PRIO_DEFAULT + priority
if channel:
taskOpts['channel'] = channel
return make_task('winbuild', [vm, url, target, opts], **taskOpts)
# Create the image task. Called from _build_image in the client.
#
def buildImage (self, arch, target, ksfile, img_type, opts=None, priority=None):
@ -6398,6 +6572,9 @@ class RootExports(object):
def mavenEnabled(self):
return bool(context.opts.get('EnableMaven'))
def winEnabled(self):
return bool(context.opts.get('EnableWin'))
def showSession(self):
return "%s" % context.session
@ -6663,6 +6840,12 @@ class RootExports(object):
buildinfo = get_build(buildinfo, strict=True)
fullpath = '%s/%s' % (koji.pathinfo.work(), filepath)
import_archive(fullpath, buildinfo, type, typeInfo)
elif type == 'win':
context.session.assertPerm('win-import')
buildinfo = get_build(buildinfo, strict=True)
fullpath = '%s/%s' % (koji.pathinfo.work(), filepath)
import_archive(fullpath, buildinfo, type, typeInfo,
destpath=os.path.dirname(filepath))
else:
koji.GenericError, 'unsupported archive type: %s' % type
@ -6944,11 +7127,13 @@ class RootExports(object):
getBuild = staticmethod(get_build)
getMavenBuild = staticmethod(get_maven_build)
getWinBuild = staticmethod(get_win_build)
getArchiveTypes = staticmethod(get_archive_types)
getArchiveType = staticmethod(get_archive_type)
listArchives = staticmethod(list_archives)
getArchive = staticmethod(get_archive)
getMavenArchive = staticmethod(get_maven_archive)
getWinArchive = staticmethod(get_win_archive)
listArchiveFiles = staticmethod(list_archive_files)
getArchiveFile = staticmethod(get_archive_file)
@ -7142,7 +7327,7 @@ class RootExports(object):
completion_time is before and/or after the given time.
The time may be specified as a floating point value indicating seconds since the Epoch (as
returned by time.time()) or as a string in ISO format ('YYYY-MM-DD HH24:MI:SS').
If type is not None, only list builds of the associated type. Currently the only supported type is 'maven'.
If type is not None, only list builds of the associated type. Currently the supported types are 'maven' and 'win'.
if typeInfo is not None, only list builds with matching type-specific info. Must be used in conjunction with
the type parameter.
Currently the only supported type is 'maven', and typeInfo is a dict containing
@ -7237,6 +7422,12 @@ class RootExports(object):
if typeInfo.has_key('version'):
clauses.append('maven_builds.version = %(version)s')
version = typeInfo['version']
elif type == 'win':
joins.append('win_builds ON build.id = win_builds.build_id')
fields.append(('win_builds.platform', 'platform'))
if typeInfo:
clauses.append('win_builds.platform = %(platform)s')
platform = typeInfo['platform']
else:
raise koji.GenericError, 'unsupported build type: %s' % type
@ -7802,11 +7993,11 @@ class RootExports(object):
else:
return ret
def getTaskChildren(self, task_id):
def getTaskChildren(self, task_id, request=False):
"""Return a list of the children
of the Task with the given ID."""
task = Task(task_id)
return task.getChildren()
return task.getChildren(request=request)
def getTaskDescendents(self, task_id, request=False):
"""Get all descendents of the task with the given ID.
@ -8332,13 +8523,14 @@ class RootExports(object):
'user': 'users',
'host': 'host',
'rpm': 'rpminfo',
'maven': 'archiveinfo'}
'maven': 'archiveinfo',
'win': 'archiveinfo'}
def search(self, terms, type, matchType, queryOpts=None):
"""Search for an item in the database matching "terms".
"type" specifies what object type to search for, and must be
one of "package", "build", "tag", "target", "user", "host",
or "rpm". "matchType" specifies the type of search to
"rpm", "maven", or "win". "matchType" specifies the type of search to
perform, and must be one of "glob" or "regexp". All searches
are case-insensitive. A list of maps containing "id" and
"name" will be returned. If no matches are found, an empty
@ -8382,6 +8574,11 @@ class RootExports(object):
joins.append('maven_archives ON archiveinfo.id = maven_archives.archive_id')
clause = "archiveinfo.filename %s %%(terms)s or maven_archives.group_id || '-' || " \
"maven_archives.artifact_id || '-' || maven_archives.version %s %%(terms)s" % (oper, oper)
elif type == 'win':
cols = ('id', "trim(leading '/' from win_archives.relpath || '/' || archiveinfo.filename)")
joins.append('win_archives ON archiveinfo.id = win_archives.archive_id')
clause = "archiveinfo.filename %s %%(terms)s or win_archives.relpath || '/' || " \
"archiveinfo.filename %s %%(terms)s" % (oper, oper)
else:
clause = 'name %s %%(terms)s' % oper
@ -8924,6 +9121,65 @@ class HostExports(object):
os.rename(fn,dest)
os.symlink(dest,fn)
def moveMavenBuildToScratch(self, task_id, results, rpm_results):
"Move a completed Maven scratch build into place (not imported)"
if not context.opts.get('EnableMaven'):
raise koji.GenericError, 'Maven support not enabled'
host = Host()
host.verify()
task = Task(task_id)
task.assertHost(host.id)
scratchdir = koji.pathinfo.scratch()
username = get_user(task.getOwner())['name']
destdir = os.path.join(scratchdir, username, 'task_%s' % task_id)
for reldir, files in results['files'].items() + [('', results['logs'])]:
for filename in files:
if reldir:
relpath = os.path.join(reldir, filename)
else:
relpath = filename
src = os.path.join(koji.pathinfo.task(results['task_id']), relpath)
dest = os.path.join(destdir, relpath)
koji.ensuredir(os.path.dirname(dest))
os.rename(src, dest)
os.symlink(dest, src)
if rpm_results:
for relpath in [rpm_results['srpm']] + rpm_results['rpms'] + \
rpm_results['logs']:
src = os.path.join(koji.pathinfo.task(rpm_results['task_id']),
relpath)
dest = os.path.join(destdir, 'rpms', relpath)
koji.ensuredir(os.path.dirname(dest))
os.rename(src, dest)
os.symlink(dest, src)
def moveWinBuildToScratch(self, task_id, results, rpm_results):
"Move a completed Windows scratch build into place (not imported)"
if not context.opts.get('EnableWin'):
raise koji.GenericError, 'Windows support not enabled'
host = Host()
host.verify()
task = Task(task_id)
task.assertHost(host.id)
scratchdir = koji.pathinfo.scratch()
username = get_user(task.getOwner())['name']
destdir = os.path.join(scratchdir, username, 'task_%s' % task_id)
for relpath in results['output'].keys() + results['logs']:
filename = os.path.join(koji.pathinfo.task(results['task_id']), relpath)
dest = os.path.join(destdir, relpath)
koji.ensuredir(os.path.dirname(dest))
os.rename(filename, dest)
os.symlink(dest, filename)
if rpm_results:
for relpath in [rpm_results['srpm']] + rpm_results['rpms'] + \
rpm_results['logs']:
filename = os.path.join(koji.pathinfo.task(rpm_results['task_id']),
relpath)
dest = os.path.join(destdir, 'rpms', relpath)
koji.ensuredir(os.path.dirname(dest))
os.rename(filename, dest)
os.symlink(dest, filename)
def initBuild(self,data):
"""Create a stub build entry.
@ -8992,10 +9248,10 @@ class HostExports(object):
data['state'] = koji.BUILD_STATES['BUILDING']
data['completion_time'] = None
build_id = new_build(data)
build_info['id'] = build_id
new_maven_build(build_info, maven_info)
data['id'] = build_id
new_maven_build(data, maven_info)
return build_id, build_info
return data
def completeMavenBuild(self, task_id, build_id, maven_results, rpm_results):
"""Complete the Maven build."""
@ -9075,6 +9331,74 @@ class HostExports(object):
_import_wrapper(task.id, build_info, rpm_results)
def initWinBuild(self, task_id, build_info, win_info):
"""
Create a new in-progress Windows build.
"""
if not context.opts.get('EnableWin'):
raise koji.GenericError, 'Windows support not enabled'
host = Host()
host.verify()
#sanity checks
task = Task(task_id)
task.assertHost(host.id)
# build_info must contain name, version, and release
data = build_info.copy()
data['task_id'] = task_id
data['owner'] = task.getOwner()
data['state'] = koji.BUILD_STATES['BUILDING']
data['completion_time'] = None
build_id = new_build(data)
data['id'] = build_id
new_win_build(data, win_info)
return data
def completeWinBuild(self, task_id, build_id, results, rpm_results):
"""Complete a Windows build"""
if not context.opts.get('EnableWin'):
raise koji.GenericError, 'Windows support not enabled'
host = Host()
host.verify()
task = Task(task_id)
task.assertHost(host.id)
build_info = get_build(build_id, strict=True)
win_info = get_win_build(build_id, strict=True)
task_dir = koji.pathinfo.task(results['task_id'])
# import the build output
for relpath, metadata in results['output'].iteritems():
archivetype = get_archive_type(relpath)
if not archivetype:
# Unknown archive type, skip it
continue
filepath = os.path.join(task_dir, relpath)
import_archive(filepath, build_info, 'win', metadata,
destpath=os.path.dirname(relpath))
# move the logs to their final destination
for relpath in results['logs']:
subdir = 'win'
reldir = os.path.dirname(relpath)
if reldir:
subdir = os.path.join(subdir, reldir)
import_build_log(os.path.join(task_dir, relpath),
build_info, subdir=subdir)
if rpm_results:
_import_wrapper(rpm_results['task_id'], build_info, rpm_results)
# update build state
st_complete = koji.BUILD_STATES['COMPLETE']
update = UpdateProcessor('build', clauses=['id=%(build_id)i'],
values={'build_id': build_id})
update.set(id=build_id, state=st_complete)
update.rawset(completion_time='now()')
update.execute()
# send email
build_notification(task_id, build_id)
def failBuild(self, task_id, build_id):
"""Mark the build as failed. If the current state is not
'BUILDING', or the current competion_time is not null, a

View file

@ -392,6 +392,7 @@ def load_config(req):
['MissingPolicyOk', 'boolean', True],
['EnableMaven', 'boolean', False],
['EnableWin', 'boolean', False],
['LockOut', 'boolean', False],
['ServerOffline', 'boolean', False],

View file

@ -81,6 +81,24 @@ Requires: createrepo >= 0.9.2
koji-builder is the daemon that runs on build machines and executes
tasks that come through the Koji system.
%package vm
Summary: Koji virtual machine management daemon
Group: Applications/System
License: LGPLv2
Requires: %{name} = %{version}-%{release}
Requires(post): /sbin/chkconfig
Requires(post): /sbin/service
Requires(preun): /sbin/chkconfig
Requires(preun): /sbin/service
Requires: libvirt-python
Requires: libxml2-python
Requires: python-virtinst
Requires: qemu-img
%description vm
koji-vm contains a supplemental build daemon that executes certain tasks in a
virtual machine. This package is not required for most installations.
%package utils
Summary: Koji Utilities
Group: Applications/Internet
@ -170,7 +188,6 @@ rm -rf $RPM_BUILD_ROOT
%config(noreplace) %{_sysconfdir}/sysconfig/kojid
%dir %{_sysconfdir}/kojid
%config(noreplace) %{_sysconfdir}/kojid/kojid.conf
%{_datadir}/koji-builder
%attr(-,kojibuilder,kojibuilder) %{_sysconfdir}/mock/koji
%pre builder
@ -186,6 +203,26 @@ if [ $1 = 0 ]; then
/sbin/chkconfig --del kojid
fi
%files vm
%defattr(-,root,root)
%{_sbindir}/kojivmd
#dir %{_datadir}/kojivmd
%{_datadir}/kojivmd/kojikamid
%{_initrddir}/kojivmd
%config(noreplace) %{_sysconfdir}/sysconfig/kojivmd
%dir %{_sysconfdir}/kojivmd
%config(noreplace) %{_sysconfdir}/kojivmd/kojivmd.conf
%post vm
/sbin/chkconfig --add kojivmd
/sbin/service kojivmd condrestart &> /dev/null || :
%preun vm
if [ $1 = 0 ]; then
/sbin/service kojivmd stop &> /dev/null
/sbin/chkconfig --del kojivmd
fi
%post utils
/sbin/chkconfig --add kojira
/sbin/service kojira condrestart &> /dev/null || :

View file

@ -211,6 +211,8 @@ BASEDIR = '/mnt/koji'
# default task priority
PRIO_DEFAULT = 20
## BEGIN kojikamid dup
#Exceptions
class GenericError(Exception):
"""Base class for our custom exceptions"""
@ -224,6 +226,7 @@ class GenericError(Exception):
return str(self.args[0])
except:
return str(self.__dict__)
## END kojikamid dup
class LockConflictError(GenericError):
"""Raised when there is a lock conflict"""
@ -241,9 +244,12 @@ class ActionNotAllowed(GenericError):
"""Raised when the session does not have permission to take some action"""
faultCode = 1004
## BEGIN kojikamid dup
class BuildError(GenericError):
"""Raised when a build fails"""
faultCode = 1005
## END kojikamid dup
class AuthLockError(AuthError):
"""Raised when a lock prevents authentication"""
@ -304,6 +310,7 @@ class MultiCallInProgress(object):
"""
pass
#A function to get create an exception from a fault
def convertFault(fault):
"""Convert a fault to the corresponding Exception type, if possible"""
@ -378,12 +385,15 @@ def decode_args2(args, names, strict=True):
ret.update(opts)
return ret
## BEGIN kojikamid dup
def encode_int(n):
"""If n is too large for a 32bit signed, convert it to a string"""
if n <= 2147483647:
return n
#else
return str(n)
## END kojikamid dup
def decode_int(n):
"""If n is not an integer, attempt to convert it"""
@ -401,6 +411,8 @@ def safe_xmlrpc_loads(s):
except Fault, f:
return f
## BEGIN kojikamid dup
def ensuredir(directory):
"""Create directory, if necessary."""
if os.path.isdir(directory):
@ -412,6 +424,7 @@ def ensuredir(directory):
if not os.path.isdir(directory):
#something else must have gone wrong
raise
## END kojikamid dup
def daemonize():
"""Detach and run in background"""
@ -1395,13 +1408,33 @@ class PathInfo(object):
release = build['release']
return self.topdir + ("/maven2/%(group_path)s/%(artifact_id)s/%(version)s/%(release)s" % locals())
def winbuild(self, build):
"""Return the directory where the Windows build exists"""
return self.build(build) + '/win'
def winfile(self, wininfo):
"""Return the relative path from the winbuild directory where the
file identified by wininfo is located."""
filepath = wininfo['filename']
if wininfo['relpath']:
filepath = wininfo['relpath'] + '/' + filepath
return filepath
def mavenfile(self, maveninfo):
"""Return the relative path the file exists in the per-tag Maven repo"""
group_path = maveninfo['group_id'].replace('.', '/')
artifact_id = maveninfo['artifact_id']
version = maveninfo['version']
filename = maveninfo['filename']
return "%(group_path)s/%(artifact_id)s/%(version)s/%(filename)s" % locals()
def mavenrepo(self, build, maveninfo):
"""Return the directory where the Maven artifact exists in the per-tag Maven repo
(/mnt/koji/repos/tag-name/repo-id/maven2/)"""
group_path = maveninfo['group_id'].replace('.', '/')
artifact_id = maveninfo['artifact_id']
version = maveninfo['version']
return self.topdir + ("/maven2/%(group_path)s/%(artifact_id)s/%(version)s" % locals())
return self.topdir + "/maven2/" + os.path.dirname(self.mavenfile(maveninfo))
def rpm(self,rpminfo):
"""Return the path (relative to build_dir) where an rpm belongs"""
@ -1939,13 +1972,17 @@ def formatTimeLong(value):
def buildLabel(buildInfo, showEpoch=False):
"""Format buildInfo (dict) into a descriptive label."""
epoch = buildInfo['epoch']
epoch = buildInfo.get('epoch')
if showEpoch and epoch != None:
epochStr = '%i:' % epoch
else:
epochStr = ''
return '%s%s-%s-%s' % (epochStr, buildInfo['package_name'],
buildInfo['version'], buildInfo['release'])
name = buildInfo.get('package_name')
if not name:
name = buildInfo.get('name')
return '%s%s-%s-%s' % (epochStr, name,
buildInfo.get('version'),
buildInfo.get('release'))
def _module_info(url):
module_info = ''
@ -1999,6 +2036,16 @@ def taskLabel(taskInfo):
extra = '%s, %s' % (build_tag['name'], buildLabel(build))
else:
extra = build_tag['name']
elif method == 'winbuild':
if taskInfo.has_key('request'):
vm = taskInfo['request'][0]
url = taskInfo['request'][1]
target = taskInfo['request'][2]
module_info = _module_info(url)
extra = '%s, %s' % (target, module_info)
elif method == 'vmExec':
if taskInfo.has_key('request'):
extra = taskInfo['request'][0]
elif method == 'buildNotification':
if taskInfo.has_key('request'):
build = taskInfo['request'][1]

1132
koji/daemon.py Normal file

File diff suppressed because it is too large Load diff

457
koji/tasks.py Normal file
View file

@ -0,0 +1,457 @@
# Task definitions used by various Koji daemons
# Copyright (c) 2010 Red Hat, Inc.
#
# Koji is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation;
# version 2.1 of the License.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this software; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# Authors:
# Mike McLean <mikem@redhat.com>
import koji
import os
import logging
import xmlrpclib
import signal
import urllib2
import shutil
import random
import time
import pprint
def scan_mounts(topdir):
"""Search path for mountpoints"""
mplist = []
topdir = os.path.normpath(topdir)
fo = file('/proc/mounts','r')
for line in fo.readlines():
path = line.split()[1]
if path.startswith(topdir):
mplist.append(path)
fo.close()
#reverse sort so deeper dirs come first
mplist.sort()
mplist.reverse()
return mplist
def umount_all(topdir):
"Unmount every mount under topdir"
logger = logging.getLogger("koji.build")
for path in scan_mounts(topdir):
logger.debug('Unmounting %s' % path)
cmd = ['umount', '-l', path]
rv = os.spawnvp(os.P_WAIT,cmd[0],cmd)
if rv != 0:
raise koji.GenericError, 'umount failed (exit code %r) for %s' % (rv,path)
#check mounts again
remain = scan_mounts(topdir)
if remain:
raise koji.GenericError, "Unmounting incomplete: %r" % remain
def safe_rmtree(path, unmount=False, strict=True):
logger = logging.getLogger("koji.build")
#safe remove: with -xdev the find cmd will not cross filesystems
# (though it will cross bind mounts from the same filesystem)
if not os.path.exists(path):
logger.debug("No such path: %s" % path)
return
if unmount:
umount_all(path)
#first rm -f non-directories
logger.debug('Scrubbing files in %s' % path)
rv = os.system("find '%s' -xdev \\! -type d -print0 |xargs -0 rm -f" % path)
msg = 'file removal failed (code %r) for %s' % (rv,path)
if rv != 0:
logger.warn(msg)
if strict:
raise koji.GenericError, msg
else:
return rv
#them rmdir directories
#with -depth, we start at the bottom and work up
logger.debug('Scrubbing directories in %s' % path)
rv = os.system("find '%s' -xdev -depth -type d -print0 |xargs -0 rmdir" % path)
msg = 'dir removal failed (code %r) for %s' % (rv,path)
if rv != 0:
logger.warn(msg)
if strict:
raise koji.GenericError, msg
return rv
class ServerExit(Exception):
"""Raised to shutdown the server"""
pass
class BaseTaskHandler(object):
"""The base class for task handlers
Each task handler is a class, a new instance of which is created
to handle each task.
"""
# list of methods the class can handle
Methods = []
# Options:
Foreground = False
def __init__(self, id, method, params, session, options, workdir=None):
self.id = id #task id
if method not in self.Methods:
raise koji.GenericError, 'method "%s" is not supported' % method
self.method = method
# handle named parameters
self.params,self.opts = koji.decode_args(*params)
self.session = session
self.options = options
if workdir is None:
workdir = "%s/%s" % (self.options.workdir, koji.pathinfo.taskrelpath(id))
self.workdir = workdir
self.logger = logging.getLogger("koji.build.BaseTaskHandler")
self.manager = None
def setManager(self,manager):
"""Set the manager attribute
This is only used for foreground tasks to give them access
to their task manager.
"""
if not self.Foreground:
return
self.manager = manager
def handler(self):
"""(abstract) the handler for the task."""
raise NotImplementedError
def run(self):
"""Execute the task"""
self.createWorkdir()
try:
return self.handler(*self.params,**self.opts)
finally:
self.removeWorkdir()
_taskWeight = 1.0
def weight(self):
"""Return the weight of the task.
This is run by the taskmanager before the task is run to determine
the weight of the task. The weight is an abstract measure of the
total load the task places on the system while running.
A task may set _taskWeight for a constant weight different from 1, or
override this function for more complicated situations.
Note that task weight is partially ignored while the task is sleeping.
"""
return getattr(self,'_taskWeight',1.0)
def createWorkdir(self):
if self.workdir is None:
return
self.removeWorkdir()
os.makedirs(self.workdir)
def removeWorkdir(self):
if self.workdir is None:
return
safe_rmtree(self.workdir, unmount=False, strict=True)
#os.spawnvp(os.P_WAIT, 'rm', ['rm', '-rf', self.workdir])
def wait(self, subtasks=None, all=False, failany=False):
"""Wait on subtasks
subtasks is a list of integers (or an integer). If more than one subtask
is specified, then the default behavior is to return when any of those
tasks complete. However, if all is set to True, then it waits for all of
them to complete. If all and failany are both set to True, then each
finished task will be checked for failure, and a failure will cause all
of the unfinished tasks to be cancelled.
special values:
subtasks = None specify all subtasks
Implementation notes:
The build daemon forks all tasks as separate processes. This function
uses signal.pause to sleep. The main process watches subtasks in
the database and will send the subprocess corresponding to the
subtask a SIGUSR2 to wake it up when subtasks complete.
"""
if isinstance(subtasks,int):
# allow single integer w/o enclosing list
subtasks = [subtasks]
self.session.host.taskSetWait(self.id,subtasks)
self.logger.debug("Waiting on %r" % subtasks)
while True:
finished, unfinished = self.session.host.taskWait(self.id)
if len(unfinished) == 0:
#all done
break
elif len(finished) > 0:
if all:
if failany:
failed = False
for task in finished:
try:
result = self.session.getTaskResult(task)
except (koji.GenericError, xmlrpclib.Fault), task_error:
self.logger.info("task %s failed or was canceled" % task)
failed = True
break
if failed:
self.logger.info("at least one task failed or was canceled, cancelling unfinished tasks")
self.session.cancelTaskChildren(self.id)
# reraise the original error now, rather than waiting for
# an error in taskWaitResults()
raise task_error
else:
# at least one done
break
# signal handler set by TaskManager.forkTask
self.logger.debug("Pausing...")
signal.pause()
# main process will wake us up with SIGUSR2
self.logger.debug("...waking up")
self.logger.debug("Finished waiting")
return dict(self.session.host.taskWaitResults(self.id,subtasks))
def getUploadDir(self):
return koji.pathinfo.taskrelpath(self.id)
def uploadFile(self, filename, relPath=None, remoteName=None):
"""Upload the file with the given name to the task output directory
on the hub."""
uploadPath = self.getUploadDir()
if relPath:
relPath = relPath.strip('/')
uploadPath += '/' + relPath
# Only upload files with content
if os.path.isfile(filename) and os.stat(filename).st_size > 0:
self.session.uploadWrapper(filename, uploadPath, remoteName)
def uploadTree(self, dirpath, flatten=False):
"""Upload the directory tree at dirpath to the task directory on the
hub, preserving the directory structure"""
dirpath = dirpath.rstrip('/')
for path, dirs, files in os.walk(dirpath):
if flatten:
relpath = None
else:
relpath = path[len(dirpath) + 1:]
for filename in files:
self.uploadFile(os.path.join(path, filename), relpath)
def localPath(self, relpath):
"""Return a local path to a remote file.
If the file is on an nfs mount, use that, otherwise download a copy"""
if self.options.topurl:
fn = "%s/local/%s" % (self.workdir, relpath)
if os.path.exists(fn):
# We've already downloaded this file,
# just return the existing local path
return fn
self.logger.debug("Downloading %s", relpath)
url = "%s/%s" % (self.options.topurl, relpath)
fsrc = urllib2.urlopen(url)
if not os.path.exists(os.path.dirname(fn)):
os.makedirs(os.path.dirname(fn))
fdst = file(fn, 'w')
shutil.copyfileobj(fsrc, fdst)
fsrc.close()
fdst.close()
else:
fn = "%s/%s" % (self.options.topdir, relpath)
return fn
def subtask(self, method, arglist, **opts):
return self.session.host.subtask(method, arglist, self.id, **opts)
def subtask2(self, __taskopts, __method, *args, **kwargs):
return self.session.host.subtask2(self.id, __taskopts, __method, *args, **kwargs)
def find_arch(self, arch, host, tag):
"""
For noarch tasks, find a canonical arch that is supported by both the host and tag.
If the arch is anything other than noarch, return it unmodified.
"""
if arch != "noarch":
return arch
# We need a concrete arch. Pick one that:
# a) this host can handle
# b) the build tag can support
# c) is canonical
host_arches = host['arches']
if not host_arches:
raise koji.BuildError, "No arch list for this host: %s" % host['name']
tag_arches = tag['arches']
if not tag_arches:
raise koji.BuildError, "No arch list for tag: %s" % tag['name']
# index canonical host arches
host_arches = set([koji.canonArch(a) for a in host_arches.split()])
# index canonical tag arches
tag_arches = set([koji.canonArch(a) for a in tag_arches.split()])
# find the intersection of host and tag arches
common_arches = list(host_arches & tag_arches)
if common_arches:
# pick one of the common arches randomly
# need to re-seed the prng or we'll get the same arch every time,
# because we just forked from a common parent
random.seed()
arch = random.choice(common_arches)
self.logger.info('Valid arches: %s, using: %s' % (' '.join(common_arches), arch))
return arch
else:
# no overlap
raise koji.BuildError, "host %s (%s) does not support any arches of tag %s (%s)" % \
(host['name'], ', '.join(host_arches), tag['name'], ', '.join(tag_arches))
def getRepo(self, tag):
"""
Get the active repo for the given tag. If there is no repo available,
wait for a repo to be created.
"""
repo_info = self.session.getRepo(tag)
if not repo_info:
#wait for it
task_id = self.session.host.subtask(method='waitrepo',
arglist=[tag, None, None],
parent=self.id)
repo_info = self.wait(task_id)[task_id]
return repo_info
class FakeTask(BaseTaskHandler):
Methods = ['someMethod']
Foreground = True
def handler(self, *args):
self.logger.info("This is a fake task. Args: " + str(args))
return 42
class SleepTask(BaseTaskHandler):
Methods = ['sleep']
_taskWeight = 0.25
def handler(self, n):
self.logger.info("Sleeping for %s seconds" % n)
time.sleep(n)
self.logger.info("Finished sleeping")
class ForkTask(BaseTaskHandler):
Methods = ['fork']
def handler(self, n=5, m=37):
for i in xrange(n):
os.spawnvp(os.P_NOWAIT, 'sleep', ['sleep',str(m)])
class WaitTestTask(BaseTaskHandler):
Methods = ['waittest']
_taskWeight = 0.1
def handler(self,count,seconds=10):
tasks = []
for i in xrange(count):
task_id = self.session.host.subtask(method='sleep',
arglist=[seconds],
label=str(i),
parent=self.id)
tasks.append(task_id)
results = self.wait(all=True)
self.logger.info(pprint.pformat(results))
class SubtaskTask(BaseTaskHandler):
Methods = ['subtask']
_taskWeight = 0.1
def handler(self,n=4):
if n > 0:
task_id = self.session.host.subtask(method='subtask',
arglist=[n-1],
label='foo',
parent=self.id)
self.wait(task_id)
else:
task_id = self.session.host.subtask(method='sleep',
arglist=[15],
label='bar',
parent=self.id)
self.wait(task_id)
class DefaultTask(BaseTaskHandler):
"""Used when no matching method is found"""
Methods = ['default']
_taskWeight = 0.1
def handler(self,*args,**opts):
raise koji.GenericError, "Invalid method: %s" % self.method
class ShutdownTask(BaseTaskHandler):
Methods = ['shutdown']
_taskWeight = 0.0
Foreground = True
def handler(self):
#note: this is a foreground task
raise ServerExit
class DependantTask(BaseTaskHandler):
Methods = ['dependantTask']
#mostly just waiting on other tasks
_taskWeight = 0.2
def handler(self, wait_list, task_list):
for task in wait_list:
if not isinstance(task, int) or not self.session.getTaskInfo(task):
self.logger.debug("invalid task id %s, removing from wait_list" % task)
wait_list.remove(task)
# note, tasks in wait_list are not children of this task so we can't
# just use self.wait()
while wait_list:
for task in wait_list[:]:
if self.session.taskFinished(task):
info = self.session.getTaskInfo(task)
if info and koji.TASK_STATES[info['state']] in ['CANCELED','FAILED']:
raise koji.GenericError, "Dependency %s failed to complete." % info['id']
wait_list.remove(task)
# let the system rest before polling again
time.sleep(1)
subtasks = []
for task in task_list:
# **((len(task)>2 and task[2]) or {}) expands task[2] into opts if it exists, allows for things like 'priority=15'
task_id = self.session.host.subtask(method=task[0], arglist=task[1], parent=self.id, **((len(task)>2 and task[2]) or {}))
if task_id:
subtasks.append(task_id)
if subtasks:
self.wait(subtasks, all=True)
class MultiPlatformTask(BaseTaskHandler):
def buildWrapperRPM(self, spec_url, build_task_id, build_tag, build, repo_id, **opts):
task = self.session.getTaskInfo(build_task_id)
arglist = [spec_url, build_tag, build, task, {'repo_id': repo_id}]
rpm_task_id = self.session.host.subtask(method='wrapperRPM',
arglist=arglist,
label='rpm',
parent=self.id,
arch='noarch',
**opts)
results = self.wait(rpm_task_id)[rpm_task_id]
results['task_id'] = rpm_task_id
return results

View file

@ -1,4 +1,4 @@
# Copyright (c) 2005-2007 Red Hat
# Copyright (c) 2005-2010 Red Hat
#
# Koji is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
@ -18,6 +18,7 @@ import calendar
import re
import time
import koji
import os
try:
from hashlib import md5 as md5_constructor
@ -147,3 +148,21 @@ def filedigestAlgo(hdr):
digest_algo_id = None
digest_algo = koji.RPM_FILEDIGESTALGO_IDS.get(digest_algo_id, 'unknown')
return digest_algo.lower()
def parseStatus(rv, prefix):
if isinstance(prefix, list) or isinstance(prefix, tuple):
prefix = ' '.join(prefix)
if os.WIFSIGNALED(rv):
return '%s was killed by signal %i' % (prefix, os.WTERMSIG(rv))
elif os.WIFEXITED(rv):
return '%s exited with status %i' % (prefix, os.WEXITSTATUS(rv))
else:
return '%s terminated for unknown reasons' % prefix
def isSuccess(rv):
"""Return True if rv indicates successful completion
(exited with status 0), False otherwise."""
if os.WIFEXITED(rv) and os.WEXITSTATUS(rv) == 0:
return True
else:
return False

View file

@ -1,7 +1,7 @@
#!/usr/bin/python
# Koji Repository Administrator (kojira)
# Copyright (c) 2005-2007 Red Hat
# Copyright (c) 2005-2010 Red Hat
#
# Koji is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
@ -27,6 +27,7 @@ except ImportError:
import sys
import os
import koji
from koji.tasks import safe_rmtree
from optparse import OptionParser
from ConfigParser import ConfigParser
import fnmatch
@ -38,38 +39,11 @@ import time
import traceback
def safe_rmtree(path, strict=True):
logger = logging.getLogger("koji.repo")
#safe remove: with -xdev the find cmd will not cross filesystems
# (though it will cross bind mounts from the same filesystem)
if not os.path.exists(path):
logger.debug("No such path: %s" % path)
return
#first rm -f non-directories
logger.debug('Removing files under %s' % path)
rv = os.system("find '%s' -xdev \\! -type d -print0 |xargs -0 rm -f" % path)
msg = 'file removal failed (code %r) for %s' % (rv,path)
if rv != 0:
logger.warn(msg)
if strict:
raise koji.GenericError, msg
else:
return rv
#them rmdir directories
#with -depth, we start at the bottom and work up
logger.debug('Removing directories under %s' % path)
rv = os.system("find '%s' -xdev -depth -type d -print0 |xargs -0 rmdir" % path)
msg = 'dir removal failed (code %r) for %s' % (rv,path)
if rv != 0:
logger.warn(msg)
if strict:
raise koji.GenericError, msg
return rv
class ManagedRepo(object):
def __init__(self, data):
def __init__(self, session, options, data):
self.session = session
self.options = options
self.logger = logging.getLogger("koji.repo")
self.current = True
self.repo_id = data['id']
@ -78,7 +52,7 @@ class ManagedRepo(object):
self.tag_id = data['tag_id']
self.state = data['state']
self.first_seen = time.time()
order = session.getFullInheritance(self.tag_id, event=self.event_id)
order = self.session.getFullInheritance(self.tag_id, event=self.event_id)
#order may contain same tag more than once
tags = {self.tag_id : 1}
for x in order:
@ -90,9 +64,9 @@ class ManagedRepo(object):
if self.state == koji.REPO_EXPIRED:
return
elif self.state == koji.REPO_DELETED:
raise GenericError, "Repo already deleted"
raise koji.GenericError, "Repo already deleted"
self.logger.info("Expiring repo %s.." % self.repo_id)
session.repoExpire(self.repo_id)
self.session.repoExpire(self.repo_id)
self.state = koji.REPO_EXPIRED
def expired(self):
@ -124,7 +98,7 @@ class ManagedRepo(object):
def tryDelete(self):
"""Remove the repo from disk, if possible"""
tag_info = session.getTag(self.tag_id)
tag_info = self.session.getTag(self.tag_id)
if not tag_info:
self.logger.warn('Could not get info for tag %i, skipping delete of repo %i' %
(self.tag_id, self.repo_id))
@ -146,13 +120,13 @@ class ManagedRepo(object):
return False
else:
age = time.time() - max(self.event_ts, mtime)
if age < options.deleted_repo_lifetime:
if age < self.options.deleted_repo_lifetime:
#XXX should really be called expired_repo_lifetime
return False
self.logger.debug("Attempting to delete repo %s.." % self.repo_id)
if self.state != koji.REPO_EXPIRED:
raise GenericError, "Repo not expired"
if session.repoDelete(self.repo_id) > 0:
raise koji.GenericError, "Repo not expired"
if self.session.repoDelete(self.repo_id) > 0:
#cannot delete, we are referenced by a buildroot
self.logger.debug("Cannot delete repo %s, still referenced" % self.repo_id)
return False
@ -178,7 +152,7 @@ class ManagedRepo(object):
#also no point in checking
return False
self.logger.debug("Checking for changes: %r" % self.taglist)
if session.tagChangedSinceEvent(self.event_id,self.taglist):
if self.session.tagChangedSinceEvent(self.event_id,self.taglist):
self.logger.debug("Tag data has changed since event %r" % self.event_id)
self.current = False
else:
@ -188,7 +162,9 @@ class ManagedRepo(object):
class RepoManager(object):
def __init__(self):
def __init__(self, options, session):
self.options = options
self.session = session
self.repos = {}
self.tasks = {}
self.tag_use_stats = {}
@ -203,7 +179,7 @@ class RepoManager(object):
def readCurrentRepos(self):
self.logger.debug("Reading current repo data")
repodata = session.getActiveRepos()
repodata = self.session.getActiveRepos()
self.logger.debug("Repo data: %r" % repodata)
for data in repodata:
repo_id = data['id']
@ -217,7 +193,7 @@ class RepoManager(object):
else:
self.logger.info('Found repo %s, state=%s'
%(repo_id, koji.REPO_STATES[data['state']]))
self.repos[repo_id] = ManagedRepo(data)
self.repos[repo_id] = ManagedRepo(self.session, self.options, data)
def pruneLocalRepos(self):
"""Scan filesystem for repos and remove any deleted ones
@ -230,12 +206,12 @@ class RepoManager(object):
tagdir = "%s/%s" % (topdir, tag)
if not os.path.isdir(tagdir):
continue
taginfo = session.getTag(tag)
taginfo = self.session.getTag(tag)
if taginfo is None:
self.logger.warn("Unexpected directory (no such tag): %s" % tagdir)
continue
for repo_id in os.listdir(tagdir):
if count >= options.prune_batch_size:
if count >= self.options.prune_batch_size:
#this keeps us from spending too much time on this at one time
return
repodir = "%s/%s" % (tagdir, repo_id)
@ -253,11 +229,11 @@ class RepoManager(object):
except OSError:
#just in case something deletes the repo out from under us
continue
rinfo = session.repoInfo(repo_id)
rinfo = self.session.repoInfo(repo_id)
if rinfo is None:
if not options.ignore_stray_repos:
if not self.options.ignore_stray_repos:
age = time.time() - dir_ts
if age > options.deleted_repo_lifetime:
if age > self.options.deleted_repo_lifetime:
count += 1
self.logger.info("Removing unexpected directory (no such repo): %s" % repodir)
safe_rmtree(repodir, strict=False)
@ -267,7 +243,7 @@ class RepoManager(object):
continue
if rinfo['state'] in (koji.REPO_DELETED, koji.REPO_PROBLEM):
age = time.time() - max(rinfo['create_ts'], dir_ts)
if age > options.deleted_repo_lifetime:
if age > self.options.deleted_repo_lifetime:
#XXX should really be called expired_repo_lifetime
count += 1
logger.info("Removing stray repo (state=%s): %s" % (koji.REPO_STATES[rinfo['state']], repodir))
@ -280,8 +256,8 @@ class RepoManager(object):
if stats and now - stats['ts'] < 3600:
#use the cache
return stats
data = session.listBuildroots(tagID=tag_id,
queryOpts={'order': '-create_event_id', 'limit' : 100})
data = self.session.listBuildroots(tagID=tag_id,
queryOpts={'order': '-create_event_id', 'limit' : 100})
#XXX magic number (limit)
if data:
tag_name = data[0]['tag_name']
@ -350,7 +326,7 @@ class RepoManager(object):
def updateRepos(self):
#check on tasks
for tag_id, task_id in self.tasks.items():
tinfo = session.getTaskInfo(task_id)
tinfo = self.session.getTaskInfo(task_id)
tstate = koji.TASK_STATES[tinfo['state']]
if tstate == 'CLOSED':
self.logger.info("Finished: newRepo task %s for tag %s" % (task_id, tag_id))
@ -368,7 +344,7 @@ class RepoManager(object):
repo.expire()
#find out which tags require repos
tags = {}
for target in session.getBuildTargets():
for target in self.session.getBuildTargets():
tag_id = target['build_tag']
tags[tag_id] = target['build_tag_name']
#index repos by tag
@ -410,20 +386,20 @@ class RepoManager(object):
self.logger.debug("order: %s", regen)
# i.e. tags with oldest (or no) repos get precedence
for ts, tag_id in regen:
if len(self.tasks) >= options.max_repo_tasks:
if len(self.tasks) >= self.options.max_repo_tasks:
self.logger.info("Maximum number of repo tasks reached.")
break
tagname = tags[tag_id]
taskopts = {}
for pat in options.debuginfo_tags.split():
for pat in self.options.debuginfo_tags.split():
if fnmatch.fnmatch(tagname, pat):
taskopts['debuginfo'] = True
break
for pat in options.source_tags.split():
for pat in self.options.source_tags.split():
if fnmatch.fnmatch(tagname, pat):
taskopts['src'] = True
break
task_id = session.newRepo(tagname, **taskopts)
task_id = self.session.newRepo(tagname, **taskopts)
self.logger.info("Created newRepo task %s for tag %s (%s)" % (task_id, tag_id, tags[tag_id]))
self.tasks[tag_id] = task_id
#some cleanup
@ -435,7 +411,7 @@ class RepoManager(object):
if repo.ready():
repo.expire()
for repo in repolist:
if n_deletes >= options.delete_batch_size:
if n_deletes >= self.options.delete_batch_size:
break
if repo.expired():
#try to delete
@ -443,10 +419,13 @@ class RepoManager(object):
n_deletes += 1
def main():
repomgr = RepoManager()
def main(options, session):
repomgr = RepoManager(options, session)
repomgr.readCurrentRepos()
repomgr.pruneLocalRepos()
def shutdown(*args):
raise SystemExit
signal.signal(signal.SIGTERM,shutdown)
logger.info("Entering main loop")
while True:
try:
@ -475,11 +454,6 @@ def main():
finally:
sys.exit()
def _exit_signal_handler(signum, frame):
logger.error('Exiting on signal')
session.logout()
sys.exit(1)
def get_options():
"""process options from command line and config file"""
# parse command line args
@ -634,6 +608,4 @@ if __name__ == "__main__":
koji.daemonize()
else:
koji.add_stderr_logger("koji")
main()
main(options, session)

36
vm/Makefile Normal file
View file

@ -0,0 +1,36 @@
BINFILES = kojivmd
SHAREFILES = kojikamid
_default:
@echo "nothing to make. try make install"
clean:
rm -f *.o *.so *.pyc *~ kojikamid
kojikamid: kojikamid.py
bash fix_kojikamid.sh >kojikamid
install: kojikamid
@if [ "$(DESTDIR)" = "" ]; then \
echo " "; \
echo "ERROR: A destdir is required"; \
exit 1; \
fi
mkdir -p $(DESTDIR)/usr/sbin
install -p -m 755 $(BINFILES) $(DESTDIR)/usr/sbin
mkdir -p $(DESTDIR)/usr/share/kojivmd
install -p -m 644 $(SHAREFILES) $(DESTDIR)/usr/share/kojivmd
mkdir -p $(DESTDIR)/etc/rc.d/init.d
install -p -m 755 kojivmd.init $(DESTDIR)/etc/rc.d/init.d/kojivmd
mkdir -p $(DESTDIR)/etc/sysconfig
install -p -m 644 kojivmd.sysconfig $(DESTDIR)/etc/sysconfig/kojivmd
mkdir -p $(DESTDIR)/etc/kojivmd
install -p -m 644 kojivmd.conf $(DESTDIR)/etc/kojivmd/kojivmd.conf

10
vm/fix_kojikamid.sh Executable file
View file

@ -0,0 +1,10 @@
#!/bin/bash
awk '/^## INSERT kojikamid dup/ {exit} {print $0}' kojikamid.py
for fn in ../koji/__init__.py ../koji/daemon.py
do
awk '/^## END kojikamid dup/ {p=0} p {print $0} /^## BEGIN kojikamid dup/ {p=1}' $fn
done
awk 'p {print $0} /^## INSERT kojikamid dup/ {p=1}' kojikamid.py

674
vm/kojikamid.py Executable file
View file

@ -0,0 +1,674 @@
#!/usr/bin/python
# Koji daemon that runs in a Windows VM and executes commands associated
# with a task.
# Copyright (c) 2010 Red Hat
#
# Koji is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation;
# version 2.1 of the License.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this software; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors:
# Mike Bonnet <mikeb@redhat.com>
# Jay Greguske <jgregusk@redhat.com>
#
# To register this script as a service on Windows 2008 (with Cygwin 1.7.5 installed) run:
# kojiwind --install
# in a cygwin shell.
from optparse import OptionParser
from ConfigParser import ConfigParser
import os
import subprocess
import sys
import tempfile
import time
import urlparse
import xmlrpclib
import base64
import hashlib
import logging
import traceback
import threading
import re
import glob
MANAGER_PORT = 7000
KOJIKAMID = True
## INSERT kojikamid dup
class fakemodule(object):
pass
#make parts of the above insert accessible as koji.X
koji = fakemodule()
koji.GenericError = GenericError
koji.BuildError = BuildError
class WindowsBuild(object):
LEADING_CHAR = re.compile('^[^A-Za-z_]')
VAR_CHARS = re.compile('[^A-Za-z0-9_]')
def __init__(self, server):
"""constructor: check ini spec file syntax, set build properties"""
self.logger = logging.getLogger('koji.vm')
self.server = server
info = server.getTaskInfo()
self.source_url = info[0]
self.build_tag = info[1]
if len(info) > 2:
self.task_opts = info[2]
else:
self.task_opts = {}
self.workdir = '/tmp/build'
ensuredir(self.workdir)
self.buildreq_dir = os.path.join(self.workdir, 'buildreqs')
ensuredir(self.buildreq_dir)
self.source_dir = None
self.spec_dir = None
# we initialize these here for clarity, but they are populated in loadConfig()
self.name = None
self.version = None
self.release = None
self.description = None
self.platform = None
self.preinstalled = []
self.buildrequires = []
self.provides = []
self.shell = None
self.execute = []
self.postbuild = []
self.output = {}
self.logs = []
def checkTools(self):
"""Is this environment fit to build in, based on the spec file?"""
errors = []
for entry in self.preinstalled:
checkdir = False
if entry.startswith('/'):
# Cygwin path
if entry.endswith('/'):
# directory
checkdir = True
elif entry[1:3] == ':\\':
# Windows path
if entry.endswith('\\'):
# directory
checkdir = True
else:
# Check in the path
ret, output = run(['which', entry], log=False, fatal=False)
output = output.strip()
if ret:
errors.append(output)
else:
self.logger.debug('command %s is available at %s', entry, output)
continue
if checkdir:
if not os.path.isdir(entry):
errors.append('directory %s does not exist' % entry)
else:
self.logger.debug('directory %s exists', entry)
else:
# file
if not os.path.isfile(entry):
errors.append('file %s does not exist' % entry)
else:
self.logger.debug('file %s exists', entry)
if errors:
raise BuildError, 'error validating build environment: %s' % \
', '.join(errors)
def updateClam(self):
"""update ClamAV virus definitions"""
ret, output = run(['freshclam'])
if ret and ret == 1:
self.logger.info('ClamAV database is already up to date')
elif ret:
raise BuildError, 'could not update ClamAV database: %s' % output
def checkEnv(self):
"""make the environment is fit for building in"""
# right now we just check for ClamAV executables
for clam_tool in ('freshclam', 'clamscan'):
ret, output = run(['which', clam_tool], log=False, fatal=False)
if ret:
raise BuildError, '%s appears to be missing, is ClamAV installed?' % clam_tool
def checkout(self):
"""Checkout sources, winspec, and patches, and apply patches"""
src_scm = SCM(self.source_url)
self.source_dir = src_scm.checkout(ensuredir(os.path.join(self.workdir, 'source')))
if 'winspec' in self.task_opts:
spec_scm = SCM(self.task_opts['winspec'])
self.spec_dir = spec_scm.checkout(ensuredir(os.path.join(self.workdir, 'spec')))
else:
self.spec_dir = self.source_dir
if 'patches' in self.task_opts:
patch_scm = SCM(self.task_opts['patches'])
patch_dir = patch_scm.checkout(ensuredir(os.path.join(self.workdir, 'patches')))
self.applyPatches(self.source_dir, patch_dir)
self.virusCheck(self.workdir)
def applyPatches(self, sourcedir, patchdir):
"""Apply patches in patchdir to files in sourcedir)"""
patches = [patch for patch in os.listdir(patchdir) if \
os.path.isfile(os.path.join(patchdir, patch)) and \
not patch.startswith('.')]
if not patches:
raise BuildError, 'no patches found at %s' % patchdir
patches.sort()
for patch in patches:
cmd = ['/usr/bin/patch', '--verbose', '-d', sourcedir, '-p1', '-i', os.path.join(patchdir, patch)]
run(cmd, fatal=True)
def loadConfig(self):
"""Load build configuration from the spec file."""
specfiles = [spec for spec in os.listdir(self.spec_dir) if spec.endswith('.ini')]
if len(specfiles) == 0:
raise BuildError, 'No .ini file found'
elif len(specfiles) > 1:
raise BuildError, 'Multiple .ini files found'
conf = ConfigParser()
conf.read(os.path.join(self.spec_dir, specfiles[0]))
# [naming] section
for entry in ('name', 'version', 'release', 'description'):
setattr(self, entry, conf.get('naming', entry))
if conf.has_option('naming', 'epoch'):
self.epoch = conf.get('naming', 'epoch')
else:
self.epoch = None
# [building] section
self.platform = conf.get('building', 'platform')
# preinstalled are paths to files or directories that must exist
# in the VM for it to execute the build.
# If the path ends in / or \ it must be a directory, otherwise it must
# be a file.
# They may be specified as Cygwin (/cygdrive/c/...) or Windows (C:\...)
# absolute paths, or without a path in which case it is searched for
# on the PATH.
if conf.has_option('building', 'preinstalled'):
self.preinstalled.extend([e.strip() for e in conf.get('building', 'preinstalled').split('\n') if e])
# buildrequires and provides are multi-valued (space-separated)
for br in conf.get('building', 'buildrequires').split():
# buildrequires is a space-separated list
# each item in the list is in the format:
# pkgname[:opt1:opt2=val2:...]
# the options are put into a dict
# if the option has no =val, the value in the dict will be None
if br:
br = br.split(':')
bropts = {}
for opt in br[1:]:
if '=' in opt:
key, val = opt.split('=', 1)
else:
key = opt
val = None
bropts[key] = val
self.buildrequires.append((br[0], bropts))
for prov in conf.get('building', 'provides').split():
if prov:
self.provides.append(prov)
# optionally specify a shell to use (defaults to bash)
# valid values are: cmd, cmd.exe (alias for cmd), and bash
if conf.has_option('building', 'shell'):
self.shell = conf.get('building', 'shell')
else:
self.shell = 'bash'
# execute is multi-valued (newline-separated)
self.execute.extend([e.strip() for e in conf.get('building', 'execute').split('\n') if e])
# postbuild are files or directories that must exist after the build is
# complete, but are not included in the build output
# they are specified as paths relative the source directory, and may be
# in Unix or Windows format
# each entry may contain shell-style globs, and one or more files
# matching the glob is considered valid
if conf.has_option('building', 'postbuild'):
self.postbuild.extend([e.strip() for e in conf.get('building', 'postbuild').split('\n') if e])
# [files] section
for entry in conf.get('files', 'output').split('\n'):
if not entry:
continue
tokens = entry.strip().split(':')
filename = tokens[0]
for var in ('name', 'version', 'release'):
filename = filename.replace('$' + var, getattr(self, var))
metadata = {}
metadata['platforms'] = tokens[1].split(',')
if len(tokens) > 2:
metadata['flags'] = tokens[2].split(',')
else:
metadata['flags'] = []
self.output[filename] = metadata
self.logs.extend([e.strip() for e in conf.get('files', 'logs').split('\n') if e])
def fetchFile(self, basedir, buildinfo, fileinfo, type):
"""Download the file from buildreq, at filepath, into the basedir"""
destpath = os.path.join(basedir, fileinfo['localpath'])
ensuredir(os.path.dirname(destpath))
destfile = file(destpath, 'w')
offset = 0
checksum = hashlib.md5()
while True:
encoded = self.server.getFile(buildinfo, fileinfo, encode_int(offset), 1048576, type)
if not encoded:
break
data = base64.b64decode(encoded)
del encoded
destfile.write(data)
offset += len(data)
checksum.update(data)
destfile.close()
digest = checksum.hexdigest()
# rpms don't have a md5sum in the fileinfo, but check it for everything else
if ('md5sum' in fileinfo) and (digest != fileinfo['md5sum']):
raise BuildError, 'md5 checksum validation failed for %s, %s (computed) != %s (provided)' % \
(destpath, digest, fileinfo['md5sum'])
self.logger.info('Retrieved %s (%s bytes, md5: %s)', destpath, offset, digest)
def fetchBuildReqs(self):
"""Retrieve buildrequires listed in the spec file"""
for buildreq, brinfo in self.buildrequires:
# if no type is specified in the options, default to win
brtype = brinfo.get('type', 'win')
buildinfo = self.server.getLatestBuild(self.build_tag, buildreq,
self.task_opts.get('repo_id'))
br_dir = os.path.join(self.buildreq_dir, buildreq, brtype)
ensuredir(br_dir)
brinfo['dir'] = br_dir
brfiles = []
brinfo['files'] = brfiles
buildfiles = self.server.getFileList(buildinfo['id'], brtype, brinfo)
for fileinfo in buildfiles:
self.fetchFile(br_dir, buildinfo, fileinfo, brtype)
brfiles.append(fileinfo['localpath'])
self.virusCheck(self.buildreq_dir)
def build(self):
if self.shell in ('cmd', 'cmd.exe'):
self.cmdBuild()
else:
self.bashBuild()
def varname(self, name):
"""
Convert name to a valid shell variable name.
Converts leading characters that aren't letters or underscores
to underscores.
Converts any other characters that aren't letters, numbers,
or underscores to underscores.
"""
name = self.LEADING_CHAR.sub('_', name)
name = self.VAR_CHARS.sub('_', name)
return name
def cmdBuild(self):
"""Do the build: run the execute line(s) with cmd.exe"""
tmpfd, tmpname = tempfile.mkstemp(prefix='koji-tmp', suffix='.bat', dir='/cygdrive/c/Windows/Temp')
script = os.fdopen(tmpfd, 'w')
for buildreq, brinfo in self.buildrequires:
buildreq = self.varname(buildreq)
ret, output = run(['cygpath', '-wa', brinfo['dir']], log=False, fatal=True)
br_dir = output.strip()
files = ' '.join(brinfo['files'])
files.replace('/', '\\')
if brinfo.get('type'):
# if the spec file qualifies the buildreq with a type,
# the env. var is named buildreq_type_{dir,files}
script.write('set %s_%s_dir=%s\r\n' % (buildreq, brinfo['type'], br_dir))
script.write('set %s_%s_files=%s\r\n' % (buildreq, brinfo['type'], files))
else:
# otherwise it's just buildreq_{dir,files}
script.write('set %s_dir=%s\r\n' % (buildreq, br_dir))
script.write('set %s_files=%s\r\n' % (buildreq, files))
script.write('\r\n')
script.write('set name=%s\r\n' % self.name)
script.write('set version=%s\r\n' % self.version)
script.write('set release=%s\r\n' % self.release)
for cmd in self.execute:
script.write(cmd)
script.write('\r\n')
script.close()
cmd = ['cmd.exe', '/C', 'C:\\Windows\\Temp\\' + os.path.basename(tmpname)]
ret, output = run(cmd, chdir=self.source_dir)
if ret:
raise BuildError, 'build command failed, see build.log for details'
def bashBuild(self):
"""Do the build: run the execute line(s) with bash"""
tmpfd, tmpname = tempfile.mkstemp(prefix='koji-tmp.', dir='/tmp')
script = os.fdopen(tmpfd, 'w')
for buildreq, brinfo in self.buildrequires:
buildreq = self.varname(buildreq)
if brinfo.get('type'):
script.write("export %s_%s_dir='%s'\n" % (buildreq, brinfo['type'], brinfo['dir']))
script.write("export %s_%s_files='" % (buildreq, brinfo['type']))
else:
script.write("export %s_dir='%s'\n" % (buildreq, brinfo['dir']))
script.write("export %s_files='" % buildreq)
for filename in brinfo['files']:
script.write(filename)
script.write('\n')
script.write("'\n\n")
script.write('export name=%s\n' % self.name)
script.write('export version=%s\n' % self.version)
script.write('export release=%s\n' % self.release)
for cmd in self.execute:
script.write(cmd)
script.write('\n')
script.close()
cmd = ['/bin/bash', '-e', '-x', tmpname]
ret, output = run(cmd, chdir=self.source_dir)
if ret:
raise BuildError, 'build command failed, see build.log for details'
def checkBuild(self):
"""Verify that the build completed successfully."""
errors = []
for entry in self.postbuild:
relpath = entry
if '\\' in relpath:
relpath = relpath.replace('\\', '/')
fullpath = os.path.join(self.source_dir, relpath)
results = glob.glob(fullpath)
if fullpath.endswith('/'):
for result in results:
if os.path.isdir(result):
self.logger.debug('found directory %s at %s', entry, result)
break
else:
errors.append('directory %s does not exist' % entry)
else:
for result in results:
if os.path.isfile(result):
self.logger.debug('found file %s at %s', entry, result)
break
else:
errors.append('file %s does not exist' % entry)
self.virusCheck(self.workdir)
if errors:
raise BuildError, 'error validating build output: %s' % \
', '.join(errors)
def virusCheck(self, path):
"""ensure a path is virus free with ClamAV. path should be absolute"""
if not path.startswith('/'):
raise BuildError, 'Invalid path to scan for viruses: ' + path
run(['clamscan', '--quiet', '--recursive', path], fatal=True)
def gatherResults(self):
"""Gather information about the output from the build, return it"""
return {'name': self.name, 'version': self.version, 'release': self.release,
'epoch': self.epoch,
'description': self.description, 'platform': self.platform,
'provides': self.provides,
'output': self.output, 'logs': self.logs}
def run(self):
"""Run the entire build process"""
self.checkEnv()
self.updateClam()
self.checkout()
self.loadConfig()
self.checkTools()
self.fetchBuildReqs()
self.build()
self.checkBuild()
return self.gatherResults()
def run(cmd, chdir=None, fatal=False, log=True):
global logfd
output = ''
olddir = None
if chdir:
olddir = os.getcwd()
os.chdir(chdir)
if log:
logger = logging.getLogger('koji.vm')
logger.info('$ %s', ' '.join(cmd))
proc = subprocess.Popen(cmd, stdout=logfd, stderr=subprocess.STDOUT,
close_fds=True)
ret = proc.wait()
else:
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
close_fds=True)
output, dummy = proc.communicate()
ret = proc.returncode
if olddir:
os.chdir(olddir)
if ret and fatal:
msg = 'error running: %s, return code was %s' % (' '.join(cmd), ret)
if log:
msg += ', see %s for details' % (os.path.basename(logfd.name))
else:
msg += ', output: %s' % output
raise BuildError, msg
return ret, output
def find_net_info():
"""
Find the network gateway configured for this VM.
"""
ret, output = run(['ipconfig', '/all'], log=False)
if ret:
raise RuntimeError, 'error running ipconfig, output was: %s' % output
macaddr = None
gateway = None
for line in output.splitlines():
line = line.strip()
# take the first values we find
if line.startswith('Physical Address'):
if not macaddr:
macaddr = line.split()[-1]
# format it to be consistent with the libvirt MAC address
macaddr = macaddr.replace('-', ':').lower()
elif line.startswith('Default Gateway'):
if not gateway:
gateway = line.split()[-1]
# check that we have valid values
if macaddr and len(macaddr) != 17:
macaddr = None
if gateway and (len(gateway) < 7 or len(gateway) > 15):
gateway = None
return macaddr, gateway
def upload_file(server, prefix, path):
"""upload a single file to the vmd"""
logger = logging.getLogger('koji.vm')
destpath = os.path.join(prefix, path)
fobj = file(destpath, 'r')
offset = 0
sum = hashlib.md5()
while True:
data = fobj.read(131072)
if not data:
break
encoded = base64.b64encode(data)
server.upload(path, encode_int(offset), encoded)
offset += len(data)
sum.update(data)
fobj.close()
digest = sum.hexdigest()
server.verifyChecksum(path, digest, 'md5')
logger.info('Uploaded %s (%s bytes, md5: %s)', destpath, offset, digest)
def get_mgmt_server():
"""Get a ServerProxy object we can use to retrieve task info"""
logger = logging.getLogger('koji.vm')
macaddr, gateway = find_net_info()
while not (macaddr and gateway):
# wait for the network connection to come up and get an address
time.sleep(5)
macaddr, gateway = find_net_info()
logger.debug('found MAC address %s, connecting to %s:%s',
macaddr, gateway, MANAGER_PORT)
server = xmlrpclib.ServerProxy('http://%s:%s/' %
(gateway, MANAGER_PORT), allow_none=True)
# we would set a timeout on the socket here, but that is apparently not
# supported by python/cygwin/Windows
task_port = server.getPort(macaddr)
logger.debug('found task-specific port %s', task_port)
return xmlrpclib.ServerProxy('http://%s:%s/' % (gateway, task_port), allow_none=True)
def get_options():
"""handle usage and parse options"""
usage = """%prog [options]
Run Koji tasks assigned to a VM.
Run without any arguments to start this daemon.
"""
parser = OptionParser(usage=usage)
parser.add_option('-d', '--debug', action='store_true', help='Log debug statements')
parser.add_option('-i', '--install', action='store_true', help='Install this daemon as a service', default=False)
parser.add_option('-u', '--uninstall', action='store_true', help='Uninstall this daemon if it was installed previously as a service', default=False)
(options, args) = parser.parse_args()
return options
def setup_logging(opts):
global logfile, logfd
logger = logging.getLogger('koji.vm')
level = logging.INFO
if opts.debug:
level = logging.DEBUG
logger.setLevel(level)
logfd = file(logfile, 'w')
handler = logging.StreamHandler(logfd)
handler.setLevel(level)
handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(name)s: %(message)s'))
logger.addHandler(handler)
return handler
def log_local(msg):
tb = ''.join(traceback.format_exception(*sys.exc_info()))
sys.stderr.write('%s: %s\n' % (time.ctime(), msg))
sys.stderr.write(tb)
def incremental_upload(server, handler):
global logfile
fd = file(logfile, 'r')
while handler.active:
offset = fd.tell()
contents = fd.read(65536)
if contents:
size = len(contents)
data = base64.b64encode(contents)
digest = hashlib.md5(contents).hexdigest()
del contents
while handler.active:
try:
server.uploadDirect(os.path.basename(logfile),
offset, size, digest, data)
break
except:
# upload failed (possibly a timeout), log and retry
log_local('error calling server.uploadDirect()')
else:
time.sleep(1)
def fail(server, handler):
"""do the right thing when a build fails"""
global logfile, logfd
logging.getLogger('koji.vm').error('error running build', exc_info=True)
tb = ''.join(traceback.format_exception(*sys.exc_info()))
handler.active = False
if server is not None:
try:
logfd.flush()
upload_file(server, os.path.dirname(logfile),
os.path.basename(logfile))
except:
log_local('error calling upload_file()')
while True:
try:
# this is the very last thing we do, keep trying as long as we can
server.failTask(tb)
break
except:
log_local('error calling server.failTask()')
sys.exit(1)
logfile = '/tmp/build.log'
logfd = None
def main():
prog = os.path.basename(sys.argv[0])
opts = get_options()
if opts.install:
ret, output = run(['cygrunsrv', '--install', prog,
'--path', sys.executable, '--args', os.path.abspath(prog),
'--type', 'auto', '--dep', 'Dhcp',
'--disp', 'Koji Windows Daemon',
'--desc', 'Runs Koji tasks assigned to a VM'],
log=False)
if ret:
print 'Error installing %s service, output was: %s' % (prog, output)
sys.exit(1)
else:
print 'Successfully installed the %s service' % prog
sys.exit(0)
elif opts.uninstall:
ret, output = run(['cygrunsrv', '--remove', prog], log=False)
if ret:
print 'Error removing the %s service, output was: %s' % (prog, output)
sys.exit(1)
else:
print 'Successfully removed the %s service' % prog
sys.exit(0)
handler = setup_logging(opts)
handler.active = True
server = None
try:
server = get_mgmt_server()
thread = threading.Thread(target=incremental_upload,
args=(server, handler))
thread.daemon = True
thread.start()
build = WindowsBuild(server)
results = build.run()
for filename in results['output'].keys() + results['logs']:
upload_file(server, build.source_dir, filename)
handler.active = False
thread.join()
upload_file(server, os.path.dirname(logfile),
os.path.basename(logfile))
results['logs'].append(os.path.basename(logfile))
server.closeTask(results)
except:
fail(server, handler)
sys.exit(0)
if __name__ == '__main__':
main()

1056
vm/kojivmd Executable file

File diff suppressed because it is too large Load diff

45
vm/kojivmd.conf Normal file
View file

@ -0,0 +1,45 @@
[kojivmd]
; The number of seconds to sleep between tasks
; sleeptime=15
; The maximum number of jobs that kojivmd will handle at a time
; maxjobs=10
; Minimum amount of memory (in MBs) not allocated to a VM for kojivmd to take a new task
; minmem=4096
; The user the VM/emulator runs as (cloned disk images will be readable and writable by this user)
; vmuser=qemu
; The directory root for temporary storage
; workdir=/tmp/koji
; The url where the Koji root directory (/mnt/koji) can be accessed
topurl=http://koji.example.com/kojiroot
; The URL for the xmlrpc server
server=http://hub.example.com/kojihub
; A space-separated list of hostname:repository[:use_common] tuples that kojivmd is authorized to checkout from (no quotes).
; Wildcards (as supported by fnmatch) are allowed.
; If use_common is specified and is one of "false", "no", "off", or "0" (without quotes), then kojid will not attempt to checkout
; a common/ dir when checking out sources from the source control system. Otherwise, it will attempt to checkout a common/
; dir, and will raise an exception if it cannot.
allowed_scms=scm.example.com:/cvs/example git.example.org:/example svn.example.org:/users/*:no
; The mail host to use for sending email notifications
smtphost=example.com
; The From address used when sending email notifications
from_addr=Koji Build System <buildsys@example.com>
;configuration for SSL authentication
;client certificate
;cert = /etc/kojivmd/client.crt
;certificate of the CA that issued the client certificate
;ca = /etc/kojivmd/clientca.crt
;certificate of the CA that issued the HTTP server certificate
;serverca = /etc/kojivmd/serverca.crt

79
vm/kojivmd.init Executable file
View file

@ -0,0 +1,79 @@
#! /bin/sh
#
# kojivmd Start/Stop kojivmd
#
# chkconfig: - 99 99
# description: kojivmd server
# processname: kojivmd
# Source function library.
. /etc/init.d/functions
# Check that we're a priviledged user
[ `id -u` = 0 ] || exit 1
[ -f /etc/sysconfig/kojivmd ] && . /etc/sysconfig/kojivmd
prog="kojivmd"
# Check that networking is up.
if [ "$NETWORKING" = "no" ]
then
exit 0
fi
[ -f /usr/sbin/kojivmd ] || exit 1
RETVAL=0
start() {
echo -n $"Starting $prog: "
cd /
ARGS=""
[ "$FORCE_LOCK" == "Y" ] && ARGS="$ARGS --force-lock"
[ "$KOJIVMD_DEBUG" == "Y" ] && ARGS="$ARGS --debug"
[ "$KOJIVMD_VERBOSE" == "Y" ] && ARGS="$ARGS --verbose"
daemon /usr/sbin/kojivmd $ARGS
RETVAL=$?
echo
[ $RETVAL -eq 0 ] && touch /var/lock/subsys/kojivmd
return $RETVAL
}
stop() {
echo -n $"Stopping $prog: "
killproc kojivmd
RETVAL=$?
echo
[ $RETVAL -eq 0 ] && rm -f /var/lock/subsys/kojivmd
return $RETVAL
}
restart() {
stop
start
}
# See how we were called.
case "$1" in
start)
start
;;
stop)
stop
;;
status)
status $prog
;;
restart|reload|force-reload)
restart
;;
condrestart|try-restart)
[ -f /var/lock/subsys/kojivmd ] && restart || :
;;
*)
echo $"Usage: $0 {start|stop|status|restart|condrestart|try-restart|reload|force-reload}"
exit 1
esac
exit $?

3
vm/kojivmd.sysconfig Normal file
View file

@ -0,0 +1,3 @@
FORCE_LOCK=Y
KOJIVMD_DEBUG=N
KOJIVMD_VERBOSE=Y

49
vm/run-vm-task Executable file
View file

@ -0,0 +1,49 @@
#!/usr/bin/python
import koji
import optparse
# cli/koji -c ~/.koji/config-mead call --python makeTask '"vmExec"' '["Win2k8-x86-vstudio-devel", ["wget -q -O /tmp/test-build.sh http://download.lab.bos.redhat.com/devel/mikeb/mead/debug/test-build.sh && chmod 755 /tmp/test-build.sh && /tmp/test-build.sh &> /tmp/output/build.log && echo build successful"], {"cpus": 2, "mem": 2048}]' --kwargs '{"channel": "vm"}'
parser = optparse.OptionParser('%prog VM-NAME SCM-URL TARGET')
parser.add_option('--server', help='Koji hub')
parser.add_option('--cert', help='Client certificate')
parser.add_option('--ca', help='Client CA')
parser.add_option('--server-ca', help='Server CA')
parser.add_option('--cpus', help='Number of virtual CPUs to allocate to the VM (optional)',
type='int')
parser.add_option('--mem', help='Amount of memory (in megabytes) to allocate to the VM (optional)',
type='int')
parser.add_option('--channel', help='Channel to create the task in', default='vm')
parser.add_option('--specfile', help='Alternate SCM URL of the specfile')
parser.add_option('--patches', help='SCM URL of patches to apply before build')
parser.add_option('--scratch', help='Run a scratch build', action='store_true')
opts, args = parser.parse_args()
if len(args) < 3:
parser.error('You must specify a VM name, a SCM URL, and a build target')
vm_name = args[0]
scm_url = args[1]
target = args[2]
session = koji.ClientSession(opts.server)
session.ssl_login(opts.cert, opts.ca, opts.server_ca)
task_opts = {}
if opts.cpus:
task_opts['cpus'] = opts.cpus
if opts.mem:
task_opts['mem'] = opts.mem
if opts.specfile:
task_opts['specfile'] = opts.specfile
if opts.patches:
task_opts['patches'] = opts.patches
if opts.scratch:
task_opts['scratch'] = True
params = [vm_name, scm_url, target, task_opts]
task_id = session.makeTask('winbuild', params, channel=opts.channel)
print 'Created task %s' % task_id

View file

@ -5,30 +5,34 @@
#attr _PASSTHROUGH = ['archiveID', 'fileOrder', 'fileStart', 'buildrootOrder', 'buildrootStart']
#include "includes/header.chtml"
<h4>Information for archive $archive.filename</h4>
<h4>Information for archive <a href="archiveinfo?archiveID=$archive.id">$archive.filename</a></h4>
<table>
<tr>
<th>ID</th><td>$archive.id</td>
</tr>
<tr>
#if $wininfo
<th>File Name</th><td>$koji.pathinfo.winfile($archive)</td>
#else
<th>File Name</th><td>$archive.filename</td>
#end if
</tr>
<tr>
<th>File Type</th><td>$archive_type.name</td>
<th>File Type</th><td>$archive_type.description</td>
</tr>
<tr>
<th>Build</th><td><a href="buildinfo?buildID=$build.id">$koji.buildLabel($build)</a></td>
</tr>
#if $maveninfo
<tr>
<th>Maven groupId</th><td>$maveninfo.group_id</td>
<th>Maven groupId</th><td>$archive.group_id</td>
</tr>
<tr>
<th>Maven artifactId</th><td>$maveninfo.artifact_id</td>
<th>Maven artifactId</th><td>$archive.artifact_id</td>
</tr>
<tr>
<th>Maven version</th><td>$maveninfo.version</td>
<th>Maven version</th><td>$archive.version</td>
</tr>
#end if
<tr>
@ -37,6 +41,14 @@
<tr>
<th>MD5 Sum</th><td>$archive.md5sum</td>
</tr>
#if $wininfo
<tr>
<th>Platforms</th><td>$archive.platforms</td>
</tr>
<tr>
<th>Flags</th><td>$archive.flags</td>
</tr>
#end if
#if $builtInRoot
<tr>
<th>Buildroot</th><td><a href="buildrootinfo?buildrootID=$builtInRoot.id">$builtInRoot.tag_name-$builtInRoot.id-$builtInRoot.repo_id</a></td>

View file

@ -158,16 +158,24 @@
<tr>
<th>$ext</th>
<td>
#if $mavenbuild and $ext == 'pom'
(<a href="$downloadBase/$nvrpath/data/logs/maven2/">build logs</a>)
#end if
#if $ext == $exts[0]
#if $mavenbuild
(<a href="$downloadBase/$nvrpath/data/logs/maven2/">build logs</a>)
#elif $winbuild
(<a href="$downloadBase/$nvrpath/data/logs/win/">build logs</a>)
#end if
#end if
</td>
</tr>
#for $archive in $archivesByExt[$ext]
<tr>
<td/>
<td>
$archive.filename (<a href="archiveinfo?archiveID=$archive.id">info</a>) (<a href="$archiveurl/$archive.filename">download</a>)
#if $mavenbuild
$archive.filename (<a href="archiveinfo?archiveID=$archive.id">info</a>) (<a href="$archiveurl/$archive.filename">download</a>)
#elif $winbuild
$koji.pathinfo.winfile($archive) (<a href="archiveinfo?archiveID=$archive.id">info</a>) (<a href="$downloadBase/$nvrpath/win/$koji.pathinfo.winfile($archive)">download</a>)
#end if
</td>
</tr>
#end for

View file

@ -59,6 +59,9 @@
#if $mavenEnabled
<option value="maven">Maven Artifacts</option>
#end if
#if $winEnabled
<option value="win">Windows Artifacts</option>
#end if
</select>
<input type="text" name="terms"/>
<input type="submit" value="Search"/>

View file

@ -2,6 +2,7 @@ import os
import os.path
import re
import sys
import mimetypes
import mod_python
import mod_python.Cookie
import Cheetah.Filters
@ -358,6 +359,9 @@ _TASKS = ['build',
'chainbuild',
'maven',
'buildMaven',
'wrapperRPM',
'winbuild',
'vmExec',
'waitrepo',
'tagBuild',
'newRepo',
@ -368,9 +372,9 @@ _TASKS = ['build',
'createLiveCD',
'createAppliance']
# Tasks that can exist without a parent
_TOPLEVEL_TASKS = ['build', 'buildNotification', 'chainbuild', 'maven', 'newRepo', 'tagBuild', 'tagNotification', 'waitrepo', 'createLiveCD', 'createAppliance']
_TOPLEVEL_TASKS = ['build', 'buildNotification', 'chainbuild', 'maven', 'wrapperRPM', 'winbuild', 'newRepo', 'tagBuild', 'tagNotification', 'waitrepo', 'createLiveCD', 'createAppliance']
# Tasks that can have children
_PARENT_TASKS = ['build', 'chainbuild', 'maven', 'newRepo']
_PARENT_TASKS = ['build', 'chainbuild', 'maven', 'winbuild', 'newRepo']
def tasks(req, owner=None, state='active', view='tree', method='all', hostID=None, channelID=None, start=None, order='-id'):
values = _initValues(req, 'Tasks', 'tasks')
@ -663,16 +667,16 @@ def getfile(req, taskID, name, offset=None, size=None):
file_info = output.get(name)
if not file_info:
raise koji.GenericError, 'no file "%s" output by task %i' % (name, taskID)
if name.endswith('.rpm'):
req.content_type = 'application/x-rpm'
req.headers_out['Content-Disposition'] = 'attachment; filename=%s' % name
elif name.endswith('.log'):
req.content_type = 'text/plain'
elif name.endswith('.iso') or name.endswith('.raw') or \
name.endswith('.qcow') or name.endswith('.qcow2') or \
name.endswith('.vmx'):
req.content_type = 'application/octet-stream'
mime_guess = mimetypes.guess_type(name, strict=False)[0]
if mime_guess:
req.content_type = mime_guess
else:
if name.endswith('.log') or name.endswith('.ks'):
req.content_type = 'text/plain'
else:
req.content_type = 'application/octet-stream'
if req.content_type != 'text/plain':
req.headers_out['Content-Disposition'] = 'attachment; filename=%s' % name
file_size = int(file_info['st_size'])
@ -1027,7 +1031,14 @@ def buildinfo(req, buildID):
rpms = server.listBuildRPMs(build['id'])
rpms.sort(_sortbyname)
mavenbuild = server.getMavenBuild(buildID)
archives = server.listArchives(build['id'], queryOpts={'order': 'filename'})
winbuild = server.getWinBuild(buildID)
if mavenbuild:
archivetype = 'maven'
elif winbuild:
archivetype = 'win'
else:
archivetype = None
archives = server.listArchives(build['id'], type=archivetype, queryOpts={'order': 'filename'})
archivesByExt = {}
for archive in archives:
archivesByExt.setdefault(os.path.splitext(archive['filename'])[1][1:], []).append(archive)
@ -1097,6 +1108,7 @@ def buildinfo(req, buildID):
values['debuginfoByArch'] = debuginfoByArch
values['task'] = task
values['mavenbuild'] = mavenbuild
values['winbuild'] = winbuild
values['archives'] = archives
values['archivesByExt'] = archivesByExt
@ -1298,7 +1310,12 @@ def archiveinfo(req, archiveID, fileOrder='name', fileStart=None, buildrootOrder
archive = server.getArchive(archiveID)
archive_type = server.getArchiveType(type_id=archive['type_id'])
build = server.getBuild(archive['build_id'])
maveninfo = server.getMavenArchive(archive['id'])
maveninfo = False
if 'group_id' in archive:
maveninfo = True
wininfo = False
if 'relpath' in archive:
wininfo = True
builtInRoot = None
if archive['buildroot_id'] != None:
builtInRoot = server.getBuildroot(archive['buildroot_id'])
@ -1315,6 +1332,7 @@ def archiveinfo(req, archiveID, fileOrder='name', fileStart=None, buildrootOrder
values['archive_type'] = archive_type
values['build'] = build
values['maveninfo'] = maveninfo
values['wininfo'] = wininfo
values['builtInRoot'] = builtInRoot
values['buildroots'] = buildroots
@ -2075,7 +2093,8 @@ _infoURLs = {'package': 'packageinfo?packageID=%(id)i',
'user': 'userinfo?userID=%(id)i',
'host': 'hostinfo?hostID=%(id)i',
'rpm': 'rpminfo?rpmID=%(id)i',
'maven': 'archiveinfo?archiveID=%(id)i'}
'maven': 'archiveinfo?archiveID=%(id)i',
'win': 'archiveinfo?archiveID=%(id)i'}
_VALID_SEARCH_CHARS = r"""a-zA-Z0-9"""
_VALID_SEARCH_SYMS = r""" @.,_/\()%+-*?|[]^$"""
@ -2121,6 +2140,13 @@ def search(req, start=None, order='name'):
# (you're feeling lucky)
mod_python.util.redirect(req, infoURL % results[0])
else:
if type == 'maven':
typeLabel = 'Maven artifacts'
elif type == 'win':
typeLabel = 'Windows artifacts'
else:
typeLabel = '%ss' % type
values['typeLabel'] = typeLabel
return _genHTML(req, 'searchresults.chtml')
else:
return _genHTML(req, 'search.chtml')

View file

@ -24,6 +24,9 @@
#if $mavenEnabled
<option value="maven">Maven Artifacts</option>
#end if
#if $winEnabled
<option value="win">Windows Artifacts</option>
#end if
</select>
</td>
</tr>

View file

@ -3,7 +3,7 @@
#include "includes/header.chtml"
<h4>Search Results for ${type}s matching "$terms"</h4>
<h4>Search Results for $typeLabel matching "$terms"</h4>
<table class="data-list">
<tr>

View file

@ -144,11 +144,31 @@ $value
#end if
#elif $task.method == 'createLiveCD' or $task.method == 'createAppliance'
<strong>Arch:</strong> $params[0]<br/>
<strong>Target:</strong> <a href="buildtargetinfo?name=$params[1]">$params[1]</a><br/>
<strong>Build Target:</strong> <a href="buildtargetinfo?name=$params[1]">$params[1]</a><br/>
<strong>Kickstart File:</strong> $params[2]<br/>
#if $len($params) > 3
$printOpts($params[3])
#end if
#elif $task.method == 'winbuild'
<strong>VM:</strong> $params[0]<br/>
<strong>SCM URL:</strong> $params[1]<br/>
<strong>Build Target:</strong> <a href="buildtargetinfo?name=$params[2]">$params[2]</a><br/>
#if $len($params) > 3
$printOpts($params[3])
#end if
#elif $task.method == 'vmExec'
<strong>VM:</strong> $params[0]<br/>
<strong>Exec Params:</strong><br/>
#for $info in $params[1]
#if $isinstance($info, dict)
$printMap($info, '&nbsp;&nbsp;&nbsp;&nbsp;')
#else
&nbsp;&nbsp;$info<br/>
#end if
#end for
#if $len($params) > 2
$printOpts($params[2])
#end if
#elif $task.method == 'newRepo'
<strong>Tag:</strong> <a href="taginfo?tagID=$tag.id">$tag.name</a><br/>
#if $len($params) > 1
@ -349,7 +369,7 @@ ${excClass.__name__}: $cgi.escape($str($result))
<br/>
#end for
#if $task.state not in ($koji.TASK_STATES.CLOSED, $koji.TASK_STATES.CANCELED, $koji.TASK_STATES.FAILED) and \
$task.method in ('buildSRPMFromSCM', 'buildArch', 'createLiveCD', 'createAppliance', 'buildMaven', 'wrapperRPM', 'createrepo')
$task.method in ('buildSRPMFromSCM', 'buildArch', 'createLiveCD', 'createAppliance', 'buildMaven', 'wrapperRPM', 'vmExec', 'createrepo')
<br/>
<a href="watchlogs?taskID=$task.id">Watch logs</a>
#end if

View file

@ -81,6 +81,10 @@ All
#for $task_type in $alltasks
#if $task_type in ('maven', 'buildMaven') and not $mavenEnabled
#continue
#elif $task_type in ('winbuild', 'vmExec') and not $winEnabled
#continue
#elif $task_type == 'wrapperRPM' and not ($mavenEnabled or $winEnabled)
#continue
#else
<option value="$task_type" #if $method == $task_type then 'selected="selected"' else ''#>$task_type</option>
#end if

View file

@ -67,6 +67,8 @@ def _genHTML(req, fileName):
req._values['authToken'] = _genToken(req)
if not req._values.has_key('mavenEnabled'):
req._values['mavenEnabled'] = req._session.mavenEnabled()
if not req._values.has_key('winEnabled'):
req._values['winEnabled'] = req._session.winEnabled()
tmpl_class = TEMPLATES.get(fileName)
if not tmpl_class: