Merge remote-tracking branch 'origin/master' into cgen
This commit is contained in:
commit
d31e774a1c
13 changed files with 403 additions and 49 deletions
|
|
@ -239,6 +239,8 @@ class BuildRoot(object):
|
|||
opts['maven_envs'] = self.maven_envs
|
||||
opts['bind_opts'] = self.bind_opts
|
||||
opts['target_arch'] = self.target_arch
|
||||
if 'mock.package_manager' in self.config['extra']:
|
||||
opts['package_manager'] = self.config['extra']['mock.package_manager']
|
||||
output = koji.genMockConfig(self.name, self.br_arch, managed=True, **opts)
|
||||
|
||||
#write config
|
||||
|
|
@ -374,7 +376,7 @@ class BuildRoot(object):
|
|||
results = os.listdir(resultdir)
|
||||
except OSError:
|
||||
# will happen when mock hasn't created the resultdir yet
|
||||
continue
|
||||
results = []
|
||||
|
||||
for fname in results:
|
||||
if fname.endswith('.log') and not logs.has_key(fname):
|
||||
|
|
@ -3061,7 +3063,7 @@ class BaseImageTask(OzImageTask):
|
|||
Some image formats require others to be processed first, which is why
|
||||
we have to do this. raw files in particular may not be kept.
|
||||
"""
|
||||
supported = ('raw', 'raw-xz', 'vmdk', 'qcow', 'qcow2', 'vdi', 'rhevm-ova', 'vsphere-ova', 'docker', 'vagrant-virtualbox', 'vagrant-libvirt', 'vpc')
|
||||
supported = ('raw', 'raw-xz', 'vmdk', 'qcow', 'qcow2', 'vdi', 'rhevm-ova', 'vsphere-ova', 'docker', 'vagrant-virtualbox', 'vagrant-libvirt', 'vagrant-vmware-fusion', 'vpc')
|
||||
for f in formats:
|
||||
if f not in supported:
|
||||
raise koji.ApplianceError('Invalid format: %s' % f)
|
||||
|
|
@ -3096,6 +3098,7 @@ class BaseImageTask(OzImageTask):
|
|||
'vsphere-ova': self._buildOVA,
|
||||
'vagrant-virtualbox': self._buildOVA,
|
||||
'vagrant-libvirt': self._buildOVA,
|
||||
'vagrant-vmware-fusion': self._buildOVA,
|
||||
'docker': self._buildDocker
|
||||
}
|
||||
# add a handler to the logger so that we capture ImageFactory's logging
|
||||
|
|
@ -3245,8 +3248,14 @@ class BaseImageTask(OzImageTask):
|
|||
if format == 'vagrant-libvirt':
|
||||
format = 'rhevm-ova'
|
||||
img_opts['rhevm_ova_format'] = 'vagrant-libvirt'
|
||||
if format == 'vagrant-vmware-fusion':
|
||||
format = 'vsphere-ova'
|
||||
img_opts['vsphere_ova_format'] = 'vagrant-vmware-fusion'
|
||||
# The initial disk image transform for VMWare Fusion/Workstation requires a "standard" VMDK
|
||||
# not the stream oriented format used for VirtualBox or regular VMWare OVAs
|
||||
img_opts['vsphere_vmdk_format'] = 'standard'
|
||||
targ = self._do_target_image(self.base_img.base_image.identifier,
|
||||
format.replace('-ova', ''))
|
||||
format.replace('-ova', ''), img_opts=img_opts)
|
||||
targ2 = self._do_target_image(targ.target_image.identifier, 'OVA',
|
||||
img_opts=img_opts)
|
||||
return {'image': targ2.target_image.data}
|
||||
|
|
|
|||
109
cli/koji
109
cli/koji
|
|
@ -762,6 +762,9 @@ def handle_add_pkg(options, session, args):
|
|||
opts['block'] = False
|
||||
# check if list of packages exists for that tag already
|
||||
dsttag=session.getTag(tag)
|
||||
if dsttag is None:
|
||||
print "No such tag: %s" % tag
|
||||
sys.exit(1)
|
||||
pkglist = dict([(p['package_name'], p['package_id']) for p in session.listPackages(tagID=dsttag['id'])])
|
||||
ret = 0
|
||||
for package in args[1:]:
|
||||
|
|
@ -3895,6 +3898,13 @@ def _print_histline(entry, **kwargs):
|
|||
fmt = "new tag: %(tag.name)s"
|
||||
else:
|
||||
fmt = "tag deleted: %(tag.name)s"
|
||||
elif table == 'tag_extra':
|
||||
if edit:
|
||||
fmt = "tag option %(key)s for tag %(tag.name)s altered"
|
||||
elif create:
|
||||
fmt = "added tag option %(key)s for tag %(tag.name)s"
|
||||
else:
|
||||
fmt = "tag option %(key)s removed for %(tag.name)s"
|
||||
elif table == 'build_target_config':
|
||||
if edit:
|
||||
fmt = "build target configuration for %(build_target.name)s updated"
|
||||
|
|
@ -4005,6 +4015,7 @@ _table_keys = {
|
|||
'cg_users' : ['user_id', 'cg_id'],
|
||||
'tag_inheritance' : ['tag_id', 'parent_id'],
|
||||
'tag_config' : ['tag_id'],
|
||||
'tag_extra' : ['tag_id', 'key'],
|
||||
'build_target_config' : ['build_target_id'],
|
||||
'external_repo_config' : ['external_repo_id'],
|
||||
'tag_external_repos' : ['tag_id', 'external_repo_id'],
|
||||
|
|
@ -4396,6 +4407,12 @@ def anon_handle_taginfo(options, session, args):
|
|||
if session.mavenEnabled():
|
||||
print "Maven support?: %s" % (info['maven_support'] and 'yes' or 'no')
|
||||
print "Include all Maven archives?: %s" % (info['maven_include_all'] and 'yes' or 'no')
|
||||
if 'extra' in info:
|
||||
print "Tag options:"
|
||||
keys = info['extra'].keys()
|
||||
keys.sort()
|
||||
for key in keys:
|
||||
print " %s : %s" % (key, pprint.pformat(info['extra'][key]))
|
||||
dest_targets = session.getBuildTargets(destTagID=info['id'], **event_opts)
|
||||
build_targets = session.getBuildTargets(buildTagID=info['id'], **event_opts)
|
||||
repos = {}
|
||||
|
|
@ -4489,6 +4506,8 @@ def handle_edit_tag(options, session, args):
|
|||
parser.add_option("--no-maven-support", action="store_true", help=_("Disable creation of Maven repos for this tag"))
|
||||
parser.add_option("--include-all", action="store_true", help=_("Include all packages in this tag when generating Maven repos"))
|
||||
parser.add_option("--no-include-all", action="store_true", help=_("Do not include all packages in this tag when generating Maven repos"))
|
||||
parser.add_option("-x", "--extra", action="append", default=[], metavar="key=value",
|
||||
help=_("Set tag extra option"))
|
||||
(options, args) = parser.parse_args(args)
|
||||
if len(args) != 1:
|
||||
parser.error(_("Please specify a name for the tag"))
|
||||
|
|
@ -4516,6 +4535,13 @@ def handle_edit_tag(options, session, args):
|
|||
opts['maven_include_all'] = True
|
||||
if options.no_include_all:
|
||||
opts['maven_include_all'] = False
|
||||
if options.extra:
|
||||
extra = {}
|
||||
for xopt in options.extra:
|
||||
key, value = xopt.split('=')
|
||||
value = arg_filter(value)
|
||||
extra[key] = value
|
||||
opts['extra'] = extra
|
||||
#XXX change callname
|
||||
session.editTag2(tag,**opts)
|
||||
|
||||
|
|
@ -5285,7 +5311,7 @@ def handle_image_build(options, session, args):
|
|||
"""Create a disk image given an install tree"""
|
||||
formats = ('vmdk', 'qcow', 'qcow2', 'vdi', 'vpc', 'rhevm-ova',
|
||||
'vsphere-ova', 'vagrant-virtualbox', 'vagrant-libvirt',
|
||||
'docker', 'raw-xz')
|
||||
'vagrant-vmware-fusion', 'docker', 'raw-xz')
|
||||
usage = _("usage: %prog image-build [options] <name> <version> " +
|
||||
"<target> <install-tree-url> <arch> [<arch>...]")
|
||||
usage += _("\n %prog image-build --config FILE")
|
||||
|
|
@ -6272,15 +6298,21 @@ def anon_handle_download_logs(options, session, args):
|
|||
task_log_dir = os.path.join(parent_dir,
|
||||
"%s-%s" % (task_info["arch"], task_id))
|
||||
|
||||
count = 0
|
||||
state = koji.TASK_STATES[task_info['state']]
|
||||
if state == 'FAILED':
|
||||
if not match or koji.util.multi_fnmatch(FAIL_LOG, match):
|
||||
write_fail_log(task_log_dir, task_id)
|
||||
count += 1
|
||||
elif state not in ['CLOSED', 'CANCELED']:
|
||||
sys.stderr.write(_("Warning: task %s is %s\n") % (task_id, state))
|
||||
|
||||
for log_filename in logs:
|
||||
download_log(task_log_dir, task_id, log_filename)
|
||||
count += 1
|
||||
|
||||
if count == 0 and not recurse:
|
||||
sys.stderr.write(_("No logs found for task %i. Perhaps try --recurse?\n") % task_id)
|
||||
|
||||
if recurse:
|
||||
child_tasks = session.getTaskChildren(task_id)
|
||||
|
|
@ -6305,6 +6337,81 @@ def anon_handle_download_logs(options, session, args):
|
|||
save_logs(task_id, suboptions.match, suboptions.dir, suboptions.recurse)
|
||||
|
||||
|
||||
def anon_handle_download_task(options, sessions, args):
|
||||
"Download the output of a build task "
|
||||
usage = _("usage: %prog download-task <task_id>")
|
||||
usage += _("\n(Specify the --help global option for a list of other help options)")
|
||||
parser = OptionParser(usage=usage)
|
||||
parser.add_option("--arch", dest="arches", metavar="ARCH", action="append", default=[],
|
||||
help=_("Only download packages for this arch (may be used multiple times)"))
|
||||
parser.add_option("--logs", dest="logs", action="store_true", default=False, help=_("Also download build logs"))
|
||||
|
||||
(suboptions, args) = parser.parse_args(args)
|
||||
if len(args) == 0:
|
||||
parser.error(_("Please specify a task ID"))
|
||||
elif len(args) > 1:
|
||||
parser.error(_("Only one task ID may be specified"))
|
||||
|
||||
base_task_id = int(args.pop())
|
||||
if len(suboptions.arches) > 0:
|
||||
suboptions.arches = ",".join(suboptions.arches).split(",")
|
||||
|
||||
# get downloadable tasks
|
||||
|
||||
base_task = session.getTaskInfo(base_task_id)
|
||||
|
||||
check_downloadable = lambda task: task["method"] == "buildArch"
|
||||
downloadable_tasks = []
|
||||
|
||||
if check_downloadable(base_task):
|
||||
downloadable_tasks.append(base_task)
|
||||
else:
|
||||
subtasks = sessions.getTaskChildren(base_task_id)
|
||||
downloadable_tasks.extend(filter(check_downloadable, subtasks))
|
||||
|
||||
# get files for download
|
||||
|
||||
downloads = []
|
||||
|
||||
for task in downloadable_tasks:
|
||||
files = session.listTaskOutput(task["id"])
|
||||
for filename in files:
|
||||
if filename.endswith(".log") and suboptions.logs:
|
||||
# rename logs, they would conflict
|
||||
new_filename = "%s.%s.log" % (filename.rstrip(".log"), task["arch"])
|
||||
downloads.append((task, filename, new_filename))
|
||||
continue
|
||||
|
||||
if filename.endswith(".rpm"):
|
||||
filearch = filename.split(".")[-2]
|
||||
if len(suboptions.arches) == 0 or filearch in suboptions.arches:
|
||||
downloads.append((task, filename, filename))
|
||||
continue
|
||||
|
||||
if len(downloads) == 0:
|
||||
error(_("No files for download found."))
|
||||
|
||||
required_tasks = {}
|
||||
for (task, nop, nop) in downloads:
|
||||
if task["id"] not in required_tasks:
|
||||
required_tasks[task["id"]] = task
|
||||
|
||||
for task_id in required_tasks:
|
||||
if required_tasks[task_id]["state"] != koji.TASK_STATES.get("CLOSED"):
|
||||
if task_id == base_task_id:
|
||||
error(_("Task %d has not finished yet.") % task_id)
|
||||
else:
|
||||
error(_("Child task %d has not finished yet.") % task_id)
|
||||
|
||||
# perform the download
|
||||
|
||||
number = 0
|
||||
for (task, filename, new_filename) in downloads:
|
||||
number += 1
|
||||
print _("Downloading [%d/%d]: %s") % (number, len(downloads), new_filename)
|
||||
with open(new_filename, "wb") as output_file:
|
||||
output_file.write(session.downloadTaskOutput(task["id"], filename))
|
||||
|
||||
def anon_handle_wait_repo(options, session, args):
|
||||
"Wait for a repo to be regenerated"
|
||||
usage = _("usage: %prog wait-repo [options] <tag>")
|
||||
|
|
|
|||
76
docs/Migrating_to_1.10.txt
Normal file
76
docs/Migrating_to_1.10.txt
Normal file
|
|
@ -0,0 +1,76 @@
|
|||
Migrating to Koji 1.10
|
||||
======================
|
||||
|
||||
// asciidoc formatted
|
||||
|
||||
The 1.10 release of Koji includes a few changes that you should consider when
|
||||
migrating.
|
||||
|
||||
DB Updates
|
||||
----------
|
||||
|
||||
The new +tag_extra+ table tracks extra data for tags.
|
||||
|
||||
There is a new entry in the +channels+ table and some additions and updates to
|
||||
the +archivetypes+ table.
|
||||
|
||||
As in previous releases, we provide a migration script that updates the
|
||||
database.
|
||||
|
||||
# psql koji koji </usr/share/doc/koji-1.10.0/docs/schema-upgrade-1.9-1.10.sql
|
||||
|
||||
|
||||
Command line changes
|
||||
--------------------
|
||||
|
||||
A few commands support new arguments
|
||||
|
||||
* maven-build
|
||||
** --ini : Pass build parameters via a .ini file
|
||||
** --section : Get build parameters from this section of the .ini
|
||||
* wrapper-rpm
|
||||
** --ini : Pass build parameters via a .ini file
|
||||
** --section : Get build parameters from this section of the .ini
|
||||
* import
|
||||
** --link : Attempt to hardlink instead of uploading
|
||||
* list-tagged
|
||||
** --latest-n : Only show the latest N builds/rpms
|
||||
* list-history
|
||||
** --watch : Monitor history data
|
||||
* edit-tag
|
||||
** --extra : Set tag extra option
|
||||
* list-tasks
|
||||
** --user : Only tasks for this user
|
||||
** --arch : Only tasks for this architecture
|
||||
** --method : Only tasks of this method
|
||||
** --channel : Only tasks in this channel
|
||||
** --host : Only tasks for this host
|
||||
* download-build
|
||||
** --task-id : Interpret id as a task id
|
||||
|
||||
And there are three new commands
|
||||
|
||||
* image-build-indirection
|
||||
* maven-chain
|
||||
* runroot
|
||||
|
||||
|
||||
Other Configuration changes
|
||||
---------------------------
|
||||
|
||||
The Koji web interface can now treate +extra-footer.html+ as a Cheetah template.
|
||||
This behavior can be enabled by setting the +LiteralFooter+ option to +False+ in
|
||||
the kojiweb config.
|
||||
|
||||
|
||||
RPC API Changes
|
||||
---------------
|
||||
|
||||
The +readTaggedBuilds+ and +readTaggedRPMS+ now treat an integer value for the optional
|
||||
latest argument differently. Before it was simply treated as a boolean flag, which
|
||||
if true caused the call to return only the latest build for each package. Now, if
|
||||
the value is a positive integer N, it will return the N latest builds for each
|
||||
package. The behavior is unchanged for other values.
|
||||
|
||||
New rpc calls: +chainMaven+, +buildImageIndirection+, and +mergeScratch+
|
||||
|
||||
50
docs/schema-upgrade-1.9-1.10.sql
Normal file
50
docs/schema-upgrade-1.9-1.10.sql
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
|
||||
BEGIN;
|
||||
|
||||
INSERT INTO channels (name) VALUES ('image');
|
||||
|
||||
|
||||
CREATE TABLE tag_extra (
|
||||
tag_id INTEGER NOT NULL REFERENCES tag(id),
|
||||
key TEXT NOT NULL,
|
||||
value TEXT NOT NULL, -- TODO - move this to jsonb when we can
|
||||
-- versioned - see desc above
|
||||
create_event INTEGER NOT NULL REFERENCES events(id) DEFAULT get_event(),
|
||||
revoke_event INTEGER REFERENCES events(id),
|
||||
creator_id INTEGER NOT NULL REFERENCES users(id),
|
||||
revoker_id INTEGER REFERENCES users(id),
|
||||
active BOOLEAN DEFAULT 'true' CHECK (active),
|
||||
CONSTRAINT active_revoke_sane CHECK (
|
||||
(active IS NULL AND revoke_event IS NOT NULL AND revoker_id IS NOT NULL)
|
||||
OR (active IS NOT NULL AND revoke_event IS NULL AND revoker_id IS NULL)),
|
||||
PRIMARY KEY (create_event, tag_id, key),
|
||||
UNIQUE (tag_id, key, active)
|
||||
) WITHOUT OIDS;
|
||||
|
||||
|
||||
update archivetypes set extensions='jar war rar ear sar jdocbook jdocbook-style' where name='jar';
|
||||
update archivetypes set description='Zip file' where name='zip';
|
||||
update archivetypes set extensions='tar tar.gz tar.bz2 tar.xz' where name='tar';
|
||||
update archivetypes set description='Open Virtualization Archive' where name='ova';
|
||||
|
||||
insert into archivetypes (name, description, extensions) values ('vdi', 'VirtualBox Virtual Disk Image', 'vdi');
|
||||
insert into archivetypes (name, description, extensions) values ('aar', 'Binary distribution of an Android Library project', 'aar');
|
||||
insert into archivetypes (name, description, extensions) values ('apklib', 'Source distribution of an Android Library project', 'apklib');
|
||||
insert into archivetypes (name, description, extensions) values ('cab', 'Windows cabinet file', 'cab');
|
||||
insert into archivetypes (name, description, extensions) values ('dylib', 'OS X dynamic library', 'dylib');
|
||||
insert into archivetypes (name, description, extensions) values ('gem', 'Ruby gem', 'gem');
|
||||
insert into archivetypes (name, description, extensions) values ('ini', 'INI config file', 'ini');
|
||||
insert into archivetypes (name, description, extensions) values ('js', 'Javascript file', 'js');
|
||||
insert into archivetypes (name, description, extensions) values ('ldif', 'LDAP Data Interchange Format file', 'ldif');
|
||||
insert into archivetypes (name, description, extensions) values ('manifest', 'Runtime environment for .NET applications', 'manifest');
|
||||
insert into archivetypes (name, description, extensions) values ('msm', 'Windows merge module', 'msm');
|
||||
insert into archivetypes (name, description, extensions) values ('properties', 'Properties file', 'properties');
|
||||
insert into archivetypes (name, description, extensions) values ('sig', 'Signature file', 'sig signature');
|
||||
insert into archivetypes (name, description, extensions) values ('so', 'Shared library', 'so');
|
||||
insert into archivetypes (name, description, extensions) values ('txt', 'Text file', 'txt');
|
||||
insert into archivetypes (name, description, extensions) values ('vhd', 'Hyper-V image', 'vhd');
|
||||
insert into archivetypes (name, description, extensions) values ('wsf', 'Windows script file', 'wsf');
|
||||
insert into archivetypes (name, description, extensions) values ('box', 'Vagrant Box Image', 'box');
|
||||
insert into archivetypes (name, description, extensions) values ('raw-xz', 'xz compressed raw disk image', 'raw.xz');
|
||||
|
||||
COMMIT;
|
||||
|
|
@ -378,6 +378,22 @@ CREATE TABLE tag_config (
|
|||
UNIQUE (tag_id,active)
|
||||
) WITHOUT OIDS;
|
||||
|
||||
CREATE TABLE tag_extra (
|
||||
tag_id INTEGER NOT NULL REFERENCES tag(id),
|
||||
key TEXT NOT NULL,
|
||||
value TEXT NOT NULL, -- TODO - move this to jsonb when we can
|
||||
-- versioned - see desc above
|
||||
create_event INTEGER NOT NULL REFERENCES events(id) DEFAULT get_event(),
|
||||
revoke_event INTEGER REFERENCES events(id),
|
||||
creator_id INTEGER NOT NULL REFERENCES users(id),
|
||||
revoker_id INTEGER REFERENCES users(id),
|
||||
active BOOLEAN DEFAULT 'true' CHECK (active),
|
||||
CONSTRAINT active_revoke_sane CHECK (
|
||||
(active IS NULL AND revoke_event IS NOT NULL AND revoker_id IS NOT NULL)
|
||||
OR (active IS NOT NULL AND revoke_event IS NULL AND revoker_id IS NULL)),
|
||||
PRIMARY KEY (create_event, tag_id, key),
|
||||
UNIQUE (tag_id, key, active)
|
||||
) WITHOUT OIDS;
|
||||
|
||||
-- the tag_updates table provides a mechanism to indicate changes relevant to tag
|
||||
-- that are not reflected in a versioned table. For example: builds changing volumes,
|
||||
|
|
|
|||
111
hub/kojihub.py
111
hub/kojihub.py
|
|
@ -44,6 +44,7 @@ import os
|
|||
import re
|
||||
import rpm
|
||||
import shutil
|
||||
import simplejson as json
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
|
|
@ -2768,14 +2769,15 @@ def get_tag(tagInfo, strict=False, event=None):
|
|||
a string (the tag name) or an int (the tag ID).
|
||||
Returns a map containing the following keys:
|
||||
|
||||
- id
|
||||
- name
|
||||
- perm_id (may be null)
|
||||
- perm (name, may be null)
|
||||
- arches (may be null)
|
||||
- locked
|
||||
- maven_support
|
||||
- maven_include_all
|
||||
- id : unique id for the tag
|
||||
- name : name of the tag
|
||||
- perm_id : permission id (may be null)
|
||||
- perm : permission name (may be null)
|
||||
- arches : tag arches (string, may be null)
|
||||
- locked : lock setting (boolean)
|
||||
- maven_support : maven support flag (boolean)
|
||||
- maven_include_all : maven include all flag (boolean)
|
||||
- extra : extra tag parameters (dictionary)
|
||||
|
||||
If there is no tag matching the given tagInfo, and strict is False,
|
||||
return None. If strict is True, raise a GenericError.
|
||||
|
|
@ -2814,8 +2816,28 @@ def get_tag(tagInfo, strict=False, event=None):
|
|||
if strict:
|
||||
raise koji.GenericError, "Invalid tagInfo: %r" % tagInfo
|
||||
return None
|
||||
result['extra'] = get_tag_extra(result)
|
||||
return result
|
||||
|
||||
|
||||
def get_tag_extra(tagInfo, event=None):
|
||||
""" Get tag extra info (no inheritance) """
|
||||
tables = ['tag_extra']
|
||||
fields = ['key', 'value']
|
||||
clauses = [eventCondition(event, table='tag_extra'), "tag_id = %(id)i"]
|
||||
query = QueryProcessor(columns=fields, tables=tables, clauses=clauses, values=tagInfo,
|
||||
opts={'asList': True})
|
||||
result = {}
|
||||
for key, value in query.execute():
|
||||
try:
|
||||
value = json.loads(value)
|
||||
except Exception:
|
||||
# this should not happen
|
||||
raise koji.GenericError("Invalid tag extra data: %s : %r", key, value)
|
||||
result[key] = value
|
||||
return result
|
||||
|
||||
|
||||
def edit_tag(tagInfo, **kwargs):
|
||||
"""Edit information for an existing tag.
|
||||
|
||||
|
|
@ -2871,18 +2893,36 @@ def edit_tag(tagInfo, **kwargs):
|
|||
if kwargs.has_key(key) and data[key] != kwargs[key]:
|
||||
changed = True
|
||||
data[key] = kwargs[key]
|
||||
if not changed:
|
||||
return
|
||||
if changed:
|
||||
update = UpdateProcessor('tag_config', values=data, clauses=['tag_id = %(id)i'])
|
||||
update.make_revoke()
|
||||
update.execute()
|
||||
|
||||
update = UpdateProcessor('tag_config', values=data, clauses=['tag_id = %(id)i'])
|
||||
update.make_revoke()
|
||||
update.execute()
|
||||
insert = InsertProcessor('tag_config', data=dslice(data, ('arches', 'perm_id', 'locked')))
|
||||
insert.set(tag_id=data['id'])
|
||||
insert.set(**dslice(data, ('maven_support', 'maven_include_all')))
|
||||
insert.make_create()
|
||||
insert.execute()
|
||||
|
||||
# handle extra data
|
||||
if 'extra' in kwargs:
|
||||
for key in kwargs['extra']:
|
||||
value = kwargs['extra'][key]
|
||||
if key not in tag['extra'] or tag['extra'] != value:
|
||||
data = {
|
||||
'tag_id' : tag['id'],
|
||||
'key' : key,
|
||||
'value' : json.dumps(kwargs['extra'][key]),
|
||||
}
|
||||
# revoke old entry, if any
|
||||
update = UpdateProcessor('tag_extra', values=data, clauses=['tag_id = %(tag_id)i', 'key=%(key)s'])
|
||||
update.make_revoke()
|
||||
update.execute()
|
||||
# add new entry
|
||||
insert = InsertProcessor('tag_extra', data=data)
|
||||
insert.make_create()
|
||||
insert.execute()
|
||||
|
||||
insert = InsertProcessor('tag_config', data=dslice(data, ('arches', 'perm_id', 'locked')))
|
||||
insert.set(tag_id=data['id'])
|
||||
insert.set(**dslice(data, ('maven_support', 'maven_include_all')))
|
||||
insert.make_create()
|
||||
insert.execute()
|
||||
|
||||
def old_edit_tag(tagInfo, name, arches, locked, permissionID):
|
||||
"""Edit information for an existing tag."""
|
||||
|
|
@ -3394,9 +3434,13 @@ def get_rpm(rpminfo, strict=False, multi=False):
|
|||
tables=['rpminfo'], joins=joins, clauses=clauses,
|
||||
values=data)
|
||||
if multi:
|
||||
return query.execute()
|
||||
data = query.execute()
|
||||
for row in data:
|
||||
row['size'] = koji.encode_int(row['size'])
|
||||
return data
|
||||
ret = query.executeOne()
|
||||
if ret:
|
||||
ret['size'] = koji.encode_int(ret['size'])
|
||||
return ret
|
||||
if retry:
|
||||
#at this point we have just an NVRA with no internal match. Open it up to externals
|
||||
|
|
@ -3406,6 +3450,7 @@ def get_rpm(rpminfo, strict=False, multi=False):
|
|||
if strict:
|
||||
raise koji.GenericError, "No such rpm: %r" % data
|
||||
return None
|
||||
ret['size'] = koji.encode_int(ret['size'])
|
||||
return ret
|
||||
|
||||
def list_rpms(buildID=None, buildrootID=None, imageID=None, componentBuildrootID=None, hostID=None, arches=None, queryOpts=None):
|
||||
|
|
@ -3480,7 +3525,10 @@ def list_rpms(buildID=None, buildrootID=None, imageID=None, componentBuildrootID
|
|||
query = QueryProcessor(columns=[f[0] for f in fields], aliases=[f[1] for f in fields],
|
||||
tables=['rpminfo'], joins=joins, clauses=clauses,
|
||||
values=locals(), opts=queryOpts)
|
||||
return query.execute()
|
||||
data = query.execute()
|
||||
for row in data:
|
||||
row['size'] = koji.encode_int(row['size'])
|
||||
return data
|
||||
|
||||
def get_maven_build(buildInfo, strict=False):
|
||||
"""
|
||||
|
|
@ -5629,6 +5677,7 @@ def query_history(tables=None, **kwargs):
|
|||
'cg_users' : ['user_id', 'cg_id'],
|
||||
'tag_inheritance' : ['tag_id', 'parent_id', 'priority', 'maxdepth', 'intransitive', 'noconfig', 'pkg_filter'],
|
||||
'tag_config' : ['tag_id', 'arches', 'perm_id', 'locked', 'maven_support', 'maven_include_all'],
|
||||
'tag_extra' : ['tag_id', 'key', 'value'],
|
||||
'build_target_config' : ['build_target_id', 'build_tag', 'dest_tag'],
|
||||
'external_repo_config' : ['external_repo_id', 'url'],
|
||||
'tag_external_repos' : ['tag_id', 'external_repo_id', 'priority'],
|
||||
|
|
@ -9007,17 +9056,17 @@ class RootExports(object):
|
|||
def getBuildConfig(self,tag,event=None):
|
||||
"""Return build configuration associated with a tag"""
|
||||
taginfo = get_tag(tag,strict=True,event=event)
|
||||
arches = taginfo['arches']
|
||||
if arches is None:
|
||||
#follow inheritance for arches
|
||||
order = readFullInheritance(taginfo['id'],event=event)
|
||||
for link in order:
|
||||
if link['noconfig']:
|
||||
continue
|
||||
arches = get_tag(link['parent_id'],strict=True,event=event)['arches']
|
||||
if arches is not None:
|
||||
taginfo['arches'] = arches
|
||||
break
|
||||
order = readFullInheritance(taginfo['id'], event=event)
|
||||
#follow inheritance for arches and extra
|
||||
for link in order:
|
||||
if link['noconfig']:
|
||||
continue
|
||||
ancestor = get_tag(link['parent_id'], strict=True, event=event)
|
||||
if taginfo['arches'] is None and ancestor['arches'] is not None:
|
||||
taginfo['arches'] = ancestor['arches']
|
||||
for key in ancestor['extra']:
|
||||
if key not in taginfo['extra']:
|
||||
taginfo['extra'][key] = ancestor['extra'][key]
|
||||
return taginfo
|
||||
|
||||
def getRepo(self,tag,state=None,event=None):
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@
|
|||
%define release %{baserelease}
|
||||
%endif
|
||||
Name: koji
|
||||
Version: 1.9.0
|
||||
Version: 1.10.0
|
||||
Release: %{release}%{?dist}
|
||||
License: LGPLv2 and GPLv2+
|
||||
# koji.ssl libs (from plague) are GPLv2+
|
||||
|
|
@ -320,6 +320,9 @@ fi
|
|||
%endif
|
||||
|
||||
%changelog
|
||||
* Tue Jul 14 2015 Mike McLean <mikem at redhat.com> - 1.10.0-1
|
||||
- 1.10.0 release
|
||||
|
||||
* Mon Mar 24 2014 Mike McLean <mikem at redhat.com> - 1.9.0-1
|
||||
- 1.9.0 release
|
||||
|
||||
|
|
|
|||
|
|
@ -57,7 +57,7 @@ import xmlrpclib
|
|||
import xml.sax
|
||||
import xml.sax.handler
|
||||
from xmlrpclib import loads, dumps, Fault
|
||||
#import OpenSSL.SSL
|
||||
import OpenSSL
|
||||
import zipfile
|
||||
|
||||
def _(args):
|
||||
|
|
@ -1291,6 +1291,8 @@ def genMockConfig(name, arch, managed=False, repoid=None, tag_name=None, **opts)
|
|||
# Don't let a build last more than 24 hours
|
||||
'rpmbuild_timeout': opts.get('rpmbuild_timeout', 86400)
|
||||
}
|
||||
if opts.get('package_manager'):
|
||||
config_opts['package_manager'] = opts['package_manager']
|
||||
|
||||
# bind_opts are used to mount parts (or all of) /dev if needed.
|
||||
# See kojid::LiveCDTask for a look at this option in action.
|
||||
|
|
@ -1727,7 +1729,7 @@ class ClientSession(object):
|
|||
def _serverPrincipal(self, cprinc):
|
||||
"""Get the Kerberos principal of the server we're connecting
|
||||
to, based on baseurl."""
|
||||
servername = self._host
|
||||
servername = socket.getfqdn(self._host)
|
||||
#portspec = servername.find(':')
|
||||
#if portspec != -1:
|
||||
# servername = servername[:portspec]
|
||||
|
|
@ -1945,6 +1947,34 @@ class ClientSession(object):
|
|||
raise
|
||||
except Exception, e:
|
||||
self._close_connection()
|
||||
if isinstance(e, OpenSSL.SSL.Error):
|
||||
# pyOpenSSL doesn't use different exception
|
||||
# subclasses, we have to actually parse the args
|
||||
for arg in e.args:
|
||||
# First, check to see if 'arg' is iterable because
|
||||
# it can be anything..
|
||||
try:
|
||||
iter(arg)
|
||||
except TypeError:
|
||||
continue
|
||||
|
||||
# We do all this so that we can detect cert expiry
|
||||
# so we can avoid retrying those over and over.
|
||||
for items in arg:
|
||||
try:
|
||||
iter(items)
|
||||
except TypeError:
|
||||
continue
|
||||
|
||||
if len(items) != 3:
|
||||
continue
|
||||
|
||||
_, _, ssl_reason = items
|
||||
|
||||
if ('certificate revoked' in ssl_reason or
|
||||
'certificate expired' in ssl_reason):
|
||||
# There's no point in retrying for this
|
||||
raise
|
||||
if not self.logged_in:
|
||||
#in the past, non-logged-in sessions did not retry. For compatibility purposes
|
||||
#this behavior is governed by the anon_retry opt.
|
||||
|
|
@ -2043,7 +2073,7 @@ class ClientSession(object):
|
|||
result = self._callMethod('checkUpload', (path, name), chk_opts)
|
||||
if int(result['size']) != ofs:
|
||||
raise GenericError, "Uploaded file is wrong length: %s/%s, %s != %s" \
|
||||
% (path, name, result['sumlength'], ofs)
|
||||
% (path, name, result['size'], ofs)
|
||||
if problems and result['hexdigest'] != full_chksum.hexdigest():
|
||||
raise GenericError, "Uploaded file has wrong checksum: %s/%s, %s != %s" \
|
||||
% (path, name, result['hexdigest'], full_chksum.hexdigest())
|
||||
|
|
@ -2391,11 +2421,6 @@ def _taskLabel(taskInfo):
|
|||
else:
|
||||
return '%s (%s)' % (method, arch)
|
||||
|
||||
def _forceAscii(value):
|
||||
"""Replace characters not in the 7-bit ASCII range
|
||||
with "?"."""
|
||||
return ''.join([(ord(c) <= 127) and c or '?' for c in value])
|
||||
|
||||
def fixEncoding(value, fallback='iso8859-15'):
|
||||
"""
|
||||
Convert value to a 'str' object encoded as UTF-8.
|
||||
|
|
|
|||
|
|
@ -51,6 +51,7 @@ class RunRootTask(tasks.BaseTaskHandler):
|
|||
else:
|
||||
options.append(o)
|
||||
rel_path = path[len(mount_data['mountpoint']):]
|
||||
rel_path = rel_path[1:] if rel_path.startswith('/') else rel_path
|
||||
res = (os.path.join(mount_data['path'], rel_path), path, mount_data['fstype'], ','.join(options))
|
||||
return res
|
||||
|
||||
|
|
|
|||
|
|
@ -127,7 +127,7 @@
|
|||
#end if
|
||||
</td>
|
||||
</tr>
|
||||
#for $rpm in $rpmsByArch[$arch] + $debuginfoByArch.get($arch, [])
|
||||
#for $rpm in $rpmsByArch[$arch]
|
||||
<tr>
|
||||
#set $rpmfile = '%(name)s-%(version)s-%(release)s.%(arch)s.rpm' % $rpm
|
||||
#set $rpmpath = $pathinfo.rpm($rpm)
|
||||
|
|
|
|||
|
|
@ -1102,12 +1102,15 @@ def buildinfo(environ, buildID):
|
|||
archivesByExt.setdefault(os.path.splitext(archive['filename'])[1][1:], []).append(archive)
|
||||
|
||||
rpmsByArch = {}
|
||||
debuginfoByArch = {}
|
||||
debuginfos = []
|
||||
for rpm in rpms:
|
||||
if koji.is_debuginfo(rpm['name']):
|
||||
debuginfoByArch.setdefault(rpm['arch'], []).append(rpm)
|
||||
debuginfos.append(rpm)
|
||||
else:
|
||||
rpmsByArch.setdefault(rpm['arch'], []).append(rpm)
|
||||
# add debuginfos at the end
|
||||
for rpm in debuginfos:
|
||||
rpmsByArch.setdefault(rpm['arch'], []).append(rpm)
|
||||
|
||||
if rpmsByArch.has_key('src'):
|
||||
srpm = rpmsByArch['src'][0]
|
||||
|
|
@ -1163,7 +1166,6 @@ def buildinfo(environ, buildID):
|
|||
values['build'] = build
|
||||
values['tags'] = tags
|
||||
values['rpmsByArch'] = rpmsByArch
|
||||
values['debuginfoByArch'] = debuginfoByArch
|
||||
values['task'] = task
|
||||
values['mavenbuild'] = mavenbuild
|
||||
values['winbuild'] = winbuild
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
#from kojiweb import util
|
||||
#import pprint
|
||||
|
||||
#include "includes/header.chtml"
|
||||
|
||||
|
|
@ -153,6 +154,17 @@
|
|||
<td colspan="2"><a href="tagdelete?tagID=$tag.id$util.authToken($self)">Delete tag</a></td>
|
||||
</tr>
|
||||
#end if
|
||||
#if $tag.get('extra')
|
||||
<tr>
|
||||
<th>Extra options:</th>
|
||||
</tr>
|
||||
#for $key in $tag['extra']
|
||||
<tr>
|
||||
<th>$key</th>
|
||||
<td>$pprint.pformat($tag['extra'][$key])</td>
|
||||
</tr>
|
||||
#end for
|
||||
#end if
|
||||
</table>
|
||||
|
||||
#include "includes/footer.chtml"
|
||||
|
|
|
|||
|
|
@ -464,8 +464,12 @@ def taskScratchClass(task_object):
|
|||
""" Return a css class indicating whether or not this task is a scratch
|
||||
build.
|
||||
"""
|
||||
method = task_object['method']
|
||||
request = task_object['request']
|
||||
if len(request) >= 3:
|
||||
if method == 'build' and len(request) >= 3:
|
||||
# Each task method has its own signature for what gets put in the
|
||||
# request list. Builds should have an `opts` dict at index 2.
|
||||
# See www/kojiweb/taskinfo.chtml for the grimoire.
|
||||
opts = request[2]
|
||||
if opts.get('scratch'):
|
||||
return "scratch"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue