initial support for importing Maven builds
This commit is contained in:
parent
7677f63099
commit
d327821a2a
4 changed files with 326 additions and 0 deletions
64
cli/koji
64
cli/koji
|
|
@ -1624,6 +1624,70 @@ def handle_import_in_place(options, session, args):
|
|||
print _("done")
|
||||
sys.stdout.flush()
|
||||
|
||||
def handle_import_archive(options, session, args):
|
||||
"[admin] Import an archive file and associate it with a build"
|
||||
usage = _("usage: %prog import-archive build-id|n-v-r /path/to/archive...")
|
||||
usage += _("\n(Specify the --help global option for a list of other help options)")
|
||||
parser = OptionParser(usage=usage)
|
||||
parser.add_option("--noprogress", action="store_true",
|
||||
help=_("Do not display progress of the upload"))
|
||||
parser.add_option("--create-build", action="store_true", help=_("Auto-create builds as needed (must be used with --pom)"))
|
||||
parser.add_option("--pom", help=_("POM file to use when auto-creating builds"), metavar="FILE")
|
||||
(suboptions, args) = parser.parse_args(args)
|
||||
|
||||
if not len(args) > 1:
|
||||
parser.error(_("You must specify a build ID or N-V-R and the path to the archive to import"))
|
||||
assert False
|
||||
|
||||
activate_session(session)
|
||||
|
||||
if not session.hasPerm('admin'):
|
||||
print _("This action requires admin privileges")
|
||||
return 1
|
||||
|
||||
needs_create = True
|
||||
|
||||
buildinfo = session.getBuild(args[0])
|
||||
if not buildinfo:
|
||||
if not suboptions.create_build:
|
||||
print _("Could not find information for build: %s" % args[0])
|
||||
return 1
|
||||
else:
|
||||
needs_create = True
|
||||
|
||||
maveninfo = None
|
||||
if buildinfo:
|
||||
maveninfo = session.getMavenBuild(buildinfo)
|
||||
if not maveninfo:
|
||||
if not suboptions.create_build:
|
||||
print _("Could not find Maven information for build: %s" % args[0])
|
||||
return 1
|
||||
else:
|
||||
if not suboptions.pom:
|
||||
print _("No Maven information available to create build for %s, please specify a POM file with --pom" % args[0])
|
||||
return 1
|
||||
else:
|
||||
needs_create = 1
|
||||
|
||||
if needs_create:
|
||||
if not buildinfo:
|
||||
buildinfo = koji.parse_NVR(args[0])
|
||||
pominfo = koji.parse_pom(suboptions.pom)
|
||||
session.createMavenBuild(buildinfo, pominfo['groupId'], pominfo['artifactId'])
|
||||
|
||||
for filepath in args[1:]:
|
||||
filename = os.path.basename(filepath)
|
||||
print "Uploading archive: %s" % filename
|
||||
serverdir = _unique_path('cli-import')
|
||||
if _running_in_bg() or suboptions.noprogress:
|
||||
callback = None
|
||||
else:
|
||||
callback = _progress_callback
|
||||
session.uploadWrapper(filepath, serverdir, callback=callback, blocksize=65536)
|
||||
print
|
||||
serverpath = "%s/%s" % (serverdir, filename)
|
||||
session.importArchive(serverpath, buildinfo)
|
||||
|
||||
def handle_grant_permission(options, session, args):
|
||||
"[admin] Grant a permission to a user"
|
||||
usage = _("usage: %prog grant-permission <permission> <user> [<user> ...]")
|
||||
|
|
|
|||
|
|
@ -603,4 +603,56 @@ rpmfiles TO PUBLIC;
|
|||
-- where users.name in ('admin')
|
||||
-- and permissions.name = 'admin';
|
||||
|
||||
-- Schema additions for multiplatform support
|
||||
|
||||
-- we need to track some additional metadata about Maven builds
|
||||
CREATE TABLE mavenbuilds (
|
||||
build_id INTEGER NOT NULL PRIMARY KEY REFERENCES build(id),
|
||||
group_id TEXT NOT NULL,
|
||||
artifact_id TEXT NOT NULL
|
||||
) WITHOUT OIDS;
|
||||
|
||||
-- Even though we call this archiveinfo, we can probably use it for
|
||||
-- any filetype output by a build process. In general they will be
|
||||
-- archives (.zip, .jar, .tar.gz) but could also be installer executables (.exe)
|
||||
CREATE TABLE archivetypes (
|
||||
id SERIAL NOT NULL PRIMARY KEY,
|
||||
name TEXT NOT NULL UNIQUE,
|
||||
description TEXT NOT NULL,
|
||||
extensions TEXT NOT NULL
|
||||
) WITHOUT OIDS;
|
||||
|
||||
insert into archivetypes (name, description, extensions) values ('zip', 'Zip archives, including jars', 'zip jar war rar ear');
|
||||
|
||||
-- Do we want to enforce a constraint that a build can only generate one
|
||||
-- archive with a given name?
|
||||
CREATE TABLE archiveinfo (
|
||||
id SERIAL NOT NULL PRIMARY KEY,
|
||||
type_id INTEGER NOT NULL REFERENCES archivetypes (id),
|
||||
build_id INTEGER NOT NULL REFERENCES build (id),
|
||||
buildroot_id INTEGER REFERENCES buildroot (id),
|
||||
filename TEXT NOT NULL,
|
||||
size INTEGER NOT NULL,
|
||||
md5sum TEXT NOT NULL
|
||||
) WITHOUT OIDS;
|
||||
CREATE INDEX archiveinfo_build_idx ON archiveinfo (build_id);
|
||||
CREATE INDEX archiveinfo_buildroot_idx on archiveinfo (buildroot_id);
|
||||
CREATE INDEX archiveinfo_type_idx on archiveinfo (type_id);
|
||||
|
||||
CREATE TABLE buildroot_archives (
|
||||
buildroot_id INTEGER NOT NULL REFERENCES buildroot (id),
|
||||
archive_id INTEGER NOT NULL REFERENCES archiveinfo (id),
|
||||
PRIMARY KEY (buildroot_id, archive_id)
|
||||
) WITHOUT OIDS;
|
||||
CREATE INDEX buildroot_archives_archive_idx ON buildroot_archives (archive_id);
|
||||
|
||||
CREATE TABLE archivefiles (
|
||||
archive_id INTEGER NOT NULL REFERENCES archiveinfo (id),
|
||||
filename TEXT NOT NULL,
|
||||
size INTEGER NOT NULL,
|
||||
md5sum TEXT NOT NULL,
|
||||
PRIMARY KEY (filename, archive_id)
|
||||
) WITHOUT OIDS;
|
||||
CREATE INDEX archivefiles_by_archive_id on archivefiles (archive_id);
|
||||
|
||||
COMMIT WORK;
|
||||
|
|
|
|||
161
hub/kojihub.py
161
hub/kojihub.py
|
|
@ -41,6 +41,7 @@ import sys
|
|||
import tempfile
|
||||
import time
|
||||
import xmlrpclib
|
||||
import zipfile
|
||||
from koji.context import context
|
||||
|
||||
def log_error(msg):
|
||||
|
|
@ -2446,6 +2447,30 @@ def get_rpm(rpminfo,strict=False):
|
|||
return None
|
||||
return dict(zip(fields,row))
|
||||
|
||||
def get_maven_build(buildInfo, strict=False):
|
||||
"""
|
||||
Retrieve Maven-specific information about a build.
|
||||
buildInfo can be either a string (n-v-r) or an integer
|
||||
(build ID). Returns a map containing the following keys:
|
||||
|
||||
build_id: id of the build (integer)
|
||||
group_id: Maven groupId (string)
|
||||
artifact_id: Maven artifact_Id (string)
|
||||
"""
|
||||
fields = ('build_id', 'group_id', 'artifact_id')
|
||||
|
||||
build_id = find_build_id(buildInfo)
|
||||
if not build_id:
|
||||
if strict:
|
||||
raise koji.GenericError, 'No matching build found: %s' % buildInfo
|
||||
else:
|
||||
return None
|
||||
|
||||
query = """SELECT build_id, group_id, artifact_id
|
||||
FROM mavenbuilds
|
||||
WHERE build_id = %(build_id)i"""
|
||||
return _singleRow(query, locals(), fields, strict)
|
||||
|
||||
def _fetchMulti(query, values):
|
||||
"""Run the query and return all rows"""
|
||||
c = context.cnx.cursor()
|
||||
|
|
@ -3061,6 +3086,126 @@ def import_build_in_place(build):
|
|||
_dml(update,locals())
|
||||
return build_id
|
||||
|
||||
def get_archive_type(filename, strict=False):
|
||||
"""
|
||||
Get the archive type for the given filename, based on the file extension.
|
||||
"""
|
||||
parts = filename.split('.')
|
||||
if len(parts) < 2:
|
||||
raise koji.GenericError, '%s does not have an extension, unable to determine file type' \
|
||||
% filename
|
||||
ext = parts[-1]
|
||||
# special-case .tar.*
|
||||
if len(parts) > 2 and parts[-2] == 'tar':
|
||||
ext = '%s.%s' % tuple(parts[-2:])
|
||||
select = r"""SELECT id, name, description, extensions FROM archivetypes
|
||||
WHERE extensions ~ E'\\m%s\\M'""" % ext
|
||||
results = _multiRow(select, locals(), ('id', 'name', 'extensions'))
|
||||
if len(results) == 0:
|
||||
if strict:
|
||||
raise koji.GenericError, 'unsupported file extension: %s' % ext
|
||||
else:
|
||||
return None
|
||||
elif len(results) > 1:
|
||||
# this should never happen, and is a misconfiguration in the database
|
||||
raise koji.GenericError, 'multiple matches for file extension: %s' % ext
|
||||
else:
|
||||
return results[0]
|
||||
|
||||
def new_maven_build(build, group_id, artifact_id):
|
||||
"""
|
||||
Add Maven metadata to an existing build.
|
||||
maven_info must contain the 'group_id' and
|
||||
'artifact_id' keys.
|
||||
|
||||
Note: the artifact_id must match the name of the build
|
||||
"""
|
||||
if artifact_id != build['name']:
|
||||
raise koji.GenericError, 'mismatch between artifact_id (%s) and build name (%s)' % \
|
||||
(artifact_id, build['name'])
|
||||
build_id = build['id']
|
||||
insert = """INSERT INTO mavenbuilds (build_id, group_id, artifact_id)
|
||||
VALUES (%(build_id)i, %(group_id)s, %(artifact_id)s)"""
|
||||
_dml(insert, locals())
|
||||
|
||||
def import_archive(filepath, buildinfo, buildroot_id=None):
|
||||
"""
|
||||
Import an archive file and associate it with a build. The archive can
|
||||
be any non-rpm filetype supported by Koji.
|
||||
|
||||
filepath: path to the archive file (relative to the Koji workdir)
|
||||
buildinfo: dict of information about the build to associate the archive with (as returned by getBuild())
|
||||
buildroot_id: the id of the buildroot the archive was built in (may be null)
|
||||
"""
|
||||
maveninfo = get_maven_build(buildinfo)
|
||||
if not maveninfo:
|
||||
raise koji.GenericError, 'no Maven info for build: %s' % koji.buildLabel(buildinfo)
|
||||
|
||||
filepath = '%s/%s' % (koji.pathinfo.work(), filepath)
|
||||
if not os.path.exists(filepath):
|
||||
raise koji.GenericError, 'no such file: %s' % filepath
|
||||
|
||||
filename = koji.fixEncoding(os.path.basename(filepath))
|
||||
expected = '%(name)s-%(version)s-%(release)s' % build
|
||||
if expected != os.path.splitext(filename)[0]:
|
||||
raise koji.GenericError, 'filename is not in name-version-release format: %s' % filename
|
||||
archivetype = get_archive_type(filename, strict=True)
|
||||
type_id = archivetype['id']
|
||||
build_id = buildinfo['id']
|
||||
size = os.path.getsize(filepath)
|
||||
archivefp = file(filepath)
|
||||
m = md5.new()
|
||||
while True:
|
||||
contents = archivefp.read(8192)
|
||||
if not contents:
|
||||
break
|
||||
m.update(contents)
|
||||
archivefp.close()
|
||||
md5sum = m.hexdigest()
|
||||
|
||||
# XXX verify that the buildroot is associated with a task that's associated with the build
|
||||
archive_id = _singleValue("SELECT nextval('archiveinfo_id_seq')", strict=True)
|
||||
insert = """INSERT INTO archiveinfo
|
||||
(id, type_id, build_id, buildroot_id, filename, size, md5sum)
|
||||
VALUES
|
||||
(%(archive_id)i, %(type_id)i, %(build_id)i, %(buildroot_id)s, %(filename)s, %(size)i, %(md5sum)s)"""
|
||||
_dml(insert, locals())
|
||||
|
||||
if archivetype['name'] == 'zip':
|
||||
import_zip_archive(archive_id, filepath, buildinfo, maveninfo)
|
||||
else:
|
||||
raise koji.GenericError, 'unsupported archive type: %s' % archivetype['name']
|
||||
import_archive_file(filepath, buildinfo, maveninfo)
|
||||
|
||||
def import_zip_archive(archive_id, filepath, buildinfo, maveninfo):
|
||||
"""
|
||||
Import information about the file entries in the zip file.
|
||||
"""
|
||||
archive = zipfile.ZipFile(filepath, 'r')
|
||||
for entry in archive.infolist():
|
||||
filename = koji.fixEncoding(entry.filename)
|
||||
size = entry.file_size
|
||||
m = md5.new()
|
||||
m.update(archive.read(entry.filename))
|
||||
md5sum = m.hexdigest()
|
||||
insert = """INSERT INTO archivefiles (archive_id, filename, size, md5sum)
|
||||
VALUES
|
||||
(%(archive_id)i, %(filename)s, %(size)i, %(md5sum)s)"""
|
||||
_dml(insert, locals())
|
||||
archive.close()
|
||||
|
||||
def import_archive_file(filepath, buildinfo, maveninfo):
|
||||
"""Move the archive file to it's final location on the filesystem"""
|
||||
final_path = "%s/%s" % (koji.pathinfo.mavenbuild(buildinfo, maveninfo),
|
||||
koji.fixEncoding(os.path.basename(filepath)))
|
||||
koji.ensuredir(os.path.dirname(final_path))
|
||||
if os.path.exists(final_path):
|
||||
raise koji.GenericError("Error importing archive file, %s already exists" % final_path)
|
||||
if os.path.islink(filepath) or not os.path.isfile(filepath):
|
||||
raise koji.GenericError("Error importing archive file, %s is not a regular file" % filepath)
|
||||
os.rename(filepath, final_path)
|
||||
os.symlink(final_path, filepath)
|
||||
|
||||
def add_rpm_sig(an_rpm, sighdr):
|
||||
"""Store a signature header for an rpm"""
|
||||
#calling function should perform permission checks, if applicable
|
||||
|
|
@ -4080,6 +4225,8 @@ class RootExports(object):
|
|||
importBuildInPlace = staticmethod(import_build_in_place)
|
||||
resetBuild = staticmethod(reset_build)
|
||||
|
||||
importArchive = staticmethod(import_archive)
|
||||
|
||||
untaggedBuilds = staticmethod(untagged_builds)
|
||||
tagHistory = staticmethod(tag_history)
|
||||
|
||||
|
|
@ -4096,6 +4243,19 @@ class RootExports(object):
|
|||
data['owner'] = owner
|
||||
return new_build(data)
|
||||
|
||||
def createMavenBuild(self, build_info, group_id, artifact_id):
|
||||
"""
|
||||
Associate Maven metadata with an existing build. The build must
|
||||
not already have associated Maven metadata.
|
||||
"""
|
||||
context.session.assertPerm('admin')
|
||||
build = get_build(build_info)
|
||||
if not build:
|
||||
build_id = self.createEmptyBuild(build_info['name'], build_info['version'],
|
||||
build_info['release'], build_info['epoch'])
|
||||
build = get_build(build_id, strict=True)
|
||||
new_maven_build(build, group_id, artifact_id)
|
||||
|
||||
def importRPM(self, path, basename):
|
||||
"""Import an RPM into the database.
|
||||
|
||||
|
|
@ -4316,6 +4476,7 @@ class RootExports(object):
|
|||
return query.execute()
|
||||
|
||||
getBuild = staticmethod(get_build)
|
||||
getMavenBuild = staticmethod(get_maven_build)
|
||||
getChangelogEntries = staticmethod(get_changelog_entries)
|
||||
|
||||
def cancelBuild(self, buildID):
|
||||
|
|
|
|||
|
|
@ -47,6 +47,8 @@ import urllib2
|
|||
import urlparse
|
||||
import xmlrpclib
|
||||
from xmlrpclib import loads, Fault
|
||||
import xml.sax
|
||||
import xml.sax.handler
|
||||
import ssl.XMLRPCServerProxy
|
||||
import OpenSSL.SSL
|
||||
|
||||
|
|
@ -714,6 +716,48 @@ def canonArch(arch):
|
|||
else:
|
||||
return arch
|
||||
|
||||
class POMHandler(xml.sax.handler.ContentHandler):
|
||||
def __init__(self, values, fields):
|
||||
xml.sax.handler.ContentHandler.__init__(self)
|
||||
self.tag_stack = []
|
||||
self.tag_content = None
|
||||
self.values = values
|
||||
self.fields = fields
|
||||
|
||||
def startElement(self, name, attrs):
|
||||
self.tag_stack.append(name)
|
||||
self.tag_content = ''
|
||||
|
||||
def characters(self, content):
|
||||
self.tag_content += content
|
||||
|
||||
def endElement(self, name):
|
||||
if len(self.tag_stack) == 2 and self.tag_stack[-2] == 'project' and \
|
||||
self.tag_stack[-1] in fields:
|
||||
self.values[self.tag_stack[-1]] = self.tag_content
|
||||
self.tag_content = ''
|
||||
self.tag_stack.pop()
|
||||
|
||||
def parse_pom(pomfile):
|
||||
"""
|
||||
Parse the Maven .pom file return a map containing information
|
||||
extracted from it. The map will contain at least the following
|
||||
fields:
|
||||
|
||||
groupId
|
||||
artifactId
|
||||
name (human-readable)
|
||||
version
|
||||
"""
|
||||
fields = ('groupId', 'artifactId', 'name', 'version')
|
||||
values = {}
|
||||
handler = POMHandler(values)
|
||||
xml.sax.parse(pomfile, handler, fields)
|
||||
for field in fields:
|
||||
if field not in values.keys():
|
||||
raise GenericError, 'could not extract %s from POM: %s' % (field, pomfile)
|
||||
return values
|
||||
|
||||
def hex_string(s):
|
||||
"""Converts a string to a string of hex digits"""
|
||||
return ''.join([ '%02x' % ord(x) for x in s ])
|
||||
|
|
@ -1036,6 +1080,11 @@ class PathInfo(object):
|
|||
"""Return the directory where a build belongs"""
|
||||
return self.topdir + ("/packages/%(name)s/%(version)s/%(release)s" % build)
|
||||
|
||||
def mavenbuild(self, build, maveninfo):
|
||||
"""Return the directory where a maven build belongs"""
|
||||
return self.topdir + ("/maven/%(group_id)s/%(artifact_id)s" % maveninfo) + \
|
||||
("/%(version)s-%(release)s" % build)
|
||||
|
||||
def rpm(self,rpminfo):
|
||||
"""Return the path (relative to build_dir) where an rpm belongs"""
|
||||
return "%(arch)s/%(name)s-%(version)s-%(release)s.%(arch)s.rpm" % rpminfo
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue