maven/image import tests from first branch

This commit is contained in:
Mike McLean 2017-10-02 12:32:18 -04:00
parent bdf04adb23
commit 3a638314d8
23 changed files with 750 additions and 0 deletions

View file

@ -0,0 +1,51 @@
{
"x86_64": {
"files": [
"tdl-x86_64.xml",
"image.ks",
"image-base.ks",
"foo-x86_64.xml",
"my-image-7.4.2-2.x86_64.ova"
],
"rpmlist": [
{
"name": "some-package",
"buildtime": 1421172150,
"epoch": null,
"version": "1.0",
"release": "8.el7",
"arch": "x86_64",
"payloadhash": "28f1f3e6194573da1069bc82acdb79bd",
"size": 638
},
{
"name": "another-package",
"buildtime": 1462364954,
"epoch": null,
"version": "2.8.71",
"release": "7.el7",
"arch": "noarch",
"payloadhash": "0a445b4440665e1b435ed08b792f4811",
"size": 696715
},
{
"name": "yet-another-package",
"buildtime": 1435146325,
"epoch": null,
"version": "2.6.0",
"release": "1.el7_1",
"arch": "noarch",
"payloadhash": "ca7829d6a999db3835ab50902e44ce05",
"size": 350906
}
],
"logs": [
"oz-x86_64.log"
],
"task_id": 999,
"version": "7.4.2",
"release": "2",
"arch": "x86_64",
"name": "my-image"
}
}

View file

@ -0,0 +1,144 @@
{
"inserts": [
[
"INSERT INTO archiveinfo (build_id, archive_id, type_id, checksum, filename, checksum_type, btype_id, buildroot_id, id, size) VALUES (%(build_id)s, %(archive_id)s, %(type_id)s, %(checksum)s, %(filename)s, %(checksum_type)s, %(btype_id)s, %(buildroot_id)s, %(id)s, %(size)s)",
{
"build_id": 137,
"archive_id": "ARCHIVE_ID",
"type_id": "ARCHIVETYPE",
"checksum": "19a674d997af7098a444b60d7b51cee6",
"filename": "tdl-x86_64.xml",
"checksum_type": 0,
"btype_id": "BTYPEID:image",
"buildroot_id": null,
"id": 1001,
"size": 36
},
{}
],
[
"INSERT INTO image_archives (archive_id, arch) VALUES (%(archive_id)s, %(arch)s)",
{
"archive_id": 1001,
"arch": "x86_64"
},
{}
],
[
"INSERT INTO archiveinfo (build_id, archive_id, type_id, checksum, filename, checksum_type, btype_id, buildroot_id, id, size) VALUES (%(build_id)s, %(archive_id)s, %(type_id)s, %(checksum)s, %(filename)s, %(checksum_type)s, %(btype_id)s, %(buildroot_id)s, %(id)s, %(size)s)",
{
"build_id": 137,
"archive_id": "ARCHIVE_ID",
"type_id": "ARCHIVETYPE",
"checksum": "a5114a20d790cf17eca1b1115a4546f8",
"filename": "image.ks",
"checksum_type": 0,
"btype_id": "BTYPEID:image",
"buildroot_id": null,
"id": 1002,
"size": 30
},
{}
],
[
"INSERT INTO image_archives (archive_id, arch) VALUES (%(archive_id)s, %(arch)s)",
{
"archive_id": 1002,
"arch": "x86_64"
},
{}
],
[
"INSERT INTO archiveinfo (build_id, archive_id, type_id, checksum, filename, checksum_type, btype_id, buildroot_id, id, size) VALUES (%(build_id)s, %(archive_id)s, %(type_id)s, %(checksum)s, %(filename)s, %(checksum_type)s, %(btype_id)s, %(buildroot_id)s, %(id)s, %(size)s)",
{
"build_id": 137,
"archive_id": "ARCHIVE_ID",
"type_id": "ARCHIVETYPE",
"checksum": "9828cf75d9d17ac8e79e53ed71c6a71c",
"filename": "image-base.ks",
"checksum_type": 0,
"btype_id": "BTYPEID:image",
"buildroot_id": null,
"id": 1003,
"size": 35
},
{}
],
[
"INSERT INTO image_archives (archive_id, arch) VALUES (%(archive_id)s, %(arch)s)",
{
"archive_id": 1003,
"arch": "x86_64"
},
{}
],
[
"INSERT INTO archiveinfo (build_id, archive_id, type_id, checksum, filename, checksum_type, btype_id, buildroot_id, id, size) VALUES (%(build_id)s, %(archive_id)s, %(type_id)s, %(checksum)s, %(filename)s, %(checksum_type)s, %(btype_id)s, %(buildroot_id)s, %(id)s, %(size)s)",
{
"build_id": 137,
"archive_id": "ARCHIVE_ID",
"type_id": "ARCHIVETYPE",
"checksum": "f601c0f647d7cdd4c92aa511876f8533",
"filename": "foo-x86_64.xml",
"checksum_type": 0,
"btype_id": "BTYPEID:image",
"buildroot_id": null,
"id": 1004,
"size": 36
},
{}
],
[
"INSERT INTO image_archives (archive_id, arch) VALUES (%(archive_id)s, %(arch)s)",
{
"archive_id": 1004,
"arch": "x86_64"
},
{}
],
[
"INSERT INTO archiveinfo (build_id, archive_id, type_id, checksum, filename, checksum_type, btype_id, buildroot_id, id, size) VALUES (%(build_id)s, %(archive_id)s, %(type_id)s, %(checksum)s, %(filename)s, %(checksum_type)s, %(btype_id)s, %(buildroot_id)s, %(id)s, %(size)s)",
{
"build_id": 137,
"archive_id": "ARCHIVE_ID",
"type_id": "ARCHIVETYPE",
"checksum": "84547200ef5002292ecdd50c62de518e",
"filename": "my-image-7.4.2-2.x86_64.ova",
"checksum_type": 0,
"btype_id": "BTYPEID:image",
"buildroot_id": null,
"id": 1005,
"size": 49
},
{}
],
[
"INSERT INTO image_archives (archive_id, arch) VALUES (%(archive_id)s, %(arch)s)",
{
"archive_id": 1005,
"arch": "x86_64"
},
{}
],
[
"INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)",
{
"archive_id": 1005,
"rpm_id": 1002
},
{}
]
],
"updates": [
[
"UPDATE build SET state = %(data.state)s, id = %(data.id)s, completion_time = (now())\nWHERE ( id=%(build_id)i )",
{
"state": 1,
"id": "BUILD_ID"
},
{
"completion_time": "now()"
}
]
]
}

View file

@ -0,0 +1 @@
This is build.log

View file

@ -0,0 +1 @@
This is checkout.log

View file

@ -0,0 +1,25 @@
<?xml version="1.0" encoding="UTF-8"?>
<metadata modelVersion="1.1.0">
<groupId>com.mycompany.app</groupId>
<artifactId>my-app</artifactId>
<version>1.0-SNAPSHOT</version>
<versioning>
<snapshot>
<timestamp>20170927.043542</timestamp>
<buildNumber>1</buildNumber>
</snapshot>
<lastUpdated>20170927043542</lastUpdated>
<snapshotVersions>
<snapshotVersion>
<extension>jar</extension>
<value>1.0-20170927.043542-1</value>
<updated>20170927043542</updated>
</snapshotVersion>
<snapshotVersion>
<extension>pom</extension>
<value>1.0-20170927.043542-1</value>
<updated>20170927043542</updated>
</snapshotVersion>
</snapshotVersions>
</versioning>
</metadata>

View file

@ -0,0 +1 @@
3a7199598278308bc0dace024e5ad231

View file

@ -0,0 +1 @@
5703d4c0b313fa8580000f49d3c2d54018dbabd3

View file

@ -0,0 +1 @@
1bd36fe06d1a9ee0eb1772469e922ba1

View file

@ -0,0 +1 @@
66c246f8602ff2087a90196ee463ff3721305aac

View file

@ -0,0 +1,19 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.mycompany.app</groupId>
<artifactId>my-app</artifactId>
<packaging>jar</packaging>
<version>1.0-SNAPSHOT</version>
<name>my-app</name>
<url>http://maven.apache.org</url>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>3.8.1</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View file

@ -0,0 +1 @@
eaa80817dcfdaab0114174a1b6a5a610

View file

@ -0,0 +1 @@
c75e355eb8a46076a680dcf050dde848e55b6d4d

View file

@ -0,0 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<metadata>
<groupId>com.mycompany.app</groupId>
<artifactId>my-app</artifactId>
<versioning>
<versions>
<version>1.0-SNAPSHOT</version>
</versions>
<lastUpdated>20170927043542</lastUpdated>
</versioning>
</metadata>

View file

@ -0,0 +1 @@
c5e046db861fb27a0a008b931b2381c4

View file

@ -0,0 +1 @@
4ed2391cce9de027b1b24ddc572feb198df7b9a3

View file

@ -0,0 +1,35 @@
{
"files": {
"": [
"my-app-1.0-SNAPSHOT-scm-sources.zip"
],
"com/mycompany/app/my-app": [
"maven-metadata.xml.md5",
"maven-metadata.xml.sha1",
"maven-metadata.xml"
],
"com/mycompany/app/my-app/1.0-SNAPSHOT": [
"my-app-1.0-20170927.043542-1.pom.md5",
"maven-metadata.xml.md5",
"my-app-1.0-20170927.043542-1.pom.sha1",
"maven-metadata.xml.sha1",
"my-app-1.0-20170927.043542-1.jar.sha1",
"maven-metadata.xml",
"my-app-1.0-20170927.043542-1.jar",
"my-app-1.0-20170927.043542-1.pom",
"my-app-1.0-20170927.043542-1.jar.md5"
]
},
"buildroot_id": 3492408,
"logs": [
"checkout.log",
"state.log",
"build.log",
"root.log"
],
"maven_info": {
"group_id": "com.mycompany.app",
"artifact_id": "my-app",
"version": "1.0-SNAPSHOT"
}
}

View file

@ -0,0 +1,20 @@
packages/com.mycompany.app-my-app/1.0_SNAPSHOT/1/data/logs/maven/root.log
packages/com.mycompany.app-my-app/1.0_SNAPSHOT/1/data/logs/maven/build.log
packages/com.mycompany.app-my-app/1.0_SNAPSHOT/1/data/logs/maven/state.log
packages/com.mycompany.app-my-app/1.0_SNAPSHOT/1/data/logs/maven/checkout.log
packages/com.mycompany.app-my-app/1.0_SNAPSHOT/1/maven/com/mycompany/app/my-app/maven-metadata.xml.sha1
packages/com.mycompany.app-my-app/1.0_SNAPSHOT/1/maven/com/mycompany/app/my-app/maven-metadata.xml.md5
packages/com.mycompany.app-my-app/1.0_SNAPSHOT/1/maven/com/mycompany/app/my-app/maven-metadata.xml
packages/com.mycompany.app-my-app/1.0_SNAPSHOT/1/maven/com/mycompany/app/my-app/1.0-SNAPSHOT/my-app-1.0-20170927.043542-1.pom.sha1
packages/com.mycompany.app-my-app/1.0_SNAPSHOT/1/maven/com/mycompany/app/my-app/1.0-SNAPSHOT/my-app-1.0-20170927.043542-1.pom.md5
packages/com.mycompany.app-my-app/1.0_SNAPSHOT/1/maven/com/mycompany/app/my-app/1.0-SNAPSHOT/my-app-1.0-20170927.043542-1.pom
packages/com.mycompany.app-my-app/1.0_SNAPSHOT/1/maven/com/mycompany/app/my-app/1.0-SNAPSHOT/my-app-1.0-20170927.043542-1.jar.sha1
packages/com.mycompany.app-my-app/1.0_SNAPSHOT/1/maven/com/mycompany/app/my-app/1.0-SNAPSHOT/my-app-1.0-20170927.043542-1.jar.md5
packages/com.mycompany.app-my-app/1.0_SNAPSHOT/1/maven/com/mycompany/app/my-app/1.0-SNAPSHOT/my-app-1.0-20170927.043542-1.jar
packages/com.mycompany.app-my-app/1.0_SNAPSHOT/1/maven/com/mycompany/app/my-app/1.0-SNAPSHOT/maven-metadata.xml.sha1
packages/com.mycompany.app-my-app/1.0_SNAPSHOT/1/maven/com/mycompany/app/my-app/1.0-SNAPSHOT/maven-metadata.xml.md5
packages/com.mycompany.app-my-app/1.0_SNAPSHOT/1/maven/com/mycompany/app/my-app/1.0-SNAPSHOT/maven-metadata.xml
packages/com.mycompany.app-my-app/1.0_SNAPSHOT/1/maven/com/mycompany/app/my-app/1.0-SNAPSHOT/my-app-1.0-SNAPSHOT-scm-sources.zip.sha1
packages/com.mycompany.app-my-app/1.0_SNAPSHOT/1/maven/com/mycompany/app/my-app/1.0-SNAPSHOT/my-app-1.0-SNAPSHOT-scm-sources.zip.md5
packages/com.mycompany.app-my-app/1.0_SNAPSHOT/1/maven/com/mycompany/app/my-app/1.0-SNAPSHOT/my-app-1.0-SNAPSHOT-scm-sources.zip
packages/com.mycompany.app-my-app/1.0_SNAPSHOT/1/metadata.json

View file

@ -0,0 +1 @@
This is root.log

View file

@ -0,0 +1 @@
This is state.log

View file

@ -0,0 +1,265 @@
import copy
import json
import mock
import os
import os.path
import shutil
import tempfile
import unittest
import koji
import koji.util
import kojihub
orig_import_archive_internal = kojihub.import_archive_internal
IP = kojihub.InsertProcessor
UP = kojihub.UpdateProcessor
class TestCompleteImageBuild(unittest.TestCase):
def setUp(self):
self.tempdir = tempfile.mkdtemp()
self.pathinfo = koji.PathInfo(self.tempdir)
mock.patch('koji.pathinfo', new=self.pathinfo).start()
self.hostcalls = kojihub.HostExports()
self.context = mock.patch('kojihub.context').start()
mock.patch('kojihub.Host').start()
self.Task = mock.patch('kojihub.Task').start()
self.Task.return_value.assertHost = mock.MagicMock()
self.get_build = mock.patch('kojihub.get_build').start()
mock.patch('kojihub.get_rpm', new=self.my_get_rpm).start()
self.get_image_build = mock.patch('kojihub.get_image_build').start()
mock.patch('kojihub.get_archive_type', new=self.my_get_archive_type).start()
mock.patch('kojihub.lookup_name', new=self.my_lookup_name).start()
mock.patch.object(kojihub.BuildRoot, 'load', new=self.my_buildroot_load).start()
mock.patch('kojihub.import_archive_internal',
new=self.my_import_archive_internal).start()
self._dml = mock.patch('kojihub._dml').start()
mock.patch('kojihub.build_notification').start()
mock.patch('kojihub.assert_policy').start()
mock.patch('kojihub.check_volume_policy',
return_value={'id':0, 'name': 'DEFAULT'}).start()
self.set_up_callbacks()
self.rpms = {}
mock.patch('kojihub.InsertProcessor', new=self.get_insert).start()
mock.patch('kojihub.UpdateProcessor', new=self.get_update).start()
self.inserts = []
self.updates = []
mock.patch('kojihub.nextval', new=self.my_nextval).start()
self.sequences = {}
def tearDown(self):
mock.patch.stopall()
shutil.rmtree(self.tempdir)
def get_insert(self, *a, **kw):
insert = IP(*a, **kw)
insert.execute = mock.MagicMock()
self.inserts.append(insert)
return insert
def get_update(self, *a, **kw):
update = UP(*a, **kw)
update.execute = mock.MagicMock()
self.updates.append(update)
return update
def set_up_files(self, name):
datadir = os.path.join(os.path.dirname(__file__), 'data/image', name)
# load image result data for our test build
data = json.load(file(datadir + '/data.json'))
self.db_expect = json.load(file(datadir + '/db.json'))
for arch in data:
taskdir = koji.pathinfo.task(data[arch]['task_id'])
os.makedirs(taskdir)
filenames = data[arch]['files'] + data[arch]['logs']
for filename in filenames:
path = os.path.join(taskdir, filename)
with file(path, 'w') as fp:
fp.write('Test file for %s\n%s\n' % (arch, filename))
self.image_data = data
def get_expected_files(self, buildinfo):
data = self.image_data
imgdir = koji.pathinfo.imagebuild(buildinfo)
logdir = koji.pathinfo.build_logs(buildinfo)
paths = []
for arch in data:
for filename in data[arch]['files']:
paths.append(os.path.join(imgdir, filename))
for filename in data[arch]['logs']:
paths.append(os.path.join(logdir, 'image', filename))
bdir = koji.pathinfo.build(buildinfo)
paths.append(os.path.join(bdir, 'metadata.json'))
return paths
def my_nextval(self, sequence):
self.sequences.setdefault(sequence, 1000)
self.sequences[sequence] += 1
return self.sequences[sequence]
def my_get_rpm(self, rpminfo, **kw):
key = '%(name)s-%(version)s-%(release)s.%(arch)s' % rpminfo
ret = self.rpms.get(key)
if ret is not None:
return ret
ret = rpminfo.copy()
ret['id'] = len(self.rpms) + 1000
self.rpms[key] = rpminfo
return ret
def my_lookup_name(self, table, info, **kw):
if table == 'btype':
return {
'id': 'BTYPEID:%s' % info,
'name': 'BTYPE:%s' % info,
}
else:
raise Exception("Cannot fake call")
def my_get_archive_type(self, *a, **kw):
return dict.fromkeys(['id', 'name', 'description', 'extensions'],
'ARCHIVETYPE')
@staticmethod
def my_buildroot_load(br, id):
# br is the BuildRoot instance
br.id = id
br.is_standard = True
br.data = {
'br_type': koji.BR_TYPES['STANDARD'],
'id': id,
}
def my_import_archive_internal(self, *a, **kw):
# this is kind of odd, but we need this to fake the archiveinfo
share = {}
old_ip = kojihub.InsertProcessor
def my_ip(table, *a, **kw):
if table == 'archiveinfo':
data = kw['data']
data.setdefault('archive_id', 'ARCHIVE_ID')
share['archiveinfo'] = data
# TODO: need to add id
return old_ip(table, *a, **kw)
def my_ga(archive_id, **kw):
return share['archiveinfo']
with mock.patch('kojihub.InsertProcessor', new=my_ip), \
mock.patch('kojihub.get_archive', new=my_ga):
return orig_import_archive_internal(*a, **kw)
def set_up_callbacks(self):
new_callbacks = copy.deepcopy(koji.plugin.callbacks)
mock.patch('koji.plugin.callbacks', new=new_callbacks).start()
self.callbacks = []
for cbtype in koji.plugin.callbacks.keys():
koji.plugin.register_callback(cbtype, self.callback)
def callback(self, cbtype, *args, **kwargs):
self.callbacks.append([cbtype, args, kwargs])
def test_complete_image_build(self):
self.set_up_files('import_1')
buildinfo = {
'id': 137,
'task_id': 'TASK_ID',
'name': 'some-image',
'version': '1.2.3.4',
'release': '3',
'epoch': None,
'source': None,
'state': koji.BUILD_STATES['BUILDING'],
'volume_id': 0,
}
image_info = {'build_id': buildinfo['id']}
self.get_build.return_value = buildinfo
self.get_image_build.return_value = image_info
# run the import call
self.hostcalls.completeImageBuild('TASK_ID', 'BUILD_ID', self.image_data)
# make sure we wrote the files we expect
expected = self.get_expected_files(buildinfo)
files = []
for dirpath, dirnames, filenames in os.walk(self.tempdir + '/packages'):
files.extend([os.path.join(dirpath, fn) for fn in filenames])
self.assertEqual(set(files), set(expected))
# check callbacks
cbtypes = [c[0] for c in self.callbacks]
cb_expect = [
'preBuildStateChange', # building -> completed
'postBuildStateChange',
'preImport', # build
'preImport', # archive 1...
'postImport',
'preImport', # archive 2...
'postImport',
'preImport', # archive 3...
'postImport',
'preImport', # archive 4...
'postImport',
'preImport', # archive 5...
'postImport',
'postImport', # build
]
self.assertEqual(cbtypes, cb_expect)
cb_idx = {}
for c in self.callbacks:
# no callbacks should use *args
self.assertEqual(c[1], ())
cbtype = c[0]
if 'type' in c[2]:
key = "%s:%s" % (cbtype, c[2]['type'])
else:
key = cbtype
cb_idx.setdefault(key, [])
cb_idx[key].append(c[2])
key_expect = [
'postBuildStateChange', 'preBuildStateChange',
'preImport:archive', 'postImport:archive',
'preImport:image', 'postImport:image',
]
self.assertEqual(set(cb_idx.keys()), set(key_expect))
for key in ['preImport:image']:
callbacks = cb_idx[key]
self.assertEqual(len(callbacks), 1)
for cbargs in callbacks:
keys = set(cbargs.keys())
k_expect = set(['type', 'image'])
self.assertEqual(keys, k_expect)
self.assertEqual(cbargs['type'], 'image')
for key in ['postImport:image']:
callbacks = cb_idx[key]
self.assertEqual(len(callbacks), 1)
for cbargs in callbacks:
keys = set(cbargs.keys())
k_expect = set(['type', 'image', 'build', 'fullpath'])
self.assertEqual(keys, k_expect)
self.assertEqual(cbargs['type'], 'image')
self.assertEqual(cbargs['build'], buildinfo)
for key in ['preImport:archive', 'postImport:archive']:
callbacks = cb_idx[key]
self.assertEqual(len(callbacks), 5)
for cbargs in callbacks:
keys = set(cbargs.keys())
k_expect = set(['filepath', 'build_type', 'build', 'fileinfo', 'type', 'archive'])
self.assertEqual(keys, k_expect)
self.assertEqual(cbargs['type'], 'archive')
self.assertEqual(cbargs['build'], buildinfo)
# db operations
# with our other mocks, we should never reach _dml
self._dml.assert_not_called()
inserts = []
for insert in self.inserts:
info = [str(insert), insert.data, insert.rawdata]
inserts.append(info)
updates = []
for update in self.updates:
info = [str(update), update.data, update.rawdata]
updates.append(info)
data = {'inserts': inserts, 'updates': updates}
self.assertEqual(data, self.db_expect)

View file

@ -0,0 +1,168 @@
import copy
import json
import mock
import os
import os.path
import shutil
import tempfile
import unittest
import koji
import koji.util
import kojihub
orig_import_archive_internal = kojihub.import_archive_internal
class TestCompleteMavenBuild(unittest.TestCase):
def setUp(self):
self.tempdir = tempfile.mkdtemp()
self.pathinfo = koji.PathInfo(self.tempdir)
mock.patch('koji.pathinfo', new=self.pathinfo).start()
self.hostcalls = kojihub.HostExports()
self.context = mock.patch('kojihub.context').start()
self.context.opts = {'EnableMaven': True}
mock.patch('kojihub.Host').start()
self.Task = mock.patch('kojihub.Task').start()
self.Task.return_value.assertHost = mock.MagicMock()
self.get_build = mock.patch('kojihub.get_build').start()
self.get_maven_build = mock.patch('kojihub.get_maven_build').start()
self.get_archive_type = mock.patch('kojihub.get_archive_type').start()
mock.patch('kojihub.lookup_name', new=self.my_lookup_name).start()
mock.patch.object(kojihub.BuildRoot, 'load', new=self.my_buildroot_load).start()
mock.patch('kojihub.import_archive_internal',
new=self.my_import_archive_internal).start()
mock.patch('kojihub._dml').start()
mock.patch('kojihub._fetchSingle').start()
mock.patch('kojihub.build_notification').start()
mock.patch('kojihub.assert_policy').start()
mock.patch('kojihub.check_volume_policy',
return_value={'id':0, 'name': 'DEFAULT'}).start()
self.set_up_callbacks()
def tearDown(self):
mock.patch.stopall()
shutil.rmtree(self.tempdir)
def set_up_files(self, name):
datadir = os.path.join(os.path.dirname(__file__), 'data/maven', name)
# load maven result data for our test build
data = json.load(file(datadir + '/data.json'))
data['task_id'] = 9999
taskdir = koji.pathinfo.task(data['task_id'])
for subdir in data['files']:
path = os.path.join(taskdir, subdir)
os.makedirs(path)
for fn in data['files'][subdir]:
src = os.path.join(datadir, subdir, fn)
dst = os.path.join(path, fn)
shutil.copy(src, dst)
for fn in data['logs']:
src = os.path.join(datadir, fn)
dst = os.path.join(taskdir, fn)
shutil.copy(src, dst)
self.maven_data = data
files = file(datadir + '/files').readlines()
files = [l.strip() for l in files]
self.expected_files = files
def my_lookup_name(self, table, info, **kw):
if table == 'btype':
return mock.MagicMock()
else:
raise Exception("Cannot fake call")
@staticmethod
def my_buildroot_load(br, id):
# br is the BuildRoot instance
br.id = id
br.is_standard = True
br.data = {
'br_type': koji.BR_TYPES['STANDARD'],
'id': id,
}
def my_import_archive_internal(self, *a, **kw):
# this is kind of odd, but we need this to fake the archiveinfo
share = {}
def my_ip(table, *a, **kw):
if table == 'archiveinfo':
share['archiveinfo'] = kw['data']
# TODO: need to add id
return mock.MagicMock()
def my_ga(archive_id, **kw):
return share['archiveinfo']
with mock.patch('kojihub.InsertProcessor', new=my_ip), \
mock.patch('kojihub.get_archive', new=my_ga):
orig_import_archive_internal(*a, **kw)
def set_up_callbacks(self):
new_callbacks = copy.deepcopy(koji.plugin.callbacks)
mock.patch('koji.plugin.callbacks', new=new_callbacks).start()
self.callbacks = []
for cbtype in koji.plugin.callbacks.keys():
koji.plugin.register_callback(cbtype, self.callback)
def callback(self, cbtype, *args, **kwargs):
self.callbacks.append([cbtype, args, kwargs])
def test_complete_maven_build(self):
self.set_up_files('import_1')
buildinfo = koji.maven_info_to_nvr(self.maven_data['maven_info'])
buildinfo['id'] = 137
buildinfo['task_id'] = 'TASK_ID'
buildinfo['release'] = '1'
buildinfo['source'] = None
buildinfo['state'] = koji.BUILD_STATES['BUILDING']
maven_info = self.maven_data['maven_info'].copy()
maven_info['build_id'] = buildinfo['id']
self.get_build.return_value = buildinfo
self.get_maven_build.return_value = maven_info
self.hostcalls.completeMavenBuild('TASK_ID', 'BUILD_ID', self.maven_data, None)
# make sure we wrote the files we expect
files = []
for dirpath, dirnames, filenames in os.walk(self.tempdir + '/packages'):
relpath = koji.util.relpath(dirpath, self.tempdir)
files.extend([os.path.join(relpath, fn) for fn in filenames])
self.assertEqual(set(files), set(self.expected_files))
# check callbacks
cbtypes = [c[0] for c in self.callbacks]
cb_expect = [
'preBuildStateChange', # building -> completed
'postBuildStateChange',
'preImport', # archive 1...
'postImport',
'preImport', # archive 2...
'postImport',
'preImport', # archive 3...
'postImport',
]
self.assertEqual(cbtypes, cb_expect)
cb_idx = {}
cb_idx = {}
for c in self.callbacks:
# no callbacks should use *args
self.assertEqual(c[1], ())
cbtype = c[0]
if 'type' in c[2]:
key = "%s:%s" % (cbtype, c[2]['type'])
else:
key = cbtype
cb_idx.setdefault(key, [])
cb_idx[key].append(c[2])
key_expect = ['postBuildStateChange', 'preBuildStateChange', 'preImport:archive', 'postImport:archive']
self.assertEqual(set(cb_idx.keys()), set(key_expect))
# in this case, pre and post data is similar
for key in ['preImport:archive', 'postImport:archive']:
callbacks = cb_idx[key]
self.assertEqual(len(callbacks), 3)
for cbargs in callbacks:
keys = set(cbargs.keys())
k_expect = set(['filepath', 'build_type', 'build', 'fileinfo', 'type', 'archive'])
self.assertEqual(keys, k_expect)
self.assertEqual(cbargs['type'], 'archive')
self.assertEqual(cbargs['build'], buildinfo)