PR#232 Allow uploading files to non-default volumes

Merges #232
https://pagure.io/koji/pull-request/232

Related #200
https://pagure.io/koji/pull-request/200
This commit is contained in:
Mike McLean 2017-03-28 08:25:08 -04:00
commit f2390dad1c
12 changed files with 375 additions and 190 deletions

View file

@ -1587,19 +1587,21 @@ class WrapperRPMTask(BaseBuildTask):
if task:
# called as a subtask of a build
artifact_paths = self.session.listTaskOutput(task['id'])
artifact_data = self.session.listTaskOutput(task['id'], all_volume=True)
for artifact_path in artifact_paths:
for artifact_path in artifact_data:
artifact_name = os.path.basename(artifact_path)
base, ext = os.path.splitext(artifact_name)
if ext == '.log':
# Exclude log files for consistency with the output of listArchives() used below
continue
relpath = os.path.join(self.pathinfo.task(task['id']), artifact_path)[1:]
artifact_relpaths.append(relpath)
artifacts.setdefault(ext, []).append(artifact_name)
all_artifacts.append(artifact_name)
all_artifacts_with_path.append(artifact_path)
for volume in artifact_data[artifact_path]:
volume_path = os.path.join(self.pathinfo.volumedir(volume), relpath)
artifact_relpaths.append(volume_path)
artifacts.setdefault(ext, []).append(artifact_name)
all_artifacts.append(artifact_name)
all_artifacts_with_path.append(volume_path)
else:
# called as a top-level task to create wrapper rpms for an existing build
# verify that the build is complete
@ -4081,7 +4083,6 @@ class BuildIndirectionImageTask(OzImageTask):
raise koji.BuildError("Input task method must be 'createImage' - actual method (%s)" %
(taskmethod))
result = self.session.getTaskResult(task_id)
files = self.session.listTaskOutput(task_id)
# This approach works for both scratch and saved/formal images
# The downside is that we depend on the output file naming convention
@ -4600,15 +4601,25 @@ Build Info: %(weburl)s/buildinfo?buildID=%(build_id)i\r
if not result:
result = 'Unknown'
files = self.session.listTaskOutput(task_id)
logs = [filename for filename in files if filename.endswith('.log')]
rpms = [filename for filename in files if filename.endswith('.rpm') and not filename.endswith('.src.rpm')]
srpms = [filename for filename in files if filename.endswith('.src.rpm')]
misc = [filename for filename in files if filename not in logs + rpms + srpms]
logs, rpms, srpms, misc = [], [], [], []
files_data = self.session.listTaskOutput(task_id)
for filename in files_data:
if filename.endswith('.log'):
logs += [(filename, volume) for volume in files_data[filename]]
# all rpms + srpms are expected to be in builddir
elif filename.endswith('.src.rpm'):
srpms.append(filename)
elif filename.endswith('.rpm'):
rpms.append(filename)
else:
misc += [(filename, volume) for volume in files_data[filename]]
logs.sort()
# sort by volumes and filenames
logs.sort(key=lambda x: x[1])
misc.sort(key=lambda x: x[1])
logs.sort(key=lambda x: x[0])
misc.sort(key=lambda x: x[0])
rpms.sort()
misc.sort()
data[task_id] = {}
data[task_id]['id'] = taskinfo['id']
@ -4704,16 +4715,16 @@ Build Info: %(weburl)s/buildinfo?buildID=%(build_id)i\r
for filetype in ['logs', 'rpms', 'misc']:
if task[filetype]:
output += "%s:\r\n" % filetype
for file in task[filetype]:
for (file, volume) in task[filetype]:
if filetype == 'rpms':
output += " %s\r\n" % '/'.join([buildurl, task['build_arch'], file])
elif filetype == 'logs':
if tasks[task_state] != 'closed':
output += " %s/getfile?taskID=%s&name=%s\r\n" % (weburl, task['id'], file)
output += " %s/getfile?taskID=%s&name=%s&volume=%s\r\n" % (weburl, task['id'], file, volume)
else:
output += " %s\r\n" % '/'.join([buildurl, 'data', 'logs', task['build_arch'], file])
elif task[filetype] == 'misc':
output += " %s/getfile?taskID=%s&name=%s\r\n" % (weburl, task['id'], file)
output += " %s/getfile?taskID=%s&name=%s&volume=%s\r\n" % (weburl, task['id'], file, volume)
output += "\r\n"
output += "\r\n"

107
cli/koji
View file

@ -508,24 +508,28 @@ def watch_logs(session, tasklist, opts):
if _isDone(session, task_id):
tasklist.remove(task_id)
output = session.listTaskOutput(task_id)
output = session.listTaskOutput(task_id, all_volumes=True)
# convert to list of (file, volume)
files = []
for filename, volumes in output.iteritems():
files += [(filename, volume) for volume in volumes]
if opts.log:
logs = [filename for filename in output if filename == opts.log]
logs = [file_volume for file_volume in files if file_volume[0] == opts.log]
else:
logs = [filename for filename in output if filename.endswith('.log')]
logs = [file_volume for file_volume in files if file_volume[0].endswith('log')]
taskoffsets = offsets[task_id]
for log in logs:
for log, volume in logs:
contents = 'placeholder'
while contents:
if log not in taskoffsets:
taskoffsets[log] = 0
if (log, volume) not in taskoffsets:
taskoffsets[(log, volume)] = 0
contents = session.downloadTaskOutput(task_id, log, taskoffsets[log], 16384)
taskoffsets[log] += len(contents)
contents = session.downloadTaskOutput(task_id, log, taskoffsets[(log, volume)], 16384, volume=volume)
taskoffsets[(log, volume)] += len(contents)
if contents:
currlog = "%d:%s:" % (task_id, log)
currlog = "%d:%s:%s:" % (task_id, volume, log)
if currlog != lastlog:
if lastlog:
sys.stdout.write("\n")
@ -4670,10 +4674,18 @@ def _printTaskInfo(session, task_id, level=0, recurse=True, verbose=True):
buildroot_infos = session.listBuildroots(taskID=task_id)
build_info = session.listBuilds(taskID=task_id)
files = session.listTaskOutput(task_id)
logs = [filename for filename in files if filename.endswith('.log')]
output = [filename for filename in files if not filename.endswith('.log')]
files_dir = '%s/%s' % (koji.pathinfo.work(), koji.pathinfo.taskrelpath(task_id))
files = session.listTaskOutput(task_id, all_volumes=True)
logs = []
output = []
for filename in files:
if filename.endswith('.log'):
logs += [os.path.join(koji.pathinfo.work(volume=volume),
koji.pathinfo.taskrelpath(task_id),
filename) for volume in files[filename]]
else:
output += [os.path.join(koji.pathinfo.work(volume=volume),
koji.pathinfo.taskrelpath(task_id),
filename) for volume in files[filename]]
owner = session.getUser(info['owner'])['name']
@ -4700,12 +4712,12 @@ def _printTaskInfo(session, task_id, level=0, recurse=True, verbose=True):
print("%s %s/%s-%d-%d/" % (indent, BUILDDIR, root['tag_name'], root['id'], root['repo_id']))
if logs:
print("%sLog Files:" % indent)
for log in logs:
print("%s %s/%s" % (indent, files_dir, log))
for log_path in logs:
print("%s %s" % (indent, log_path))
if output:
print("%sOutput:" % indent)
for filename in output:
print("%s %s/%s" % (indent, files_dir, filename))
for file_path in output:
print("%s %s" % (indent, file_path))
# white space
print('')
@ -6761,9 +6773,13 @@ def anon_handle_download_logs(options, session, args):
sys.stdout.write("Writing: %s\n" % full_filename)
file(full_filename, 'w').write(content)
def download_log(task_log_dir, task_id, filename, blocksize=102400):
#Create directories only if there is any log file to write to
full_filename = os.path.normpath(os.path.join(task_log_dir, filename))
def download_log(task_log_dir, task_id, filename, blocksize=102400, volume=None):
# Create directories only if there is any log file to write to
# For each non-default volume create special sub-directory
if volume not in (None, 'DEFAULT'):
full_filename = os.path.normpath(os.path.join(task_log_dir, volume, filename))
else:
full_filename = os.path.normpath(os.path.join(task_log_dir, filename))
koji.ensuredir(os.path.dirname(full_filename))
contents = 'IGNORE ME!'
if suboptions.cont and os.path.exists(full_filename):
@ -6776,7 +6792,7 @@ def anon_handle_download_logs(options, session, args):
offset = 0
try:
while contents:
contents = session.downloadTaskOutput(task_id, filename, offset, blocksize)
contents = session.downloadTaskOutput(task_id, filename, offset=offset, size=blocksize, volume=volume)
offset += len(contents)
if contents:
fd.write(contents)
@ -6788,14 +6804,14 @@ def anon_handle_download_logs(options, session, args):
task_info = session.getTaskInfo(task_id)
if task_info is None:
error(_("No such task id: %i" % task_id))
files = session.listTaskOutput(task_id)
logs = []
files = session.listTaskOutput(task_id, all_volumes=True)
logs = [] # list of tuples (filename, volume)
for filename in files:
if not filename.endswith(".log"):
continue
if match and not koji.util.multi_fnmatch(filename, match):
continue
logs.append(filename)
logs += [(filename, volume) for volume in files[filename]]
task_log_dir = os.path.join(parent_dir,
"%s-%s" % (task_info["arch"], task_id))
@ -6809,8 +6825,8 @@ def anon_handle_download_logs(options, session, args):
elif state not in ['CLOSED', 'CANCELED']:
sys.stderr.write(_("Warning: task %s is %s\n") % (task_id, state))
for log_filename in logs:
download_log(task_log_dir, task_id, log_filename)
for log_filename, log_volume in logs:
download_log(task_log_dir, task_id, log_filename, volume=log_volume)
count += 1
if count == 0 and not recurse:
@ -6876,25 +6892,27 @@ def anon_handle_download_task(options, session, args):
downloads = []
for task in downloadable_tasks:
files = session.listTaskOutput(task["id"])
files = session.listTaskOutput(task["id"], all_volumes=True)
for filename in files:
if filename.endswith(".log") and suboptions.logs:
# rename logs, they would conflict
new_filename = "%s.%s.log" % (filename.rstrip(".log"), task["arch"])
downloads.append((task, filename, new_filename))
continue
for volume in files[filename]:
# rename logs, they would conflict
new_filename = "%s.%s.log" % (filename.rstrip(".log"), task["arch"])
downloads.append((task, filename, volume, new_filename))
continue
if filename.endswith(".rpm"):
filearch = filename.split(".")[-2]
if len(suboptions.arches) == 0 or filearch in suboptions.arches:
downloads.append((task, filename, filename))
continue
for volume in files[filename]:
filearch = filename.split(".")[-2]
if len(suboptions.arches) == 0 or filearch in suboptions.arches:
downloads.append((task, filename, volume, filename))
continue
if len(downloads) == 0:
error(_("No files for download found."))
required_tasks = {}
for (task, nop, nop) in downloads:
for (task, nop, nop, nop) in downloads:
if task["id"] not in required_tasks:
required_tasks[task["id"]] = task
@ -6908,11 +6926,14 @@ def anon_handle_download_task(options, session, args):
# perform the download
number = 0
for (task, filename, new_filename) in downloads:
for (task, filename, volume, new_filename) in downloads:
number += 1
if volume not in (None, 'DEFAULT'):
koji.ensuredir(volume)
new_filename = os.path.join(volume, new_filename)
print(_("Downloading [%d/%d]: %s") % (number, len(downloads), new_filename))
output_file = open(new_filename, "wb")
output_file.write(session.downloadTaskOutput(task["id"], filename))
output_file.write(session.downloadTaskOutput(task["id"], filename, volume=volume))
output_file.close()
def anon_handle_wait_repo(options, session, args):
@ -7168,11 +7189,11 @@ def handle_runroot(options, session, args):
print("User interrupt: canceling runroot task")
session.cancelTask(task_id)
raise
output = None
if "runroot.log" in session.listTaskOutput(task_id):
output = session.downloadTaskOutput(task_id, "runroot.log")
if output:
sys.stdout.write(output)
output = session.listTaskOutput(task_id, all_volumes=True)
if 'runroot.log' in output:
for volume in output['runroot.log']:
log = session.downloadTaskOutput(task_id, 'runroot.log', volume=volume)
sys.stdout.write(log)
info = session.getTaskInfo(task_id)
if info is None:
sys.exit(1)

View file

@ -4260,7 +4260,7 @@ def get_archive_file(archive_id, filename):
#otherwise
return None
def list_task_output(taskID, stat=False):
def list_task_output(taskID, stat=False, all_volumes=False):
"""List the files generated by the task with the given ID. This
will usually include one or more RPMs, and one or more log files.
If the task did not generate any files, or the output directory
@ -4268,29 +4268,62 @@ def list_task_output(taskID, stat=False):
If stat is True, return a map of filename -> stat_info where stat_info
is a map containing the values of the st_* attributes returned by
os.stat()."""
taskDir = '%s/%s' % (koji.pathinfo.work(), koji.pathinfo.taskrelpath(taskID))
if stat:
os.stat().
If all_volumes is set, results are extended to deal with files in same
relative paths on different volumes.
With all_volumes=True, stat=False, return a map of filename -> list_of_volumes,
{'stdout.log': ['DEFAULT']}
With all_volumes=True, stat=True, return a map of
filename -> map_of_volumes -> stat_info,
{'stdout.log':
{'DEFAULT': {
{
'st_atime': 1488902587.2141163,
'st_ctime': 1488902588.2281106,
'st_mtime': 1488902588.2281106,
'st_size': '526'
}
}
}
"""
if stat or all_volumes:
result = {}
else:
result = []
if not os.path.isdir(taskDir):
return result
for path, dirs, files in os.walk(taskDir):
relpath = path[len(taskDir) + 1:]
for filename in files:
relfilename = os.path.join(relpath, filename)
if stat:
stat_info = os.stat(os.path.join(path, filename))
stat_map = {}
for attr in dir(stat_info):
if attr == 'st_size':
stat_map[attr] = str(getattr(stat_info, attr))
elif attr in ('st_atime', 'st_mtime', 'st_ctime'):
stat_map[attr] = getattr(stat_info, attr)
result[relfilename] = stat_map
else:
result.append(relfilename)
if all_volumes:
volumes = [x['name'] for x in list_volumes()]
else:
volumes = ['DEFAULT']
for volume in volumes:
taskDir = '%s/%s' % (koji.pathinfo.work(volume=volume), koji.pathinfo.taskrelpath(taskID))
if not os.path.isdir(taskDir):
continue
for path, dirs, files in os.walk(taskDir):
for filename in files:
relpath = path[len(taskDir) + 1:]
relfilename = os.path.join(relpath, filename)
if stat:
stat_info = os.stat(os.path.join(path, filename))
stat_map = {}
for attr in dir(stat_info):
if attr == 'st_size':
stat_map[attr] = str(getattr(stat_info, attr))
elif attr in ('st_atime', 'st_mtime', 'st_ctime'):
stat_map[attr] = getattr(stat_info, attr)
if all_volumes:
result.setdefault(relfilename, {})[volume] = stat_map
else:
result[relfilename] = stat_map
else:
if all_volumes:
result.setdefault(relfilename, []).append(volume)
else:
result.append(relfilename)
return result
def _fetchMulti(query, values):
@ -8683,7 +8716,7 @@ class RootExports(object):
context.session.assertPerm('admin')
return make_task(*args, **opts)
def uploadFile(self, path, name, size, md5sum, offset, data):
def uploadFile(self, path, name, size, md5sum, offset, data, volume=None):
#path: the relative path to upload to
#name: the name of the file
#size: size of contents (bytes)
@ -8715,7 +8748,7 @@ class RootExports(object):
if verify is not None:
if digest != sum_cls(contents).hexdigest():
return False
fn = get_upload_path(path, name, create=True)
fn = get_upload_path(path, name, create=True, volume=volume)
try:
st = os.lstat(fn)
except OSError, e:
@ -8781,9 +8814,9 @@ class RootExports(object):
os.close(fd)
return True
def checkUpload(self, path, name, verify=None, tail=None):
def checkUpload(self, path, name, verify=None, tail=None, volume=None):
"""Return basic information about an uploaded file"""
fn = get_upload_path(path, name)
fn = get_upload_path(path, name, volume=volume)
data = {}
try:
fd = os.open(fn, os.O_RDONLY)
@ -8824,12 +8857,12 @@ class RootExports(object):
os.close(fd)
def downloadTaskOutput(self, taskID, fileName, offset=0, size=-1):
def downloadTaskOutput(self, taskID, fileName, offset=0, size=-1, volume=None):
"""Download the file with the given name, generated by the task with the
given ID."""
if '..' in fileName:
raise koji.GenericError('Invalid file name: %s' % fileName)
filePath = '%s/%s/%s' % (koji.pathinfo.work(), koji.pathinfo.taskrelpath(taskID), fileName)
filePath = '%s/%s/%s' % (koji.pathinfo.work(volume), koji.pathinfo.taskrelpath(taskID), fileName)
filePath = os.path.normpath(filePath)
if not os.path.isfile(filePath):
raise koji.GenericError('no file "%s" output by task %i' % (fileName, taskID))
@ -12240,7 +12273,7 @@ class HostExports(object):
return host.isEnabled()
def get_upload_path(reldir, name, create=False):
def get_upload_path(reldir, name, create=False, volume=None):
orig_reldir = reldir
orig_name = name
# lots of sanity checks
@ -12250,6 +12283,9 @@ def get_upload_path(reldir, name, create=False):
reldir = os.path.normpath(reldir)
if not reldir or reldir.startswith('..'):
raise koji.GenericError("Invalid upload directory: %s" % orig_reldir)
if volume is not None:
# make sure the volume is valid
lookup_name('volume', volume, strict=True)
parts = reldir.split('/')
check_user = True
if create and parts[0] == "tasks":
@ -12264,7 +12300,7 @@ def get_upload_path(reldir, name, create=False):
host.verify()
Task(task_id).assertHost(host.id)
check_user = False
udir = os.path.join(koji.pathinfo.work(), reldir)
udir = os.path.join(koji.pathinfo.work(volume=volume), reldir)
if create:
koji.ensuredir(udir)
if check_user:
@ -12305,7 +12341,8 @@ def handle_upload(environ):
overwrite = args.get('overwrite', ('',))[0]
offset = args.get('offset', ('0',))[0]
offset = int(offset)
fn = get_upload_path(path, name, create=True)
volume = args.get('volume', ('DEFAULT',))[0]
fn = get_upload_path(path, name, create=True, volume=volume)
if os.path.exists(fn):
if not os.path.isfile(fn):
raise koji.GenericError("destination not a file: %s" % fn)

View file

@ -1823,24 +1823,24 @@ class PathInfo(object):
"""Return the relative path for the task work directory"""
return "tasks/%s/%s" % (task_id % 10000, task_id)
def work(self):
def work(self, volume=None):
"""Return the work dir"""
return self.topdir + '/work'
return self.volumedir(volume) + '/work'
def tmpdir(self):
def tmpdir(self, volume=None):
"""Return a path to a unique directory under work()/tmp/"""
tmp = None
while tmp is None or os.path.exists(tmp):
tmp = self.work() + '/tmp/' + ''.join([random.choice(self.ASCII_CHARS) for dummy in '123456'])
tmp = self.work(volume) + '/tmp/' + ''.join([random.choice(self.ASCII_CHARS) for dummy in '123456'])
return tmp
def scratch(self):
"""Return the main scratch dir"""
return self.topdir + '/scratch'
def task(self, task_id):
def task(self, task_id, volume=None):
"""Return the output directory for the task with the given id"""
return self.work() + '/' + self.taskrelpath(task_id)
return self.work(volume=volume) + '/' + self.taskrelpath(task_id)
pathinfo = PathInfo()
@ -2451,7 +2451,7 @@ class ClientSession(object):
# raise AttributeError("no attribute %r" % name)
return VirtualMethod(self._callMethod, name)
def fastUpload(self, localfile, path, name=None, callback=None, blocksize=None, overwrite=False):
def fastUpload(self, localfile, path, name=None, callback=None, blocksize=None, overwrite=False, volume=None):
if blocksize is None:
blocksize = self.opts.get('upload_blocksize', 1048576)
@ -2476,7 +2476,7 @@ class ClientSession(object):
if not chunk and not first_cycle:
break
first_cycle = False
result = self._callMethod('rawUpload', (chunk, ofs, path, name), {'overwrite':overwrite})
result = self._callMethod('rawUpload', (chunk, ofs, path, name), {'overwrite':overwrite, 'volume': volume})
if self.retries > 1:
problems = True
hexdigest = util.adler32_constructor(chunk).hexdigest()
@ -2495,7 +2495,7 @@ class ClientSession(object):
callback(ofs, size, len(chunk), t1, t2)
if ofs != size:
self.logger.error("Local file changed size: %s, %s -> %s", localfile, size, ofs)
chk_opts = {}
chk_opts = {'volume': volume}
if problems:
chk_opts['verify'] = 'adler32'
result = self._callMethod('checkUpload', (path, name), chk_opts)
@ -2509,7 +2509,7 @@ class ClientSession(object):
% (path, name, result['hexdigest'], full_chksum.hexdigest()))
self.logger.debug("Fast upload: %s complete. %i bytes in %.1f seconds", localfile, size, t2)
def _prepUpload(self, chunk, offset, path, name, verify="adler32", overwrite=False):
def _prepUpload(self, chunk, offset, path, name, verify="adler32", overwrite=False, volume=None):
"""prep a rawUpload call"""
if not self.logged_in:
raise ActionNotAllowed("you must be logged in to upload")
@ -2521,6 +2521,8 @@ class ClientSession(object):
args['offset'] = str(offset)
if overwrite:
args['overwrite'] = "1"
if volume is not None:
args['volume'] = volume
size = len(chunk)
self.callnum += 1
handler = "%s?%s" % (self.baseurl, urllib.urlencode(args))
@ -2532,26 +2534,26 @@ class ClientSession(object):
request = chunk
return handler, headers, request
def uploadWrapper(self, localfile, path, name=None, callback=None, blocksize=None, overwrite=True):
def uploadWrapper(self, localfile, path, name=None, callback=None, blocksize=None, overwrite=True, volume=None):
"""upload a file in chunks using the uploadFile call"""
if blocksize is None:
blocksize = self.opts.get('upload_blocksize', 1048576)
if self.opts.get('use_fast_upload'):
self.fastUpload(localfile, path, name, callback, blocksize, overwrite)
self.fastUpload(localfile, path, name, callback, blocksize, overwrite, volume=volume)
return
if name is None:
name = os.path.basename(localfile)
# check if server supports fast upload
try:
self._callMethod('checkUpload', (path, name))
self._callMethod('checkUpload', (path, name), {'volume': volume})
# fast upload was introduced in 1.7.1, earlier servers will not
# recognise this call and return an error
except GenericError:
pass
else:
self.fastUpload(localfile, path, name, callback, blocksize, overwrite)
self.fastUpload(localfile, path, name, callback, blocksize, overwrite, volume=volume)
return
start = time.time()
@ -2584,7 +2586,7 @@ class ClientSession(object):
while True:
if debug:
self.logger.debug("uploadFile(%r,%r,%r,%r,%r,...)" %(path, name, sz, digest, offset))
if self.callMethod('uploadFile', path, name, encode_int(sz), digest, encode_int(offset), data):
if self.callMethod('uploadFile', path, name, encode_int(sz), digest, encode_int(offset), data, volume=volume):
break
if tries <= retries:
tries += 1
@ -2609,7 +2611,7 @@ class ClientSession(object):
callback(ofs, totalsize, size, t1, t2)
fo.close()
def downloadTaskOutput(self, taskID, fileName, offset=0, size=-1):
def downloadTaskOutput(self, taskID, fileName, offset=0, size=-1, volume=None):
"""Download the file with the given name, generated by the task with the
given ID.
@ -2617,7 +2619,7 @@ class ClientSession(object):
"""
if self.multicall:
raise GenericError('downloadTaskOutput() may not be called during a multicall')
result = self.callMethod('downloadTaskOutput', taskID, fileName, offset, size)
result = self.callMethod('downloadTaskOutput', taskID, fileName, offset=offset, size=size, volume=volume)
return base64.decodestring(result)
class DBHandler(logging.Handler):

View file

@ -262,7 +262,7 @@ class BaseTaskHandler(object):
def getUploadDir(self):
return koji.pathinfo.taskrelpath(self.id)
def uploadFile(self, filename, relPath=None, remoteName=None):
def uploadFile(self, filename, relPath=None, remoteName=None, volume=None):
"""Upload the file with the given name to the task output directory
on the hub."""
uploadPath = self.getUploadDir()
@ -271,9 +271,9 @@ class BaseTaskHandler(object):
uploadPath += '/' + relPath
# Only upload files with content
if os.path.isfile(filename) and os.stat(filename).st_size > 0:
self.session.uploadWrapper(filename, uploadPath, remoteName)
self.session.uploadWrapper(filename, uploadPath, remoteName, volume=volume)
def uploadTree(self, dirpath, flatten=False):
def uploadTree(self, dirpath, flatten=False, volume=None):
"""Upload the directory tree at dirpath to the task directory on the
hub, preserving the directory structure"""
dirpath = dirpath.rstrip('/')
@ -283,7 +283,7 @@ class BaseTaskHandler(object):
else:
relpath = path[len(dirpath) + 1:]
for filename in files:
self.uploadFile(os.path.join(path, filename), relpath)
self.uploadFile(os.path.join(path, filename), relpath, volume=volume)
def chownTree(self, dirpath, uid, gid):
"""chown the given path and all files and directories under

View file

@ -42,7 +42,7 @@ class TestListCommands(unittest.TestCase):
# Mock out the xmlrpc server
self.session.getTaskInfo.return_value = {'state': 1}
self.session.downloadTaskOutput.return_value = 'task output'
self.session.listTaskOutput.return_value = ['runroot.log']
self.session.listTaskOutput.return_value = {'runroot.log': ['DEFAULT']}
self.session.runroot.return_value = 1
# Run it and check immediate output
@ -54,9 +54,9 @@ class TestListCommands(unittest.TestCase):
# Finally, assert that things were called as we expected.
self.session.getTaskInfo.assert_called_once_with(1)
self.session.listTaskOutput.assert_called_once_with(1)
self.session.listTaskOutput.assert_called_once_with(1, all_volumes=True)
self.session.downloadTaskOutput.assert_called_once_with(
1, 'runroot.log')
1, 'runroot.log', volume='DEFAULT')
self.session.runroot.assert_called_once_with(
tag, arch, command, repo_id=mock.ANY, weight=mock.ANY,
mounts=mock.ANY, packages=mock.ANY, skip_setarch=mock.ANY,

View file

@ -64,95 +64,115 @@ class TestClientSession(unittest.TestCase):
my_rsession.close.assert_called()
self.assertNotEqual(ksession.rsession, my_rsession)
class TestFastUpload(unittest.TestCase):
@mock.patch('koji.compatrequests.Session')
@mock.patch('requests.Session')
@mock.patch('__builtin__.file')
@mock.patch('os.path.getsize')
def test_fastUpload(self, getsize_mock, file_mock, rsession, compat_session):
ksession = koji.ClientSession('http://koji.example.com/kojihub', {})
def setUp(self):
self.ksession = koji.ClientSession('http://koji.example.com/kojihub', {})
self.do_fake_login()
# mocks
self.ksession._callMethod = mock.MagicMock()
self.compat_session = mock.patch('koji.compatrequests.Session').start()
self.rsession = mock.patch('requests.Session').start()
self.file_mock = mock.patch('__builtin__.file').start()
self.getsize_mock = mock.patch('os.path.getsize').start()
def tearDown(self):
del self.ksession
mock.patch.stopall()
def do_fake_login(self):
self.ksession.logged_in = True
self.ksession.sinfo = {}
self.ksession.callnum = 1
def test_fastUpload_nologin(self):
# without login (ActionNotAllowed)
ksession.logged_in = False
self.ksession.logged_in = False
with self.assertRaises(koji.ActionNotAllowed):
ksession.fastUpload('nonexistent_file', 'target')
self.ksession.fastUpload('nonexistent_file', 'target')
# fake login
ksession.logged_in = True
ksession.sinfo = {}
ksession.callnum = 1
ksession._callMethod = mock.MagicMock()
def test_fastUpload_nofile(self):
# fail with nonexistent file (IOError)
file_mock.side_effect = IOError('mocked exception')
self.file_mock.side_effect = IOError('mocked exception')
with self.assertRaises(IOError):
ksession.fastUpload('file', 'target')
# inaccessible file, permissions (IOError)
file_mock.side_effect = IOError('mocked exception')
with self.assertRaises(IOError):
ksession.fastUpload('file', 'target')
self.ksession.fastUpload('file', 'target')
def test_fastUpload_empty_file(self):
# upload empty file (success)
file_mock.side_effect = None
fileobj = mock.MagicMock()
fileobj.read.return_value = ''
file_mock.return_value = fileobj
ksession._callMethod.return_value = {
self.file_mock.return_value = fileobj
self.ksession._callMethod.return_value = {
'size': 0,
'hexdigest': koji.util.adler32_constructor().hexdigest()
}
ksession.fastUpload('file', 'target')
self.ksession.fastUpload('file', 'target')
def test_fastUpload_regular_file(self):
# upload regular file (success)
file_mock.side_effect = None
fileobj = mock.MagicMock()
fileobj.read.side_effect = ['123123', '']
file_mock.return_value = fileobj
ksession._callMethod.reset_mock()
ksession._callMethod.side_effect = [
self.file_mock.return_value = fileobj
self.ksession._callMethod.side_effect = [
{'size': 6, 'hexdigest': '041c012d'}, # rawUpload
{'size': 6, 'hexdigest': '041c012d'}, # checkUpload
]
ksession.fastUpload('file', 'target', blocksize=1024)
self.ksession.fastUpload('file', 'target', blocksize=1024)
def test_fastUpload_size_change(self):
# change file size during upload (success)
file_mock.side_effect = None
fileobj = mock.MagicMock()
fileobj.read.side_effect = ['123123', '']
file_mock.return_value = fileobj
getsize_mock.return_value = 123456
ksession._callMethod.reset_mock()
ksession._callMethod.side_effect = [
self.file_mock.return_value = fileobj
self.getsize_mock.return_value = 123456
self.ksession._callMethod.side_effect = [
{'size': 6, 'hexdigest': '041c012d'}, # rawUpload
{'size': 6, 'hexdigest': '041c012d'}, # checkUpload
]
ksession.fastUpload('file', 'target', blocksize=1024)
self.ksession.fastUpload('file', 'target', blocksize=1024)
def test_fastUpload_wrong_length(self):
# uploaded file is corrupted (length) (GenericError)
file_mock.side_effect = None
fileobj = mock.MagicMock()
fileobj.read.side_effect = ['123123', '']
file_mock.return_value = fileobj
getsize_mock.return_value = 123456
ksession._callMethod.reset_mock()
ksession._callMethod.side_effect = [
self.file_mock.return_value = fileobj
self.getsize_mock.return_value = 123456
self.ksession._callMethod.side_effect = [
{'size': 6, 'hexdigest': '041c012d'}, # rawUpload
{'size': 3, 'hexdigest': '041c012d'}, # checkUpload
]
with self.assertRaises(koji.GenericError):
ksession.fastUpload('file', 'target', blocksize=1024)
self.ksession.fastUpload('file', 'target', blocksize=1024)
def test_fastUpload_wrong_checksum(self):
# uploaded file is corrupted (checksum) (GenericError)
file_mock.side_effect = None
fileobj = mock.MagicMock()
fileobj.read.side_effect = ['123123', '']
file_mock.return_value = fileobj
getsize_mock.return_value = 123456
ksession._callMethod.reset_mock()
ksession._callMethod.side_effect = [
self.file_mock.return_value = fileobj
self.getsize_mock.return_value = 123456
self.ksession._callMethod.side_effect = [
{'size': 6, 'hexdigest': '041c012d'}, # rawUpload
{'size': 3, 'hexdigest': 'deadbeef'}, # checkUpload
]
with self.assertRaises(koji.GenericError):
ksession.fastUpload('file', 'target', blocksize=1024)
self.ksession.fastUpload('file', 'target', blocksize=1024)
def test_fastUpload_nondefault_volume(self):
# upload regular file (success)
fileobj = mock.MagicMock()
fileobj.read.side_effect = ['123123', '']
self.file_mock.return_value = fileobj
self.ksession._callMethod.side_effect = [
{'size': 6, 'hexdigest': '041c012d'}, # rawUpload
{'size': 6, 'hexdigest': '041c012d'}, # checkUpload
]
self.ksession.fastUpload('file', 'target', blocksize=1024, volume='foobar')
for call in self.ksession._callMethod.call_args_list:
# both calls should pass volume as a named arg to the method
# (note: not literally a named arg to _callMethod)
# _callMethod args are: method, method_args, method_kwargs
kwargs = call[0][2]
self.assertTrue('volume' in kwargs)
self.assertEqual(kwargs['volume'], 'foobar')

View file

@ -0,0 +1,88 @@
import unittest
import mock
import kojihub
class TestListTaskOutput(unittest.TestCase):
@mock.patch('os.path.isdir')
@mock.patch('os.walk')
def test_empty(self, walk, isdir):
isdir.return_value = True
walk.return_value = []
result = kojihub.list_task_output(1)
self.assertEqual(result, [])
@mock.patch('os.path.isdir')
@mock.patch('os.walk')
def test_simple(self, walk, isdir):
isdir.return_value = True
walk.return_value = (('dir', [], ['file']),)
result = kojihub.list_task_output(1)
self.assertEqual(result, ['file'])
@mock.patch('os.stat')
@mock.patch('os.path.isdir')
@mock.patch('os.walk')
def test_simple_stat(self, walk, isdir, stat):
isdir.return_value = True
walk.return_value = (('dir', [], ['file']),)
st_mock = mock.MagicMock()
st_mock.st_size = 123
st_mock.st_atime = 345
st_mock.st_mtime = 678
st_mock.st_ctime = 901
stat.return_value = st_mock
result = kojihub.list_task_output(1, stat=True)
self.assertEqual(result, {
'file': {
'st_size': '123',
'st_atime': 345,
'st_mtime': 678,
'st_ctime': 901,
}
})
@mock.patch('kojihub.list_volumes')
@mock.patch('os.stat')
@mock.patch('os.path.isdir')
@mock.patch('os.walk')
def test_volumes(self, walk, isdir, stat, list_volumes):
isdir.return_value = True
walk.return_value = (('dir', [], ['file']),)
st_mock = mock.MagicMock()
st_mock.st_size = 123
st_mock.st_atime = 345
st_mock.st_mtime = 678
st_mock.st_ctime = 901
stat.return_value = st_mock
list_volumes.return_value = [{'name': 'DEFAULT'}]
result = kojihub.list_task_output(1, all_volumes=True)
self.assertEqual(result, {'file': ['DEFAULT']})
@mock.patch('kojihub.list_volumes')
@mock.patch('os.stat')
@mock.patch('os.path.isdir')
@mock.patch('os.walk')
def test_volumes_stat(self, walk, isdir, stat, list_volumes):
isdir.return_value = True
walk.return_value = (('dir', [], ['file']),)
st_mock = mock.MagicMock()
st_mock.st_size = 123
st_mock.st_atime = 345
st_mock.st_mtime = 678
st_mock.st_ctime = 901
stat.return_value = st_mock
list_volumes.return_value = [{'name': 'DEFAULT'}]
result = kojihub.list_task_output(1, stat=True, all_volumes=True)
self.assertEqual(result, {
'file': {
'DEFAULT': {
'st_size': '123',
'st_atime': 345,
'st_mtime': 678,
'st_ctime': 901,
}
}
})

View file

@ -380,7 +380,7 @@ class TasksTestCase(TestCase):
obj = TestTask(123, 'some_method', ['random_arg'], None, None, temp_path)
obj.session = Mock()
self.assertEquals(obj.uploadFile(temp_file), None)
obj.session.uploadWrapper.assert_called_once_with(temp_file, 'tasks/123/123', None)
obj.session.uploadWrapper.assert_called_once_with(temp_file, 'tasks/123/123', None, volume=None)
# This patch removes the dependence on getUploadDir functioning
@patch('{0}.TestTask.getUploadDir'.format(__name__), return_value='tasks/123/123')
@ -421,7 +421,7 @@ class TasksTestCase(TestCase):
obj.uploadFile = Mock()
obj.uploadFile.return_value = None
self.assertEquals(obj.uploadTree(temp_path), None)
obj.uploadFile.assert_has_calls([call(dummy_file, ''), call(dummy_file2, 'some_directory')])
obj.uploadFile.assert_has_calls([call(dummy_file, '', volume=None), call(dummy_file2, 'some_directory', volume=None)])
@patch('os.lchown', return_value=None)
def test_BaseTaskHandler_chownTree(self, mock_lchown):

View file

@ -672,17 +672,19 @@ def taskinfo(environ, taskID):
values['full_result_text'] = full_result_text
values['abbr_result_text'] = abbr_result_text
output = server.listTaskOutput(task['id'])
output.sort(_sortByExtAndName)
values['output'] = output
topurl = environ['koji.options']['KojiFilesURL']
pathinfo = koji.PathInfo(topdir=topurl)
values['pathinfo'] = pathinfo
paths = [] # (volume, relpath) tuples
for relname, volumes in server.listTaskOutput(task['id'], all_volumes=True).iteritems():
paths += [(volume, relname) for volume in volumes]
values['output'] = sorted(paths, cmp = _sortByExtAndName)
if environ['koji.currentUser']:
values['perms'] = server.getUserPerms(environ['koji.currentUser']['id'])
else:
values['perms'] = []
topurl = environ['koji.options']['KojiFilesURL']
values['pathinfo'] = koji.PathInfo(topdir=topurl)
return _genHTML(environ, 'taskinfo.chtml')
def taskstatus(environ, taskID):
@ -692,11 +694,11 @@ def taskstatus(environ, taskID):
task = server.getTaskInfo(taskID)
if not task:
return ''
files = server.listTaskOutput(taskID, stat=True)
files = server.listTaskOutput(taskID, stat=True, all_volumes=True)
output = '%i:%s\n' % (task['id'], koji.TASK_STATES[task['state']])
for filename, file_stats in files.items():
output += '%s:%s\n' % (filename, file_stats['st_size'])
for filename, volumes_data in files.iteritems():
for volume, file_stats in volumes_data.iteritems():
output += '%s:%s:%s\n' % (volume, filename, file_stats['st_size'])
return output
def resubmittask(environ, taskID):
@ -716,18 +718,19 @@ def canceltask(environ, taskID):
_redirect(environ, 'taskinfo?taskID=%i' % taskID)
def _sortByExtAndName(a, b):
"""Sort two filenames, first by extension, and then by name."""
aRoot, aExt = os.path.splitext(a)
bRoot, bExt = os.path.splitext(b)
"""Sort two filename tuples, first by extension, and then by name."""
aRoot, aExt = os.path.splitext(os.path.basename(a[1]))
bRoot, bExt = os.path.splitext(os.path.basename(b[1]))
return cmp(aExt, bExt) or cmp(aRoot, bRoot)
def getfile(environ, taskID, name, offset=None, size=None):
def getfile(environ, taskID, name, volume='DEFAULT', offset=None, size=None):
server = _getServer(environ)
taskID = int(taskID)
output = server.listTaskOutput(taskID, stat=True)
file_info = output.get(name)
if not file_info:
output = server.listTaskOutput(taskID, stat=True, all_volumes=True)
try:
file_info = output[name][volume]
except KeyError:
raise koji.GenericError('no file "%s" output by task %i' % (name, taskID))
mime_guess = mimetypes.guess_type(name, strict=False)[0]
@ -764,10 +767,10 @@ def getfile(environ, taskID, name, offset=None, size=None):
size = file_size - offset
#environ['koji.headers'].append(['Content-Length', str(size)])
return _chunk_file(server, environ, taskID, name, offset, size)
return _chunk_file(server, environ, taskID, name, offset, size, volume)
def _chunk_file(server, environ, taskID, name, offset, size):
def _chunk_file(server, environ, taskID, name, offset, size, volume):
remaining = size
encode_int = koji.encode_int
while True:
@ -776,7 +779,7 @@ def _chunk_file(server, environ, taskID, name, offset, size):
chunk_size = 1048576
if remaining < chunk_size:
chunk_size = remaining
content = server.downloadTaskOutput(taskID, name, offset=encode_int(offset), size=chunk_size)
content = server.downloadTaskOutput(taskID, name, offset=encode_int(offset), size=chunk_size, volume=volume)
if not content:
break
yield content

View file

@ -412,10 +412,10 @@ $value
<tr>
<th>Output</th>
<td>
#for $filename in $output
<a href="$pathinfo.task($task.id)/$urllib.quote($filename)">$filename</a>
#for $volume, $filename in $output
<a href="$pathinfo.task($task.id, volume=$volume)/$urllib.quote($filename)">$filename</a>
#if $filename.endswith('.log')
(<a href="getfile?taskID=$task.id&name=$urllib.quote($filename)&offset=-4000">tail</a>)
(<a href="getfile?taskID=$task.id&volume=$volume&name=$urllib.quote($filename)&offset=-4000">tail</a>)
#end if
<br/>
#end for

View file

@ -51,8 +51,8 @@ function handleStatus(event) {
var logs = {};
for (var i = 1; i < lines.length; i++) {
data = lines[i].split(":");
var filename = data[0];
var filesize = parseInt(data[1]);
var filename = data[0] + ":" + data[1];
var filesize = parseInt(data[2]);
if (filename.indexOf(".log") != -1) {
logs[filename] = filesize;
}
@ -140,8 +140,11 @@ function outputLog() {
chunkSize = currentSize - currentOffset;
}
var req = new XMLHttpRequest();
req.open("GET", baseURL + "/getfile?taskID=" + currentTaskID + "&name=" + currentLog +
"&offset=" + currentOffset + "&size=" + chunkSize, true);
var data = currentLog.split(':');
var volume = data[0];
var filename = data[1];
req.open("GET", baseURL + "/getfile?taskID=" + currentTaskID + "&name=" + filename +
"&volume=" + volume + "&offset=" + currentOffset + "&size=" + chunkSize, true);
req.onreadystatechange = handleLog;
req.send(null);
if (headerElement != null) {