explicit encoding for text file operations

Fixes: https://pagure.io/koji/issue/2641
This commit is contained in:
Tomas Kopecek 2021-01-14 16:37:47 +01:00
parent ffa0912bc0
commit 96ae0ecef5
29 changed files with 136 additions and 156 deletions

View file

@ -27,7 +27,6 @@ import copy
import glob
import grp
import io
import json
import logging
import logging.handlers
import os
@ -311,7 +310,7 @@ class BuildRoot(object):
output = koji.genMockConfig(self.name, self.br_arch, managed=True, **opts)
# write config
with open(configfile, 'w') as fo:
with open(configfile, 'wt', encoding='utf-8') as fo:
fo.write(output)
def _repositoryEntries(self, pi, plugin=False):
@ -410,8 +409,8 @@ class BuildRoot(object):
</settings>
"""
settings = settings % locals()
with open(self.rootdir() + destfile, 'w') as fo:
fo.write(settings)
with open(self.rootdir() + destfile, 'wt') as fo:
fo.write(settings, encoding='utf-8')
def mock(self, args):
"""Run mock"""
@ -457,13 +456,13 @@ class BuildRoot(object):
ts_name = '%s-ts.log' % fname
fpath = os.path.join(resultdir, ts_name)
if os.path.exists(fpath):
with open(fpath, 'rt') as ts_file:
with open(fpath, 'rt', encoding='utf-8') as ts_file:
lines = ts_file.readlines()
if lines:
last = int(lines[-1].split()[1])
ts_offsets[fname] = last
else:
with open(fpath, 'a') as ts_file:
with open(fpath, 'at', encoding='utf-8') as ts_file:
ts_file.write('%.0f 0\n' % time.time())
logs[ts_name] = (None, None, 0, fpath)
if workdir and mocklog not in logs:
@ -474,13 +473,13 @@ class BuildRoot(object):
ts_name = '%s-ts.log' % mocklog
fpath = os.path.join(workdir, ts_name)
if os.path.exists(fpath):
with open(fpath, 'rt') as ts_file:
with open(fpath, 'rt', encoding='utf-8') as ts_file:
lines = ts_file.readlines()
if lines:
last = int(lines[-1].split()[1])
ts_offsets[mocklog] = last
else:
with open(fpath, 'a') as ts_file:
with open(fpath, 'at', encoding='utf-8') as ts_file:
ts_file.write('%.0f 0\n' % time.time())
logs[ts_name] = (None, None, 0, fpath)
@ -511,7 +510,7 @@ class BuildRoot(object):
ts_offsets.setdefault(fname, 0)
if ts_offsets[fname] < position:
fpath = os.path.join(resultdir, '%s-ts.log' % fname)
with open(fpath, 'a') as ts_file:
with open(fpath, 'at', encoding='utf-8') as ts_file:
ts_file.write('%.0f %i\n' % (time.time(), position))
ts_offsets[fname] = position
incremental_upload(self.session, fname, fd, uploadpath, logger=self.logger)
@ -1438,8 +1437,7 @@ class BuildArchTask(BaseBuildTask):
if rpmdiff_hash[self.id]:
log_name = 'noarch_rpmdiff.json'
noarch_hash_path = os.path.join(broot.workdir, log_name)
with open(noarch_hash_path, 'wt') as f:
json.dump(rpmdiff_hash, f, indent=2, sort_keys=True)
koji.dump_json(noarch_hash_path, rpmdiff_hash, indent=2, sort_keys=True)
log_files.append(log_name)
self.logger.debug("rpms: %r" % rpm_files)
@ -1827,7 +1825,7 @@ class WrapperRPMTask(BaseBuildTask):
tgt[field] = src.get(field)
def spec_sanity_checks(self, filename):
spec = open(filename).read()
spec = open(filename, encoding='utf-8').read()
for tag in ("Packager", "Distribution", "Vendor"):
if re.match("%s:" % tag, spec, re.M):
raise koji.BuildError("%s is not allowed to be set in spec file" % tag)
@ -3052,7 +3050,7 @@ class ImageTask(BaseTaskHandler):
kskoji = os.path.join(broot.tmpdir(), 'koji-image-%s-%i.ks' %
(target_info['build_tag_name'], self.id))
koji.ensuredir(broot.tmpdir())
with open(kskoji, 'w') as outfile:
with open(kskoji, 'wt', encoding='utf-8') as outfile:
outfile.write(str(self.ks.handler))
# put the new ksfile in the output directory
@ -3250,7 +3248,7 @@ class LiveCDTask(ImageTask):
Using iso9660 from pycdio, get the file manifest of the given image,
and save it to the text file manifile.
"""
fd = open(manifile, 'w')
fd = open(manifile, 'wt', encoding='utf-8')
if not fd:
raise koji.GenericError(
'Unable to open manifest file (%s) for writing!' % manifile)
@ -3439,7 +3437,7 @@ class LiveMediaTask(ImageTask):
Using iso9660 from pycdio, get the file manifest of the given image,
and save it to the text file manifile.
"""
fd = open(manifile, 'w')
fd = open(manifile, 'wt', encoding='utf-8')
if not fd:
raise koji.GenericError(
'Unable to open manifest file (%s) for writing!' % manifile)
@ -3772,7 +3770,7 @@ class OzImageTask(BaseTaskHandler):
an absolute path to the kickstart file we wrote
"""
kspath = os.path.join(self.workdir, ksname)
with open(kspath, 'w') as outfile:
with open(kspath, 'wt', encoding='utf-8') as outfile:
outfile.write(str(ksobj.handler))
# put the new ksfile in the output directory
@ -3906,7 +3904,7 @@ class OzImageTask(BaseTaskHandler):
edriver = newxml.getElementsByTagName('driver')[0]
edriver.setAttribute('type', format)
xml_path = os.path.join(self.workdir, filename)
with open(xml_path, 'w') as xmlfd:
with open(xml_path, 'wt', encoding='utf-8') as xmlfd:
xmlfd.write(newxml.toprettyxml())
return xml_path
@ -4356,7 +4354,7 @@ class BaseImageTask(OzImageTask):
ApplicationConfiguration(configuration=config)
tdl_path = os.path.join(self.workdir, 'tdl-%s.xml' % self.arch)
with open(tdl_path, 'w') as tdl:
with open(tdl_path, 'wt', encoding='utf-8') as tdl:
tdl.write(template)
self.uploadFile(tdl_path)
@ -4506,7 +4504,7 @@ class BuildIndirectionImageTask(OzImageTask):
tops['tempdir'] = self.workdir
final_path = os.path.join(self.workdir, os.path.basename(filepath))
with koji.openRemoteFile(filepath, **tops) as remote_fileobj:
with open(final_path, 'w') as final_fileobj:
with open(final_path, 'wb') as final_fileobj:
shutil.copyfileobj(remote_fileobj, final_fileobj)
self.logger.debug('uploading retrieved file from here: %s' % final_path)
self.uploadFile(final_path) # upload the original ks file
@ -4560,7 +4558,7 @@ class BuildIndirectionImageTask(OzImageTask):
# Factory doesn't attempt to modify a disk image after it is COMPLETE so
# this will work safely on read-only NFS mounts
factory_base_image.data = diskimage_full
factory_base_image.template = open(tdl_full).read()
factory_base_image.template = open(tdl_full, encoding='utf-8').read()
factory_base_image.status = 'COMPLETE'
# Now save it
pim.save_image(factory_base_image)
@ -4612,7 +4610,7 @@ class BuildIndirectionImageTask(OzImageTask):
# Factory doesn't attempt to modify a disk image after it is COMPLETE so
# this will work safely on read-only NFS mounts
factory_base_image.data = diskimage_full
factory_base_image.template = open(tdl_full).read()
factory_base_image.template = open(tdl_full, encoding='utf-8').read()
factory_base_image.status = 'COMPLETE'
# Now save it
pim.save_image(factory_base_image)
@ -4702,7 +4700,7 @@ class BuildIndirectionImageTask(OzImageTask):
rm = ReservationManager()
rm._listen_port = rm.MIN_PORT + self.id % (rm.MAX_PORT - rm.MIN_PORT)
utility_customizations = open(indirection_template).read()
utility_customizations = open(indirection_template, encoding='utf-8').read()
results_loc = opts.get('results_loc', None)
if results_loc[0] != "/":
results_loc = "/" + results_loc
@ -4720,7 +4718,7 @@ class BuildIndirectionImageTask(OzImageTask):
pim = PersistentImageManager.default_manager()
pim.add_image(target_image)
target.target_image = target_image
with open(target_image.data, "w") as f:
with open(target_image.data, "wt", encoding='utf-8') as f:
f.write("Mock build from task ID: %s" % self.id)
target_image.status = 'COMPLETE'
else:
@ -4880,7 +4878,7 @@ class BuildSRPMFromSCMTask(BaseBuildTask):
_taskWeight = 1.0
def spec_sanity_checks(self, filename):
spec = open(filename).read()
spec = open(filename, encoding='utf-8').read()
for tag in ("Packager", "Distribution", "Vendor"):
if re.match("%s:" % tag, spec, re.M):
raise koji.BuildError("%s is not allowed to be set in spec file" % tag)
@ -5452,7 +5450,7 @@ class CreaterepoTask(BaseTaskHandler):
if external_repos:
self.merge_repos(external_repos, arch, groupdata)
elif pkglist is None:
with open(os.path.join(self.datadir, "EMPTY_REPO"), 'w') as fo:
with open(os.path.join(self.datadir, "EMPTY_REPO"), 'wt') as fo:
fo.write("This repo is empty because its tag has no content for this arch\n")
uploadpath = self.getUploadDir()
@ -5751,7 +5749,7 @@ class createDistRepoTask(BaseTaskHandler):
zck_dict_dir=opts.get('zck_dict_dir'))
if len(self.kojipkgs) == 0:
fn = os.path.join(self.repodir, "repodata", "EMPTY_REPO")
with open(fn, 'w') as fp:
with open(fn, 'wt') as fp:
fp.write("This repo is empty because its tag has no content "
"for this arch\n")
@ -5793,8 +5791,7 @@ class createDistRepoTask(BaseTaskHandler):
def upload_repo_manifest(self):
"""Upload a list of the repo files we've uploaded"""
fn = '%s/repo_manifest' % self.workdir
with open(fn, 'w') as fp:
json.dump(self.repo_files, fp, indent=4)
koji.dump_json(fn, self.repo_files, indent=4)
self.session.uploadWrapper(fn, self.uploadpath)
def do_createrepo(self, repodir, pkglist, groupdata, oldpkgs=None,
@ -5862,7 +5859,7 @@ class createDistRepoTask(BaseTaskHandler):
# read pkgs data from multilib repo
ml_pkgfile = os.path.join(mldir, 'kojipkgs')
ml_pkgs = json.load(open(ml_pkgfile, 'r'))
ml_pkgs = koji.load_json(ml_pkgfile)
# step 1: figure out which packages are multilib (should already exist)
dnfbase = dnf.Base()
@ -5918,7 +5915,7 @@ enabled=1
# step 3: proceed with dnf config and set up
yconfig_path = os.path.join(dnfdir, 'dnf.conf-koji-%s' % arch)
with open(yconfig_path, 'w') as f:
with open(yconfig_path, 'wt', encoding='utf-8') as f:
f.write(dnfconfig)
self.session.uploadWrapper(yconfig_path, self.uploadpath,
os.path.basename(yconfig_path))
@ -5955,7 +5952,7 @@ enabled=1
if len(fs_missing) > 0:
missing_log = os.path.join(self.workdir, 'missing_multilib.log')
with open(missing_log, 'w') as outfile:
with open(missing_log, 'wt', encoding='utf-8') as outfile:
outfile.write('The following multilib files were missing:\n')
for ml_path in fs_missing:
outfile.write(ml_path + '\n')
@ -6058,7 +6055,7 @@ enabled=1
# report problems
if len(fs_missing) > 0:
missing_log = os.path.join(self.workdir, 'missing_files.log')
with open(missing_log, 'w') as outfile:
with open(missing_log, 'wt', encoding='utf-8') as outfile:
outfile.write('Some rpm files were missing.\n'
'Most likely, you want to create these signed copies.\n\n'
'Missing files:\n')
@ -6071,7 +6068,7 @@ enabled=1
if sig_missing:
# log missing signatures and possibly error
missing_log = os.path.join(self.workdir, 'missing_signatures.log')
with open(missing_log, 'w') as outfile:
with open(missing_log, 'wt', encoding='utf-8') as outfile:
outfile.write('Some rpms were missing requested signatures.\n')
if opts['skip_missing_signatures']:
outfile.write('The skip_missing_signatures option was specified, so '
@ -6120,19 +6117,18 @@ enabled=1
else:
pkgs.append('Packages/%s/%s\n' % (bnplet, bnp))
with open('%s/pkglist' % self.repodir, 'w') as fo:
with open('%s/pkglist' % self.repodir, 'wt', encoding='utf-8') as fo:
for line in pkgs:
fo.write(line)
for subrepo in subrepo_pkgs:
koji.ensuredir('%s/%s' % (self.repodir, subrepo))
with open('%s/%s/pkglist' % (self.repodir, subrepo), 'w') as fo:
with open('%s/%s/pkglist' % (self.repodir, subrepo), 'wt', encoding='utf-8') as fo:
for line in subrepo_pkgs[subrepo]:
fo.write(line)
def write_kojipkgs(self):
filename = os.path.join(self.repodir, 'kojipkgs')
with open(filename, 'w') as datafile:
json.dump(self.kojipkgs, datafile, indent=4, sort_keys=True)
koji.dump_json(filename, self.kojipks, sort_keys=False)
class WaitrepoTask(BaseTaskHandler):

View file

@ -252,7 +252,7 @@ class RepoMerge(object):
include_srpms[srpm_name] = (pkg.sourcerpm, pkg.repoid)
pkgorigins = os.path.join(self.yumbase.conf.cachedir, 'pkgorigins')
origins = open(pkgorigins, 'w')
origins = open(pkgorigins, 'wt', encoding='utf-8')
seen_rpms = {}
for repo in repos:
@ -307,7 +307,7 @@ class RepoMerge(object):
pkg._return_remote_location = make_const_func(loc)
pkgorigins = os.path.join(self.yumbase.conf.cachedir, 'pkgorigins')
origins = open(pkgorigins, 'w')
origins = open(pkgorigins, 'wt', encoding='utf-8')
seen_rpms = {}
for repo in repos:
@ -348,9 +348,8 @@ def main(args):
opts = parse_args(args)
if opts.blocked:
blocked_fo = open(opts.blocked)
with open(opts.blocked, encoding='utf-8') as blocked_fo:
blocked_list = blocked_fo.readlines()
blocked_fo.close()
blocked = dict([(b.strip(), 1) for b in blocked_list])
else:
blocked = {}

View file

@ -1035,7 +1035,7 @@ def anon_handle_mock_config(goptions, session, args):
opts['tag_macros'][macro] = buildcfg['extra'][key]
output = koji.genMockConfig(name, arch, **opts)
if options.ofile:
with open(options.ofile, 'w') as fo:
with open(options.ofile, 'wt', encoding='utf-8') as fo:
fo.write(output)
else:
print(output)
@ -1319,7 +1319,7 @@ def handle_import_cg(goptions, session, args):
if json is None:
parser.error(_("Unable to find json module"))
activate_session(session, goptions)
metadata = json.load(open(args[0], 'r'))
metadata = koji.load_json(args[0])
if 'output' not in metadata:
print(_("Metadata contains no output"))
sys.exit(1)
@ -1586,10 +1586,10 @@ def handle_prune_signed_copies(goptions, session, args):
# (with the modification that we check to see if the build was latest within
# the last N days)
if options.ignore_tag_file:
with open(options.ignore_tag_file) as fo:
with open(options.ignore_tag_file, encoding='utf-8') as fo:
options.ignore_tag.extend([line.strip() for line in fo.readlines()])
if options.protect_tag_file:
with open(options.protect_tag_file) as fo:
with open(options.protect_tag_file, encoding='utf-8') as fo:
options.protect_tag.extend([line.strip() for line in fo.readlines()])
if options.debug:
options.verbose = True
@ -6884,7 +6884,7 @@ def anon_handle_download_logs(options, session, args):
full_filename = os.path.normpath(os.path.join(task_log_dir, FAIL_LOG))
koji.ensuredir(os.path.dirname(full_filename))
sys.stdout.write("Writing: %s\n" % full_filename)
with open(full_filename, 'w') as fo:
with open(full_filename, 'wt', encoding='utf-8') as fo:
fo.write(content)
def download_log(task_log_dir, task_id, filename, blocksize=102400, volume=None):

View file

@ -2546,7 +2546,7 @@ def repo_init(tag, with_src=False, with_debuginfo=False, event=None, with_separa
groupsdir = "%s/groups" % (repodir)
koji.ensuredir(groupsdir)
comps = koji.generate_comps(groups, expand_groups=True)
with open("%s/comps.xml" % groupsdir, 'w') as fo:
with open("%s/comps.xml" % groupsdir, 'wt', encoding='utf-8') as fo:
fo.write(comps)
# write repo info to disk
@ -2559,7 +2559,7 @@ def repo_init(tag, with_src=False, with_debuginfo=False, event=None, with_separa
'with_separate_src': with_separate_src,
'with_debuginfo': with_debuginfo,
}
with open('%s/repo.json' % repodir, 'w') as fp:
with open('%s/repo.json' % repodir, 'wt', encoding='utf-8') as fp:
json.dump(repo_info, fp, indent=2)
# get build dirs
@ -2577,7 +2577,7 @@ def repo_init(tag, with_src=False, with_debuginfo=False, event=None, with_separa
top_relpath = os.path.relpath(koji.pathinfo.topdir, archdir)
top_link = joinpath(archdir, 'toplink')
os.symlink(top_relpath, top_link)
pkglist[repoarch] = open(joinpath(archdir, 'pkglist'), 'w')
pkglist[repoarch] = open(joinpath(archdir, 'pkglist'), 'wt', encoding='utf-8')
# NOTE - rpms is now an iterator
for rpminfo in rpms:
if not with_debuginfo and koji.is_debuginfo(rpminfo['name']):
@ -2606,7 +2606,7 @@ def repo_init(tag, with_src=False, with_debuginfo=False, event=None, with_separa
# write blocked package lists
for repoarch in repo_arches:
blocklist = open(joinpath(repodir, repoarch, 'blocklist'), 'w')
blocklist = open(joinpath(repodir, repoarch, 'blocklist'), 'wt', encoding='utf-8')
for pkg in blocks:
blocklist.write(pkg['package_name'])
blocklist.write('\n')
@ -2676,7 +2676,7 @@ def _write_maven_repo_metadata(destdir, artifacts):
</versioning>
</metadata>
""" % datetime.datetime.now().strftime('%Y%m%d%H%M%S')
with open(joinpath(destdir, 'maven-metadata.xml'), 'w') as mdfile:
with open(joinpath(destdir, 'maven-metadata.xml'), 'wt', encoding='utf-8') as mdfile:
mdfile.write(contents)
_generate_maven_metadata(destdir)
@ -2727,8 +2727,7 @@ def dist_repo_init(tag, keys, task_opts):
'volume': volume,
'task_opts': task_opts,
}
with open('%s/repo.json' % repodir, 'w') as fp:
json.dump(repo_info, fp, indent=2)
koji.dump_json('%s/repo.json' % repodir, repo_info, indent=2)
# note: we need to match args from the other postRepoInit callback
koji.plugin.run_callbacks('postRepoInit', tag=tinfo, with_src=False,
with_debuginfo=False, event=event, repo_id=repo_id,
@ -4933,7 +4932,7 @@ def _get_tarball_list(archive_id, tarpath):
result = []
if not os.path.exists(tarpath):
return result
with tarfile.open(tarpath, 'r') as archive:
with tarfile.open(tarpath, 'rb') as archive:
for entry in archive:
filename = koji.fixEncoding(entry.name)
result.append({'archive_id': archive_id,
@ -5875,7 +5874,7 @@ def check_noarch_rpms(basepath, rpms, logs=None):
for arch in logs:
for log in logs[arch]:
if os.path.basename(log) == 'noarch_rpmdiff.json':
task_hash = json.load(open(joinpath(basepath, log), 'rt'))
task_hash = koji.load_json(joinpath(basepath, log))
for task_id in task_hash:
hashes[task_id] = task_hash[task_id]
@ -6279,7 +6278,7 @@ class CG_Importer(object):
path = joinpath(workdir, directory, metadata)
if not os.path.exists(path):
raise koji.GenericError("No such file: %s" % metadata)
with open(path, 'rt') as fo:
with open(path, 'rt', encoding='utf-8') as fo:
metadata = fo.read()
self.raw_metadata = metadata
self.metadata = parse_json(metadata, desc='metadata')
@ -6482,7 +6481,7 @@ class CG_Importer(object):
builddir = koji.pathinfo.build(self.buildinfo)
koji.ensuredir(builddir)
path = joinpath(builddir, 'metadata.json')
with open(path, 'w') as fo:
with open(path, 'wt', encoding='utf-8') as fo:
fo.write(self.raw_metadata)
def prep_brs(self):
@ -7385,7 +7384,7 @@ def _generate_maven_metadata(mavendir):
if not content:
break
sum.update(content)
with open('%s/%s' % (mavendir, sumfile), 'w') as sumobj:
with open('%s/%s' % (mavendir, sumfile), 'wt') as sumobj:
sumobj.write(sum.hexdigest())
@ -14581,15 +14580,13 @@ class HostExports(object):
fn = '%s/%s/repo_manifest' % (workdir, uploadpath)
if not os.path.isfile(fn):
raise koji.GenericError('Missing repo manifest')
with open(fn) as fp:
files = json.load(fp)
files = koji.load_json(fn)
# Read package data
fn = '%s/%s/kojipkgs' % (workdir, uploadpath)
if not os.path.isfile(fn):
raise koji.GenericError('Missing kojipkgs file')
with open(fn) as fp:
kojipkgs = json.load(fp)
kojipkgs = koji.load_json(fn)
# Figure out where to send the uploaded files
file_moves = []
@ -14697,12 +14694,12 @@ def get_upload_path(reldir, name, create=False, volume=None):
# assuming login was asserted earlier
u_fn = joinpath(udir, '.user')
if os.path.exists(u_fn):
user_id = int(open(u_fn, 'r').read())
user_id = int(open(u_fn, 'rt').read())
if context.session.user_id != user_id:
raise koji.GenericError("Invalid upload directory, not owner: %s" %
orig_reldir)
else:
with open(u_fn, 'w') as fo:
with open(u_fn, 'wt') as fo:
fo.write(str(context.session.user_id))
return joinpath(udir, name)

View file

@ -29,6 +29,7 @@ import datetime
import errno
import hashlib
import imp
import json
import logging
import logging.handlers
import optparse
@ -1282,9 +1283,8 @@ def parse_pom(path=None, contents=None):
values = {}
handler = POMHandler(values, fields)
if path:
fd = open(path)
with open(path, encoding='utf-8') as fd:
contents = fd.read()
fd.close()
if not contents:
raise GenericError(
@ -1353,6 +1353,16 @@ def hex_string(s):
return ''.join(['%02x' % _ord(x) for x in s])
def load_json(filepath):
"""Loads json from file"""
return json.load(open(filepath, 'rt', encoding='utf-8'))
def dump_json(filepath, data, indent=4, sort_keys=False):
"""Write json to file"""
json.dump(data, open(filepath, 'wt', encoding='utf-8'), indent=indent, sort_keys=sort_keys)
def make_groups_spec(grplist, name='buildsys-build', buildgroup=None):
"""Return specfile contents representing the group"""
if buildgroup is None:
@ -1611,9 +1621,7 @@ def genMockConfig(name, arch, managed=False, repoid=None, tag_name=None, **opts)
if opts.get('use_host_resolv', False) and os.path.exists('/etc/hosts'):
# if we're setting up DNS,
# also copy /etc/hosts from the host
etc_hosts = open('/etc/hosts')
files['etc/hosts'] = etc_hosts.read()
etc_hosts.close()
files['etc/hosts'] = open('/etc/hosts', 'rt', encoding='utf-8').read()
mavenrc = ''
if opts.get('maven_opts'):
mavenrc = 'export MAVEN_OPTS="%s"\n' % ' '.join(opts['maven_opts'])
@ -1832,7 +1840,7 @@ def openRemoteFile(relpath, topurl=None, topdir=None, tempdir=None):
downloadFile(url, path=relpath, fo=fo)
elif topdir:
fn = "%s/%s" % (topdir, relpath)
fo = open(fn)
fo = open(fn, 'rb')
else:
raise GenericError("No access method for remote file: %s" % relpath)
return fo

View file

@ -237,7 +237,7 @@ def _try_read_cpuinfo():
""" Try to read /proc/cpuinfo ... if we can't ignore errors (ie. proc not
mounted). """
try:
return open("/proc/cpuinfo", "r")
return open("/proc/cpuinfo", "rt")
except Exception:
return []
@ -383,9 +383,8 @@ def getCanonX86_64Arch(arch):
def getCanonArch(skipRpmPlatform=0):
if not skipRpmPlatform and os.access("/etc/rpm/platform", os.R_OK):
try:
f = open("/etc/rpm/platform", "r")
with open("/etc/rpm/platform", "rt", encoding='utf-8') as f:
line = f.readline()
f.close()
(arch, vendor, opersys) = line.split("-", 2)
return arch
except Exception:

View file

@ -790,7 +790,7 @@ class TaskManager(object):
fn = "%s/%s" % (configdir, f)
if not os.path.isfile(fn):
continue
fo = open(fn, 'r')
fo = open(fn, 'rt', encoding='utf-8')
id = None
name = None
for n in range(10):
@ -1089,7 +1089,7 @@ class TaskManager(object):
proc_path = '/proc/%i/stat' % pid
if not os.path.isfile(proc_path):
return None
proc_file = open(proc_path)
proc_file = open(proc_path, 'rt', encoding='utf-8')
procstats = [not field.isdigit() and field or int(field)
for field in proc_file.read().split()]
proc_file.close()
@ -1097,7 +1097,7 @@ class TaskManager(object):
cmd_path = '/proc/%i/cmdline' % pid
if not os.path.isfile(cmd_path):
return None
cmd_file = open(cmd_path)
cmd_file = open(cmd_path, 'rt', encoding='utf-8')
procstats[1] = cmd_file.read().replace('\0', ' ').strip()
cmd_file.close()
if not procstats[1]:

View file

@ -267,7 +267,7 @@ class RunRootTask(koji.tasks.BaseTaskHandler):
self.logger.info('New runroot')
self.logger.info("Runroot mounts: %s" % mounts)
fn = '%s/tmp/runroot_mounts' % rootdir
with open(fn, 'a') as fslog:
with open(fn, 'at', encoding='utf-8') as fslog:
logfile = "%s/do_mounts.log" % self.workdir
uploadpath = self.getUploadDir()
error = None
@ -309,7 +309,7 @@ class RunRootTask(koji.tasks.BaseTaskHandler):
mounts = set()
fn = '%s/tmp/runroot_mounts' % rootdir
if os.path.exists(fn):
with open(fn, 'r') as fslog:
with open(fn, 'rt', encoding='utf-8') as fslog:
for line in fslog.readlines():
mounts.add(line.strip())
# also, check /proc/mounts just in case

View file

@ -23,8 +23,7 @@ class MyClientSession(koji.ClientSession):
def load_calls(self, name):
fn = os.path.join(os.path.dirname(__file__), 'data/calls', name,'calls.json')
with open(fn) as fp:
data = json.load(fp)
data = koji.load_json(fn)
for call in data:
key = self._munge([call['method'], call['args'], call['kwargs']])
self._testcalls[key] = call
@ -79,8 +78,7 @@ class TestBuildNotification(unittest.TestCase):
# task_info['id'], method, params, self.session, self.options
task_id = 999
fn = os.path.join(os.path.dirname(__file__), 'data/calls', 'build_notif_1', 'params.json')
with open(fn) as fp:
kwargs = json.load(fp)
kwargs = koji.load_json(fn)
self.session = MyClientSession('https://koji.example.com/kojihub')
self.session.load_calls('build_notif_1')
self.options.from_addr = "koji@example.com"

View file

@ -36,7 +36,7 @@ class TestImportCG(utils.CliTestCase):
@mock.patch('koji_cli.commands._running_in_bg', return_value=False)
@mock.patch('koji_cli.commands.linked_upload')
@mock.patch('koji_cli.commands.activate_session')
@mock.patch('koji_cli.commands.json')
@mock.patch('koji.json')
def test_handle_import_cg(
self,
json_mock,
@ -153,15 +153,7 @@ class TestImportCG(utils.CliTestCase):
activate_session=None)
# Case 2. JSON module does not exist
expected = self.format_error_message('Unable to find json module')
with mock.patch('koji_cli.commands.json', new=None):
self.assert_system_exit(
handle_import_cg,
options,
session,
arguments,
stderr=expected,
activate_session=None)
# dropped - it is now part of stdlib
metadata = {
'output': [
@ -177,7 +169,7 @@ class TestImportCG(utils.CliTestCase):
with mock.patch(utils.get_builtin_open()):
with mock.patch('os.path.exists', new=self.mock_os_path_exists):
with mock.patch('koji_cli.commands.json') as json_mock:
with mock.patch('koji.json') as json_mock:
# Case 3. metafile doesn't have output section
json_mock.load.return_value = {}

View file

@ -333,7 +333,7 @@ config_opts['macros']['%distribution'] = 'Koji Testing'
fobj = mock.MagicMock()
openf.return_value.__enter__.return_value = fobj
anon_handle_mock_config(options, session, arguments)
openf.assert_called_with('/tmp/mock.out', 'w')
openf.assert_called_with('/tmp/mock.out', 'wt', encoding='utf-8')
fobj.write.assert_called_once_with(self.mock_output)
gen_config_mock.assert_called_with(
self.progname, arch, **opts)

View file

@ -9,6 +9,7 @@ import unittest
from mock import call
from six.moves import range
import koji
from koji_cli.lib import watch_tasks
from .fakeclient import FakeClientSession, RecordingClientSession
@ -27,8 +28,7 @@ class TestWatchTasks(unittest.TestCase):
if self.recording:
# save recorded calls
if self.record_file:
with open(self.record_file, 'w') as fp:
json.dump(self.session.get_calls(), fp, indent=4)
koji.dump_json(self.record_file, self.session.get_calls())
else:
json.dump(self.session.get_calls(), sys.stderr, indent=4)
self.recording = False
@ -54,8 +54,7 @@ class TestWatchTasks(unittest.TestCase):
def test_watch_tasks(self, stdout):
# self.setup_record('foo.json')
cfile = os.path.dirname(__file__) + '/data/calls/watchtasks1.json'
with open(cfile) as fp:
cdata = json.load(fp)
cdata = koji.load_json(cfile)
self.session.load_calls(cdata)
rv = watch_tasks(self.session, [1188], quiet=False, poll_interval=0,
topurl=self.options.topurl)
@ -75,8 +74,7 @@ class TestWatchTasks(unittest.TestCase):
def test_watch_tasks_fail(self, stdout, sleep):
# self.setup_record('foo.json')
cfile = os.path.dirname(__file__) + '/data/calls/watchtasks2.json'
with open(cfile) as fp:
cdata = json.load(fp)
cdata = koji.load_json(cfile)
self.session.load_calls(cdata)
rv = watch_tasks(self.session, [1208], quiet=False, poll_interval=5, topurl=None)
self.assertEqual(rv, 1)
@ -100,8 +98,7 @@ class TestWatchTasks(unittest.TestCase):
"""Raise KeyboardInterrupt inner watch_tasks.
Raising it by SIGNAL might be better"""
cfile = os.path.dirname(__file__) + '/data/calls/watchtasks2.json'
with open(cfile) as fp:
cdata = json.load(fp)
cdata = koji.load_json(cfile)
self.session.load_calls(cdata)
sleep.side_effect = [None] * 10 + [KeyboardInterrupt]
with self.assertRaises(KeyboardInterrupt):
@ -125,8 +122,7 @@ Running Tasks:
def test_watch_tasks_with_keyboardinterrupt_handler(self, stdout, sleep):
"""Raise KeyboardInterrupt inner watch_tasks with a ki_handler"""
cfile = os.path.dirname(__file__) + '/data/calls/watchtasks2.json'
with open(cfile) as fp:
cdata = json.load(fp)
cdata = koji.load_json(cfile)
self.session.load_calls(cdata)
sleep.side_effect = [None] * 10 + [KeyboardInterrupt]

View file

@ -96,15 +96,15 @@ class TestCompleteImageBuild(unittest.TestCase):
def set_up_files(self, name):
datadir = os.path.join(os.path.dirname(__file__), 'data/image', name)
# load image result data for our test build
data = json.load(open(datadir + '/data.json'))
self.db_expect = json.load(open(datadir + '/db.json'))
data = koji.load_json(datadir + '/data.json')
self.db_expect = koji.load_json(datadir + '/db.json')
for arch in data:
taskdir = koji.pathinfo.task(data[arch]['task_id'])
os.makedirs(taskdir)
filenames = data[arch]['files'] + data[arch]['logs']
for filename in filenames:
path = os.path.join(taskdir, filename)
with open(path, 'w') as fp:
with open(path, 'wt', encoding='utf-8') as fp:
fp.write('Test file for %s\n%s\n' % (arch, filename))
self.image_data = data

View file

@ -49,7 +49,7 @@ class TestCompleteMavenBuild(unittest.TestCase):
def set_up_files(self, name):
datadir = os.path.join(os.path.dirname(__file__), 'data/maven', name)
# load maven result data for our test build
data = json.load(open(datadir + '/data.json'))
data = koji.load_json(datadir + '/data.json')
data['task_id'] = 9999
taskdir = koji.pathinfo.task(data['task_id'])
for subdir in data['files']:
@ -64,7 +64,7 @@ class TestCompleteMavenBuild(unittest.TestCase):
dst = os.path.join(taskdir, fn)
shutil.copy(src, dst)
self.maven_data = data
files = open(datadir + '/files').readlines()
files = open(datadir + '/files', 'rt', encoding='utf-8').readlines()
files = [l.strip() for l in files]
self.expected_files = files

View file

@ -1,5 +1,4 @@
import unittest
import json
import mock
import os
import shutil
@ -138,7 +137,7 @@ class TestDistRepoMove(unittest.TestCase):
for fn in self.files:
path = os.path.join(uploaddir, fn)
koji.ensuredir(os.path.dirname(path))
with open(path, 'w') as fo:
with open(path, 'wt', encoding='utf-8') as fo:
fo.write('%s' % os.path.basename(fn))
# generate pkglist file
@ -148,7 +147,7 @@ class TestDistRepoMove(unittest.TestCase):
self.rpms = {}
self.builds ={}
self.key = '4c8da725'
with open(plist, 'w') as f_pkglist:
with open(plist, 'wt', encoding='utf-8') as f_pkglist:
for nvr in nvrs:
binfo = koji.parse_NVR(nvr)
rpminfo = binfo.copy()
@ -158,7 +157,7 @@ class TestDistRepoMove(unittest.TestCase):
path = os.path.join(builddir, relpath)
koji.ensuredir(os.path.dirname(path))
basename = os.path.basename(path)
with open(path, 'w') as fo:
with open(path, 'wt', encoding='utf-8') as fo:
fo.write('%s' % basename)
f_pkglist.write(path)
f_pkglist.write('\n')
@ -179,13 +178,11 @@ class TestDistRepoMove(unittest.TestCase):
for rpminfo in self.rpms.values():
bnp = '%(name)s-%(version)s-%(release)s.%(arch)s.rpm' % rpminfo
kojipkgs[bnp] = rpminfo
with open("%s/kojipkgs" % uploaddir, "w") as fp:
json.dump(kojipkgs, fp, indent=4)
koji.dump_json("%s/kojipkgs" % uploaddir, kojipkgs)
self.files.append('kojipkgs')
# write manifest
with open("%s/repo_manifest" % uploaddir, "w") as fp:
json.dump(self.files, fp, indent=4)
koji.dump_json("%s/repo_manifest" % uploaddir, self.files)
# mocks
self.repo_info = mock.patch('kojihub.repo_info').start()
@ -221,7 +218,7 @@ class TestDistRepoMove(unittest.TestCase):
basename = os.path.basename(path)
if not os.path.exists(path):
raise Exception("Missing file: %s" % path)
data = open(path).read()
data = open(path, 'rt', encoding='utf-8').read()
data.strip()
self.assertEquals(data, basename)

View file

@ -31,7 +31,7 @@ class TestGetBuildLogs(unittest.TestCase):
dirpath = os.path.dirname(path)
koji.ensuredir(dirpath)
if path:
with open(path, 'w') as fo:
with open(path, 'wt') as fo:
fo.write('TEST LOG FILE CONTENTS\n')
def test_get_build_logs_basic(self):
@ -54,7 +54,7 @@ class TestGetBuildLogs(unittest.TestCase):
def test_get_build_logs_notadir(self):
fn = "%s/SOMEFILE" % self.tempdir
with open(fn, 'w') as fo:
with open(fn, 'wt') as fo:
fo.write('NOT A DIRECTORY\n')
koji.pathinfo.build_logs.return_value = fn
try:

View file

@ -43,7 +43,7 @@ class TestGetUploadPath(unittest.TestCase):
fullpath = '%s/work/%s' % (self.topdir, reldir)
os.makedirs(fullpath)
with open('{0}/.user'.format(fullpath), 'wt') as f:
with open('{0}/.user'.format(fullpath), 'wt', encoding='utf-8') as f:
f.write('1')
with self.assertRaises(GenericError):

View file

@ -32,7 +32,7 @@ class TestWriteMavenRepoMetadata(unittest.TestCase):
_write_maven_repo_metadata(destdir, artifacts)
openf_mock.assert_called_with(
os.path.join(destdir, 'maven-metadata.xml'), 'w')
os.path.join(destdir, 'maven-metadata.xml'), 'wt', encoding='utf-8')
handle = openf_mock().__enter__()
expected = """\

View file

@ -18,10 +18,10 @@ class TestGenMockConfig(unittest.TestCase):
if not fn.endswith('.data'):
continue
path = os.path.join(datadir, fn)
with open(path) as fo:
with open(path, 'rt', encoding='utf-8') as fo:
s = fo.read()
params = ast.literal_eval(s)
with open(path[:-5] + '.out') as fo:
with open(path[:-5] + '.out', 'rt', encoding='utf-8') as fo:
expected = fo.read()
output = koji.genMockConfig(**params)
self.assertMultiLineEqual(output, expected)

View file

@ -131,7 +131,7 @@ class HeaderTestCase(unittest.TestCase):
rpmdir = os.path.join(os.path.dirname(__file__), 'data/rpms')
def setUp(self):
self.fd = open(self.rpm_path)
self.fd = open(self.rpm_path, 'rb')
def tearDown(self):
self.fd.close()

View file

@ -364,7 +364,7 @@ class TasksTestCase(unittest.TestCase):
temp_path = get_tmp_dir_path('TestTask')
makedirs(temp_path)
temp_file = path.join(temp_path, 'test.txt')
with open(temp_file, 'w') as temp_file_handler:
with open(temp_file, 'wt') as temp_file_handler:
temp_file_handler.write('Test')
obj = TestTask(123, 'some_method', ['random_arg'], None, None, temp_path)
@ -400,11 +400,11 @@ class TasksTestCase(unittest.TestCase):
makedirs(dummy_dir)
dummy_file = path.join(temp_path, 'test.txt')
with open(dummy_file, 'w') as temp_file_handler:
with open(dummy_file, 'wt') as temp_file_handler:
temp_file_handler.write('Test')
dummy_file2 = path.join(dummy_dir, 'test2.txt')
with open(dummy_file2, 'w') as temp_file_handler2:
with open(dummy_file2, 'wt') as temp_file_handler2:
temp_file_handler2.write('Test2')
obj = TestTask(123, 'some_method', ['random_arg'], None, None, temp_path)

View file

@ -145,7 +145,7 @@ class MiscFunctionTestCase(unittest.TestCase):
fo = koji.openRemoteFile(path, topdir=topdir)
self.assertEqual(m_requests.call_count, 0)
m_TemporaryFile.assert_not_called()
m_open.assert_called_once_with(filename)
m_open.assert_called_once_with(filename, 'rb')
assert fo is m_open.return_value
for m in mocks:
@ -736,7 +736,7 @@ class MavenUtilTestCase(unittest.TestCase):
def _read_conf(self, cfile):
path = os.path.dirname(__file__)
with open(path + cfile, 'r') as conf_file:
with open(path + cfile, 'rt', encoding='utf-8') as conf_file:
if six.PY2:
config = six.moves.configparser.SafeConfigParser()
config.readfp(conf_file)

View file

@ -382,7 +382,7 @@ def send_warning_notice(owner_name, builds):
print("Warning: empty build list. No notice sent")
return
with open(options.email_template, 'r') as f:
with open(options.email_template, 'rt', encoding='utf-8') as f:
tpl = Template(f.read())
fmt = """\
@ -782,7 +782,7 @@ def read_policies(fn=None):
The expected format as follows
test [params] [&& test [params] ...] :: (keep|untag|skip)
"""
fo = open(fn, 'r')
fo = open(fn, 'rt', encoding='utf-8')
tests = koji.policy.findSimpleTests(globals())
ret = koji.policy.SimpleRuleSet(fo, tests)
fo.close()

View file

@ -414,7 +414,7 @@ class TrackedBuild(object):
fsrc = urllib2.urlopen(url)
fn = "%s/%s.src.rpm" % (options.workpath, self.nvr)
koji.ensuredir(os.path.dirname(fn))
fdst = open(fn, 'w')
fdst = open(fn, 'wb')
shutil.copyfileobj(fsrc, fdst)
fsrc.close()
fdst.close()
@ -857,7 +857,7 @@ class BuildTracker(object):
os.chown(os.path.dirname(dst), 48, 48) # XXX - hack
log("Downloading %s to %s" % (url, dst))
fsrc = urllib2.urlopen(url)
fdst = open(fn, 'w')
fdst = open(fn, 'wb')
shutil.copyfileobj(fsrc, fdst)
fsrc.close()
fdst.close()
@ -871,7 +871,7 @@ class BuildTracker(object):
dst = "%s/%s" % (options.workpath, fn)
log("Downloading %s to %s..." % (url, dst))
fsrc = urllib2.urlopen(url)
fdst = open(dst, 'w')
fdst = open(dst, 'wb')
shutil.copyfileobj(fsrc, fdst)
fsrc.close()
fdst.close()

View file

@ -21,7 +21,6 @@
# Mike McLean <mikem@redhat.com>
import errno
import json
import logging
import logging.handlers
import os
@ -119,8 +118,7 @@ class ManagedRepo(object):
if not os.path.exists(fn):
self.logger.warning('Repo info file missing: %s', fn)
return None
with open(fn, 'r') as fp:
return json.load(fp)
return koji.load_json(fn)
def get_path(self, volume=None):
"""Return the path to the repo directory"""
@ -732,7 +730,7 @@ class RepoManager(object):
self.updateTagScores()
if self.options.queue_file:
with open(self.options.queue_file, "wt") as f:
with open(self.options.queue_file, "wt", encoding='utf-8') as f:
fmt = "%-40s %7s %5s\n"
f.write(fmt % ("Tag", "Expired", "Score"))
for tag in sorted(self.needed_tags.values(), key=lambda t: t['score'],

View file

@ -331,7 +331,7 @@ class WindowsBuild(object):
raise BuildError('Unknown checksum type %s for %s' % ( # noqa: F821
checksum_type,
os.path.basename(fileinfo['localpath'])))
with open(destpath, 'w') as destfile:
with open(destpath, 'wt', encoding='utf-8') as destfile:
offset = 0
while True:
encoded = self.server.getFile(buildinfo, fileinfo, encode_int(offset), 1048576,
@ -412,7 +412,7 @@ class WindowsBuild(object):
"""Do the build: run the execute line(s) with cmd.exe"""
tmpfd, tmpname = tempfile.mkstemp(prefix='koji-tmp', suffix='.bat',
dir='/cygdrive/c/Windows/Temp')
script = os.fdopen(tmpfd, 'w')
script = os.fdopen(tmpfd, 'wt', encoding='utf-8')
for attr in ['source_dir', 'spec_dir', 'patches_dir']:
val = getattr(self, attr)
if val:
@ -449,7 +449,7 @@ class WindowsBuild(object):
def bashBuild(self):
"""Do the build: run the execute line(s) with bash"""
tmpfd, tmpname = tempfile.mkstemp(prefix='koji-tmp.', dir='/tmp')
script = os.fdopen(tmpfd, 'w')
script = os.fdopen(tmpfd, 'wt', encoding='utf-8')
script.write("export source_dir='%s'\n" % self.source_dir)
script.write("export spec_dir='%s'\n" % self.spec_dir)
if self.patches_dir:
@ -599,7 +599,7 @@ def upload_file(server, prefix, path):
"""upload a single file to the vmd"""
logger = logging.getLogger('koji.vm')
destpath = os.path.join(prefix, path)
fobj = open(destpath, 'r')
fobj = open(destpath, 'rb')
offset = 0
sum = hashlib.sha256()
while True:
@ -657,7 +657,7 @@ def setup_logging(opts):
if opts.debug:
level = logging.DEBUG
logger.setLevel(level)
logfd = open(logfile, 'w')
logfd = open(logfile, 'wt', encoding='utf-8')
handler = logging.StreamHandler(logfd)
handler.setLevel(level)
handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(name)s: %(message)s'))
@ -688,7 +688,7 @@ def stream_logs(server, handler, builds):
if not fd:
if os.path.isfile(log):
try:
fd = open(log, 'r')
fd = open(log, 'rb')
logs[log] = (relpath, fd)
except Exception:
log_local('Error opening %s' % log)

View file

@ -722,7 +722,7 @@ class VMExecTask(BaseTaskHandler):
self.verifyChecksum(localpath, fileinfo['checksum'],
koji.CHECKSUM_TYPES[fileinfo['checksum_type']])
return open(localpath, 'r')
return open(localpath, 'rb')
def getFile(self, buildinfo, archiveinfo, offset, length, type):
"""
@ -751,7 +751,7 @@ class VMExecTask(BaseTaskHandler):
if offset == 0:
if os.path.exists(local_path):
raise koji.BuildError('cannot overwrite %s' % local_path)
fobj = open(local_path, 'w')
fobj = open(local_path, 'wb')
else:
if not os.path.isfile(local_path):
raise koji.BuildError('% does not exist' % local_path)
@ -759,7 +759,7 @@ class VMExecTask(BaseTaskHandler):
if offset != size:
raise koji.BuildError('cannot write to %s at offset %s, size is %s' %
(local_path, offset, size))
fobj = open(local_path, 'r+')
fobj = open(local_path, 'rb+')
fobj.seek(offset)
data = base64.b64decode(contents)
fobj.write(data)
@ -792,7 +792,7 @@ class VMExecTask(BaseTaskHandler):
else:
raise koji.BuildError('unsupported checksum algorithm: %s' % algo)
with open(local_path, 'r') as f:
with open(local_path, 'rb') as f:
while True:
data = f.read(1048576)
if not data:

View file

@ -9,7 +9,7 @@
#set $localfooterpath=$util.themePath("extra-footer.html", local=True)
#if os.path.exists($localfooterpath)
#if $literalFooter
#set $localfooter="".join(open($localfooterpath).readlines())
#set $localfooter="".join(open($localfooterpath, 'rt', encoding='utf-8').readlines())
$localfooter
#else
#include $localfooterpath
@ -20,7 +20,7 @@ $localfooter
</div>
#set $localbottompath=$util.themePath("extra-bottom.html", local=True)
#if os.path.exists($localbottompath)
#set $localbottom="".join(open($localbottompath).readlines())
#set $localbottom="".join(open($localbottompath, 'rt', encoding='utf-8').readlines())
$localbottom
#end if
</body>

View file

@ -29,7 +29,7 @@
<img src="$util.themePath('images/koji.png')" alt="Koji Logo" id="kojiLogo"/>
#set $localnavpath=$util.themePath("extra-nav.html", local=True)
#if os.path.exists($localnavpath)
#set $localnav="".join(open($localnavpath).readlines())
#set $localnav="".join(open($localnavpath, 'rt', encoding='utf-8').readlines())
$localnav
#end if
<form action="search" id="headerSearch">