test: explicit encodings for open()
This commit is contained in:
parent
3703328751
commit
38d2ab685c
14 changed files with 63 additions and 61 deletions
|
|
@ -168,7 +168,7 @@ class Initrd:
|
|||
encoding=None,
|
||||
stdin=image,
|
||||
stderr=subprocess.DEVNULL)
|
||||
return output.strip().decode('utf-8')
|
||||
return output.strip().decode('utf8')
|
||||
|
||||
|
||||
def read_initrd(path):
|
||||
|
|
|
|||
|
|
@ -55,14 +55,14 @@ def test_runner_fail(tempdir):
|
|||
logfile = os.path.join(tempdir, "log.txt")
|
||||
|
||||
with BuildRoot("/", runner, libdir, var) as root, \
|
||||
open(logfile, "w") as log:
|
||||
open(logfile, "w", encoding="utf8") as log:
|
||||
|
||||
monitor = LogMonitor(log.fileno())
|
||||
|
||||
r = root.run(["/usr/bin/true"], monitor)
|
||||
|
||||
assert r.returncode == 1
|
||||
with open(logfile) as f:
|
||||
with open(logfile, encoding="utf8") as f:
|
||||
log = f.read()
|
||||
assert log
|
||||
assert r.output
|
||||
|
|
@ -208,7 +208,7 @@ def test_env_isolation(tempdir):
|
|||
r = root.run(cmd, monitor, binds=[f"{ipc}:/ipc"])
|
||||
|
||||
assert r.returncode == 0
|
||||
with open(os.path.join(ipc, "env.txt")) as f:
|
||||
with open(os.path.join(ipc, "env.txt"), encoding="utf8") as f:
|
||||
data = f.read().strip()
|
||||
assert data
|
||||
have = dict(map(lambda x: x.split("=", 1), data.split("\n")))
|
||||
|
|
@ -247,7 +247,7 @@ def test_caps(tempdir):
|
|||
r = root.run(cmd, monitor, binds=[f"{ipc}:/ipc"])
|
||||
|
||||
assert r.returncode == 0
|
||||
with open(os.path.join(ipc, "status"), encoding="utf-8") as f:
|
||||
with open(os.path.join(ipc, "status"), encoding="utf8") as f:
|
||||
data = f.readlines()
|
||||
assert data
|
||||
|
||||
|
|
|
|||
|
|
@ -67,13 +67,13 @@ class TestMonitor(unittest.TestCase):
|
|||
|
||||
logfile = os.path.join(tmpdir, "log.txt")
|
||||
|
||||
with open(logfile, "w") as log, ObjectStore(storedir) as store:
|
||||
with open(logfile, "w", encoding="utf8") as log, ObjectStore(storedir) as store:
|
||||
monitor = LogMonitor(log.fileno())
|
||||
res = pipeline.run(store,
|
||||
monitor,
|
||||
libdir=os.path.abspath(os.curdir))
|
||||
|
||||
with open(logfile) as f:
|
||||
with open(logfile, encoding="utf8") as f:
|
||||
log = f.read()
|
||||
|
||||
assert res
|
||||
|
|
|
|||
|
|
@ -122,7 +122,7 @@ class TestObjectStore(unittest.TestCase):
|
|||
with object_store.new() as tree:
|
||||
path = tree.write()
|
||||
with tree.write() as path, \
|
||||
open(os.path.join(path, "data"), "w") as f:
|
||||
open(os.path.join(path, "data"), "w", encoding="utf8") as f:
|
||||
f.write(data)
|
||||
st = os.fstat(f.fileno())
|
||||
data_inode = st.st_ino
|
||||
|
|
@ -134,7 +134,7 @@ class TestObjectStore(unittest.TestCase):
|
|||
# check that "data" is still the very
|
||||
# same file after committing
|
||||
with tree.read() as path:
|
||||
with open(os.path.join(path, "data"), "r") as f:
|
||||
with open(os.path.join(path, "data"), "r", encoding="utf8") as f:
|
||||
st = os.fstat(f.fileno())
|
||||
self.assertEqual(st.st_ino, data_inode)
|
||||
data_read = f.read()
|
||||
|
|
@ -147,7 +147,7 @@ class TestObjectStore(unittest.TestCase):
|
|||
with object_store.new(base_id="x") as tree:
|
||||
self.assertEqual(tree.base, "x")
|
||||
with tree.read() as path:
|
||||
with open(os.path.join(path, "data"), "r") as f:
|
||||
with open(os.path.join(path, "data"), "r", encoding="utf8") as f:
|
||||
# copy-on-write: since we have not written
|
||||
# to the tree yet, "data" should be the
|
||||
# very same file as that one of object "x"
|
||||
|
|
@ -303,7 +303,7 @@ class TestObjectStore(unittest.TestCase):
|
|||
assert Path(path) == mountpoint
|
||||
filepath = Path(mountpoint, "file.txt")
|
||||
assert filepath.exists()
|
||||
txt = filepath.read_text()
|
||||
txt = filepath.read_text(encoding="utf8")
|
||||
assert txt == "osbuild"
|
||||
|
||||
# check we can mount subtrees via `read_tree_at`
|
||||
|
|
@ -314,7 +314,7 @@ class TestObjectStore(unittest.TestCase):
|
|||
path = client.read_tree_at("42", filemount, "/file.txt")
|
||||
filepath = Path(path)
|
||||
assert filepath.is_file()
|
||||
txt = filepath.read_text()
|
||||
txt = filepath.read_text(encoding="utf8")
|
||||
assert txt == "osbuild"
|
||||
|
||||
dirmount = Path(tmpdir, "dir")
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ def test_ioctl_get_immutable(tmpdir):
|
|||
# as intended.
|
||||
#
|
||||
|
||||
with open(f"{tmpdir}/immutable", "x") as f:
|
||||
with open(f"{tmpdir}/immutable", "x", encoding="utf8") as f:
|
||||
assert not linux.ioctl_get_immutable(f.fileno())
|
||||
|
||||
|
||||
|
|
@ -37,7 +37,7 @@ def test_ioctl_toggle_immutable(tmpdir):
|
|||
# as intended.
|
||||
#
|
||||
|
||||
with open(f"{tmpdir}/immutable", "x") as f:
|
||||
with open(f"{tmpdir}/immutable", "x", encoding="utf8") as f:
|
||||
# Check the file is mutable by default and if we clear it again.
|
||||
assert not linux.ioctl_get_immutable(f.fileno())
|
||||
linux.ioctl_toggle_immutable(f.fileno(), False)
|
||||
|
|
|
|||
|
|
@ -42,13 +42,13 @@ class TestUtilLorax(test.TestBase):
|
|||
os.makedirs(root)
|
||||
os.makedirs(tree)
|
||||
|
||||
with open(os.path.join(root, "hello.txt"), "w") as f:
|
||||
with open(os.path.join(root, "hello.txt"), "w", encoding="utf8") as f:
|
||||
f.write("Hello World\n")
|
||||
|
||||
self.assertExists(root, "hello.txt")
|
||||
|
||||
template = os.path.join(tmp, "template.tmpl")
|
||||
with open(os.path.join(tmp, template), "w") as f:
|
||||
with open(os.path.join(tmp, template), "w", encoding="utf8") as f:
|
||||
f.write(BASIC_TEMPLATE)
|
||||
|
||||
# parse the template and render it
|
||||
|
|
@ -76,7 +76,7 @@ class TestUtilLorax(test.TestBase):
|
|||
self.assertExists(tree, "foo.txt")
|
||||
|
||||
for fn in ["a.txt", "b.txt"]:
|
||||
with open(os.path.join(tree, fn), "r") as f:
|
||||
with open(os.path.join(tree, fn), "r", encoding="utf8") as f:
|
||||
data = f.read().strip()
|
||||
self.assertEqual(data, "osbuild-42")
|
||||
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ def have_lvm() -> bool:
|
|||
try:
|
||||
r = subprocess.run(
|
||||
["vgs"],
|
||||
encoding="utf-8",
|
||||
encoding="utf8",
|
||||
stdout=subprocess.PIPE,
|
||||
check=False
|
||||
)
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ from osbuild.util import ostree
|
|||
from .. import test
|
||||
|
||||
|
||||
def run(*args, check=True, encoding="utf-8", **kwargs):
|
||||
def run(*args, check=True, encoding="utf8", **kwargs):
|
||||
res = subprocess.run(*args,
|
||||
encoding=encoding,
|
||||
check=check,
|
||||
|
|
@ -56,7 +56,7 @@ class TestObjectStore(test.TestBase):
|
|||
tf["ref"] = test_ref
|
||||
|
||||
with tf.as_tmp_file() as path:
|
||||
with open(path, "r") as f:
|
||||
with open(path, "r", encoding="utf8") as f:
|
||||
js = json.load(f)
|
||||
self.assertEqual(js["ref"], test_ref)
|
||||
self.assertEqual(tf["ref"], test_ref)
|
||||
|
|
@ -118,16 +118,16 @@ class TestPasswdLike(unittest.TestCase):
|
|||
"lp:x:4:7:lp:/var/spool/lpd:/sbin/nologin\n",
|
||||
"sync:x:5:0:sync:/sbin:/bin/sync\n"
|
||||
]
|
||||
with open(os.path.join(tmpdir, "primary"), "w") as f:
|
||||
with open(os.path.join(tmpdir, "primary"), "w", encoding="utf8") as f:
|
||||
f.writelines(primary_file_lines)
|
||||
with open(os.path.join(tmpdir, "secondary"), "w") as f:
|
||||
with open(os.path.join(tmpdir, "secondary"), "w", encoding="utf8") as f:
|
||||
f.writelines(secondary_file_lines)
|
||||
|
||||
passwd = ostree.PasswdLike.from_file(os.path.join(tmpdir, "primary"))
|
||||
passwd.merge_with_file(os.path.join(tmpdir, "secondary"))
|
||||
passwd.dump_to_file(os.path.join(tmpdir, "result"))
|
||||
|
||||
with open(os.path.join(tmpdir, "result"), "r") as f:
|
||||
with open(os.path.join(tmpdir, "result"), "r", encoding="utf8") as f:
|
||||
self.assertEqual(sorted(f.readlines()), sorted(result_file_lines))
|
||||
|
||||
def test_merge_group(self):
|
||||
|
|
@ -145,16 +145,16 @@ class TestPasswdLike(unittest.TestCase):
|
|||
"bin:x:1:\n",
|
||||
"daemon:x:2:\n"
|
||||
]
|
||||
with open(os.path.join(tmpdir, "primary"), "w") as f:
|
||||
with open(os.path.join(tmpdir, "primary"), "w", encoding="utf8") as f:
|
||||
f.writelines(primary_file_lines)
|
||||
with open(os.path.join(tmpdir, "secondary"), "w") as f:
|
||||
with open(os.path.join(tmpdir, "secondary"), "w", encoding="utf8") as f:
|
||||
f.writelines(secondary_file_lines)
|
||||
|
||||
passwd = ostree.PasswdLike.from_file(os.path.join(tmpdir, "primary"))
|
||||
passwd.merge_with_file(os.path.join(tmpdir, "secondary"))
|
||||
passwd.dump_to_file(os.path.join(tmpdir, "result"))
|
||||
|
||||
with open(os.path.join(tmpdir, "result"), "r") as f:
|
||||
with open(os.path.join(tmpdir, "result"), "r", encoding="utf8") as f:
|
||||
self.assertEqual(sorted(f.readlines()), sorted(result_file_lines))
|
||||
|
||||
#pylint: disable=no-self-use
|
||||
|
|
|
|||
|
|
@ -29,7 +29,8 @@ class TestAssemblers(test.TestBase):
|
|||
@contextlib.contextmanager
|
||||
def run_assembler(self, osb, name, options, output_path):
|
||||
with open(os.path.join(self.locate_test_data(),
|
||||
"manifests/filesystem.json")) as f:
|
||||
"manifests/filesystem.json"),
|
||||
encoding="utf8") as f:
|
||||
manifest = json.load(f)
|
||||
manifest["pipeline"] = dict(
|
||||
manifest["pipeline"],
|
||||
|
|
@ -51,7 +52,7 @@ class TestAssemblers(test.TestBase):
|
|||
self.assertEqual(info["virtual-size"], expected_size)
|
||||
|
||||
def assertFilesystem(self, device, uuid, fstype, tree):
|
||||
output = subprocess.check_output(["blkid", "--output", "export", device], encoding="utf-8")
|
||||
output = subprocess.check_output(["blkid", "--output", "export", device], encoding="utf8")
|
||||
blkid = dict(line.split("=") for line in output.strip().split("\n"))
|
||||
self.assertEqual(blkid["UUID"], uuid)
|
||||
self.assertEqual(blkid["TYPE"], fstype)
|
||||
|
|
@ -113,7 +114,8 @@ class TestAssemblers(test.TestBase):
|
|||
def test_ostree(self):
|
||||
with self.osbuild as osb:
|
||||
with open(os.path.join(self.locate_test_data(),
|
||||
"manifests/fedora-ostree-commit.json")) as f:
|
||||
"manifests/fedora-ostree-commit.json"),
|
||||
encoding="utf8") as f:
|
||||
manifest = json.load(f)
|
||||
|
||||
data = json.dumps(manifest)
|
||||
|
|
@ -122,7 +124,7 @@ class TestAssemblers(test.TestBase):
|
|||
compose_file = os.path.join(output_dir, "ostree-commit", "compose.json")
|
||||
repo = os.path.join(output_dir, "ostree-commit", "repo")
|
||||
|
||||
with open(compose_file) as f:
|
||||
with open(compose_file, encoding="utf8") as f:
|
||||
compose = json.load(f)
|
||||
commit_id = compose["ostree-commit"]
|
||||
ref = compose["ref"]
|
||||
|
|
@ -146,7 +148,7 @@ class TestAssemblers(test.TestBase):
|
|||
"--repo", repo,
|
||||
"--print-metadata-key=rpmostree.inputhash",
|
||||
commit_id
|
||||
], encoding="utf-8").strip()
|
||||
], encoding="utf8").strip()
|
||||
self.assertEqual(md, f"'{rpmostree_inputhash}'")
|
||||
|
||||
md = subprocess.check_output(
|
||||
|
|
@ -156,7 +158,7 @@ class TestAssemblers(test.TestBase):
|
|||
"--repo", repo,
|
||||
"--print-metadata-key=version",
|
||||
commit_id
|
||||
], encoding="utf-8").strip()
|
||||
], encoding="utf8").strip()
|
||||
self.assertEqual(md, f"'{os_version}'")
|
||||
|
||||
@unittest.skipUnless(test.TestBase.have_tree_diff(), "tree-diff missing")
|
||||
|
|
@ -223,7 +225,7 @@ class TestAssemblers(test.TestBase):
|
|||
"org.osbuild.tar",
|
||||
options,
|
||||
filename) as (tree, image):
|
||||
output = subprocess.check_output(["file", "--mime-type", image], encoding="utf-8")
|
||||
output = subprocess.check_output(["file", "--mime-type", image], encoding="utf8")
|
||||
_, mimetype = output.strip().split(": ") # "filename: mimetype"
|
||||
self.assertIn(mimetype, expected_mimetypes)
|
||||
|
||||
|
|
@ -240,7 +242,7 @@ class TestAssemblers(test.TestBase):
|
|||
"--xattrs", "--xattrs-include", "*",
|
||||
"-xaf", image,
|
||||
"-C", tmp]
|
||||
subprocess.check_output(args, encoding="utf-8")
|
||||
subprocess.check_output(args, encoding="utf8")
|
||||
diff = self.tree_diff(tree, tmp)
|
||||
self.assertEqual(diff["added_files"], [])
|
||||
self.assertEqual(diff["deleted_files"], [])
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ class TestBoot(test.TestBase):
|
|||
"-device", "virtserialport,chardev=stdio",
|
||||
|
||||
qcow2],
|
||||
encoding="utf-8",
|
||||
encoding="utf8",
|
||||
check=True)
|
||||
with open(output_file, "r") as f:
|
||||
with open(output_file, "r", encoding="utf8") as f:
|
||||
self.assertEqual(f.read().strip(), "running")
|
||||
|
|
|
|||
|
|
@ -77,5 +77,5 @@ def test_loopback_basic(tmpdir):
|
|||
client.call("close", None)
|
||||
|
||||
lo = loop.Loop(minor)
|
||||
with open(filename, "r") as f:
|
||||
with open(filename, "r", encoding="utf8") as f:
|
||||
assert not lo.is_bound_to(f.fileno())
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ libc.setns.errcheck = errcheck
|
|||
@contextlib.contextmanager
|
||||
def netns():
|
||||
# Grab a reference to the current namespace.
|
||||
with open("/proc/self/ns/net") as oldnet:
|
||||
with open("/proc/self/ns/net", encoding="utf8") as oldnet:
|
||||
# Create a new namespace and enter it.
|
||||
libc.unshare(CLONE_NEWNET)
|
||||
try:
|
||||
|
|
@ -76,7 +76,7 @@ def runFileServer(barrier, directory):
|
|||
|
||||
def guess_type(self, path):
|
||||
try:
|
||||
with open(path + ".mimetype", "r") as f:
|
||||
with open(path + ".mimetype", "r", encoding="utf8") as f:
|
||||
return f.read().strip()
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
|
@ -119,7 +119,7 @@ def test_sources(source, case, tmpdir):
|
|||
index = osbuild.meta.Index(os.curdir)
|
||||
sources = os.path.join(test.TestBase.locate_test_data(), "sources")
|
||||
|
||||
with open(f"{sources}/{source}/cases/{case}") as f:
|
||||
with open(f"{sources}/{source}/cases/{case}", encoding="utf8") as f:
|
||||
case_options = json.load(f)
|
||||
|
||||
info = index.get_module_info("Source", source)
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ def have_sfdisk_with_json():
|
|||
r = subprocess.run(["sfdisk", "--version"],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
encoding="utf-8",
|
||||
encoding="utf8",
|
||||
check=False)
|
||||
|
||||
if r.returncode != 0:
|
||||
|
|
@ -187,7 +187,7 @@ class TestStages(test.TestBase):
|
|||
pprint.pformat(want).splitlines())
|
||||
txt = "\n".join(diff)
|
||||
path = f"/tmp/osbuild.metadata.{stageid}.json"
|
||||
with open(path, "w") as f:
|
||||
with open(path, "w", encoding="utf8") as f:
|
||||
json.dump(have, f, indent=2)
|
||||
self.fail(f"metadata for {stageid} differs:\n{txt}\n{path}")
|
||||
|
||||
|
|
@ -224,14 +224,14 @@ class TestStages(test.TestBase):
|
|||
|
||||
actual_diff = self.tree_diff(tree1, tree2)
|
||||
|
||||
with open(os.path.join(test_dir, "diff.json")) as f:
|
||||
with open(os.path.join(test_dir, "diff.json"), encoding="utf8") as f:
|
||||
expected_diff = json.load(f)
|
||||
|
||||
self.assertTreeDiffsEqual(expected_diff, actual_diff)
|
||||
|
||||
md_path = os.path.join(test_dir, "metadata.json")
|
||||
if os.path.exists(md_path):
|
||||
with open(md_path, "r") as f:
|
||||
with open(md_path, "r", encoding="utf8") as f:
|
||||
metadata = json.load(f)
|
||||
|
||||
self.assertMetadata(metadata, res)
|
||||
|
|
@ -245,7 +245,7 @@ class TestStages(test.TestBase):
|
|||
datadir = self.locate_test_data()
|
||||
base = os.path.join(datadir, "stages/dracut")
|
||||
|
||||
with open(f"{base}/vanilla.json", "r") as f:
|
||||
with open(f"{base}/vanilla.json", "r", encoding="utf8") as f:
|
||||
refs = json.load(f)
|
||||
|
||||
with self.osbuild as osb, tempfile.TemporaryDirectory(dir="/var/tmp") as outdir:
|
||||
|
|
@ -273,7 +273,7 @@ class TestStages(test.TestBase):
|
|||
testdir = os.path.join(datadir, "stages", "selinux")
|
||||
|
||||
def load_manifest(manifest_name):
|
||||
with open(os.path.join(datadir, f"manifests/{manifest_name}")) as f:
|
||||
with open(os.path.join(datadir, f"manifests/{manifest_name}"), encoding="utf8") as f:
|
||||
manifest = json.load(f)
|
||||
return manifest
|
||||
|
||||
|
|
@ -281,7 +281,7 @@ class TestStages(test.TestBase):
|
|||
|
||||
for t in glob.glob(f"{testdir}/test_*.json"):
|
||||
manifest = load_manifest("f34-base.json")
|
||||
with open(t) as f:
|
||||
with open(t, encoding="utf8") as f:
|
||||
check = json.load(f)
|
||||
manifest["pipeline"]["stages"].append({
|
||||
"name": "org.osbuild.selinux",
|
||||
|
|
@ -308,7 +308,7 @@ class TestStages(test.TestBase):
|
|||
|
||||
checks_path = os.path.join(testdir, "checks.json")
|
||||
checks = {}
|
||||
with open(checks_path) as f:
|
||||
with open(checks_path, encoding="utf8") as f:
|
||||
checks = json.load(f)
|
||||
|
||||
for image_name, test_data in checks.items():
|
||||
|
|
@ -326,7 +326,7 @@ class TestStages(test.TestBase):
|
|||
["qemu-img", "info", "--output=json", ip],
|
||||
capture_output=True,
|
||||
check=True,
|
||||
encoding="utf-8"
|
||||
encoding="utf8"
|
||||
)
|
||||
|
||||
qemu_img_out = json.loads(qemu_img_run.stdout)
|
||||
|
|
@ -371,7 +371,7 @@ class TestStages(test.TestBase):
|
|||
|
||||
imgname = "disk.img"
|
||||
|
||||
with open(os.path.join(testdir, f"{imgname}.json"), "r") as f:
|
||||
with open(os.path.join(testdir, f"{imgname}.json"), "r", encoding="utf8") as f:
|
||||
want = json.load(f)
|
||||
|
||||
with self.osbuild as osb, tempfile.TemporaryDirectory(dir="/var/tmp") as outdir:
|
||||
|
|
@ -388,7 +388,7 @@ class TestStages(test.TestBase):
|
|||
r = subprocess.run(["sfdisk", "--json", target],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
encoding="utf-8",
|
||||
encoding="utf8",
|
||||
check=False)
|
||||
|
||||
have = json.loads(r.stdout)
|
||||
|
|
@ -419,7 +419,7 @@ class TestStages(test.TestBase):
|
|||
|
||||
imgname = "disk.img"
|
||||
|
||||
with open(os.path.join(testdir, f"{imgname}.json"), "r") as f:
|
||||
with open(os.path.join(testdir, f"{imgname}.json"), "r", encoding="utf8") as f:
|
||||
want = json.load(f)
|
||||
|
||||
with self.osbuild as osb, tempfile.TemporaryDirectory(dir="/var/tmp") as outdir:
|
||||
|
|
@ -436,7 +436,7 @@ class TestStages(test.TestBase):
|
|||
r = subprocess.run(["sfdisk", "--json", target],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
encoding="utf-8",
|
||||
encoding="utf8",
|
||||
check=False)
|
||||
|
||||
have = json.loads(r.stdout)
|
||||
|
|
|
|||
16
test/test.py
16
test/test.py
|
|
@ -133,9 +133,9 @@ class TestBase(unittest.TestCase):
|
|||
original = os.path.join(tmpdir, "original")
|
||||
mnt = os.path.join(tmpdir, "mnt")
|
||||
|
||||
with open(original, "w") as f:
|
||||
with open(original, "w", encoding="utf8") as f:
|
||||
f.write("foo")
|
||||
with open(mnt, "w") as f:
|
||||
with open(mnt, "w", encoding="utf8") as f:
|
||||
f.write("bar")
|
||||
|
||||
try:
|
||||
|
|
@ -153,7 +153,7 @@ class TestBase(unittest.TestCase):
|
|||
stderr=subprocess.DEVNULL,
|
||||
check=True,
|
||||
)
|
||||
with open(mnt, "r") as f:
|
||||
with open(mnt, "r", encoding="utf8") as f:
|
||||
assert f.read() == "foo"
|
||||
return True
|
||||
except subprocess.CalledProcessError:
|
||||
|
|
@ -178,7 +178,7 @@ class TestBase(unittest.TestCase):
|
|||
try:
|
||||
r = subprocess.run(
|
||||
["autopep8-3", "--version"],
|
||||
encoding="utf-8", stdout=subprocess.PIPE, check=False
|
||||
encoding="utf8", stdout=subprocess.PIPE, check=False
|
||||
)
|
||||
except FileNotFoundError:
|
||||
return False
|
||||
|
|
@ -196,7 +196,7 @@ class TestBase(unittest.TestCase):
|
|||
try:
|
||||
r = subprocess.run(
|
||||
["rpm-ostree", "--version"],
|
||||
encoding="utf-8", stdout=subprocess.PIPE, check=False
|
||||
encoding="utf8", stdout=subprocess.PIPE, check=False
|
||||
)
|
||||
except FileNotFoundError:
|
||||
return False
|
||||
|
|
@ -333,7 +333,7 @@ class OSBuild(contextlib.AbstractContextManager):
|
|||
|
||||
cmd_args += ["-"]
|
||||
|
||||
logfile_context = tempfile.NamedTemporaryFile(dir="/var/tmp", mode="w+", encoding="utf-8")
|
||||
logfile_context = tempfile.NamedTemporaryFile(dir="/var/tmp", mode="w+", encoding="utf8")
|
||||
logfile = cm.enter_context(logfile_context)
|
||||
|
||||
cmd_args += ["--monitor", "LogMonitor", "--monitor-fd", str(logfile.fileno())]
|
||||
|
|
@ -345,7 +345,7 @@ class OSBuild(contextlib.AbstractContextManager):
|
|||
try:
|
||||
p = subprocess.Popen(
|
||||
cmd_args,
|
||||
encoding="utf-8",
|
||||
encoding="utf8",
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
|
|
@ -376,7 +376,7 @@ class OSBuild(contextlib.AbstractContextManager):
|
|||
to `compile()`.
|
||||
"""
|
||||
|
||||
with open(file_stdin, "r") as f:
|
||||
with open(file_stdin, "r", encoding="utf8") as f:
|
||||
data_stdin = f.read()
|
||||
return self.compile(data_stdin, output_dir, checkpoints=checkpoints, exports=exports)
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue