test: explicit encodings for open()

This commit is contained in:
Simon de Vlieger 2022-09-09 11:40:27 +02:00
parent 3703328751
commit 38d2ab685c
14 changed files with 63 additions and 61 deletions

View file

@ -168,7 +168,7 @@ class Initrd:
encoding=None, encoding=None,
stdin=image, stdin=image,
stderr=subprocess.DEVNULL) stderr=subprocess.DEVNULL)
return output.strip().decode('utf-8') return output.strip().decode('utf8')
def read_initrd(path): def read_initrd(path):

View file

@ -55,14 +55,14 @@ def test_runner_fail(tempdir):
logfile = os.path.join(tempdir, "log.txt") logfile = os.path.join(tempdir, "log.txt")
with BuildRoot("/", runner, libdir, var) as root, \ with BuildRoot("/", runner, libdir, var) as root, \
open(logfile, "w") as log: open(logfile, "w", encoding="utf8") as log:
monitor = LogMonitor(log.fileno()) monitor = LogMonitor(log.fileno())
r = root.run(["/usr/bin/true"], monitor) r = root.run(["/usr/bin/true"], monitor)
assert r.returncode == 1 assert r.returncode == 1
with open(logfile) as f: with open(logfile, encoding="utf8") as f:
log = f.read() log = f.read()
assert log assert log
assert r.output assert r.output
@ -208,7 +208,7 @@ def test_env_isolation(tempdir):
r = root.run(cmd, monitor, binds=[f"{ipc}:/ipc"]) r = root.run(cmd, monitor, binds=[f"{ipc}:/ipc"])
assert r.returncode == 0 assert r.returncode == 0
with open(os.path.join(ipc, "env.txt")) as f: with open(os.path.join(ipc, "env.txt"), encoding="utf8") as f:
data = f.read().strip() data = f.read().strip()
assert data assert data
have = dict(map(lambda x: x.split("=", 1), data.split("\n"))) have = dict(map(lambda x: x.split("=", 1), data.split("\n")))
@ -247,7 +247,7 @@ def test_caps(tempdir):
r = root.run(cmd, monitor, binds=[f"{ipc}:/ipc"]) r = root.run(cmd, monitor, binds=[f"{ipc}:/ipc"])
assert r.returncode == 0 assert r.returncode == 0
with open(os.path.join(ipc, "status"), encoding="utf-8") as f: with open(os.path.join(ipc, "status"), encoding="utf8") as f:
data = f.readlines() data = f.readlines()
assert data assert data

View file

@ -67,13 +67,13 @@ class TestMonitor(unittest.TestCase):
logfile = os.path.join(tmpdir, "log.txt") logfile = os.path.join(tmpdir, "log.txt")
with open(logfile, "w") as log, ObjectStore(storedir) as store: with open(logfile, "w", encoding="utf8") as log, ObjectStore(storedir) as store:
monitor = LogMonitor(log.fileno()) monitor = LogMonitor(log.fileno())
res = pipeline.run(store, res = pipeline.run(store,
monitor, monitor,
libdir=os.path.abspath(os.curdir)) libdir=os.path.abspath(os.curdir))
with open(logfile) as f: with open(logfile, encoding="utf8") as f:
log = f.read() log = f.read()
assert res assert res

View file

@ -122,7 +122,7 @@ class TestObjectStore(unittest.TestCase):
with object_store.new() as tree: with object_store.new() as tree:
path = tree.write() path = tree.write()
with tree.write() as path, \ with tree.write() as path, \
open(os.path.join(path, "data"), "w") as f: open(os.path.join(path, "data"), "w", encoding="utf8") as f:
f.write(data) f.write(data)
st = os.fstat(f.fileno()) st = os.fstat(f.fileno())
data_inode = st.st_ino data_inode = st.st_ino
@ -134,7 +134,7 @@ class TestObjectStore(unittest.TestCase):
# check that "data" is still the very # check that "data" is still the very
# same file after committing # same file after committing
with tree.read() as path: with tree.read() as path:
with open(os.path.join(path, "data"), "r") as f: with open(os.path.join(path, "data"), "r", encoding="utf8") as f:
st = os.fstat(f.fileno()) st = os.fstat(f.fileno())
self.assertEqual(st.st_ino, data_inode) self.assertEqual(st.st_ino, data_inode)
data_read = f.read() data_read = f.read()
@ -147,7 +147,7 @@ class TestObjectStore(unittest.TestCase):
with object_store.new(base_id="x") as tree: with object_store.new(base_id="x") as tree:
self.assertEqual(tree.base, "x") self.assertEqual(tree.base, "x")
with tree.read() as path: with tree.read() as path:
with open(os.path.join(path, "data"), "r") as f: with open(os.path.join(path, "data"), "r", encoding="utf8") as f:
# copy-on-write: since we have not written # copy-on-write: since we have not written
# to the tree yet, "data" should be the # to the tree yet, "data" should be the
# very same file as that one of object "x" # very same file as that one of object "x"
@ -303,7 +303,7 @@ class TestObjectStore(unittest.TestCase):
assert Path(path) == mountpoint assert Path(path) == mountpoint
filepath = Path(mountpoint, "file.txt") filepath = Path(mountpoint, "file.txt")
assert filepath.exists() assert filepath.exists()
txt = filepath.read_text() txt = filepath.read_text(encoding="utf8")
assert txt == "osbuild" assert txt == "osbuild"
# check we can mount subtrees via `read_tree_at` # check we can mount subtrees via `read_tree_at`
@ -314,7 +314,7 @@ class TestObjectStore(unittest.TestCase):
path = client.read_tree_at("42", filemount, "/file.txt") path = client.read_tree_at("42", filemount, "/file.txt")
filepath = Path(path) filepath = Path(path)
assert filepath.is_file() assert filepath.is_file()
txt = filepath.read_text() txt = filepath.read_text(encoding="utf8")
assert txt == "osbuild" assert txt == "osbuild"
dirmount = Path(tmpdir, "dir") dirmount = Path(tmpdir, "dir")

View file

@ -26,7 +26,7 @@ def test_ioctl_get_immutable(tmpdir):
# as intended. # as intended.
# #
with open(f"{tmpdir}/immutable", "x") as f: with open(f"{tmpdir}/immutable", "x", encoding="utf8") as f:
assert not linux.ioctl_get_immutable(f.fileno()) assert not linux.ioctl_get_immutable(f.fileno())
@ -37,7 +37,7 @@ def test_ioctl_toggle_immutable(tmpdir):
# as intended. # as intended.
# #
with open(f"{tmpdir}/immutable", "x") as f: with open(f"{tmpdir}/immutable", "x", encoding="utf8") as f:
# Check the file is mutable by default and if we clear it again. # Check the file is mutable by default and if we clear it again.
assert not linux.ioctl_get_immutable(f.fileno()) assert not linux.ioctl_get_immutable(f.fileno())
linux.ioctl_toggle_immutable(f.fileno(), False) linux.ioctl_toggle_immutable(f.fileno(), False)

View file

@ -42,13 +42,13 @@ class TestUtilLorax(test.TestBase):
os.makedirs(root) os.makedirs(root)
os.makedirs(tree) os.makedirs(tree)
with open(os.path.join(root, "hello.txt"), "w") as f: with open(os.path.join(root, "hello.txt"), "w", encoding="utf8") as f:
f.write("Hello World\n") f.write("Hello World\n")
self.assertExists(root, "hello.txt") self.assertExists(root, "hello.txt")
template = os.path.join(tmp, "template.tmpl") template = os.path.join(tmp, "template.tmpl")
with open(os.path.join(tmp, template), "w") as f: with open(os.path.join(tmp, template), "w", encoding="utf8") as f:
f.write(BASIC_TEMPLATE) f.write(BASIC_TEMPLATE)
# parse the template and render it # parse the template and render it
@ -76,7 +76,7 @@ class TestUtilLorax(test.TestBase):
self.assertExists(tree, "foo.txt") self.assertExists(tree, "foo.txt")
for fn in ["a.txt", "b.txt"]: for fn in ["a.txt", "b.txt"]:
with open(os.path.join(tree, fn), "r") as f: with open(os.path.join(tree, fn), "r", encoding="utf8") as f:
data = f.read().strip() data = f.read().strip()
self.assertEqual(data, "osbuild-42") self.assertEqual(data, "osbuild-42")

View file

@ -24,7 +24,7 @@ def have_lvm() -> bool:
try: try:
r = subprocess.run( r = subprocess.run(
["vgs"], ["vgs"],
encoding="utf-8", encoding="utf8",
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
check=False check=False
) )

View file

@ -15,7 +15,7 @@ from osbuild.util import ostree
from .. import test from .. import test
def run(*args, check=True, encoding="utf-8", **kwargs): def run(*args, check=True, encoding="utf8", **kwargs):
res = subprocess.run(*args, res = subprocess.run(*args,
encoding=encoding, encoding=encoding,
check=check, check=check,
@ -56,7 +56,7 @@ class TestObjectStore(test.TestBase):
tf["ref"] = test_ref tf["ref"] = test_ref
with tf.as_tmp_file() as path: with tf.as_tmp_file() as path:
with open(path, "r") as f: with open(path, "r", encoding="utf8") as f:
js = json.load(f) js = json.load(f)
self.assertEqual(js["ref"], test_ref) self.assertEqual(js["ref"], test_ref)
self.assertEqual(tf["ref"], test_ref) self.assertEqual(tf["ref"], test_ref)
@ -118,16 +118,16 @@ class TestPasswdLike(unittest.TestCase):
"lp:x:4:7:lp:/var/spool/lpd:/sbin/nologin\n", "lp:x:4:7:lp:/var/spool/lpd:/sbin/nologin\n",
"sync:x:5:0:sync:/sbin:/bin/sync\n" "sync:x:5:0:sync:/sbin:/bin/sync\n"
] ]
with open(os.path.join(tmpdir, "primary"), "w") as f: with open(os.path.join(tmpdir, "primary"), "w", encoding="utf8") as f:
f.writelines(primary_file_lines) f.writelines(primary_file_lines)
with open(os.path.join(tmpdir, "secondary"), "w") as f: with open(os.path.join(tmpdir, "secondary"), "w", encoding="utf8") as f:
f.writelines(secondary_file_lines) f.writelines(secondary_file_lines)
passwd = ostree.PasswdLike.from_file(os.path.join(tmpdir, "primary")) passwd = ostree.PasswdLike.from_file(os.path.join(tmpdir, "primary"))
passwd.merge_with_file(os.path.join(tmpdir, "secondary")) passwd.merge_with_file(os.path.join(tmpdir, "secondary"))
passwd.dump_to_file(os.path.join(tmpdir, "result")) passwd.dump_to_file(os.path.join(tmpdir, "result"))
with open(os.path.join(tmpdir, "result"), "r") as f: with open(os.path.join(tmpdir, "result"), "r", encoding="utf8") as f:
self.assertEqual(sorted(f.readlines()), sorted(result_file_lines)) self.assertEqual(sorted(f.readlines()), sorted(result_file_lines))
def test_merge_group(self): def test_merge_group(self):
@ -145,16 +145,16 @@ class TestPasswdLike(unittest.TestCase):
"bin:x:1:\n", "bin:x:1:\n",
"daemon:x:2:\n" "daemon:x:2:\n"
] ]
with open(os.path.join(tmpdir, "primary"), "w") as f: with open(os.path.join(tmpdir, "primary"), "w", encoding="utf8") as f:
f.writelines(primary_file_lines) f.writelines(primary_file_lines)
with open(os.path.join(tmpdir, "secondary"), "w") as f: with open(os.path.join(tmpdir, "secondary"), "w", encoding="utf8") as f:
f.writelines(secondary_file_lines) f.writelines(secondary_file_lines)
passwd = ostree.PasswdLike.from_file(os.path.join(tmpdir, "primary")) passwd = ostree.PasswdLike.from_file(os.path.join(tmpdir, "primary"))
passwd.merge_with_file(os.path.join(tmpdir, "secondary")) passwd.merge_with_file(os.path.join(tmpdir, "secondary"))
passwd.dump_to_file(os.path.join(tmpdir, "result")) passwd.dump_to_file(os.path.join(tmpdir, "result"))
with open(os.path.join(tmpdir, "result"), "r") as f: with open(os.path.join(tmpdir, "result"), "r", encoding="utf8") as f:
self.assertEqual(sorted(f.readlines()), sorted(result_file_lines)) self.assertEqual(sorted(f.readlines()), sorted(result_file_lines))
#pylint: disable=no-self-use #pylint: disable=no-self-use

View file

@ -29,7 +29,8 @@ class TestAssemblers(test.TestBase):
@contextlib.contextmanager @contextlib.contextmanager
def run_assembler(self, osb, name, options, output_path): def run_assembler(self, osb, name, options, output_path):
with open(os.path.join(self.locate_test_data(), with open(os.path.join(self.locate_test_data(),
"manifests/filesystem.json")) as f: "manifests/filesystem.json"),
encoding="utf8") as f:
manifest = json.load(f) manifest = json.load(f)
manifest["pipeline"] = dict( manifest["pipeline"] = dict(
manifest["pipeline"], manifest["pipeline"],
@ -51,7 +52,7 @@ class TestAssemblers(test.TestBase):
self.assertEqual(info["virtual-size"], expected_size) self.assertEqual(info["virtual-size"], expected_size)
def assertFilesystem(self, device, uuid, fstype, tree): def assertFilesystem(self, device, uuid, fstype, tree):
output = subprocess.check_output(["blkid", "--output", "export", device], encoding="utf-8") output = subprocess.check_output(["blkid", "--output", "export", device], encoding="utf8")
blkid = dict(line.split("=") for line in output.strip().split("\n")) blkid = dict(line.split("=") for line in output.strip().split("\n"))
self.assertEqual(blkid["UUID"], uuid) self.assertEqual(blkid["UUID"], uuid)
self.assertEqual(blkid["TYPE"], fstype) self.assertEqual(blkid["TYPE"], fstype)
@ -113,7 +114,8 @@ class TestAssemblers(test.TestBase):
def test_ostree(self): def test_ostree(self):
with self.osbuild as osb: with self.osbuild as osb:
with open(os.path.join(self.locate_test_data(), with open(os.path.join(self.locate_test_data(),
"manifests/fedora-ostree-commit.json")) as f: "manifests/fedora-ostree-commit.json"),
encoding="utf8") as f:
manifest = json.load(f) manifest = json.load(f)
data = json.dumps(manifest) data = json.dumps(manifest)
@ -122,7 +124,7 @@ class TestAssemblers(test.TestBase):
compose_file = os.path.join(output_dir, "ostree-commit", "compose.json") compose_file = os.path.join(output_dir, "ostree-commit", "compose.json")
repo = os.path.join(output_dir, "ostree-commit", "repo") repo = os.path.join(output_dir, "ostree-commit", "repo")
with open(compose_file) as f: with open(compose_file, encoding="utf8") as f:
compose = json.load(f) compose = json.load(f)
commit_id = compose["ostree-commit"] commit_id = compose["ostree-commit"]
ref = compose["ref"] ref = compose["ref"]
@ -146,7 +148,7 @@ class TestAssemblers(test.TestBase):
"--repo", repo, "--repo", repo,
"--print-metadata-key=rpmostree.inputhash", "--print-metadata-key=rpmostree.inputhash",
commit_id commit_id
], encoding="utf-8").strip() ], encoding="utf8").strip()
self.assertEqual(md, f"'{rpmostree_inputhash}'") self.assertEqual(md, f"'{rpmostree_inputhash}'")
md = subprocess.check_output( md = subprocess.check_output(
@ -156,7 +158,7 @@ class TestAssemblers(test.TestBase):
"--repo", repo, "--repo", repo,
"--print-metadata-key=version", "--print-metadata-key=version",
commit_id commit_id
], encoding="utf-8").strip() ], encoding="utf8").strip()
self.assertEqual(md, f"'{os_version}'") self.assertEqual(md, f"'{os_version}'")
@unittest.skipUnless(test.TestBase.have_tree_diff(), "tree-diff missing") @unittest.skipUnless(test.TestBase.have_tree_diff(), "tree-diff missing")
@ -223,7 +225,7 @@ class TestAssemblers(test.TestBase):
"org.osbuild.tar", "org.osbuild.tar",
options, options,
filename) as (tree, image): filename) as (tree, image):
output = subprocess.check_output(["file", "--mime-type", image], encoding="utf-8") output = subprocess.check_output(["file", "--mime-type", image], encoding="utf8")
_, mimetype = output.strip().split(": ") # "filename: mimetype" _, mimetype = output.strip().split(": ") # "filename: mimetype"
self.assertIn(mimetype, expected_mimetypes) self.assertIn(mimetype, expected_mimetypes)
@ -240,7 +242,7 @@ class TestAssemblers(test.TestBase):
"--xattrs", "--xattrs-include", "*", "--xattrs", "--xattrs-include", "*",
"-xaf", image, "-xaf", image,
"-C", tmp] "-C", tmp]
subprocess.check_output(args, encoding="utf-8") subprocess.check_output(args, encoding="utf8")
diff = self.tree_diff(tree, tmp) diff = self.tree_diff(tree, tmp)
self.assertEqual(diff["added_files"], []) self.assertEqual(diff["added_files"], [])
self.assertEqual(diff["deleted_files"], []) self.assertEqual(diff["deleted_files"], [])

View file

@ -46,7 +46,7 @@ class TestBoot(test.TestBase):
"-device", "virtserialport,chardev=stdio", "-device", "virtserialport,chardev=stdio",
qcow2], qcow2],
encoding="utf-8", encoding="utf8",
check=True) check=True)
with open(output_file, "r") as f: with open(output_file, "r", encoding="utf8") as f:
self.assertEqual(f.read().strip(), "running") self.assertEqual(f.read().strip(), "running")

View file

@ -77,5 +77,5 @@ def test_loopback_basic(tmpdir):
client.call("close", None) client.call("close", None)
lo = loop.Loop(minor) lo = loop.Loop(minor)
with open(filename, "r") as f: with open(filename, "r", encoding="utf8") as f:
assert not lo.is_bound_to(f.fileno()) assert not lo.is_bound_to(f.fileno())

View file

@ -35,7 +35,7 @@ libc.setns.errcheck = errcheck
@contextlib.contextmanager @contextlib.contextmanager
def netns(): def netns():
# Grab a reference to the current namespace. # Grab a reference to the current namespace.
with open("/proc/self/ns/net") as oldnet: with open("/proc/self/ns/net", encoding="utf8") as oldnet:
# Create a new namespace and enter it. # Create a new namespace and enter it.
libc.unshare(CLONE_NEWNET) libc.unshare(CLONE_NEWNET)
try: try:
@ -76,7 +76,7 @@ def runFileServer(barrier, directory):
def guess_type(self, path): def guess_type(self, path):
try: try:
with open(path + ".mimetype", "r") as f: with open(path + ".mimetype", "r", encoding="utf8") as f:
return f.read().strip() return f.read().strip()
except FileNotFoundError: except FileNotFoundError:
pass pass
@ -119,7 +119,7 @@ def test_sources(source, case, tmpdir):
index = osbuild.meta.Index(os.curdir) index = osbuild.meta.Index(os.curdir)
sources = os.path.join(test.TestBase.locate_test_data(), "sources") sources = os.path.join(test.TestBase.locate_test_data(), "sources")
with open(f"{sources}/{source}/cases/{case}") as f: with open(f"{sources}/{source}/cases/{case}", encoding="utf8") as f:
case_options = json.load(f) case_options = json.load(f)
info = index.get_module_info("Source", source) info = index.get_module_info("Source", source)

View file

@ -25,7 +25,7 @@ def have_sfdisk_with_json():
r = subprocess.run(["sfdisk", "--version"], r = subprocess.run(["sfdisk", "--version"],
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, stderr=subprocess.PIPE,
encoding="utf-8", encoding="utf8",
check=False) check=False)
if r.returncode != 0: if r.returncode != 0:
@ -187,7 +187,7 @@ class TestStages(test.TestBase):
pprint.pformat(want).splitlines()) pprint.pformat(want).splitlines())
txt = "\n".join(diff) txt = "\n".join(diff)
path = f"/tmp/osbuild.metadata.{stageid}.json" path = f"/tmp/osbuild.metadata.{stageid}.json"
with open(path, "w") as f: with open(path, "w", encoding="utf8") as f:
json.dump(have, f, indent=2) json.dump(have, f, indent=2)
self.fail(f"metadata for {stageid} differs:\n{txt}\n{path}") self.fail(f"metadata for {stageid} differs:\n{txt}\n{path}")
@ -224,14 +224,14 @@ class TestStages(test.TestBase):
actual_diff = self.tree_diff(tree1, tree2) actual_diff = self.tree_diff(tree1, tree2)
with open(os.path.join(test_dir, "diff.json")) as f: with open(os.path.join(test_dir, "diff.json"), encoding="utf8") as f:
expected_diff = json.load(f) expected_diff = json.load(f)
self.assertTreeDiffsEqual(expected_diff, actual_diff) self.assertTreeDiffsEqual(expected_diff, actual_diff)
md_path = os.path.join(test_dir, "metadata.json") md_path = os.path.join(test_dir, "metadata.json")
if os.path.exists(md_path): if os.path.exists(md_path):
with open(md_path, "r") as f: with open(md_path, "r", encoding="utf8") as f:
metadata = json.load(f) metadata = json.load(f)
self.assertMetadata(metadata, res) self.assertMetadata(metadata, res)
@ -245,7 +245,7 @@ class TestStages(test.TestBase):
datadir = self.locate_test_data() datadir = self.locate_test_data()
base = os.path.join(datadir, "stages/dracut") base = os.path.join(datadir, "stages/dracut")
with open(f"{base}/vanilla.json", "r") as f: with open(f"{base}/vanilla.json", "r", encoding="utf8") as f:
refs = json.load(f) refs = json.load(f)
with self.osbuild as osb, tempfile.TemporaryDirectory(dir="/var/tmp") as outdir: with self.osbuild as osb, tempfile.TemporaryDirectory(dir="/var/tmp") as outdir:
@ -273,7 +273,7 @@ class TestStages(test.TestBase):
testdir = os.path.join(datadir, "stages", "selinux") testdir = os.path.join(datadir, "stages", "selinux")
def load_manifest(manifest_name): def load_manifest(manifest_name):
with open(os.path.join(datadir, f"manifests/{manifest_name}")) as f: with open(os.path.join(datadir, f"manifests/{manifest_name}"), encoding="utf8") as f:
manifest = json.load(f) manifest = json.load(f)
return manifest return manifest
@ -281,7 +281,7 @@ class TestStages(test.TestBase):
for t in glob.glob(f"{testdir}/test_*.json"): for t in glob.glob(f"{testdir}/test_*.json"):
manifest = load_manifest("f34-base.json") manifest = load_manifest("f34-base.json")
with open(t) as f: with open(t, encoding="utf8") as f:
check = json.load(f) check = json.load(f)
manifest["pipeline"]["stages"].append({ manifest["pipeline"]["stages"].append({
"name": "org.osbuild.selinux", "name": "org.osbuild.selinux",
@ -308,7 +308,7 @@ class TestStages(test.TestBase):
checks_path = os.path.join(testdir, "checks.json") checks_path = os.path.join(testdir, "checks.json")
checks = {} checks = {}
with open(checks_path) as f: with open(checks_path, encoding="utf8") as f:
checks = json.load(f) checks = json.load(f)
for image_name, test_data in checks.items(): for image_name, test_data in checks.items():
@ -326,7 +326,7 @@ class TestStages(test.TestBase):
["qemu-img", "info", "--output=json", ip], ["qemu-img", "info", "--output=json", ip],
capture_output=True, capture_output=True,
check=True, check=True,
encoding="utf-8" encoding="utf8"
) )
qemu_img_out = json.loads(qemu_img_run.stdout) qemu_img_out = json.loads(qemu_img_run.stdout)
@ -371,7 +371,7 @@ class TestStages(test.TestBase):
imgname = "disk.img" imgname = "disk.img"
with open(os.path.join(testdir, f"{imgname}.json"), "r") as f: with open(os.path.join(testdir, f"{imgname}.json"), "r", encoding="utf8") as f:
want = json.load(f) want = json.load(f)
with self.osbuild as osb, tempfile.TemporaryDirectory(dir="/var/tmp") as outdir: with self.osbuild as osb, tempfile.TemporaryDirectory(dir="/var/tmp") as outdir:
@ -388,7 +388,7 @@ class TestStages(test.TestBase):
r = subprocess.run(["sfdisk", "--json", target], r = subprocess.run(["sfdisk", "--json", target],
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, stderr=subprocess.PIPE,
encoding="utf-8", encoding="utf8",
check=False) check=False)
have = json.loads(r.stdout) have = json.loads(r.stdout)
@ -419,7 +419,7 @@ class TestStages(test.TestBase):
imgname = "disk.img" imgname = "disk.img"
with open(os.path.join(testdir, f"{imgname}.json"), "r") as f: with open(os.path.join(testdir, f"{imgname}.json"), "r", encoding="utf8") as f:
want = json.load(f) want = json.load(f)
with self.osbuild as osb, tempfile.TemporaryDirectory(dir="/var/tmp") as outdir: with self.osbuild as osb, tempfile.TemporaryDirectory(dir="/var/tmp") as outdir:
@ -436,7 +436,7 @@ class TestStages(test.TestBase):
r = subprocess.run(["sfdisk", "--json", target], r = subprocess.run(["sfdisk", "--json", target],
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, stderr=subprocess.PIPE,
encoding="utf-8", encoding="utf8",
check=False) check=False)
have = json.loads(r.stdout) have = json.loads(r.stdout)

View file

@ -133,9 +133,9 @@ class TestBase(unittest.TestCase):
original = os.path.join(tmpdir, "original") original = os.path.join(tmpdir, "original")
mnt = os.path.join(tmpdir, "mnt") mnt = os.path.join(tmpdir, "mnt")
with open(original, "w") as f: with open(original, "w", encoding="utf8") as f:
f.write("foo") f.write("foo")
with open(mnt, "w") as f: with open(mnt, "w", encoding="utf8") as f:
f.write("bar") f.write("bar")
try: try:
@ -153,7 +153,7 @@ class TestBase(unittest.TestCase):
stderr=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
check=True, check=True,
) )
with open(mnt, "r") as f: with open(mnt, "r", encoding="utf8") as f:
assert f.read() == "foo" assert f.read() == "foo"
return True return True
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
@ -178,7 +178,7 @@ class TestBase(unittest.TestCase):
try: try:
r = subprocess.run( r = subprocess.run(
["autopep8-3", "--version"], ["autopep8-3", "--version"],
encoding="utf-8", stdout=subprocess.PIPE, check=False encoding="utf8", stdout=subprocess.PIPE, check=False
) )
except FileNotFoundError: except FileNotFoundError:
return False return False
@ -196,7 +196,7 @@ class TestBase(unittest.TestCase):
try: try:
r = subprocess.run( r = subprocess.run(
["rpm-ostree", "--version"], ["rpm-ostree", "--version"],
encoding="utf-8", stdout=subprocess.PIPE, check=False encoding="utf8", stdout=subprocess.PIPE, check=False
) )
except FileNotFoundError: except FileNotFoundError:
return False return False
@ -333,7 +333,7 @@ class OSBuild(contextlib.AbstractContextManager):
cmd_args += ["-"] cmd_args += ["-"]
logfile_context = tempfile.NamedTemporaryFile(dir="/var/tmp", mode="w+", encoding="utf-8") logfile_context = tempfile.NamedTemporaryFile(dir="/var/tmp", mode="w+", encoding="utf8")
logfile = cm.enter_context(logfile_context) logfile = cm.enter_context(logfile_context)
cmd_args += ["--monitor", "LogMonitor", "--monitor-fd", str(logfile.fileno())] cmd_args += ["--monitor", "LogMonitor", "--monitor-fd", str(logfile.fileno())]
@ -345,7 +345,7 @@ class OSBuild(contextlib.AbstractContextManager):
try: try:
p = subprocess.Popen( p = subprocess.Popen(
cmd_args, cmd_args,
encoding="utf-8", encoding="utf8",
stdin=subprocess.PIPE, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, stderr=subprocess.PIPE,
@ -376,7 +376,7 @@ class OSBuild(contextlib.AbstractContextManager):
to `compile()`. to `compile()`.
""" """
with open(file_stdin, "r") as f: with open(file_stdin, "r", encoding="utf8") as f:
data_stdin = f.read() data_stdin = f.read()
return self.compile(data_stdin, output_dir, checkpoints=checkpoints, exports=exports) return self.compile(data_stdin, output_dir, checkpoints=checkpoints, exports=exports)