autopep8: Update with changes to make autopep8 -a -a -a happy

This commit is contained in:
Brian C. Lane 2023-08-07 10:08:32 -07:00 committed by Simon de Vlieger
parent a7541ad8b4
commit 44c28c8c16
26 changed files with 66 additions and 59 deletions

View file

@ -10,7 +10,7 @@ def skipcpio(fd):
pos = 0
while True:
os.lseek(fd, pos, os.SEEK_SET)
data = os.read(fd, 2*cpio_len)
data = os.read(fd, 2 * cpio_len)
if data == b'':
# end of file, cpio_end not found, cat it all
pos = 0
@ -24,7 +24,7 @@ def skipcpio(fd):
if pos == 0:
return pos
# skip zeros
n = 2*cpio_len
n = 2 * cpio_len
while True:
data = os.read(fd, n)
if data == b'':

View file

@ -158,7 +158,7 @@ def test_clear_fd_wait(tempdir):
thread.start()
barrier.wait()
lo.clear_fd_wait(f.fileno(), 4*delay_time, delay_time/10)
lo.clear_fd_wait(f.fileno(), 4 * delay_time, delay_time / 10)
# no timeout exception has occurred and thus the device
# must not be be bound to the original file anymore

View file

@ -85,12 +85,12 @@ def test_runner_detection(tempdir):
},
"rhel": {
"base": 90,
"versions": [91, 92, 93],
"versions": [91, 92, 93],
"check": {94: 93},
},
"future": {
"base": 100,
"versions": [101, 102, 103],
"versions": [101, 102, 103],
"check": {110: 103},
}
}

View file

@ -38,7 +38,7 @@ def store_fixture():
def test_basic(object_store):
object_store.maximum_size = 1024*1024*1024
object_store.maximum_size = 1024 * 1024 * 1024
# No objects or references should be in the store
assert len(os.listdir(object_store.objects)) == 0
@ -85,7 +85,7 @@ def test_basic(object_store):
def test_cleanup(tmpdir):
with objectstore.ObjectStore(tmpdir) as object_store:
object_store.maximum_size = 1024*1024*1024
object_store.maximum_size = 1024 * 1024 * 1024
stage = os.path.join(object_store, "stage")
tree = object_store.new("a")
@ -156,7 +156,7 @@ def test_metadata(tmpdir):
# use tmpdir fixture from here on
with objectstore.ObjectStore(tmpdir) as store:
store.maximum_size = 1024*1024*1024
store.maximum_size = 1024 * 1024 * 1024
obj = store.new("a")
p = Path(obj, "A")
p.touch()

View file

@ -56,13 +56,13 @@ def test_pathlike(tmpdir):
dir_str: str = os.fspath(tmpdir)
cache1 = fscache.FsCache("osbuild-test-appid", dir_str)
assert os.fspath(cache1) == tmpdir
assert os.path.join(cache1, "foobar") == os.path.join(tmpdir, "foobar")
assert os.path.join(cache1, "foobar") == os.path.join(tmpdir, "foobar")
# Test with a wrapper-type as argument
dir_pathlike: Wrapper = Wrapper(os.fspath(tmpdir))
cache2 = fscache.FsCache("osbuild-test-appid", dir_pathlike)
assert os.fspath(cache2) == tmpdir
assert os.path.join(cache2, "foobar") == os.path.join(tmpdir, "foobar")
assert os.path.join(cache2, "foobar") == os.path.join(tmpdir, "foobar")
def test_path(tmpdir):
@ -235,19 +235,19 @@ def test_cache_info(tmpdir):
cache.info = fscache.FsCacheInfo(maximum_size=1024)
assert cache.info.maximum_size == 1024
assert cache.info.creation_boot_id is None
cache.info = fscache.FsCacheInfo(creation_boot_id="0"*32)
cache.info = fscache.FsCacheInfo(creation_boot_id="0" * 32)
assert cache.info.maximum_size == 1024
assert cache.info.creation_boot_id == "0"*32
cache.info = fscache.FsCacheInfo(maximum_size=2048, creation_boot_id="1"*32)
assert cache.info.creation_boot_id == "0" * 32
cache.info = fscache.FsCacheInfo(maximum_size=2048, creation_boot_id="1" * 32)
assert cache.info.maximum_size == 2048
assert cache.info.creation_boot_id == "1"*32
assert cache.info.creation_boot_id == "1" * 32
assert not fscache.FsCacheInfo().to_json()
assert fscache.FsCacheInfo(creation_boot_id="0"*32).to_json() == {
"creation-boot-id": "0"*32,
assert fscache.FsCacheInfo(creation_boot_id="0" * 32).to_json() == {
"creation-boot-id": "0" * 32,
}
assert fscache.FsCacheInfo(creation_boot_id="0"*32, maximum_size=1024).to_json() == {
"creation-boot-id": "0"*32,
assert fscache.FsCacheInfo(creation_boot_id="0" * 32, maximum_size=1024).to_json() == {
"creation-boot-id": "0" * 32,
"maximum-size": 1024,
}
@ -255,21 +255,21 @@ def test_cache_info(tmpdir):
assert fscache.FsCacheInfo.from_json(None) == fscache.FsCacheInfo()
assert fscache.FsCacheInfo.from_json("foobar") == fscache.FsCacheInfo()
assert fscache.FsCacheInfo.from_json({
"creation-boot-id": "0"*32,
}) == fscache.FsCacheInfo(creation_boot_id="0"*32)
"creation-boot-id": "0" * 32,
}) == fscache.FsCacheInfo(creation_boot_id="0" * 32)
assert fscache.FsCacheInfo.from_json({
"creation-boot-id": "0"*32,
"creation-boot-id": "0" * 32,
"maximum-size": 1024,
}) == fscache.FsCacheInfo(creation_boot_id="0"*32, maximum_size=1024)
}) == fscache.FsCacheInfo(creation_boot_id="0" * 32, maximum_size=1024)
assert fscache.FsCacheInfo.from_json({
"creation-boot-id": "0"*32,
"creation-boot-id": "0" * 32,
"maximum-size": 1024,
}) == fscache.FsCacheInfo(creation_boot_id="0"*32, maximum_size=1024)
}) == fscache.FsCacheInfo(creation_boot_id="0" * 32, maximum_size=1024)
assert fscache.FsCacheInfo.from_json({
"creation-boot-id": "0"*32,
"creation-boot-id": "0" * 32,
"unknown0": "foobar",
"unknown1": ["foo", "bar"],
}) == fscache.FsCacheInfo(creation_boot_id="0"*32)
}) == fscache.FsCacheInfo(creation_boot_id="0" * 32)
def test_store(tmpdir):
@ -317,7 +317,7 @@ def test_store_tree(tmpdir):
cache.store_tree("foobar", "invalid/dir")
with cache:
cache.info = cache.info._replace(maximum_size=1024*1024*1024)
cache.info = cache.info._replace(maximum_size=1024 * 1024 * 1024)
with pytest.raises(ValueError):
cache.store_tree("", "invalid/dir")

View file

@ -156,7 +156,7 @@ def test_rename_vg_group(tempdir):
vg = find_vg(vgs, new_name)
if vg:
break
time.sleep(0.250 * (i+1))
time.sleep(0.250 * (i + 1))
if not vg:
raise RuntimeError(f"Could not find vg {new_name}")
finally:

View file

@ -73,7 +73,7 @@ def create_image(tmpdir):
env = os.environ.copy()
env["PYTHONPATH"] = os.curdir
subprocess.run(
[os.path.join(os.curdir, "stages", "org.osbuild.mkfs.fat")],
[os.path.join(os.curdir, "stages", "org.osbuild.mkfs.fat")],
env=env,
check=True,
stdout=sys.stdout,

View file

@ -66,7 +66,7 @@ def can_setup_netns() -> bool:
try:
with netns():
return True
except: # pylint: disable=bare-except
except BaseException: # pylint: disable=bare-except
return False

View file

@ -77,7 +77,7 @@ def mapping_is_subset(subset, other):
"""
if isinstance(subset, Mapping) and isinstance(other, Mapping):
for key, value in subset.items():
if not key in other:
if key not in other:
return False
other_value = other[key]