test_depsolve.py: add basic test cases for 'dump' command

Signed-off-by: Tomáš Hozza <thozza@redhat.com>
This commit is contained in:
Tomáš Hozza 2024-08-06 18:46:00 +02:00 committed by Tomáš Hozza
parent d0094bbe2b
commit 421663cb28

View file

@ -67,6 +67,34 @@ def depsolve(transactions, repos, root_dir, cache_dir, dnf_config, opt_metadata)
return json.loads(p.stdout), p.returncode
def dump(repos, root_dir, cache_dir, dnf_config, opt_metadata) -> Tuple[dict, int]:
req = {
"command": "dump",
"arch": ARCH,
"module_platform_id": f"platform:el{RELEASEVER}",
"releasever": RELEASEVER,
"cachedir": cache_dir,
"arguments": {
"root_dir": root_dir,
"repos": repos,
"optional-metadata": opt_metadata,
}
}
# If there is a config file, write it to a temporary file and pass it to the depsolver
with TemporaryDirectory() as cfg_dir:
env = None
if dnf_config:
cfg_file = pathlib.Path(cfg_dir) / "solver.json"
cfg_file.write_text(dnf_config)
env = {"OSBUILD_SOLVER_CONFIG": os.fspath(cfg_file)}
p = sp.run(["./tools/osbuild-depsolve-dnf"], input=json.dumps(req), env=env,
check=False, stdout=sp.PIPE, stderr=sys.stderr, universal_newlines=True)
return json.loads(p.stdout), p.returncode
def get_rand_port():
s = socket.socket()
s.bind(("", 0))
@ -957,6 +985,27 @@ depsolve_test_cases = [
]
dump_test_cases = [
{
"id": "basic",
"packages_count": 4444,
},
# Test repository error
{
"id": "error_unreachable_repo",
"additional_servers": [
{
"name": "broken",
"address": "file:///non-existing-repo",
},
],
"error": True,
"error_kind": "RepoError",
"error_reason_re": r"There was a problem reading a repository: Failed to download metadata.*['\"]broken['\"].*",
},
]
def make_dnf_scafolding(base_dir):
root_dir = pathlib.Path(TemporaryDirectory(dir=base_dir).name)
@ -1125,3 +1174,50 @@ def test_depsolve(tmp_path, repo_servers, dnf_config, detect_fn, test_case):
assert res["solver"] == "dnf5"
else:
assert res["solver"] == "dnf"
@pytest.mark.parametrize("test_case", dump_test_cases, ids=tcase_idfn)
@pytest.mark.parametrize("dnf_config, detect_fn", [
(None, assert_dnf),
('{"use_dnf5": false}', assert_dnf),
('{"use_dnf5": true}', assert_dnf5),
], ids=["no-config", "dnf4", "dnf5"])
def test_dump(tmp_path, repo_servers, dnf_config, detect_fn, test_case):
try:
detect_fn()
except RuntimeError as e:
pytest.skip(e)
repo_servers_copy = repo_servers.copy()
if "additional_servers" in test_case:
repo_servers_copy.extend(test_case["additional_servers"])
for repo_configs, root_dir, opt_metadata in config_combos(tmp_path, repo_servers_copy):
with TemporaryDirectory() as cache_dir:
res, exit_code = dump(repo_configs, root_dir, cache_dir, dnf_config, opt_metadata)
if test_case.get("error", False):
assert exit_code != 0
assert res["kind"] == test_case["error_kind"]
assert re.match(test_case["error_reason_re"], res["reason"], re.DOTALL)
continue
assert exit_code == 0
assert len(res) == test_case["packages_count"]
for res_pkg in res:
for key in ["arch", "buildtime", "description", "epoch", "license", "name", "release", "repo_id",
"summary", "url", "version"]:
assert key in res_pkg
if res_pkg["name"] == "pkg-with-no-deps":
assert res_pkg["repo_id"] == "custom"
else:
assert res_pkg["repo_id"] == "baseos"
# if opt_metadata includes 'filelists', then each repository 'repodata' must include a file that matches
# *filelists*
n_filelist_files = len(glob(f"{cache_dir}/*/repodata/*filelists*"))
if "filelists" in opt_metadata:
assert n_filelist_files == len(REPO_PATHS)
else:
assert n_filelist_files == 0