debian-forge-composer/dnf-json
Lars Karlitski 839a109c78 weldr: ensure a fresh dnf cache when making a new compose
dnf-json relies on dnf's ability to cache repository metadata. This is
important, because the API calls it quite often to serve requests for
package lists and depsolves.

However, osbuild's dnf stage always fetches new metadata, because it
doesn't have access to the host's cache. Since metadata is valid for
some time, even after a repository changed, the checksum we put in
the pipeline might be old.

Force a new metadata download when producing the pipeline. This is still
not perfect, but greatly reduces the probability of putting stale
metadata into the pipeline.
2019-12-19 21:46:02 +01:00

132 lines
3.6 KiB
Python

#!/usr/bin/python3
import datetime
import dnf
import hashlib
import json
import shutil
import sys
DNF_ERROR_EXIT_CODE = 10
def timestamp_to_rfc3339(timestamp):
d = datetime.datetime.utcfromtimestamp(package.buildtime)
return d.strftime('%Y-%m-%dT%H:%M:%SZ')
def dnfrepo(desc, parent_conf=None):
"""Makes a dnf.repo.Repo out of a JSON repository description"""
repo = dnf.repo.Repo(desc["id"], parent_conf)
repo.name = desc["name"]
if "baseurl" in desc:
repo.baseurl = desc["baseurl"]
elif "metalink" in desc:
repo.metalink = desc["metalink"]
elif "mirrorlist" in desc:
repo.mirrorlist = desc["mirrorlist"]
else:
assert False
if desc.get("ignoressl", False):
repo.sslverify = False
return repo
def create_base(repos, clean=False):
base = dnf.Base()
if clean:
shutil.rmtree(base.conf.cachedir, ignore_errors=True)
for repo in repos:
base.repos.add(dnfrepo(repo, base.conf))
base.fill_sack(load_system_repo=False)
return base
def exit_with_dnf_error(kind: str, reason: str):
json.dump({"kind": kind, "reason": reason}, sys.stdout)
sys.exit(DNF_ERROR_EXIT_CODE)
def repo_checksums(base):
checksums = {}
for repo in base.repos.iter_enabled():
# Uses the same algorithm as libdnf to find cache dir:
# https://github.com/rpm-software-management/libdnf/blob/master/libdnf/repo/Repo.cpp#L1288
if repo.metalink:
url = repo.metalink
elif repo.mirrorlist:
url = repo.mirrorlist
elif repo.baseurl:
url = repo.baseurl[0]
else:
assert False
digest = hashlib.sha256(url.encode()).hexdigest()[:16]
with open(f"{base.conf.cachedir}/{repo.id}-{digest}/repodata/repomd.xml", "rb") as f:
repomd = f.read()
checksums[repo.id] = "sha256:" + hashlib.sha256(repomd).hexdigest()
return checksums
call = json.load(sys.stdin)
command = call["command"]
arguments = call.get("arguments", {})
if command == "dump":
base = create_base(arguments.get("repos", {}), arguments.get("clean", False))
packages = []
for package in base.sack.query().available():
packages.append({
"name": package.name,
"summary": package.summary,
"description": package.description,
"url": package.url,
"epoch": package.epoch,
"version": package.version,
"release": package.release,
"arch": package.arch,
"buildtime": timestamp_to_rfc3339(package.buildtime),
"license": package.license
})
json.dump({
"checksums": repo_checksums(base),
"packages": packages
}, sys.stdout)
elif command == "depsolve":
base = create_base(arguments.get("repos", {}), arguments.get("clean", False))
errors = []
try:
base.install_specs(arguments["package-specs"])
except dnf.exceptions.MarkingErrors as e:
exit_with_dnf_error("MarkingErrors", f"Error occurred when marking packages for installation: {e}")
try:
base.resolve()
except dnf.exceptions.DepsolveError as e:
exit_with_dnf_error("DepsolveError", f"There was a problem depsolving {arguments['package-specs']}: {e}")
dependencies = []
for package in base.transaction.install_set:
dependencies.append({
"name": package.name,
"epoch": package.epoch,
"version": package.version,
"release": package.release,
"arch": package.arch
})
json.dump({
"checksums": repo_checksums(base),
"dependencies": dependencies
}, sys.stdout)