tools/mpp: refactor dep-solving

Create a DepSolver class that carries the global state such as dirs
and subscription information, as well as local state, like the 
repositories and basedir. The latter can be reset so the class can
easily be re-used for all dep-solve sections.
This avoids having any global state.
This commit is contained in:
Christian Kellner 2021-06-30 17:06:07 +00:00 committed by Tom Gundersen
parent 77c5c8e8a6
commit 802f401069

View file

@ -105,6 +105,7 @@ import os
import sys import sys
import pathlib import pathlib
import tempfile import tempfile
from typing import Dict
import urllib.parse import urllib.parse
import collections import collections
import dnf import dnf
@ -119,140 +120,152 @@ def element_enter(element, key, default):
return element[key] return element[key]
host_subscriptions = None class DepSolver:
def __init__(self, cachedir, persistdir):
self.cachedir = cachedir
self.persistdir = persistdir
self.basedir = None
self.subscriptions = None
self.secrets = {}
def _dnf_expand_baseurl(baseurl, basedir): self.base = dnf.Base()
"""Expand non-uris as paths relative to basedir into a file:/// uri"""
try:
result = urllib.parse.urlparse(baseurl)
if not result.scheme:
path = basedir.joinpath(baseurl)
return path.as_uri()
except: # pylint: disable=bare-except
pass
return baseurl
def reset(self, basedir):
base = self.base
base.reset(goal=True, repos=True, sack=True)
self.secrets.clear()
def _dnf_repo(conf, desc, basedir): if self.cachedir:
repo = dnf.repo.Repo(desc["id"], conf) base.conf.cachedir = self.cachedir
url = None base.conf.config_file_path = "/dev/null"
url_keys = ["baseurl", "metalink", "mirrorlist"] base.conf.persistdir = self.persistdir
skip_keys = ["id", "secrets"]
supported = ["baseurl", "metalink", "mirrorlist",
"enabled", "metadata_expire", "gpgcheck", "username", "password", "priority",
"sslverify", "sslcacert", "sslclientkey", "sslclientcert"]
for key in desc.keys(): self.base = base
if key in skip_keys: self.basedir = basedir
continue # We handled this already
if key in url_keys: def setup(self, arch, module_platform_id, ignore_weak_deps):
url = desc[key] base = self.base
if key in supported:
value = desc[key]
if key == "baseurl":
value = _dnf_expand_baseurl(value, basedir)
setattr(repo, key, value)
else:
raise ValueError(f"Unknown repo config option {key}")
if not url:
raise ValueError("repo description does not contain baseurl, metalink, or mirrorlist keys")
global host_subscriptions base.conf.module_platform_id = module_platform_id
secrets = None base.conf.substitutions['arch'] = arch
if "secrets" in desc: base.conf.substitutions['basearch'] = dnf.rpm.basearch(arch)
secrets_desc = desc["secrets"] base.conf.install_weak_deps = not ignore_weak_deps
if "name" in secrets_desc and secrets_desc["name"] == "org.osbuild.rhsm":
try:
# rhsm secrets only need to be retrieved once and can then be reused
if host_subscriptions is None:
host_subscriptions = Subscriptions.from_host_system()
secrets = host_subscriptions.get_secrets(url)
except RuntimeError as e:
raise ValueError(f"Error getting secrets: {e.args[0]}") from None
if secrets: def expand_baseurl(self, baseurl):
if "ssl_ca_cert" in secrets: """Expand non-uris as paths relative to basedir into a file:/// uri"""
repo.sslcacert = secrets["ssl_ca_cert"] basedir = self.basedir
if "ssl_client_key" in secrets: try:
repo.sslclientkey = secrets["ssl_client_key"] result = urllib.parse.urlparse(baseurl)
if "ssl_client_cert" in secrets: if not result.scheme:
repo.sslclientcert = secrets["ssl_client_cert"] path = basedir.joinpath(baseurl)
return path.as_uri()
except: # pylint: disable=bare-except
pass
return baseurl
return repo def get_secrets(self, url, desc):
if not desc:
return None
name = desc.get("name")
if name != "org.osbuild.rhsm":
raise ValueError(f"Unknown secret type: {name}")
def _dnf_base(mpp_depsolve, persistdir, cachedir, basedir): try:
arch = mpp_depsolve["architecture"] # rhsm secrets only need to be retrieved once and can then be reused
module_platform_id = mpp_depsolve["module-platform-id"] if not self.subscriptions:
ignore_weak_deps = bool(mpp_depsolve.get("ignore-weak-deps", False)) self.subscriptions = Subscriptions.from_host_system()
repos = mpp_depsolve.get("repos", []) secrets = self.subscriptions.get_secrets(url)
except RuntimeError as e:
raise ValueError(f"Error getting secrets: {e.args[0]}") from None
base = dnf.Base() secrets["type"] = "org.osbuild.rhsm"
if cachedir:
base.conf.cachedir = cachedir
base.conf.config_file_path = "/dev/null"
base.conf.module_platform_id = module_platform_id
base.conf.persistdir = persistdir
base.conf.substitutions['arch'] = arch
base.conf.substitutions['basearch'] = dnf.rpm.basearch(arch)
base.conf.install_weak_deps = not ignore_weak_deps
for repo in repos: return secrets
base.repos.add(_dnf_repo(base.conf, repo, basedir))
base.fill_sack(load_system_repo=False) def add_repo(self, desc, baseurl):
return base repo = dnf.repo.Repo(desc["id"], self.base.conf)
url = None
url_keys = ["baseurl", "metalink", "mirrorlist"]
skip_keys = ["id", "secrets"]
supported = ["baseurl", "metalink", "mirrorlist",
"enabled", "metadata_expire", "gpgcheck", "username", "password", "priority",
"sslverify", "sslcacert", "sslclientkey", "sslclientcert"]
for key in desc.keys():
if key in skip_keys:
continue # We handled this already
def _dnf_resolve(mpp_depsolve, basedir): if key in url_keys:
deps = [] url = desc[key]
if key in supported:
value = desc[key]
if key == "baseurl":
value = self.expand_baseurl(value)
setattr(repo, key, value)
else:
raise ValueError(f"Unknown repo config option {key}")
repos = mpp_depsolve.get("repos", []) if not url:
packages = mpp_depsolve.get("packages", []) url = self.expand_baseurl(baseurl)
excludes = mpp_depsolve.get("excludes", [])
baseurl = mpp_depsolve.get("baseurl")
baseurls = { if not url:
repo["id"]: repo.get("baseurl", baseurl) for repo in repos raise ValueError("repo description does not contain baseurl, metalink, or mirrorlist keys")
}
secrets = {
repo["id"]: repo.get("secrets", None) for repo in repos
}
if len(packages) > 0: secrets = self.get_secrets(url, desc.get("secrets"))
with tempfile.TemporaryDirectory() as persistdir:
base = _dnf_base(mpp_depsolve, persistdir, dnf_cache, basedir)
base.install_specs(packages, exclude=excludes)
base.resolve()
for tsi in base.transaction: if secrets:
if tsi.action not in dnf.transaction.FORWARD_ACTIONS: if "ssl_ca_cert" in secrets:
continue repo.sslcacert = secrets["ssl_ca_cert"]
if "ssl_client_key" in secrets:
repo.sslclientkey = secrets["ssl_client_key"]
if "ssl_client_cert" in secrets:
repo.sslclientcert = secrets["ssl_client_cert"]
self.secrets[repo.id] = secrets["type"]
checksum_type = hawkey.chksum_name(tsi.pkg.chksum[0]) self.base.repos.add(repo)
checksum_hex = tsi.pkg.chksum[1].hex()
path = tsi.pkg.relativepath return repo
reponame = tsi.pkg.reponame
base = _dnf_expand_baseurl(baseurls[reponame], basedir)
# dep["path"] often starts with a "/", even though it's meant to be
# relative to `baseurl`. Strip any leading slashes, but ensure there's
# exactly one between `baseurl` and the path.
url = urllib.parse.urljoin(base + "/", path.lstrip("/"))
secret = secrets[reponame]
pkg = { def resolve(self, packages, excludes):
"checksum": f"{checksum_type}:{checksum_hex}", base = self.base
"name": tsi.pkg.name,
"url": url,
}
if secret:
pkg["secrets"] = secret
deps.append(pkg)
return deps base.reset(goal=True, sack=True)
base.fill_sack(load_system_repo=False)
base.install_specs(packages, exclude=excludes)
base.resolve()
deps = []
for tsi in base.transaction:
if tsi.action not in dnf.transaction.FORWARD_ACTIONS:
continue
checksum_type = hawkey.chksum_name(tsi.pkg.chksum[0])
checksum_hex = tsi.pkg.chksum[1].hex()
path = tsi.pkg.relativepath
reponame = tsi.pkg.reponame
baseurl = self.base.repos[reponame].baseurl[0] # self.expand_baseurl(baseurls[reponame])
# dep["path"] often starts with a "/", even though it's meant to be
# relative to `baseurl`. Strip any leading slashes, but ensure there's
# exactly one between `baseurl` and the path.
url = urllib.parse.urljoin(baseurl + "/", path.lstrip("/"))
secret = self.secrets.get(reponame)
pkg = {
"checksum": f"{checksum_type}:{checksum_hex}",
"name": tsi.pkg.name,
"url": url,
}
if secret:
pkg["secrets"] = secret
deps.append(pkg)
return deps
class ManifestFile: class ManifestFile:
@ -296,6 +309,22 @@ class ManifestFile:
raise FileNotFoundError(f"Could not find manifest '{path}'") raise FileNotFoundError(f"Could not find manifest '{path}'")
def depsolve(self, solver: DepSolver, desc: Dict):
repos = desc.get("repos", [])
packages = desc.get("packages", [])
excludes = desc.get("excludes", [])
baseurl = desc.get("baseurl")
if not packages:
return []
solver.reset(self.basedir)
for repo in repos:
solver.add_repo(repo, baseurl)
return solver.resolve(packages, excludes)
def add_packages(self, deps): def add_packages(self, deps):
checksums = [] checksums = []
@ -380,7 +409,7 @@ class ManifestFileV1(ManifestFile):
self._process_import(current, search_dirs) self._process_import(current, search_dirs)
current = current.get("pipeline", {}).get("build") current = current.get("pipeline", {}).get("build")
def _process_depsolve(self, stage): def _process_depsolve(self, solver, stage):
if stage.get("name", "") != "org.osbuild.rpm": if stage.get("name", "") != "org.osbuild.rpm":
return return
options = stage.get("options") options = stage.get("options")
@ -394,21 +423,21 @@ class ManifestFileV1(ManifestFile):
packages = element_enter(options, "packages", []) packages = element_enter(options, "packages", [])
deps = _dnf_resolve(mpp, self.basedir) deps = self.depsolve(solver, mpp)
checksums = self.add_packages(deps) checksums = self.add_packages(deps)
packages += checksums packages += checksums
def process_depsolves(self, pipeline=None): def process_depsolves(self, solver, pipeline=None):
if pipeline is None: if pipeline is None:
pipeline = self.pipeline pipeline = self.pipeline
stages = element_enter(pipeline, "stages", []) stages = element_enter(pipeline, "stages", [])
for stage in stages: for stage in stages:
self._process_depsolve(stage) self._process_depsolve(solver, stage)
build = pipeline.get("build") build = pipeline.get("build")
if build: if build:
if "pipeline" in build: if "pipeline" in build:
self.process_depsolves(build["pipeline"]) self.process_depsolves(solver, build["pipeline"])
class ManifestFileV2(ManifestFile): class ManifestFileV2(ManifestFile):
@ -456,7 +485,7 @@ class ManifestFileV2(ManifestFile):
for pipeline in self.pipelines: for pipeline in self.pipelines:
self._process_import(pipeline, search_dirs) self._process_import(pipeline, search_dirs)
def _process_depsolve(self, stage): def _process_depsolve(self, solver, stage):
if stage.get("type", "") != "org.osbuild.rpm": if stage.get("type", "") != "org.osbuild.rpm":
return return
inputs = element_enter(stage, "inputs", {}) inputs = element_enter(stage, "inputs", {})
@ -469,21 +498,19 @@ class ManifestFileV2(ManifestFile):
refs = element_enter(packages, "references", {}) refs = element_enter(packages, "references", {})
deps = _dnf_resolve(mpp, self.basedir) deps = self.depsolve(solver, mpp)
checksums = self.add_packages(deps) checksums = self.add_packages(deps)
for checksum in checksums: for checksum in checksums:
refs[checksum] = {} refs[checksum] = {}
def process_depsolves(self): def process_depsolves(self, solver):
for pipeline in self.pipelines: for pipeline in self.pipelines:
stages = element_enter(pipeline, "stages", []) stages = element_enter(pipeline, "stages", [])
for stage in stages: for stage in stages:
self._process_depsolve(stage) self._process_depsolve(solver, stage)
dnf_cache = None
if __name__ == "__main__": if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Manifest pre processor") parser = argparse.ArgumentParser(description="Manifest pre processor")
parser.add_argument( parser.add_argument(
@ -519,14 +546,14 @@ if __name__ == "__main__":
args = parser.parse_args(sys.argv[1:]) args = parser.parse_args(sys.argv[1:])
dnf_cache = args.dnf_cache
m = ManifestFile.load(args.src) m = ManifestFile.load(args.src)
# First resolve all imports # First resolve all imports
m.process_imports(args.searchdirs) m.process_imports(args.searchdirs)
m.process_depsolves() with tempfile.TemporaryDirectory() as persistdir:
solver = DepSolver(args.dnf_cache, persistdir)
m.process_depsolves(solver)
with sys.stdout if args.dst == "-" else open(args.dst, "w") as f: with sys.stdout if args.dst == "-" else open(args.dst, "w") as f:
m.write(f, args.sort_keys) m.write(f, args.sort_keys)