Add a new output-directory argument which specifies where to store
result objects. For now, this is purely optional and simply copies from
the old `output_id` into the specified directory. This allows a
backwards compatible transition towards removing any external access to
the osbuild cache.
Note that this has still lots of room for improvements:
* We only support assembler-output for now, but we could also easily
support entire trees as output, in case no assembler was selected.
Alternatively, we could introduce a "copy" assembler, that just
outputs the input tree.
* This parameter is optional, but should really be mandatory. There
is little reason to have the default behavior just dropping any
generated content. This would be a breaking change, though.
* We could move data out of a temporary object-store entry, rather
than copy it. But again, for backwards-compatibility, we leave the
latest store-object intact and do not move things out of it.
* We could now transition towards never committing anything to the
store, not even output IDs, unless explicitly checkpointed.
119 lines
3.9 KiB
Python
Executable file
119 lines
3.9 KiB
Python
Executable file
import argparse
|
|
import json
|
|
import os
|
|
import sys
|
|
import osbuild
|
|
|
|
|
|
RESET = "\033[0m"
|
|
BOLD = "\033[1m"
|
|
RED = "\033[31m"
|
|
|
|
|
|
def mark_checkpoints(pipeline, checkpoints):
|
|
points = set(checkpoints)
|
|
|
|
def mark_stage(stage):
|
|
c = stage.id
|
|
if c in points:
|
|
stage.checkpoint = True
|
|
points.remove(c)
|
|
|
|
def mark_pipeline(pl):
|
|
for stage in pl.stages:
|
|
mark_stage(stage)
|
|
if pl.build:
|
|
mark_pipeline(pl.build)
|
|
|
|
mark_pipeline(pipeline)
|
|
return points
|
|
|
|
|
|
# pylint: disable=too-many-branches
|
|
# pylint: disable=too-many-statements
|
|
def main():
|
|
parser = argparse.ArgumentParser(description="Build operating system images")
|
|
parser.add_argument("manifest_path", metavar="MANIFEST",
|
|
help="json file containing the manifest that should be built, or a '-' to read from stdin")
|
|
parser.add_argument("--build-env", metavar="FILE", type=os.path.abspath,
|
|
help="json file containing a description of the build environment")
|
|
parser.add_argument("--store", metavar="DIRECTORY", type=os.path.abspath,
|
|
default=".osbuild",
|
|
help="directory where intermediary os trees are stored")
|
|
parser.add_argument("--sources", metavar="FILE", type=os.path.abspath,
|
|
help="json file containing a dictionary of source configuration")
|
|
parser.add_argument("--secrets", metavar="FILE", type=os.path.abspath,
|
|
help="json file containing a dictionary of secrets that are passed to sources")
|
|
parser.add_argument("-l", "--libdir", metavar="DIRECTORY", type=os.path.abspath,
|
|
help="the directory containing stages, assemblers, and the osbuild library")
|
|
parser.add_argument("--checkpoint", metavar="ID", action="append", type=str, default=None,
|
|
help="stage to commit to the object store during build (can be passed multiple times)")
|
|
parser.add_argument("--json", action="store_true",
|
|
help="output results in JSON format")
|
|
parser.add_argument("--output-directory", metavar="DIRECTORY", type=os.path.abspath,
|
|
help="directory where result objects are stored")
|
|
args = parser.parse_args()
|
|
|
|
if args.manifest_path == "-":
|
|
f = sys.stdin
|
|
else:
|
|
f = open(args.manifest_path)
|
|
manifest = json.load(f)
|
|
f.close()
|
|
|
|
pipeline = manifest.get("pipeline", {})
|
|
sources_options = manifest.get("sources", {})
|
|
|
|
if args.sources:
|
|
with open(args.sources) as f:
|
|
sources_options = json.load(f)
|
|
|
|
pipeline = osbuild.load(pipeline, sources_options)
|
|
|
|
if args.build_env:
|
|
with open(args.build_env) as f:
|
|
build_pipeline, runner = osbuild.load_build(json.load(f), sources_options)
|
|
pipeline.prepend_build_env(build_pipeline, runner)
|
|
|
|
secrets = {}
|
|
if args.secrets:
|
|
with open(args.secrets) as f:
|
|
secrets = json.load(f)
|
|
|
|
if args.checkpoint:
|
|
missed = mark_checkpoints(pipeline, args.checkpoint)
|
|
if missed:
|
|
for checkpoint in missed:
|
|
print(f"Checkpoint {BOLD}{checkpoint}{RESET} not found!")
|
|
print(f"{RESET}{BOLD}{RED}Failed{RESET}")
|
|
return 1
|
|
|
|
try:
|
|
r = pipeline.run(
|
|
args.store,
|
|
interactive=not args.json,
|
|
libdir=args.libdir,
|
|
secrets=secrets,
|
|
output_directory=args.output_directory
|
|
)
|
|
except KeyboardInterrupt:
|
|
print()
|
|
print(f"{RESET}{BOLD}{RED}Aborted{RESET}")
|
|
return 130
|
|
|
|
if args.json:
|
|
json.dump(r, sys.stdout)
|
|
sys.stdout.write("\n")
|
|
else:
|
|
if r["success"]:
|
|
print("tree id:", pipeline.tree_id)
|
|
print("output id:", pipeline.output_id)
|
|
else:
|
|
print()
|
|
print(f"{RESET}{BOLD}{RED}Failed{RESET}")
|
|
|
|
return 0 if r["success"] else 1
|
|
|
|
|
|
if __name__ == "__main__":
|
|
sys.exit(main())
|