monitor: include build_result in jsonseq monitor streaming
In order to avoid having to rely on the output of `osbuild --json` when using `--progress=JSONSeqMonitor` the monitor needs to include the `osbuild.pipeline.BuildResult` for each individual stage. This commit adds those to the montior.
This commit is contained in:
parent
2cb57f0ed8
commit
5ba7cadd8b
3 changed files with 35 additions and 15 deletions
|
|
@ -165,7 +165,8 @@ class Progress:
|
|||
|
||||
def log_entry(message: Optional[str] = None,
|
||||
context: Optional[Context] = None,
|
||||
progress: Optional[Progress] = None) -> dict:
|
||||
progress: Optional[Progress] = None,
|
||||
build_result: Optional[osbuild.pipeline.BuildResult] = None) -> dict:
|
||||
"""
|
||||
Create a single log entry dict with a given message, context, and progress objects.
|
||||
All arguments are optional. A timestamp is added to the message.
|
||||
|
|
@ -174,6 +175,7 @@ def log_entry(message: Optional[str] = None,
|
|||
# monitors support that
|
||||
return omitempty({
|
||||
"message": message,
|
||||
"build_result": build_result.as_dict() if build_result else None,
|
||||
"context": context.as_dict() if context else None,
|
||||
"progress": progress.as_dict() if progress else None,
|
||||
"timestamp": time.time(),
|
||||
|
|
@ -254,7 +256,7 @@ class LogMonitor(BaseMonitor):
|
|||
super().__init__(fd, total_steps)
|
||||
self.timer_start = 0
|
||||
|
||||
def result(self, result):
|
||||
def result(self, result: osbuild.pipeline.BuildResult):
|
||||
duration = int(time.time() - self.timer_start)
|
||||
self.out.write(f"\n⏱ Duration: {duration}s\n")
|
||||
|
||||
|
|
@ -339,13 +341,25 @@ class JSONSeqMonitor(BaseMonitor):
|
|||
# we may need to check pipeline ids here in the future
|
||||
if self._progress.sub_progress:
|
||||
self._progress.sub_progress.incr()
|
||||
self.log(f"Finished module {result.name}", origin="osbuild.monitor")
|
||||
|
||||
self._jsonseq(log_entry(
|
||||
f"Finished module {result.name}",
|
||||
context=self._context.with_origin("osbuild.monitor"),
|
||||
progress=self._progress,
|
||||
# We should probably remove the "output" key from the result
|
||||
# as it is redundant, each output already generates a "log()"
|
||||
# message that is streamed to the client.
|
||||
build_result=result,
|
||||
))
|
||||
|
||||
def log(self, message, origin: Optional[str] = None):
|
||||
entry = log_entry(message, self._context.with_origin(origin), self._progress)
|
||||
self._jsonseq(entry)
|
||||
self._jsonseq(log_entry(
|
||||
message,
|
||||
context=self._context.with_origin(origin),
|
||||
progress=self._progress,
|
||||
))
|
||||
|
||||
def _jsonseq(self, entry):
|
||||
def _jsonseq(self, entry: dict) -> None:
|
||||
# follow rfc7464 (application/json-seq)
|
||||
self.out.write("\x1e")
|
||||
json.dump(entry, self.out)
|
||||
|
|
|
|||
|
|
@ -44,14 +44,14 @@ def cleanup(*objs):
|
|||
|
||||
|
||||
class BuildResult:
|
||||
def __init__(self, origin, returncode, output, error):
|
||||
def __init__(self, origin: 'Stage', returncode: int, output: str, error: Dict[str, str]) -> None:
|
||||
self.name = origin.name
|
||||
self.id = origin.id
|
||||
self.success = returncode == 0
|
||||
self.output = output
|
||||
self.error = error
|
||||
|
||||
def as_dict(self):
|
||||
def as_dict(self) -> Dict[str, Any]:
|
||||
return vars(self)
|
||||
|
||||
|
||||
|
|
@ -69,11 +69,11 @@ class Stage:
|
|||
self.mounts = {}
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
def name(self) -> str:
|
||||
return self.info.name
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
def id(self) -> str:
|
||||
m = hashlib.sha256()
|
||||
m.update(json.dumps(self.name, sort_keys=True).encode())
|
||||
m.update(json.dumps(self.build, sort_keys=True).encode())
|
||||
|
|
@ -82,11 +82,11 @@ class Stage:
|
|||
if self.source_epoch is not None:
|
||||
m.update(json.dumps(self.source_epoch, sort_keys=True).encode())
|
||||
if self.inputs:
|
||||
data = {n: i.id for n, i in self.inputs.items()}
|
||||
m.update(json.dumps(data, sort_keys=True).encode())
|
||||
data_inp = {n: i.id for n, i in self.inputs.items()}
|
||||
m.update(json.dumps(data_inp, sort_keys=True).encode())
|
||||
if self.mounts:
|
||||
data = [m.id for m in self.mounts.values()]
|
||||
m.update(json.dumps(data).encode())
|
||||
data_mnt = [m.id for m in self.mounts.values()]
|
||||
m.update(json.dumps(data_mnt).encode())
|
||||
return m.hexdigest()
|
||||
|
||||
@property
|
||||
|
|
|
|||
|
|
@ -208,7 +208,7 @@ def test_json_progress_monitor():
|
|||
mon.log("pipeline 1 message 2")
|
||||
mon.log("pipeline 1 finished", origin="org.osbuild")
|
||||
mon.result(osbuild.pipeline.BuildResult(
|
||||
fake_noop_stage, returncode=0, output="output", error=None))
|
||||
fake_noop_stage, returncode=0, output="some output", error=None))
|
||||
mon.finish({"success": True, "name": "test-pipeline-first"})
|
||||
mon.begin(manifest.pipelines["test-pipeline-second"])
|
||||
mon.log("pipeline 2 starting", origin="org.osbuild")
|
||||
|
|
@ -268,6 +268,12 @@ def test_json_progress_monitor():
|
|||
logitem = json.loads(log[i])
|
||||
assert logitem["message"] == "Finished module org.osbuild.noop"
|
||||
assert logitem["context"]["id"] == id_start_module
|
||||
assert logitem["result"] == {
|
||||
"id": fake_noop_stage.id,
|
||||
"name": "org.osbuild.noop",
|
||||
"output": "some output",
|
||||
"success": True,
|
||||
}
|
||||
i += 1
|
||||
|
||||
logitem = json.loads(log[i])
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue