monitor: add duration to JSON logger
Some checks are pending
Checks / Spelling (push) Waiting to run
Checks / Python Linters (push) Waiting to run
Checks / Shell Linters (push) Waiting to run
Checks / 📦 Packit config lint (push) Waiting to run
Checks / 🔍 Check for valid snapshot urls (push) Waiting to run
Checks / 🔍 Check JSON files for formatting consistency (push) Waiting to run
Generate / Documentation (push) Waiting to run
Generate / Test Data (push) Waiting to run
Tests / Unittest (push) Waiting to run
Tests / Assembler test (legacy) (push) Waiting to run
Tests / Smoke run: unittest as normal user on default runner (push) Waiting to run
Some checks are pending
Checks / Spelling (push) Waiting to run
Checks / Python Linters (push) Waiting to run
Checks / Shell Linters (push) Waiting to run
Checks / 📦 Packit config lint (push) Waiting to run
Checks / 🔍 Check for valid snapshot urls (push) Waiting to run
Checks / 🔍 Check JSON files for formatting consistency (push) Waiting to run
Generate / Documentation (push) Waiting to run
Generate / Test Data (push) Waiting to run
Tests / Unittest (push) Waiting to run
Tests / Assembler test (legacy) (push) Waiting to run
Tests / Smoke run: unittest as normal user on default runner (push) Waiting to run
Although duration of each stage could be calculated from the start and end timestamps, it is more convenient to have it directly in the log entry. This way we can avoid calculating it in the client code.
This commit is contained in:
parent
5088c0ee69
commit
3e37ad3b92
2 changed files with 11 additions and 0 deletions
|
|
@ -168,6 +168,7 @@ class Progress:
|
|||
def log_entry(message: Optional[str] = None,
|
||||
context: Optional[Context] = None,
|
||||
progress: Optional[Progress] = None,
|
||||
duration: Optional[float] = None,
|
||||
result: Union[BuildResult, DownloadResult, None] = None,
|
||||
) -> dict:
|
||||
"""
|
||||
|
|
@ -182,6 +183,7 @@ def log_entry(message: Optional[str] = None,
|
|||
"context": context.as_dict() if context else None,
|
||||
"progress": progress.as_dict() if progress else None,
|
||||
"timestamp": time.time(),
|
||||
"duration": duration,
|
||||
})
|
||||
|
||||
|
||||
|
|
@ -320,6 +322,7 @@ class JSONSeqMonitor(BaseMonitor):
|
|||
self._progress = Progress("pipelines/sources", total_steps)
|
||||
self._context = Context(origin="org.osbuild")
|
||||
self._jsonseq_mu = Lock()
|
||||
self._module_start_time: Optional[float] = None
|
||||
|
||||
def begin(self, pipeline: osbuild.Pipeline):
|
||||
self._context.set_pipeline(pipeline)
|
||||
|
|
@ -341,6 +344,7 @@ class JSONSeqMonitor(BaseMonitor):
|
|||
def _module(self, module: osbuild.Stage):
|
||||
self._context.set_stage(module)
|
||||
self.log(f"Starting module {module.name}", origin="osbuild.monitor")
|
||||
self._module_start_time = time.monotonic()
|
||||
|
||||
def result(self, result: Union[BuildResult, DownloadResult]) -> None:
|
||||
""" Called when the module (stage or download) is finished
|
||||
|
|
@ -368,10 +372,16 @@ class JSONSeqMonitor(BaseMonitor):
|
|||
removed = len(result.output) - max_output_len
|
||||
result.output = f"[...{removed} bytes hidden...]\n{result.output[removed:]}"
|
||||
|
||||
# If the module has a timestamp, we can calculate the duration.
|
||||
duration = None
|
||||
if self._module_start_time is not None:
|
||||
duration = time.monotonic() - self._module_start_time
|
||||
|
||||
self._jsonseq(log_entry(
|
||||
f"Finished module {result.name}",
|
||||
context=self._context.with_origin("osbuild.monitor"),
|
||||
progress=self._progress,
|
||||
duration=duration,
|
||||
# We should probably remove the "output" key from the result
|
||||
# as it is redundant, each output already generates a "log()"
|
||||
# message that is streamed to the client.
|
||||
|
|
|
|||
|
|
@ -304,6 +304,7 @@ def test_json_progress_monitor():
|
|||
"output": "some output",
|
||||
"success": True,
|
||||
}
|
||||
assert logitem["duration"] > 0
|
||||
i += 1
|
||||
|
||||
logitem = json.loads(log[i])
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue