Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions docs/releases/pending/4642.fmf
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
description: |
All warnings encountered in a run are now saved in a
:tmt:story:`warnings.yaml</stories/cli/run/logs/warnings.yaml>` file under
the run workdir.
15 changes: 15 additions & 0 deletions stories/cli/run.fmf
Original file line number Diff line number Diff line change
Expand Up @@ -265,3 +265,18 @@ story: 'As a user I want to execute tests easily'
example: |
tmt run provision login
tmt run --last finish

/logs:
/warnings.yaml:
story: As a CI runner I want to forward the warnings to the user.
description: |
All warnings encountered during a run are recorded in a
``warnings.yaml`` file under the run workdir.
example: |
- logger: tmt.run
msg: User is feeling safe.
trace: 'tmt/base/core.py#3322: go()'

- logger: tmt.run.logger0
msg: '/: - ''invalid'' does not match any of the regexes: ''^extra-'''
trace: 'tmt/utils/__init__.py#5385: _validate_fmf_node()'
21 changes: 17 additions & 4 deletions tmt/base/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -2871,6 +2871,8 @@ class Run(HasRunWorkdir, HasEnvironment, tmt.utils.Common):

data: Optional[RunData] = None

WARNINGS_FILE_NAME: ClassVar[str] = "warnings.yaml"

def __init__(
self,
*,
Expand Down Expand Up @@ -2930,6 +2932,17 @@ def run_workdir(self) -> Path:

return self.workdir

def load_workdir(self, *, with_logfiles: bool = True) -> None:
"""
Prepare the run workdir and associated.

:param with_logfiles: whether to attach logfile handlers
"""
self._workdir_load(self._workdir_path)
if with_logfiles:
warnings_file = self.run_workdir / self.WARNINGS_FILE_NAME
self._logger.add_runwarnings_handler(warnings_file)

@functools.cached_property
def runner(self) -> 'tmt.steps.provision.local.GuestLocal':
import tmt.steps.provision.local
Expand Down Expand Up @@ -3067,7 +3080,7 @@ def load_from_workdir(self) -> None:
from tmt.base.plan import Plan

self._save_tree(self._tree)
self._workdir_load(self._workdir_path)
self.load_workdir()
try:
self.data = RunData.from_serialized(
tmt.utils.yaml_to_dict(self.read(Path('run.yaml')))
Expand Down Expand Up @@ -3282,7 +3295,7 @@ def prepare_for_try(self, tree: Tree) -> None:
"""
self.tree = tree
self._save_tree(self.tree)
self._workdir_load(self._workdir_path)
self.load_workdir()
self.config.last_run = self.run_workdir
self.info(str(self.run_workdir), color='magenta')

Expand All @@ -3297,7 +3310,7 @@ def go(self) -> None:

# Create the workdir and save last run
self._save_tree(self._tree)
self._workdir_load(self._workdir_path)
self.load_workdir()
assert self.tree is not None # narrow type
assert self._workdir is not None # narrow type
if self.tree.root and self._workdir.is_relative_to(self.tree.root):
Expand Down Expand Up @@ -3741,7 +3754,7 @@ def runs(self, id_: tuple[str, ...], keep: Optional[int]) -> bool:
# Pass the context containing --last to Run to choose
# the correct one.
last_run = Run(logger=self._logger, cli_invocation=self.cli_invocation)
last_run._workdir_load(last_run._workdir_path)
last_run.load_workdir(with_logfiles=False)
return self._clean_workdir(last_run.run_workdir)
all_workdirs = list(tmt.utils.generate_runs(self.workdir_root, id_, all_=True))
if keep is not None:
Expand Down
155 changes: 147 additions & 8 deletions tmt/log.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,9 @@
managing handlers themselves, which would be very messy given the propagation of messages.
"""

import copy
import enum
import io
import itertools
import logging
import os
Expand All @@ -33,16 +35,20 @@
TYPE_CHECKING,
Any,
Callable,
ClassVar,
Optional,
Protocol,
TextIO,
Union,
cast,
)

from ruamel.yaml import YAML

from tmt._compat.pathlib import Path
from tmt._compat.typing import Self, TypeAlias
from tmt._compat.warnings import deprecated
from tmt.container import container, simple_field
from tmt.container import SpecBasedContainer, container, simple_field

if TYPE_CHECKING:
import tmt.cli
Expand Down Expand Up @@ -288,15 +294,63 @@ class LogRecordDetails:
logger_topics: set[Topic] = simple_field(default_factory=set[Topic])
message_topic: Optional[Topic] = None

#: The source related to the log message. This is different from the stacktrace
#: which is automatically handled. This is meant to track sources such as those
#: from fmf file
source: Optional[str] = None
#: The reason for triggering the log.
reason: Optional[str] = None


class LogfileHandler(logging.FileHandler):
#: Paths of all log files to which ``LogfileHandler`` was attached.
emitting_to: list[Path] = []

def __init__(self, filepath: 'tmt.utils.Path') -> None:
super().__init__(filepath, mode='a')

LogfileHandler.emitting_to.append(filepath)
logfiles_with_stacktrace: ClassVar[list[Path]] = []
#: Temporary buffer until the file is created
_log_buffer: Optional[io.StringIO] = None
#: Path reference to the file being written to
_file: Path

def __init__(self, filepath: Path, with_stacktrace: bool = False) -> None:
Comment thread
LecrisUT marked this conversation as resolved.
# mode="a": We want to keep the old log file if we are running a new run on top of it
# delay: If the file was not present don't try to open it
super().__init__(filepath, mode="a", delay=not filepath.exists())
self._file = filepath
if not filepath.exists():
self._log_buffer = io.StringIO()
# logging.FileHandler always checks `self.stream` before opening or writing to it. We
# switch this to the buffer so that it just writes to it immediately instead of
# skipping the messages
# ignore[assignment]: Intentional because we are using a different stream type in the
# meantime
self.stream = self._log_buffer # type: ignore[assignment]
if with_stacktrace:
LogfileHandler.logfiles_with_stacktrace.append(filepath)

def _check_file(self) -> None:
"""
Check for the presence of :py:attr:`_file` and switch to it when it becomes
available.
"""
if not self._log_buffer:
# We do not have any temporary buffer (anymore). Nothing to do here.
return
if not self._file.exists():
# File is not yet available continue as-is
return
# See logic in `logging.FileHandler.emit` when `stream is None` aka `delay=True`. So far
# the only thing we need to do is to run `_open`
self.stream = self._open()
# Write everything we have accumulated so far
content = self._log_buffer.getvalue()
if content:
self.stream.write(content)
# Cleanup after ourselves
self._log_buffer = None

def emit(self, record: logging.LogRecord) -> None:
# Check if the file is created in the meantime
self._check_file()
super().emit(record)


# ignore[type-arg]: StreamHandler is a generic type, but such expression would be incompatible
Expand Down Expand Up @@ -364,6 +418,65 @@ def __init__(self, apply_colors: bool = True, show_timestamps: bool = False) ->
)


_RawRunWarningEntry: TypeAlias = dict[str, Any]


@container
class RunWarningEntry(SpecBasedContainer[_RawRunWarningEntry, _RawRunWarningEntry]):
msg: str
logger: str
trace: str
source: Optional[str]
reason: Optional[str]

@classmethod
def from_spec(cls, spec: _RawRunWarningEntry) -> Self:
return cls(**spec)


class RunWarningsFormatter(logging.Formatter):
#: Yaml handler for formatting the content.
_yaml_handler: YAML

def __init__(self) -> None:
from tmt.utils import _yaml

# We do not use roundtrip loader here because that would require rewriting
# the whole content each time, but the streaming nature of the logger
# assumes that we will be appending when ``emit()`` is called. Instead we
# make use of the document is composed of direct list items so we can
# simply append each item as a new list.
self._yaml_handler = _yaml(yaml_type="safe")
super().__init__('%(message)s', datefmt='%H:%M:%S')

def format(self, record: logging.LogRecord) -> str:
# TODO: make this in a better yaml with a schema
details: Optional[LogRecordDetails] = getattr(record, 'details', None)
if not details:
# Not a tmt owned warning
warning_msg = super().format(record)
else:
# Tmt warning, we take the original raw value
record_copy = copy.copy(record)
record_copy.msg = details.value
warning_msg = super().format(record_copy)
# The yaml content to be appended is always a single list item so that
# it can be appended with the previous content
yaml_content = [
RunWarningEntry(
msg=warning_msg,
logger=record.name,
trace=f"{record.pathname}#{record.lineno}: {record.funcName}()",
source=details.source if details else "(external)",
reason=details.reason if details else None,
).to_minimal_spec(),
]
# Format and dump the yaml content
string_io = io.StringIO()
self._yaml_handler.dump(yaml_content, string_io)
return string_io.getvalue()


class VerbosityLevelFilter(logging.Filter):
def filter(self, record: logging.LogRecord) -> bool:
if record.levelno != logging.INFO:
Expand Down Expand Up @@ -434,6 +547,14 @@ def filter(self, record: logging.LogRecord) -> bool:
return False


class RunWarningsFilter(logging.Filter):
def filter(self, record: logging.LogRecord) -> bool:
if record.levelno == logging.WARNING:
return True

return False


class LoggingFunction(Protocol):
def __call__(
self,
Expand Down Expand Up @@ -646,6 +767,15 @@ def add_logfile_handler(self, filepath: 'tmt.utils.Path') -> None:

self._logger.addHandler(handler)

def add_runwarnings_handler(self, filepath: Path) -> None:
handler = LogfileHandler(filepath)
Comment thread
LecrisUT marked this conversation as resolved.

handler.setFormatter(RunWarningsFormatter())

handler.addFilter(RunWarningsFilter())

self._logger.addHandler(handler)

def add_console_handler(self, show_timestamps: bool = False) -> None:
"""
Attach console handler to this logger.
Expand Down Expand Up @@ -907,10 +1037,19 @@ def warning(
message: str,
shift: int = 0,
stacklevel: int = 1,
source: Optional[str] = None,
reason: Optional[str] = None,
) -> None:
self._log(
logging.WARNING,
LogRecordDetails(key='warn', value=message, color='yellow', shift=shift),
LogRecordDetails(
key='warn',
value=message,
color='yellow',
shift=shift,
source=source,
reason=reason,
),
stacklevel=stacklevel,
)

Expand Down
33 changes: 33 additions & 0 deletions tmt/schemas/warnings.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
---

#
# JSON Schema definition for tmt `warnings.yaml` file
# Linked to `tmt.log.RunWarningEntry`
#

$id: /schemas/warnings
$schema: https://json-schema.org/draft-07/schema

definitions:
warning-entry:
type: object
additionalProperties: false
properties:
msg:
type: string
logger:
type: string
trace:
type: string
source:
type: string
reason:
type: string
required:
- msg
- logger
- trace

type: array
items:
$ref: "#/definitions/warning-entry"
2 changes: 1 addition & 1 deletion tmt/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3232,7 +3232,7 @@ def _render_exception_into_files(exception: BaseException, logger: tmt.log.Logge
logfile_streams: list[TextIO] = []

with contextlib.ExitStack() as stack:
for path in tmt.log.LogfileHandler.emitting_to:
for path in tmt.log.LogfileHandler.logfiles_with_stacktrace:
try:
# SIM115: all opened files are added on exit stack, and they
# will get collected and closed properly.
Expand Down
Loading