From 4cda4e5e705a1672dcbec4b81ba927dcdbcc7570 Mon Sep 17 00:00:00 2001 From: Twirre Meulenbelt <43213592+TwirreM@users.noreply.github.com> Date: Thu, 15 Jan 2026 17:07:49 +0100 Subject: [PATCH 1/4] feat: experiment log stream, to file and UI Adds a few new logging utility classes. One to save to files with a date, one to support optional fields in formats, last to filter partial log messages. ref: N25B-401 --- .gitignore | 3 + .logging_config.yaml | 37 ++- src/control_backend/agents/base.py | 3 +- src/control_backend/core/config.py | 14 ++ src/control_backend/logging/__init__.py | 3 + .../logging/dated_file_handler.py | 29 +++ .../logging/optional_field_formatter.py | 67 ++++++ src/control_backend/logging/partial_filter.py | 10 + src/control_backend/logging/setup_logging.py | 4 +- test/unit/logging/test_file_handler.py | 18 ++ .../logging/test_optional_field_formatter.py | 218 ++++++++++++++++++ test/unit/logging/test_partial_filter.py | 83 +++++++ 12 files changed, 479 insertions(+), 10 deletions(-) create mode 100644 src/control_backend/logging/dated_file_handler.py create mode 100644 src/control_backend/logging/optional_field_formatter.py create mode 100644 src/control_backend/logging/partial_filter.py create mode 100644 test/unit/logging/test_file_handler.py create mode 100644 test/unit/logging/test_optional_field_formatter.py create mode 100644 test/unit/logging/test_partial_filter.py diff --git a/.gitignore b/.gitignore index f58719a..47ef46d 100644 --- a/.gitignore +++ b/.gitignore @@ -222,6 +222,9 @@ __marimo__/ docs/* !docs/conf.py +# Generated files +experiment-*.log + diff --git a/.logging_config.yaml b/.logging_config.yaml index 4af5d56..bf4c8c5 100644 --- a/.logging_config.yaml +++ b/.logging_config.yaml @@ -1,36 +1,56 @@ version: 1 custom_levels: - OBSERVATION: 25 - ACTION: 26 + OBSERVATION: 24 + ACTION: 25 + CHAT: 26 LLM: 9 formatters: # Console output colored: - (): "colorlog.ColoredFormatter" + class: colorlog.ColoredFormatter format: "{log_color}{asctime}.{msecs:03.0f} | {levelname:11} | {name:70} | {message}" style: "{" datefmt: "%H:%M:%S" # User-facing UI (structured JSON) - json_experiment: - (): "pythonjsonlogger.jsonlogger.JsonFormatter" + json: + class: pythonjsonlogger.jsonlogger.JsonFormatter format: "{name} {levelname} {levelno} {message} {created} {relativeCreated}" style: "{" + # Experiment stream for console and file output, with optional `role` field + experiment: + class: control_backend.logging.OptionalFieldFormatter + format: "%(asctime)s %(levelname)s %(role?)s %(message)s" + defaults: + role: "-" + +filters: + # Filter out any log records that have the extra field "partial" set to True, indicating that they + # will be replaced later. + partial: + (): control_backend.logging.PartialFilter + handlers: console: class: logging.StreamHandler level: DEBUG formatter: colored + filters: [partial] stream: ext://sys.stdout ui: class: zmq.log.handlers.PUBHandler level: LLM - formatter: json_experiment + formatter: json + file: + class: control_backend.logging.DatedFileHandler + formatter: experiment + filters: [partial] + file_prefix: experiment_logs/experiment -# Level of external libraries +# Level for external libraries root: level: WARN handlers: [console] @@ -39,3 +59,6 @@ loggers: control_backend: level: LLM handlers: [ui] + experiment: + level: DEBUG + handlers: [ui, file] diff --git a/src/control_backend/agents/base.py b/src/control_backend/agents/base.py index ec50af5..beccdaa 100644 --- a/src/control_backend/agents/base.py +++ b/src/control_backend/agents/base.py @@ -1,9 +1,10 @@ import logging +from abc import ABC from control_backend.core.agent_system import BaseAgent as CoreBaseAgent -class BaseAgent(CoreBaseAgent): +class BaseAgent(CoreBaseAgent, ABC): """ The primary base class for all implementation agents. diff --git a/src/control_backend/core/config.py b/src/control_backend/core/config.py index c4a4db7..c8af094 100644 --- a/src/control_backend/core/config.py +++ b/src/control_backend/core/config.py @@ -142,6 +142,18 @@ class SpeechModelSettings(BaseModel): openai_model_name: str = "small.en" +class LoggingSettings(BaseModel): + """ + Configuration for logging. + + :ivar logging_config_file: Path to the logging configuration file. + :ivar experiment_logger_name: Name of the experiment logger, should match the logging config. + """ + + logging_config_file: str = ".logging_config.yaml" + experiment_logger_name: str = "experiment" + + class Settings(BaseSettings): """ Global application settings. @@ -163,6 +175,8 @@ class Settings(BaseSettings): ri_host: str = "localhost" + logging_settings: LoggingSettings = LoggingSettings() + zmq_settings: ZMQSettings = ZMQSettings() agent_settings: AgentSettings = AgentSettings() diff --git a/src/control_backend/logging/__init__.py b/src/control_backend/logging/__init__.py index c97af40..a08e9b8 100644 --- a/src/control_backend/logging/__init__.py +++ b/src/control_backend/logging/__init__.py @@ -1 +1,4 @@ +from .dated_file_handler import DatedFileHandler as DatedFileHandler +from .optional_field_formatter import OptionalFieldFormatter as OptionalFieldFormatter +from .partial_filter import PartialFilter as PartialFilter from .setup_logging import setup_logging as setup_logging diff --git a/src/control_backend/logging/dated_file_handler.py b/src/control_backend/logging/dated_file_handler.py new file mode 100644 index 0000000..b927f9d --- /dev/null +++ b/src/control_backend/logging/dated_file_handler.py @@ -0,0 +1,29 @@ +from datetime import datetime +from logging import FileHandler +from pathlib import Path + + +class DatedFileHandler(FileHandler): + def __init__(self, file_prefix: str, **kwargs): + if not file_prefix: + raise ValueError("`file_prefix` argument cannot be empty.") + self._file_prefix = file_prefix + kwargs["filename"] = self._make_filename() + super().__init__(**kwargs) + + def _make_filename(self) -> str: + filepath = Path(f"{self._file_prefix}-{datetime.now():%Y%m%d-%H%M%S}.log") + if not filepath.parent.is_dir(): + filepath.parent.mkdir(parents=True, exist_ok=True) + return str(filepath) + + def do_rollover(self): + self.acquire() + try: + if self.stream: + self.stream.close() + + self.baseFilename = self._make_filename() + self.stream = self._open() + finally: + self.release() diff --git a/src/control_backend/logging/optional_field_formatter.py b/src/control_backend/logging/optional_field_formatter.py new file mode 100644 index 0000000..886e9a4 --- /dev/null +++ b/src/control_backend/logging/optional_field_formatter.py @@ -0,0 +1,67 @@ +import logging +import re + + +class OptionalFieldFormatter(logging.Formatter): + """ + Logging formatter that supports optional fields marked by `?`. + + Optional fields are denoted by placing a `?` after the field name inside + the parentheses, e.g., `%(role?)s`. If the field is not provided in the + log call's `extra` dict, it will use the default value from `defaults` + or `None` if no default is specified. + + :param fmt: Format string with optional `%(name?)s` style fields. + :type fmt: str or None + :param datefmt: Date format string, passed to parent :class:`logging.Formatter`. + :type datefmt: str or None + :param style: Formatting style, must be '%'. Passed to parent. + :type style: str + :param defaults: Default values for optional fields when not provided. + :type defaults: dict or None + + :example: + + >>> formatter = OptionalFieldFormatter( + ... fmt="%(asctime)s %(levelname)s %(role?)s %(message)s", + ... defaults={"role": ""-""} + ... ) + >>> handler = logging.StreamHandler() + >>> handler.setFormatter(formatter) + >>> logger = logging.getLogger(__name__) + >>> logger.addHandler(handler) + >>> + >>> logger.chat("Hello there!", extra={"role": "USER"}) + 2025-01-15 10:30:00 CHAT USER Hello there! + >>> + >>> logger.info("A logging message") + 2025-01-15 10:30:01 INFO - A logging message + + .. note:: + Only `%`-style formatting is supported. The `{` and `$` styles are not + compatible with this formatter. + + .. seealso:: + :class:`logging.Formatter` for base formatter documentation. + """ + + # Match %(name?)s or %(name?)d etc. + OPTIONAL_PATTERN = re.compile(r"%\((\w+)\?\)([sdifFeEgGxXocrba%])") + + def __init__(self, fmt=None, datefmt=None, style="%", defaults=None): + self.defaults = defaults or {} + + self.optional_fields = set(self.OPTIONAL_PATTERN.findall(fmt or "")) + + # Convert %(name?)s to %(name)s for standard formatting + normalized_fmt = self.OPTIONAL_PATTERN.sub(r"%(\1)\2", fmt or "") + + super().__init__(normalized_fmt, datefmt, style) + + def format(self, record): + for field, _ in self.optional_fields: + if not hasattr(record, field): + default = self.defaults.get(field, None) + setattr(record, field, default) + + return super().format(record) diff --git a/src/control_backend/logging/partial_filter.py b/src/control_backend/logging/partial_filter.py new file mode 100644 index 0000000..1b121cb --- /dev/null +++ b/src/control_backend/logging/partial_filter.py @@ -0,0 +1,10 @@ +import logging + + +class PartialFilter(logging.Filter): + """ + Class to filter any log records that have the "partial" attribute set to ``True``. + """ + + def filter(self, record): + return getattr(record, "partial", False) is not True diff --git a/src/control_backend/logging/setup_logging.py b/src/control_backend/logging/setup_logging.py index 05ae85a..7147dcc 100644 --- a/src/control_backend/logging/setup_logging.py +++ b/src/control_backend/logging/setup_logging.py @@ -37,7 +37,7 @@ def add_logging_level(level_name: str, level_num: int, method_name: str | None = setattr(logging, method_name, log_to_root) -def setup_logging(path: str = ".logging_config.yaml") -> None: +def setup_logging(path: str = settings.logging_settings.logging_config_file) -> None: """ Setup logging configuration of the CB. Tries to load the logging configuration from a file, in which we specify custom loggers, formatters, handlers, etc. @@ -65,7 +65,7 @@ def setup_logging(path: str = ".logging_config.yaml") -> None: # Patch ZMQ PUBHandler to know about custom levels if custom_levels: - for logger_name in ("control_backend",): + for logger_name in config.get("loggers", {}): logger = logging.getLogger(logger_name) for handler in logger.handlers: if isinstance(handler, PUBHandler): diff --git a/test/unit/logging/test_file_handler.py b/test/unit/logging/test_file_handler.py new file mode 100644 index 0000000..9d1ee90 --- /dev/null +++ b/test/unit/logging/test_file_handler.py @@ -0,0 +1,18 @@ +from unittest.mock import MagicMock, patch + +from control_backend.logging.dated_file_handler import DatedFileHandler + + +@patch("control_backend.logging.file_handler.DatedFileHandler._open") +def test_reset(open_): + stream = MagicMock() + open_.return_value = stream + + # A file should be opened when the logger is created + handler = DatedFileHandler(prefix="anything") + assert open_.call_count == 1 + + # Upon reset, the current file should be closed, and a new one should be opened + handler.do_rollover() + assert stream.close.call_count == 1 + assert open_.call_count == 2 diff --git a/test/unit/logging/test_optional_field_formatter.py b/test/unit/logging/test_optional_field_formatter.py new file mode 100644 index 0000000..ae75bd9 --- /dev/null +++ b/test/unit/logging/test_optional_field_formatter.py @@ -0,0 +1,218 @@ +import logging + +import pytest + +from control_backend.logging.optional_field_formatter import OptionalFieldFormatter + + +@pytest.fixture +def logger(): + """Create a fresh logger for each test.""" + logger = logging.getLogger(f"test_{id(object())}") + logger.setLevel(logging.DEBUG) + logger.handlers = [] + return logger + + +@pytest.fixture +def log_output(logger): + """Capture log output and return a function to get it.""" + + class ListHandler(logging.Handler): + def __init__(self): + super().__init__() + self.records = [] + + def emit(self, record): + self.records.append(self.format(record)) + + handler = ListHandler() + logger.addHandler(handler) + + def get_output(): + return handler.records + + return get_output + + +def test_optional_field_present(logger, log_output): + """Optional field should appear when provided in extra.""" + formatter = OptionalFieldFormatter("%(levelname)s - %(role?)s - %(message)s") + logger.handlers[0].setFormatter(formatter) + + logger.info("test message", extra={"role": "user"}) + + assert log_output() == ["INFO - user - test message"] + + +def test_optional_field_missing_no_default(logger, log_output): + """Missing optional field with no default should be None.""" + formatter = OptionalFieldFormatter("%(levelname)s - %(role?)s - %(message)s") + logger.handlers[0].setFormatter(formatter) + + logger.info("test message") + + assert log_output() == ["INFO - None - test message"] + + +def test_optional_field_missing_with_default(logger, log_output): + """Missing optional field should use provided default.""" + formatter = OptionalFieldFormatter( + "%(levelname)s - %(role?)s - %(message)s", defaults={"role": "assistant"} + ) + logger.handlers[0].setFormatter(formatter) + + logger.info("test message") + + assert log_output() == ["INFO - assistant - test message"] + + +def test_optional_field_overrides_default(logger, log_output): + """Provided extra value should override default.""" + formatter = OptionalFieldFormatter( + "%(levelname)s - %(role?)s - %(message)s", defaults={"role": "assistant"} + ) + logger.handlers[0].setFormatter(formatter) + + logger.info("test message", extra={"role": "user"}) + + assert log_output() == ["INFO - user - test message"] + + +def test_multiple_optional_fields(logger, log_output): + """Multiple optional fields should work independently.""" + formatter = OptionalFieldFormatter( + "%(levelname)s - %(role?)s - %(request_id?)s - %(message)s", defaults={"role": "assistant"} + ) + logger.handlers[0].setFormatter(formatter) + + logger.info("test", extra={"request_id": "123"}) + + assert log_output() == ["INFO - assistant - 123 - test"] + + +def test_mixed_optional_and_required_fields(logger, log_output): + """Standard fields should work alongside optional fields.""" + formatter = OptionalFieldFormatter("%(levelname)s %(name)s %(role?)s %(message)s") + logger.handlers[0].setFormatter(formatter) + + logger.info("test", extra={"role": "user"}) + + output = log_output()[0] + assert "INFO" in output + assert "user" in output + assert "test" in output + + +def test_no_optional_fields(logger, log_output): + """Formatter should work normally with no optional fields.""" + formatter = OptionalFieldFormatter("%(levelname)s %(message)s") + logger.handlers[0].setFormatter(formatter) + + logger.info("test message") + + assert log_output() == ["INFO test message"] + + +def test_integer_format_specifier(logger, log_output): + """Optional fields with %d specifier should work.""" + formatter = OptionalFieldFormatter( + "%(levelname)s %(count?)d %(message)s", defaults={"count": 0} + ) + logger.handlers[0].setFormatter(formatter) + + logger.info("test", extra={"count": 42}) + + assert log_output() == ["INFO 42 test"] + + +def test_float_format_specifier(logger, log_output): + """Optional fields with %f specifier should work.""" + formatter = OptionalFieldFormatter( + "%(levelname)s %(duration?)f %(message)s", defaults={"duration": 0.0} + ) + logger.handlers[0].setFormatter(formatter) + + logger.info("test", extra={"duration": 1.5}) + + assert "1.5" in log_output()[0] + + +def test_empty_string_default(logger, log_output): + """Empty string default should work.""" + formatter = OptionalFieldFormatter("%(levelname)s %(role?)s %(message)s", defaults={"role": ""}) + logger.handlers[0].setFormatter(formatter) + + logger.info("test") + + assert log_output() == ["INFO test"] + + +def test_none_format_string(): + """None format string should not raise.""" + formatter = OptionalFieldFormatter(fmt=None) + assert formatter.optional_fields == set() + + +def test_optional_fields_parsed_correctly(): + """Check that optional fields are correctly identified.""" + formatter = OptionalFieldFormatter("%(asctime)s %(role?)s %(level?)d %(name)s") + + assert formatter.optional_fields == {("role", "s"), ("level", "d")} + + +def test_format_string_normalized(): + """Check that ? is removed from format string.""" + formatter = OptionalFieldFormatter("%(role?)s %(message)s") + + assert "?" not in formatter._style._fmt + assert "%(role)s" in formatter._style._fmt + + +def test_field_with_underscore(logger, log_output): + """Field names with underscores should work.""" + formatter = OptionalFieldFormatter("%(levelname)s %(user_id?)s %(message)s") + logger.handlers[0].setFormatter(formatter) + + logger.info("test", extra={"user_id": "abc123"}) + + assert log_output() == ["INFO abc123 test"] + + +def test_field_with_numbers(logger, log_output): + """Field names with numbers should work.""" + formatter = OptionalFieldFormatter("%(levelname)s %(field2?)s %(message)s") + logger.handlers[0].setFormatter(formatter) + + logger.info("test", extra={"field2": "value"}) + + assert log_output() == ["INFO value test"] + + +def test_multiple_log_calls(logger, log_output): + """Formatter should work correctly across multiple log calls.""" + formatter = OptionalFieldFormatter( + "%(levelname)s %(role?)s %(message)s", defaults={"role": "other"} + ) + logger.handlers[0].setFormatter(formatter) + + logger.info("first", extra={"role": "assistant"}) + logger.info("second") + logger.info("third", extra={"role": "user"}) + + assert log_output() == [ + "INFO assistant first", + "INFO other second", + "INFO user third", + ] + + +def test_default_not_mutated(logger, log_output): + """Original defaults dict should not be mutated.""" + defaults = {"role": "other"} + formatter = OptionalFieldFormatter("%(levelname)s %(role?)s %(message)s", defaults=defaults) + logger.handlers[0].setFormatter(formatter) + + logger.info("test") + + assert defaults == {"role": "other"} diff --git a/test/unit/logging/test_partial_filter.py b/test/unit/logging/test_partial_filter.py new file mode 100644 index 0000000..bd5ef10 --- /dev/null +++ b/test/unit/logging/test_partial_filter.py @@ -0,0 +1,83 @@ +import logging + +import pytest + +from control_backend.logging import PartialFilter + + +@pytest.fixture +def logger(): + """Create a fresh logger for each test.""" + logger = logging.getLogger(f"test_{id(object())}") + logger.setLevel(logging.DEBUG) + logger.handlers = [] + return logger + + +@pytest.fixture +def log_output(logger): + """Capture log output and return a function to get it.""" + + class ListHandler(logging.Handler): + def __init__(self): + super().__init__() + self.records = [] + + def emit(self, record): + self.records.append(self.format(record)) + + handler = ListHandler() + handler.addFilter(PartialFilter()) + handler.setFormatter(logging.Formatter("%(message)s")) + logger.addHandler(handler) + + return lambda: handler.records + + +def test_no_partial_attribute(logger, log_output): + """Records without partial attribute should pass through.""" + logger.info("normal message") + + assert log_output() == ["normal message"] + + +def test_partial_true_filtered(logger, log_output): + """Records with partial=True should be filtered out.""" + logger.info("partial message", extra={"partial": True}) + + assert log_output() == [] + + +def test_partial_false_passes(logger, log_output): + """Records with partial=False should pass through.""" + logger.info("complete message", extra={"partial": False}) + + assert log_output() == ["complete message"] + + +def test_partial_none_passes(logger, log_output): + """Records with partial=None should pass through.""" + logger.info("message", extra={"partial": None}) + + assert log_output() == ["message"] + + +def test_partial_truthy_value_passes(logger, log_output): + """ + Records with truthy but non-True partial should pass through, that is, only when it's exactly + ``True`` should it pass. + """ + logger.info("message", extra={"partial": "yes"}) + + assert log_output() == ["message"] + + +def test_multiple_records_mixed(logger, log_output): + """Filter should handle mixed records correctly.""" + logger.info("first") + logger.info("second", extra={"partial": True}) + logger.info("third", extra={"partial": False}) + logger.info("fourth", extra={"partial": True}) + logger.info("fifth") + + assert log_output() == ["first", "third", "fifth"] From ba79d09c5d2ca3a449ba56c9aa0293029e91d6f2 Mon Sep 17 00:00:00 2001 From: Twirre Meulenbelt <43213592+TwirreM@users.noreply.github.com> Date: Fri, 16 Jan 2026 16:32:51 +0100 Subject: [PATCH 2/4] feat: log download endpoints ref: N25B-401 --- .logging_config.yaml | 3 +- src/control_backend/api/v1/endpoints/logs.py | 31 ++++++++++++++++++-- src/control_backend/core/config.py | 4 ++- 3 files changed, 34 insertions(+), 4 deletions(-) diff --git a/.logging_config.yaml b/.logging_config.yaml index bf4c8c5..f7cccf9 100644 --- a/.logging_config.yaml +++ b/.logging_config.yaml @@ -48,6 +48,7 @@ handlers: class: control_backend.logging.DatedFileHandler formatter: experiment filters: [partial] + # Directory must match config.logging_settings.experiment_log_directory file_prefix: experiment_logs/experiment # Level for external libraries @@ -59,6 +60,6 @@ loggers: control_backend: level: LLM handlers: [ui] - experiment: + experiment: # This name must match config.logging_settings.experiment_logger_name level: DEBUG handlers: [ui, file] diff --git a/src/control_backend/api/v1/endpoints/logs.py b/src/control_backend/api/v1/endpoints/logs.py index ccccf44..0e2dff9 100644 --- a/src/control_backend/api/v1/endpoints/logs.py +++ b/src/control_backend/api/v1/endpoints/logs.py @@ -1,8 +1,9 @@ import logging +from pathlib import Path import zmq -from fastapi import APIRouter -from fastapi.responses import StreamingResponse +from fastapi import APIRouter, HTTPException +from fastapi.responses import FileResponse, StreamingResponse from zmq.asyncio import Context from control_backend.core.config import settings @@ -38,3 +39,29 @@ async def log_stream(): yield f"data: {message}\n\n" return StreamingResponse(gen(), media_type="text/event-stream") + + +LOGGING_DIR = Path(settings.logging_settings.experiment_log_directory).resolve() + + +@router.get("/logs/files") +@router.get("/api/logs/files") +async def log_directory(): + """ + Get a list of all log files stored in the experiment log file directory. + """ + return [f.name for f in LOGGING_DIR.glob("*.log")] + + +@router.get("/logs/files/{filename}") +@router.get("/api/logs/files/{filename}") +async def log_file(filename: str): + # Prevent path-traversal + file_path = (LOGGING_DIR / filename).resolve() # This .resolve() is important + if not file_path.is_relative_to(LOGGING_DIR): + raise HTTPException(status_code=400, detail="Invalid filename.") + + if not file_path.is_file(): + raise HTTPException(status_code=404, detail="File not found.") + + return FileResponse(file_path, filename=file_path.name) diff --git a/src/control_backend/core/config.py b/src/control_backend/core/config.py index c8af094..2dbde02 100644 --- a/src/control_backend/core/config.py +++ b/src/control_backend/core/config.py @@ -147,10 +147,12 @@ class LoggingSettings(BaseModel): Configuration for logging. :ivar logging_config_file: Path to the logging configuration file. - :ivar experiment_logger_name: Name of the experiment logger, should match the logging config. + :ivar experiment_log_directory: Location of the experiment logs. Must match the logging config. + :ivar experiment_logger_name: Name of the experiment logger. Must match the logging config. """ logging_config_file: str = ".logging_config.yaml" + experiment_log_directory: str = "experiment_logs" experiment_logger_name: str = "experiment" From 58881b5914fb7bbf6d5e7caccb4172e81888c3c6 Mon Sep 17 00:00:00 2001 From: Twirre Meulenbelt <43213592+TwirreM@users.noreply.github.com> Date: Mon, 19 Jan 2026 12:47:59 +0100 Subject: [PATCH 3/4] test: add test cases ref: N25B-401 --- .../api/v1/endpoints/test_logs_endpoint.py | 68 ++++++++++++++++++- test/unit/logging/test_dated_file_handler.py | 45 ++++++++++++ test/unit/logging/test_file_handler.py | 18 ----- 3 files changed, 111 insertions(+), 20 deletions(-) create mode 100644 test/unit/logging/test_dated_file_handler.py delete mode 100644 test/unit/logging/test_file_handler.py diff --git a/test/unit/api/v1/endpoints/test_logs_endpoint.py b/test/unit/api/v1/endpoints/test_logs_endpoint.py index 50ee740..4aaa90e 100644 --- a/test/unit/api/v1/endpoints/test_logs_endpoint.py +++ b/test/unit/api/v1/endpoints/test_logs_endpoint.py @@ -1,7 +1,7 @@ -from unittest.mock import patch +from unittest.mock import MagicMock, patch import pytest -from fastapi import FastAPI +from fastapi import FastAPI, HTTPException from fastapi.testclient import TestClient from starlette.responses import StreamingResponse @@ -61,3 +61,67 @@ async def test_log_stream_endpoint_lines(client): # Optional: assert subscribe/connect were called assert dummy_socket.subscribed # at least some log levels subscribed assert dummy_socket.connected # connect was called + + +@patch("control_backend.api.v1.endpoints.logs.LOGGING_DIR") +def test_files_endpoint(LOGGING_DIR, client): + file_1, file_2 = MagicMock(), MagicMock() + file_1.name = "file_1" + file_2.name = "file_2" + LOGGING_DIR.glob.return_value = [file_1, file_2] + result = client.get("/api/logs/files") + + assert result.status_code == 200 + assert result.json() == ["file_1", "file_2"] + + +@patch("control_backend.api.v1.endpoints.logs.FileResponse") +@patch("control_backend.api.v1.endpoints.logs.LOGGING_DIR") +def test_log_file_endpoint_success(LOGGING_DIR, MockFileResponse, client): + mock_file_path = MagicMock() + mock_file_path.is_relative_to.return_value = True + mock_file_path.is_file.return_value = True + mock_file_path.name = "test.log" + + LOGGING_DIR.__truediv__ = MagicMock(return_value=mock_file_path) + mock_file_path.resolve.return_value = mock_file_path + + MockFileResponse.return_value = MagicMock() + + result = client.get("/api/logs/files/test.log") + + assert result.status_code == 200 + MockFileResponse.assert_called_once_with(mock_file_path, filename="test.log") + + +@pytest.mark.asyncio +@patch("control_backend.api.v1.endpoints.logs.LOGGING_DIR") +async def test_log_file_endpoint_path_traversal(LOGGING_DIR): + from control_backend.api.v1.endpoints.logs import log_file + + mock_file_path = MagicMock() + mock_file_path.is_relative_to.return_value = False + + LOGGING_DIR.__truediv__ = MagicMock(return_value=mock_file_path) + mock_file_path.resolve.return_value = mock_file_path + + with pytest.raises(HTTPException) as exc_info: + await log_file("../secret.txt") + + assert exc_info.value.status_code == 400 + assert exc_info.value.detail == "Invalid filename." + + +@patch("control_backend.api.v1.endpoints.logs.LOGGING_DIR") +def test_log_file_endpoint_file_not_found(LOGGING_DIR, client): + mock_file_path = MagicMock() + mock_file_path.is_relative_to.return_value = True + mock_file_path.is_file.return_value = False + + LOGGING_DIR.__truediv__ = MagicMock(return_value=mock_file_path) + mock_file_path.resolve.return_value = mock_file_path + + result = client.get("/api/logs/files/nonexistent.log") + + assert result.status_code == 404 + assert result.json()["detail"] == "File not found." diff --git a/test/unit/logging/test_dated_file_handler.py b/test/unit/logging/test_dated_file_handler.py new file mode 100644 index 0000000..14809fb --- /dev/null +++ b/test/unit/logging/test_dated_file_handler.py @@ -0,0 +1,45 @@ +from unittest.mock import MagicMock, patch + +import pytest + +from control_backend.logging.dated_file_handler import DatedFileHandler + + +@patch("control_backend.logging.dated_file_handler.DatedFileHandler._open") +def test_reset(open_): + stream = MagicMock() + open_.return_value = stream + + # A file should be opened when the logger is created + handler = DatedFileHandler(file_prefix="anything") + assert open_.call_count == 1 + + # Upon reset, the current file should be closed, and a new one should be opened + handler.do_rollover() + assert stream.close.call_count == 1 + assert open_.call_count == 2 + + +@patch("control_backend.logging.dated_file_handler.Path") +@patch("control_backend.logging.dated_file_handler.DatedFileHandler._open") +def test_creates_dir(open_, Path_): + stream = MagicMock() + open_.return_value = stream + + test_path = MagicMock() + test_path.parent.is_dir.return_value = False + Path_.return_value = test_path + + DatedFileHandler(file_prefix="anything") + + # The directory should've been created + test_path.parent.mkdir.assert_called_once() + + +@patch("control_backend.logging.dated_file_handler.DatedFileHandler._open") +def test_invalid_constructor(_): + with pytest.raises(ValueError): + DatedFileHandler(file_prefix=None) + + with pytest.raises(ValueError): + DatedFileHandler(file_prefix="") diff --git a/test/unit/logging/test_file_handler.py b/test/unit/logging/test_file_handler.py deleted file mode 100644 index 9d1ee90..0000000 --- a/test/unit/logging/test_file_handler.py +++ /dev/null @@ -1,18 +0,0 @@ -from unittest.mock import MagicMock, patch - -from control_backend.logging.dated_file_handler import DatedFileHandler - - -@patch("control_backend.logging.file_handler.DatedFileHandler._open") -def test_reset(open_): - stream = MagicMock() - open_.return_value = stream - - # A file should be opened when the logger is created - handler = DatedFileHandler(prefix="anything") - assert open_.call_count == 1 - - # Upon reset, the current file should be closed, and a new one should be opened - handler.do_rollover() - assert stream.close.call_count == 1 - assert open_.call_count == 2 From a74ecc6c4549a645402303a3e5ea9f6787211e78 Mon Sep 17 00:00:00 2001 From: Twirre Meulenbelt <43213592+TwirreM@users.noreply.github.com> Date: Thu, 22 Jan 2026 11:48:02 +0100 Subject: [PATCH 4/4] docs: add docstrings to dated file handler ref: N25B-401 --- src/control_backend/logging/dated_file_handler.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/control_backend/logging/dated_file_handler.py b/src/control_backend/logging/dated_file_handler.py index b927f9d..3a405bb 100644 --- a/src/control_backend/logging/dated_file_handler.py +++ b/src/control_backend/logging/dated_file_handler.py @@ -12,12 +12,21 @@ class DatedFileHandler(FileHandler): super().__init__(**kwargs) def _make_filename(self) -> str: + """ + Create the filename for the current logfile, using the configured file prefix and the + current date and time. If the directory does not exist, it gets created. + + :return: A filepath. + """ filepath = Path(f"{self._file_prefix}-{datetime.now():%Y%m%d-%H%M%S}.log") if not filepath.parent.is_dir(): filepath.parent.mkdir(parents=True, exist_ok=True) return str(filepath) def do_rollover(self): + """ + Close the current logfile and create a new one with the current date and time. + """ self.acquire() try: if self.stream: