Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 31 additions & 1 deletion psi/logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,45 @@

from __future__ import annotations

import json
import sys
from typing import TYPE_CHECKING

from loguru import logger

if TYPE_CHECKING:
from loguru import Message

_TTY_FORMAT = (
"<green>{time:YYYY-MM-DD HH:mm:ss}</green> <level>{level: <8}</level> <level>{message}</level>"
)


def _json_sink(message: Message) -> None:
"""Emit one structured JSON object per log record, no redundant text field.

Loguru's ``serialize=True`` wraps output as ``{"text": "...\\n", "record":
{...}}`` where ``text`` is a pre-formatted duplicate of the structured data
with an embedded newline. Log aggregators then show literal ``\\n`` in the
JSON. Emit just the record fields so the output is flat and clean.
"""
record = message.record
payload: dict[str, object] = {
"time": record["time"].isoformat(),
"level": record["level"].name,
"message": record["message"],
"module": record["module"],
"function": record["function"],
"line": record["line"],
"name": record["name"],
}
if record["extra"]:
payload["extra"] = dict(record["extra"])
if record["exception"] is not None:
payload["exception"] = str(record["exception"])
sys.stderr.write(json.dumps(payload, default=str) + "\n")


def configure_logging(level: str = "INFO", json_output: bool | None = None) -> None:
"""Configure loguru handlers.

Expand All @@ -30,6 +60,6 @@ def configure_logging(level: str = "INFO", json_output: bool | None = None) -> N
use_json = json_output if json_output is not None else not sys.stderr.isatty()

if use_json:
logger.add(sys.stderr, serialize=True, level=level)
logger.add(_json_sink, level=level)
else:
logger.add(sys.stderr, format=_TTY_FORMAT, level=level, colorize=True)
32 changes: 18 additions & 14 deletions tests/test_logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,28 +27,32 @@ def _reset_logger() -> Iterator[None]:


class TestConfigureLogging:
def test_json_mode_serializes(self) -> None:
sink = io.StringIO()
logger.remove()
logger.add(sink, serialize=True, level="INFO")
def test_json_mode_emits_flat_structured_record(
self, capsys: pytest.CaptureFixture[str]
) -> None:
"""JSON output is one clean object per line, no redundant text field."""
configure_logging(level="INFO", json_output=True)

logger.bind(event="test.event", key="value").info("test message")

output = sink.getvalue().strip()
output = capsys.readouterr().err.strip()
record = json.loads(output)
assert record["record"]["message"] == "test message"
assert record["record"]["extra"]["event"] == "test.event"
assert record["record"]["extra"]["key"] == "value"

def test_level_filtering(self) -> None:
sink = io.StringIO()
logger.remove()
logger.add(sink, serialize=True, level="WARNING")
assert record["message"] == "test message"
assert record["level"] == "INFO"
assert record["extra"]["event"] == "test.event"
assert record["extra"]["key"] == "value"
# No embedded pre-formatted text field with newlines
assert "text" not in record
assert "\\n" not in output
assert output.count("\n") == 0 # one line only

def test_level_filtering(self, capsys: pytest.CaptureFixture[str]) -> None:
configure_logging(level="WARNING", json_output=True)

logger.info("should not appear")
logger.warning("should appear")

output = sink.getvalue()
output = capsys.readouterr().err
assert "should not appear" not in output
assert "should appear" in output

Expand Down
Loading