Skip to content

Commit 7898832

Browse files
Add TRACE logging level for verbose payload logging across event writers
1 parent 59ba78f commit 7898832

File tree

6 files changed

+150
-0
lines changed

6 files changed

+150
-0
lines changed

src/event_gate_lambda.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,12 @@
2929
from jsonschema import validate
3030
from jsonschema.exceptions import ValidationError
3131

32+
# Register custom TRACE level before using LOG_LEVEL env var
33+
try:
34+
from .logging_levels import TRACE_LEVEL # noqa: F401
35+
except Exception: # pragma: no cover - defensive
36+
TRACE_LEVEL = 5 # type: ignore
37+
3238
# Import configuration directory symbols with explicit ImportError fallback
3339
try:
3440
from .conf_path import CONF_DIR, INVALID_CONF_ENV # type: ignore[no-redef]

src/logging_levels.py

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
"""Custom logging levels.
2+
3+
Adds a TRACE level below DEBUG for very verbose payload logging.
4+
"""
5+
6+
from __future__ import annotations
7+
import logging
8+
9+
TRACE_LEVEL = 5
10+
11+
# Register level name only once (idempotent)
12+
if not hasattr(logging, "TRACE"):
13+
logging.addLevelName(TRACE_LEVEL, "TRACE")
14+
15+
def trace(self: logging.Logger, message: str, *args, **kws): # type: ignore[override]
16+
if self.isEnabledFor(TRACE_LEVEL):
17+
self._log(TRACE_LEVEL, message, args, **kws) # pylint: disable=protected-access
18+
19+
logging.Logger.trace = trace # type: ignore[attr-defined]
20+
21+
__all__ = ["TRACE_LEVEL"]

src/writer_eventbridge.py

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,10 @@
1010
import boto3
1111
from botocore.exceptions import BotoCoreError, ClientError
1212

13+
# Ensure TRACE level is registered
14+
from . import logging_levels # noqa: F401
15+
from .logging_levels import TRACE_LEVEL
16+
1317
STATE: Dict[str, Any] = {"logger": logging.getLogger(__name__), "event_bus_arn": "", "client": None}
1418

1519

@@ -52,6 +56,15 @@ def write(topic_name: str, message: Dict[str, Any]) -> Tuple[bool, Optional[str]
5256
logger.debug("EventBridge client not initialized - skipping")
5357
return True, None
5458

59+
# TRACE-level payload logging
60+
if logger.isEnabledFor(TRACE_LEVEL):
61+
try:
62+
logger.trace( # type: ignore[attr-defined]
63+
"EventBridge payload topic=%s payload=%s", topic_name, json.dumps(message, separators=(",", ":"))
64+
)
65+
except Exception: # pragma: no cover - defensive serialization guard
66+
logger.trace("EventBridge payload topic=%s <unserializable>", topic_name) # type: ignore[attr-defined]
67+
5568
try:
5669
logger.debug("Sending to eventBridge %s", topic_name)
5770
response = client.put_events(

src/writer_kafka.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,9 @@
1010

1111
from confluent_kafka import Producer
1212

13+
# Add TRACE level import
14+
from .logging_levels import TRACE_LEVEL # type: ignore
15+
1316
try: # KafkaException may not exist in stubbed test module
1417
from confluent_kafka import KafkaException # type: ignore
1518
except (ImportError, ModuleNotFoundError): # pragma: no cover - fallback for test stub
@@ -75,6 +78,15 @@ def write(topic_name: str, message: Dict[str, Any]) -> Tuple[bool, Optional[str]
7578
logger.debug("Kafka producer not initialized - skipping")
7679
return True, None
7780

81+
# TRACE-level payload logging prior to produce
82+
if logger.isEnabledFor(TRACE_LEVEL):
83+
try:
84+
logger.trace( # type: ignore[attr-defined]
85+
"Kafka payload topic=%s payload=%s", topic_name, json.dumps(message, separators=(",", ":"))
86+
)
87+
except Exception: # pragma: no cover - defensive
88+
logger.trace("Kafka payload topic=%s <unserializable>", topic_name) # type: ignore[attr-defined]
89+
7890
errors: list[Any] = []
7991
try:
8092
logger.debug(f"Sending to kafka {topic_name}")

src/writer_postgres.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,9 @@
1515
except ImportError: # pragma: no cover - environment without psycopg2
1616
psycopg2 = None # type: ignore
1717

18+
# Ensure TRACE level is registered
19+
from .logging_levels import TRACE_LEVEL # type: ignore
20+
1821
# Module level globals for typing
1922
_logger: logging.Logger = logging.getLogger(__name__)
2023
POSTGRES: Dict[str, Any] = {"database": ""}
@@ -240,6 +243,15 @@ def write(topic_name: str, message: Dict[str, Any]) -> Tuple[bool, Optional[str]
240243
_logger.debug("psycopg2 not available - skipping actual Postgres write")
241244
return True, None
242245

246+
# TRACE-level payload logging (only when we intend to write)
247+
if _logger.isEnabledFor(TRACE_LEVEL):
248+
try:
249+
_logger.trace( # type: ignore[attr-defined]
250+
"Postgres payload topic=%s payload=%s", topic_name, json.dumps(message, separators=(",", ":"))
251+
)
252+
except Exception: # pragma: no cover - defensive
253+
_logger.trace("Postgres payload topic=%s <unserializable>", topic_name) # type: ignore[attr-defined]
254+
243255
with psycopg2.connect( # type: ignore[attr-defined]
244256
database=POSTGRES["database"],
245257
host=POSTGRES["host"],

tests/test_trace_logging.py

Lines changed: 86 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,86 @@
1+
import logging
2+
from unittest.mock import MagicMock
3+
4+
from src.logging_levels import TRACE_LEVEL
5+
import src.writer_eventbridge as we
6+
import src.writer_kafka as wk
7+
import src.writer_postgres as wp
8+
9+
10+
def test_trace_eventbridge(caplog):
11+
logger = logging.getLogger("trace.eventbridge")
12+
logger.setLevel(TRACE_LEVEL)
13+
we.STATE["logger"] = logger
14+
we.STATE["event_bus_arn"] = "arn:aws:events:region:acct:event-bus/test"
15+
mock_client = MagicMock()
16+
mock_client.put_events.return_value = {"FailedEntryCount": 0, "Entries": []}
17+
we.STATE["client"] = mock_client
18+
caplog.set_level(TRACE_LEVEL)
19+
ok, err = we.write("topic.eb", {"k": 1})
20+
assert ok and err is None
21+
assert any("EventBridge payload" in rec.message for rec in caplog.records)
22+
23+
24+
def test_trace_kafka(caplog):
25+
class FakeProducer:
26+
def produce(self, *a, **kw):
27+
cb = kw.get("callback")
28+
if cb:
29+
cb(None, object())
30+
31+
def flush(self, *a, **kw): # noqa: D401
32+
return 0
33+
34+
logger = logging.getLogger("trace.kafka")
35+
logger.setLevel(TRACE_LEVEL)
36+
wk.STATE["logger"] = logger
37+
wk.STATE["producer"] = FakeProducer()
38+
caplog.set_level(TRACE_LEVEL)
39+
ok, err = wk.write("topic.kf", {"k": 2})
40+
assert ok and err is None
41+
assert any("Kafka payload" in rec.message for rec in caplog.records)
42+
43+
44+
def test_trace_postgres(caplog, monkeypatch):
45+
# Prepare dummy psycopg2 connection machinery
46+
store = []
47+
48+
class DummyCursor:
49+
def execute(self, sql, params):
50+
store.append((sql, params))
51+
52+
def __enter__(self):
53+
return self
54+
55+
def __exit__(self, exc_type, exc, tb):
56+
return False
57+
58+
class DummyConnection:
59+
def cursor(self):
60+
return DummyCursor()
61+
62+
def commit(self):
63+
pass
64+
65+
def __enter__(self):
66+
return self
67+
68+
def __exit__(self, exc_type, exc, tb):
69+
return False
70+
71+
class DummyPsycopg2:
72+
def connect(self, **kwargs): # noqa: D401
73+
return DummyConnection()
74+
75+
monkeypatch.setattr(wp, "psycopg2", DummyPsycopg2())
76+
77+
logger = logging.getLogger("trace.postgres")
78+
logger.setLevel(TRACE_LEVEL)
79+
wp._logger = logger # type: ignore
80+
wp.POSTGRES = {"database": "db", "host": "h", "user": "u", "password": "p", "port": 5432}
81+
82+
caplog.set_level(TRACE_LEVEL)
83+
message = {"event_id": "e", "tenant_id": "t", "source_app": "a", "environment": "dev", "timestamp": 1}
84+
ok, err = wp.write("public.cps.za.test", message)
85+
assert ok and err is None
86+
assert any("Postgres payload" in rec.message for rec in caplog.records)

0 commit comments

Comments
 (0)