-
Notifications
You must be signed in to change notification settings - Fork 0
Add TRACE logging level for verbose payload logging across event writers #61
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: master
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,21 @@ | ||
"""Custom logging levels. | ||
|
||
Adds a TRACE level below DEBUG for very verbose payload logging. | ||
""" | ||
|
||
from __future__ import annotations | ||
import logging | ||
|
||
TRACE_LEVEL = 5 | ||
|
||
# Register level name only once (idempotent) | ||
if not hasattr(logging, "TRACE"): | ||
logging.addLevelName(TRACE_LEVEL, "TRACE") | ||
|
||
def trace(self: logging.Logger, message: str, *args, **kws): # type: ignore[override] | ||
if self.isEnabledFor(TRACE_LEVEL): | ||
self._log(TRACE_LEVEL, message, args, **kws) # pylint: disable=protected-access | ||
|
||
logging.Logger.trace = trace # type: ignore[attr-defined] | ||
|
||
__all__ = ["TRACE_LEVEL"] |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -26,6 +26,9 @@ | |
|
||
from confluent_kafka import Producer | ||
|
||
# Add TRACE level import | ||
from .logging_levels import TRACE_LEVEL # type: ignore | ||
|
||
try: # KafkaException may not exist in stubbed test module | ||
from confluent_kafka import KafkaException # type: ignore | ||
except (ImportError, ModuleNotFoundError): # pragma: no cover - fallback for test stub | ||
|
@@ -91,6 +94,15 @@ def write(topic_name: str, message: Dict[str, Any]) -> Tuple[bool, Optional[str] | |
logger.debug("Kafka producer not initialized - skipping") | ||
return True, None | ||
|
||
# TRACE-level payload logging prior to produce | ||
if logger.isEnabledFor(TRACE_LEVEL): | ||
try: | ||
logger.trace( # type: ignore[attr-defined] | ||
"Kafka payload topic=%s payload=%s", topic_name, json.dumps(message, separators=(",", ":")) | ||
) | ||
except Exception: # pragma: no cover - defensive | ||
logger.trace("Kafka payload topic=%s <unserializable>", topic_name) # type: ignore[attr-defined] | ||
|
||
Comment on lines
+97
to
+105
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Consider payload size/PII guard at TRACE. Even at TRACE, raw payloads can contain secrets/PII and blow up log volume. Add a redaction/cap mechanism (e.g., redact keys list + max length). Would you like a small helper (e.g., 🧰 Tools🪛 Ruff (0.12.2)103-103: Do not catch blind exception: (BLE001) 🤖 Prompt for AI Agents
|
||
errors: list[Any] = [] | ||
try: | ||
logger.debug("Sending to kafka %s", topic_name) | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -31,6 +31,9 @@ | |
except ImportError: # pragma: no cover - environment without psycopg2 | ||
psycopg2 = None # type: ignore | ||
|
||
# Ensure TRACE level is registered | ||
from .logging_levels import TRACE_LEVEL # type: ignore | ||
|
||
# Define a unified psycopg2 error base for safe exception handling even if psycopg2 missing | ||
if psycopg2 is not None: # type: ignore | ||
try: # pragma: no cover - attribute presence depends on installed psycopg2 variant | ||
|
@@ -271,6 +274,15 @@ def write(topic_name: str, message: Dict[str, Any]) -> Tuple[bool, Optional[str] | |
_logger.debug("psycopg2 not available - skipping actual Postgres write") | ||
return True, None | ||
|
||
# TRACE-level payload logging (only when we intend to write) | ||
if _logger.isEnabledFor(TRACE_LEVEL): | ||
try: | ||
_logger.trace( # type: ignore[attr-defined] | ||
"Postgres payload topic=%s payload=%s", topic_name, json.dumps(message, separators=(",", ":")) | ||
) | ||
except Exception: # pragma: no cover - defensive | ||
_logger.trace("Postgres payload topic=%s <unserializable>", topic_name) # type: ignore[attr-defined] | ||
|
||
Comment on lines
+277
to
+285
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. TRACE payloads may leak sensitive fields. Introduce optional redaction (configurable list of keys) and an upper bound on logged payload length. I can add a shared 🧰 Tools🪛 Ruff (0.12.2)283-283: Do not catch blind exception: (BLE001) |
||
with psycopg2.connect( # type: ignore[attr-defined] | ||
database=POSTGRES["database"], | ||
host=POSTGRES["host"], | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,86 @@ | ||
import logging | ||
from unittest.mock import MagicMock | ||
|
||
from src.logging_levels import TRACE_LEVEL | ||
import src.writer_eventbridge as we | ||
import src.writer_kafka as wk | ||
import src.writer_postgres as wp | ||
|
||
|
||
def test_trace_eventbridge(caplog): | ||
logger = logging.getLogger("trace.eventbridge") | ||
logger.setLevel(TRACE_LEVEL) | ||
we.STATE["logger"] = logger | ||
we.STATE["event_bus_arn"] = "arn:aws:events:region:acct:event-bus/test" | ||
mock_client = MagicMock() | ||
mock_client.put_events.return_value = {"FailedEntryCount": 0, "Entries": []} | ||
we.STATE["client"] = mock_client | ||
caplog.set_level(TRACE_LEVEL) | ||
ok, err = we.write("topic.eb", {"k": 1}) | ||
assert ok and err is None | ||
assert any("EventBridge payload" in rec.message for rec in caplog.records) | ||
|
||
|
||
def test_trace_kafka(caplog): | ||
class FakeProducer: | ||
def produce(self, *a, **kw): | ||
cb = kw.get("callback") | ||
if cb: | ||
cb(None, object()) | ||
|
||
def flush(self, *a, **kw): # noqa: D401 | ||
return 0 | ||
|
||
logger = logging.getLogger("trace.kafka") | ||
logger.setLevel(TRACE_LEVEL) | ||
wk.STATE["logger"] = logger | ||
wk.STATE["producer"] = FakeProducer() | ||
caplog.set_level(TRACE_LEVEL) | ||
ok, err = wk.write("topic.kf", {"k": 2}) | ||
assert ok and err is None | ||
assert any("Kafka payload" in rec.message for rec in caplog.records) | ||
|
||
|
||
def test_trace_postgres(caplog, monkeypatch): | ||
# Prepare dummy psycopg2 connection machinery | ||
store = [] | ||
|
||
class DummyCursor: | ||
def execute(self, sql, params): | ||
store.append((sql, params)) | ||
|
||
def __enter__(self): | ||
return self | ||
|
||
def __exit__(self, exc_type, exc, tb): | ||
return False | ||
|
||
class DummyConnection: | ||
def cursor(self): | ||
return DummyCursor() | ||
|
||
def commit(self): | ||
pass | ||
|
||
def __enter__(self): | ||
return self | ||
|
||
def __exit__(self, exc_type, exc, tb): | ||
return False | ||
|
||
class DummyPsycopg2: | ||
def connect(self, **kwargs): # noqa: D401 | ||
return DummyConnection() | ||
|
||
monkeypatch.setattr(wp, "psycopg2", DummyPsycopg2()) | ||
|
||
logger = logging.getLogger("trace.postgres") | ||
logger.setLevel(TRACE_LEVEL) | ||
wp._logger = logger # type: ignore | ||
wp.POSTGRES = {"database": "db", "host": "h", "user": "u", "password": "p", "port": 5432} | ||
|
||
caplog.set_level(TRACE_LEVEL) | ||
message = {"event_id": "e", "tenant_id": "t", "source_app": "a", "environment": "dev", "timestamp": 1} | ||
ok, err = wp.write("public.cps.za.test", message) | ||
assert ok and err is None | ||
assert any("Postgres payload" in rec.message for rec in caplog.records) |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
💡 Verification agent
🧩 Analysis chain
Guard against PII/large payloads at TRACE.
Add configurable redaction and a size cap to avoid leaking secrets and unbounded log volume.
🏁 Script executed:
Length of output: 1215
Implement configurable redaction + size cap for TRACE payload logs
Raw payload JSON is being logged at TRACE (can leak PII and create unbounded log volume). Fix by centralizing safe serialization and applying it consistently.
🧰 Tools
🪛 Ruff (0.12.2)
81-81: Do not catch blind exception:
Exception
(BLE001)