Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions ddtrace/debugging/_encoding.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
from ddtrace.debugging._signal.log import LogSignal
from ddtrace.debugging._signal.snapshot import Snapshot
from ddtrace.internal import forksafe
from ddtrace.internal import process_tags
from ddtrace.internal._encoding import BufferFull
from ddtrace.internal.logger import get_logger
from ddtrace.internal.utils.formats import format_trace_id
Expand Down Expand Up @@ -113,6 +114,9 @@ def _build_log_track_payload(
"timestamp": int(signal.timestamp * 1e3), # milliseconds,
}

if p_tags := process_tags.process_tags:
payload["process_tags"] = p_tags

# Add the correlation IDs if available
if context is not None and context.trace_id is not None:
payload["dd"] = {
Expand Down
55 changes: 55 additions & 0 deletions tests/debugging/test_encoding.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,6 +250,61 @@ def test_batch_json_encoder():
assert queue.count == 0


def test_process_tags_are_not_included_by_default():
s = Snapshot(
probe=create_snapshot_line_probe(probe_id="batch-test", source_file="foo.py", line=42),
frame=inspect.currentframe(),
thread=threading.current_thread(),
)
buffer_size = 30 * (1 << 20)
queue = SignalQueue(encoder=LogSignalJsonEncoder(None), buffer_size=buffer_size)

s.line({})

queue = SignalQueue(encoder=LogSignalJsonEncoder("test-service"))
queue.put(s)
data = queue.flush()
assert data is not None
payload, _ = data
decoded = json.loads(payload.decode())
assert "process_tags" not in decoded[0]


@pytest.mark.subprocess(
env=dict(
DD_EXPERIMENTAL_PROPAGATE_PROCESS_TAGS_ENABLED="true",
)
)
def test_process_tags_are_included():
import inspect
import json
import threading

from ddtrace.debugging._encoding import LogSignalJsonEncoder
from ddtrace.debugging._encoding import SignalQueue
from ddtrace.debugging._signal.snapshot import Snapshot
from tests.debugging.utils import create_snapshot_line_probe

s = Snapshot(
probe=create_snapshot_line_probe(probe_id="batch-test", source_file="foo.py", line=42),
frame=inspect.currentframe(),
thread=threading.current_thread(),
)
buffer_size = 30 * (1 << 20)
queue = SignalQueue(encoder=LogSignalJsonEncoder(None), buffer_size=buffer_size)

s.line({})

queue = SignalQueue(encoder=LogSignalJsonEncoder("test-service"))
queue.put(s)
data = queue.flush()
assert data is not None
payload, _ = data
decoded = json.loads(payload.decode())

assert "process_tags" in decoded[0]


def test_batch_flush_reencode():
s = Snapshot(
probe=create_snapshot_line_probe(probe_id="batch-test", source_file="foo.py", line=42),
Expand Down
Loading