Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions sentry_sdk/_metrics_batcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,18 @@

class MetricsBatcher:
MAX_METRICS_BEFORE_FLUSH = 1000
MAX_METRICS_BEFORE_DROP = 10_000
FLUSH_WAIT_TIME = 5.0

def __init__(
self,
capture_func, # type: Callable[[Envelope], None]
record_lost_func, # type: Callable[..., None]
):
# type: (...) -> None
self._metric_buffer = [] # type: List[Metric]
self._capture_func = capture_func
self._record_lost_func = record_lost_func
self._running = True
self._lock = threading.Lock()

Expand Down Expand Up @@ -72,6 +75,14 @@ def add(
return None

with self._lock:
if len(self._metric_buffer) >= self.MAX_METRICS_BEFORE_DROP:
self._record_lost_func(
reason="queue_overflow",
data_category="trace_metric",
quantity=1,
)
return None

self._metric_buffer.append(metric)
if len(self._metric_buffer) >= self.MAX_METRICS_BEFORE_FLUSH:
self._flush_event.set()
Expand Down
20 changes: 18 additions & 2 deletions sentry_sdk/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@
from typing import Union
from typing import TypeVar

from sentry_sdk._types import Event, Hint, SDKInfo, Log, Metric
from sentry_sdk._types import Event, Hint, SDKInfo, Log, Metric, EventDataCategory
from sentry_sdk.integrations import Integration
from sentry_sdk.scope import Scope
from sentry_sdk.session import Session
Expand Down Expand Up @@ -357,6 +357,19 @@ def _capture_envelope(envelope):
if self.transport is not None:
self.transport.capture_envelope(envelope)

def _record_lost_event(
reason, # type: str
data_category, # type: EventDataCategory
quantity=1, # type: int
):
# type: (...) -> None
if self.transport is not None:
self.transport.record_lost_event(
reason=reason,
data_category=data_category,
quantity=quantity,
)

try:
_client_init_debug.set(self.options["debug"])
self.transport = make_transport(self.options)
Expand All @@ -377,7 +390,10 @@ def _capture_envelope(envelope):

self.metrics_batcher = None
if has_metrics_enabled(self.options):
self.metrics_batcher = MetricsBatcher(capture_func=_capture_envelope)
self.metrics_batcher = MetricsBatcher(
capture_func=_capture_envelope,
record_lost_func=_record_lost_event,
)

max_request_body_size = ("always", "never", "small", "medium")
if self.options["max_request_body_size"] not in max_request_body_size:
Expand Down
24 changes: 24 additions & 0 deletions tests/test_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,3 +204,27 @@ def _before_metric(record, hint):
assert len(metrics) == 1
assert metrics[0]["name"] == "test.keep"
assert before_metric_called


def test_batcher_drops_metrics(sentry_init, monkeypatch):
sentry_init()
client = sentry_sdk.get_client()

def no_op_flush():
pass

monkeypatch.setattr(client.metrics_batcher, "_flush", no_op_flush)

lost_event_calls = []

def record_lost_event(reason, data_category, quantity):
lost_event_calls.append((reason, data_category, quantity))

monkeypatch.setattr(client.metrics_batcher, "_record_lost_func", record_lost_event)

for i in range(10_005): # 5 metrics over the hard limit
sentry_sdk.metrics.count("test.counter", 1)

assert len(lost_event_calls) == 5
for lost_event_call in lost_event_calls:
assert lost_event_call == ("queue_overflow", "trace_metric", 1)