Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions relay-server/src/metrics_extraction/event.rs
Original file line number Diff line number Diff line change
Expand Up @@ -104,8 +104,9 @@ fn extract_span_metrics_for_event(
}
}

// This function assumes it is only called when span metrics should be extracted, hence we
// extract the span root counter unconditionally.
// We unconditionally run metric extraction for spans. The count per root, is technically
// only required for configurations which do have dynamic sampling enabled. But for the
// sake of simplicity we always add it here.
let transaction = transactions::get_transaction_name(event);
let bucket = create_span_root_counter(
event,
Expand Down
1 change: 0 additions & 1 deletion relay-server/src/services/processor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1427,7 +1427,6 @@ impl EnvelopeProcessorService {

// If spans were already extracted for an event, we rely on span processing to extract metrics.
let extract_spans = !spans_extracted.0
&& project_info.config.features.produces_spans()
&& utils::sample(global.options.span_extraction_sample_rate.unwrap_or(1.0)).is_keep();

let metrics = crate::metrics_extraction::event::extract_metrics(
Expand Down
2 changes: 1 addition & 1 deletion relay-server/src/services/processor/metrics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ fn is_metric_namespace_valid(state: &ProjectInfo, namespace: MetricNamespace) ->
match namespace {
MetricNamespace::Sessions => true,
MetricNamespace::Transactions => true,
MetricNamespace::Spans => state.config.features.produces_spans(),
MetricNamespace::Spans => true,
MetricNamespace::Custom => state.has_feature(Feature::CustomMetrics),
MetricNamespace::Stats => true,
MetricNamespace::Unsupported => false,
Expand Down
20 changes: 11 additions & 9 deletions tests/integration/test_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -1024,7 +1024,7 @@ def test_transaction_metrics_extraction_external_relays(
assert len(metrics_envelope.items) == 1

payload = json.loads(metrics_envelope.items[0].get_bytes().decode())
assert len(payload) == 4
assert len(payload) == 6

by_name = {m["name"]: m for m in payload}
light_metric = by_name["d:transactions/duration_light@millisecond"]
Expand Down Expand Up @@ -1087,7 +1087,7 @@ def test_transaction_metrics_extraction_processing_relays(
tx_consumer.assert_empty()

if expect_metrics_extraction:
metrics = metrics_by_name(metrics_consumer, 4, timeout=3)
metrics = metrics_by_name(metrics_consumer, 6, timeout=3)
metric_usage = metrics["c:transactions/usage@none"]
assert metric_usage["tags"] == {}
assert metric_usage["value"] == 1.0
Expand Down Expand Up @@ -1332,20 +1332,22 @@ def test_limit_custom_measurements(
event, _ = transactions_consumer.get_event()
assert len(event["measurements"]) == 2

# Expect exactly 5 metrics:
# (transaction.duration, transaction.duration_light, transactions.count_per_root_project, 1 builtin, 1 custom)
metrics = metrics_by_name(metrics_consumer, 6)
metrics.pop("headers")

assert metrics.keys() == {
expected_metrics = {
"c:transactions/usage@none",
"d:transactions/duration@millisecond",
"d:transactions/duration_light@millisecond",
"c:transactions/count_per_root_project@none",
"d:transactions/measurements.foo@none",
"d:transactions/measurements.bar@none",
"c:spans/usage@none",
"c:spans/count_per_root_project@none",
}

metrics = metrics_by_name(metrics_consumer, len(expected_metrics))
metrics.pop("headers")

assert metrics.keys() == expected_metrics


@pytest.mark.parametrize("has_measurements_config", [True, False])
def test_do_not_drop_custom_measurements_in_static(
Expand Down Expand Up @@ -1856,7 +1858,7 @@ def test_metrics_extraction_with_computed_context_filters(
]

# Verify that all three metrics were extracted
metrics = metrics_by_name(metrics_consumer, 7)
metrics = metrics_by_name(metrics_consumer, 9)

# Check each extracted metric
for metric_name in metric_names:
Expand Down
8 changes: 4 additions & 4 deletions tests/integration/test_outcome.py
Original file line number Diff line number Diff line change
Expand Up @@ -1452,11 +1452,11 @@ def make_envelope():
"source": "pop-relay",
"timestamp": time_within_delta(),
}
for category in [6, 11] # Profile, ProfileIndexed
for category in [DataCategory.PROFILE.value, DataCategory.PROFILE_INDEXED.value]
]

# Make sure the profile will not be counted as accepted:
metrics = metrics_by_name(metrics_consumer, 4)
metrics = metrics_by_name(metrics_consumer, 7)
assert "has_profile" not in metrics["d:transactions/duration@millisecond"]["tags"]
assert "has_profile" not in metrics["c:transactions/usage@none"]["tags"]

Expand Down Expand Up @@ -1621,10 +1621,10 @@ def make_envelope():
"source": "processing-relay",
"timestamp": time_within_delta(),
}
for category in [6, 11] # Profile, ProfileIndexed
for category in [DataCategory.PROFILE.value, DataCategory.PROFILE_INDEXED.value]
]

metrics = metrics_by_name(metrics_consumer, 4)
metrics = metrics_by_name(metrics_consumer, 7)
assert "has_profile" not in metrics["d:transactions/duration@millisecond"]["tags"]
assert "has_profile" not in metrics["c:transactions/usage@none"]["tags"]

Expand Down
8 changes: 8 additions & 0 deletions tests/integration/test_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -916,6 +916,14 @@ def test_processing_quota_transaction_indexing(
timeout=3,
)

# Ignore span metrics, they may be emitted because rate limits from transactions are not
# currently enforced for spans, which they should be. See: https://github.com/getsentry/relay/issues/4961.
metrics = {metric["name"] for (metric, _) in metrics_consumer.get_metrics()}
assert metrics == {
"c:spans/count_per_root_project@none",
"c:spans/usage@none",
}


def test_events_buffered_before_auth(relay, mini_sentry):
evt = threading.Event()
Expand Down
Loading