Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions src/sentry/options/defaults.py
Original file line number Diff line number Diff line change
Expand Up @@ -483,6 +483,13 @@
default=False,
flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK | FLAG_AUTOMATOR_MODIFIABLE,
)
# Whether or not Relay replay-event publishing to Snuba is disabled.
register(
"replay.relay-snuba-publishing-disabled",
type=Bool,
default=False,
flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK | FLAG_AUTOMATOR_MODIFIABLE,
)
# Billing skip for mobile replay orgs.
register(
"replay.replay-video.billing-skip-org-ids",
Expand Down
1 change: 1 addition & 0 deletions src/sentry/relay/globalconfig.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
"relay.span-extraction.sample-rate",
"relay.span-normalization.allowed_hosts",
"relay.drop-transaction-attachments",
"replay.relay-snuba-publishing-disabled",
]


Expand Down
10 changes: 10 additions & 0 deletions src/sentry/replays/consumers/recording.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,15 @@ def parse_recording_event(message: bytes) -> Event:
else:
replay_video = None

relay_snuba_publish_disabled = recording.get("relay_snuba_publish_disabled", False)

# No matter what value we receive "True" is the only value that can influence our behavior.
# Otherwise we default to "False" which means our consumer does nothing. Its only when Relay
# reports that it has disabled itself that we publish to the Snuba consumer. Any other value
# is invalid and means we should _not_ publish to Snuba.
if relay_snuba_publish_disabled is not True:
relay_snuba_publish_disabled = False

return {
"context": {
"key_id": recording.get("key_id"),
Expand All @@ -146,6 +155,7 @@ def parse_recording_event(message: bytes) -> Event:
"replay_id": recording["replay_id"],
"retention_days": recording["retention_days"],
"segment_id": segment_id,
"should_publish_replay_event": relay_snuba_publish_disabled,
},
"payload_compressed": compressed,
"payload": decompressed,
Expand Down
9 changes: 9 additions & 0 deletions src/sentry/replays/usecases/ingest/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from sentry.constants import DataCategory
from sentry.logging.handlers import SamplingFilter
from sentry.models.project import Project
from sentry.replays.lib.kafka import publish_replay_event
from sentry.replays.lib.storage import _make_recording_filename, storage_kv
from sentry.replays.usecases.ingest.event_logger import (
emit_click_events,
Expand Down Expand Up @@ -47,6 +48,7 @@ class EventContext(TypedDict):
replay_id: str
retention_days: int
segment_id: int
should_publish_replay_event: bool | None


class Event(TypedDict):
Expand Down Expand Up @@ -176,6 +178,13 @@ def commit_recording_message(recording: ProcessedEvent) -> None:
recording.context["received"],
)

metrics.incr(
"replays.should_publish_replay_event",
tags={"value": recording.context["should_publish_replay_event"]},
)
if recording.context["should_publish_replay_event"] and recording.replay_event:
publish_replay_event(json.dumps(recording.replay_event))
Comment on lines +185 to +186
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Potential bug: The feature to publish replay events to Snuba is non-functional due to a key mismatch. The code looks for relay_snuba_publish_disabled, but the option is named differently.
  • Description: There is a mismatch between the configuration option name replay.relay-snuba-publishing-disabled and the key the consumer code attempts to retrieve from the recording payload, relay_snuba_publish_disabled. Because recording.get("relay_snuba_publish_disabled") in src/sentry/replays/consumers/recording.py will always return None, the should_publish_replay_event context value is always None. This causes the conditional check in src/sentry/replays/usecases/ingest/__init__.py to always evaluate to false. As a consequence, the publish_replay_event function is never called, rendering the feature to publish replay events to Snuba completely non-functional regardless of its configuration.

  • Suggested fix: Correct the key used in src/sentry/replays/consumers/recording.py from relay_snuba_publish_disabled to match the key sent by Relay. Additionally, since the flag name implies disabling publishing, the conditional logic in src/sentry/replays/usecases/ingest/__init__.py may need to be inverted to correctly handle the flag's intent.
    severity: 0.8, confidence: 0.95

Did we get this right? 👍 / 👎 to inform future reviews.


# Write to replay-event consumer.
if recording.actions_event:
emit_replay_events(
Expand Down
1 change: 1 addition & 0 deletions tests/sentry/api/endpoints/test_relay_globalconfig_v3.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ def inner(version, global_):
"profiling.profile_metrics.unsampled_profiles.sample_rate": 1.0,
"relay.span-usage-metric": True,
"relay.cardinality-limiter.mode": "passive",
"replay.relay-snuba-publishing-disabled": True,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Bug: New Option Not Recognized in Config Normalization

The new replay.relay-snuba-publishing-disabled option added to override_options is not yet recognized by sentry_relay's normalize_global_config. This leads to the option being stripped during normalization, causing the test's config comparison to fail, similar to how relay.drop-transaction-attachments is handled.

Fix in Cursor Fix in Web

"relay.metric-bucket-distribution-encodings": {
"custom": "array",
"metric_stats": "array",
Expand Down
6 changes: 6 additions & 0 deletions tests/sentry/replays/unit/consumers/test_recording.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,7 @@ def test_parse_recording_event_success() -> None:
"replay_id": "test-replay-id",
"retention_days": 30,
"segment_id": 42,
"should_publish_replay_event": False,
},
"payload_compressed": compressed_payload,
"payload": original_payload,
Expand Down Expand Up @@ -206,6 +207,7 @@ def test_parse_recording_event_with_replay_event() -> None:
"replay_id": "test-replay-id",
"retention_days": 30,
"segment_id": 42,
"should_publish_replay_event": False,
},
"payload_compressed": compressed_payload,
"payload": original_payload,
Expand Down Expand Up @@ -303,6 +305,7 @@ def test_process_message_compressed() -> None:
"replay_id": "1",
"retention_days": 30,
"segment_id": 42,
"should_publish_replay_event": False,
},
filedata=b"x\x9c\x8b\xaeV*\xa9,HU\xb2RP*I-.Q\xd2QPJI,I\x04\xf1\x8b\xf3sS\x15R\xcbR\xf3J\x14\xc0B\xb5\xb1\x00F\x9f\x0e\x8d",
filename="30/4/1/42",
Expand Down Expand Up @@ -354,6 +357,7 @@ def test_process_message_uncompressed() -> None:
"replay_id": "1",
"retention_days": 30,
"segment_id": 42,
"should_publish_replay_event": False,
},
filedata=b"x\x9c\x8b\xaeV*\xa9,HU\xb2RP*I-.Q\xd2QPJI,I\x04\xf1\x8b\xf3sS\x15R\xcbR\xf3J\x14\xc0B\xb5\xb1\x00F\x9f\x0e\x8d",
filename="30/4/1/42",
Expand Down Expand Up @@ -405,6 +409,7 @@ def test_process_message_compressed_with_video() -> None:
"replay_id": "1",
"retention_days": 30,
"segment_id": 42,
"should_publish_replay_event": False,
},
filedata=zlib.compress(pack(original_payload, b"hello")),
filename="30/4/1/42",
Expand Down Expand Up @@ -566,6 +571,7 @@ def make_valid_processed_event() -> ProcessedEvent:
"replay_id": "1",
"retention_days": 30,
"segment_id": 42,
"should_publish_replay_event": False,
},
filedata=compressed_payload,
filename="30/4/1/42",
Expand Down
4 changes: 4 additions & 0 deletions tests/sentry/replays/unit/test_ingest.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ def test_process_recording_event_without_video() -> None:
"replay_id": "test-replay-id",
"retention_days": 30,
"segment_id": 42,
"should_publish_replay_event": False,
},
"payload": payload,
"payload_compressed": payload_compressed,
Expand Down Expand Up @@ -64,6 +65,7 @@ def test_process_recording_event_with_video() -> None:
"replay_id": "video-replay-id",
"retention_days": 90,
"segment_id": 1,
"should_publish_replay_event": False,
},
"payload": payload,
"payload_compressed": payload_compressed,
Expand Down Expand Up @@ -99,6 +101,7 @@ def test_parse_replay_events_empty() -> None:
"replay_id": "1",
"retention_days": 1,
"segment_id": 1,
"should_publish_replay_event": False,
},
"payload": b"[]",
"payload_compressed": b"",
Expand All @@ -121,6 +124,7 @@ def test_parse_replay_events_invalid_json() -> None:
"replay_id": "1",
"retention_days": 1,
"segment_id": 1,
"should_publish_replay_event": False,
},
"payload": b"hello, world!",
"payload_compressed": b"",
Expand Down
Loading