Skip to content

Commit

Permalink
feat(replay): remove organizations:session-replay-event-linking featu…
Browse files Browse the repository at this point in the history
…re flag (#66257)

removes flag declaration and all refs
getsentry/team-replay#387
  • Loading branch information
aliu3ntry committed Mar 6, 2024
1 parent f0f614a commit 4dc60ad
Show file tree
Hide file tree
Showing 4 changed files with 76 additions and 90 deletions.
2 changes: 0 additions & 2 deletions src/sentry/conf/server.py
Expand Up @@ -1847,8 +1847,6 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]:
"organizations:session-replay-enable-canvas": False,
# Enable canvas replaying
"organizations:session-replay-enable-canvas-replayer": False,
# Enable replay event linking in event processing
"organizations:session-replay-event-linking": False,
# Enable linking from 'new issue' email notifs to the issue replay list
"organizations:session-replay-issue-emails": False,
# Enable the new event linking columns to be queried
Expand Down
1 change: 0 additions & 1 deletion src/sentry/features/__init__.py
Expand Up @@ -240,7 +240,6 @@
default_manager.add("organizations:session-replay-count-query-optimize", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
default_manager.add("organizations:session-replay-enable-canvas-replayer", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
default_manager.add("organizations:session-replay-enable-canvas", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
default_manager.add("organizations:session-replay-event-linking", OrganizationFeature, FeatureHandlerStrategy.INTERNAL)
default_manager.add("organizations:session-replay-issue-emails", OrganizationFeature, FeatureHandlerStrategy.INTERNAL)
default_manager.add("organizations:session-replay-new-event-counts", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
default_manager.add("organizations:session-replay-recording-scrubbing", OrganizationFeature, FeatureHandlerStrategy.INTERNAL)
Expand Down
6 changes: 0 additions & 6 deletions src/sentry/tasks/post_process.py
Expand Up @@ -1008,12 +1008,6 @@ def _get_replay_id(event):
if job["is_reprocessed"]:
return

if not features.has(
"organizations:session-replay-event-linking", job["event"].project.organization
):
metrics.incr("post_process.process_replay_link.feature_not_enabled")
return

metrics.incr("post_process.process_replay_link.id_sampled")

group_event = job["event"]
Expand Down
157 changes: 76 additions & 81 deletions tests/sentry/tasks/test_post_process.py
Expand Up @@ -1907,38 +1907,37 @@ def test_replay_linkage(self, incr, kafka_producer, kafka_publisher):
project_id=self.project.id,
)

with self.feature({"organizations:session-replay-event-linking": True}):
self.call_post_process_group(
is_new=True,
is_regression=False,
is_new_group_environment=True,
event=event,
)
assert kafka_producer.return_value.publish.call_count == 1
assert kafka_producer.return_value.publish.call_args[0][0] == "ingest-replay-events"
self.call_post_process_group(
is_new=True,
is_regression=False,
is_new_group_environment=True,
event=event,
)
assert kafka_producer.return_value.publish.call_count == 1
assert kafka_producer.return_value.publish.call_args[0][0] == "ingest-replay-events"

ret_value = json.loads(kafka_producer.return_value.publish.call_args[0][1])
ret_value = json.loads(kafka_producer.return_value.publish.call_args[0][1])

assert ret_value["type"] == "replay_event"
assert ret_value["start_time"]
assert ret_value["replay_id"] == replay_id
assert ret_value["project_id"] == self.project.id
assert ret_value["segment_id"] is None
assert ret_value["retention_days"] == 90
assert ret_value["type"] == "replay_event"
assert ret_value["start_time"]
assert ret_value["replay_id"] == replay_id
assert ret_value["project_id"] == self.project.id
assert ret_value["segment_id"] is None
assert ret_value["retention_days"] == 90

# convert ret_value_payload which is a list of bytes to a string
ret_value_payload = json.loads(bytes(ret_value["payload"]).decode("utf-8"))
# convert ret_value_payload which is a list of bytes to a string
ret_value_payload = json.loads(bytes(ret_value["payload"]).decode("utf-8"))

assert ret_value_payload == {
"type": "event_link",
"replay_id": replay_id,
"error_id": event.event_id,
"timestamp": int(event.datetime.timestamp()),
"event_hash": str(uuid.UUID(md5((event.event_id).encode("utf-8")).hexdigest())),
}
assert ret_value_payload == {
"type": "event_link",
"replay_id": replay_id,
"error_id": event.event_id,
"timestamp": int(event.datetime.timestamp()),
"event_hash": str(uuid.UUID(md5((event.event_id).encode("utf-8")).hexdigest())),
}

incr.assert_any_call("post_process.process_replay_link.id_sampled")
incr.assert_any_call("post_process.process_replay_link.id_exists")
incr.assert_any_call("post_process.process_replay_link.id_sampled")
incr.assert_any_call("post_process.process_replay_link.id_exists")

def test_replay_linkage_with_tag(self, incr, kafka_producer, kafka_publisher):
replay_id = uuid.uuid4().hex
Expand All @@ -1947,86 +1946,82 @@ def test_replay_linkage_with_tag(self, incr, kafka_producer, kafka_publisher):
project_id=self.project.id,
)

with self.feature({"organizations:session-replay-event-linking": True}):
self.call_post_process_group(
is_new=True,
is_regression=False,
is_new_group_environment=True,
event=event,
)
assert kafka_producer.return_value.publish.call_count == 1
assert kafka_producer.return_value.publish.call_args[0][0] == "ingest-replay-events"
self.call_post_process_group(
is_new=True,
is_regression=False,
is_new_group_environment=True,
event=event,
)
assert kafka_producer.return_value.publish.call_count == 1
assert kafka_producer.return_value.publish.call_args[0][0] == "ingest-replay-events"

ret_value = json.loads(kafka_producer.return_value.publish.call_args[0][1])
ret_value = json.loads(kafka_producer.return_value.publish.call_args[0][1])

assert ret_value["type"] == "replay_event"
assert ret_value["start_time"]
assert ret_value["replay_id"] == replay_id
assert ret_value["project_id"] == self.project.id
assert ret_value["segment_id"] is None
assert ret_value["retention_days"] == 90
assert ret_value["type"] == "replay_event"
assert ret_value["start_time"]
assert ret_value["replay_id"] == replay_id
assert ret_value["project_id"] == self.project.id
assert ret_value["segment_id"] is None
assert ret_value["retention_days"] == 90

# convert ret_value_payload which is a list of bytes to a string
ret_value_payload = json.loads(bytes(ret_value["payload"]).decode("utf-8"))
# convert ret_value_payload which is a list of bytes to a string
ret_value_payload = json.loads(bytes(ret_value["payload"]).decode("utf-8"))

assert ret_value_payload == {
"type": "event_link",
"replay_id": replay_id,
"error_id": event.event_id,
"timestamp": int(event.datetime.timestamp()),
"event_hash": str(uuid.UUID(md5((event.event_id).encode("utf-8")).hexdigest())),
}
assert ret_value_payload == {
"type": "event_link",
"replay_id": replay_id,
"error_id": event.event_id,
"timestamp": int(event.datetime.timestamp()),
"event_hash": str(uuid.UUID(md5((event.event_id).encode("utf-8")).hexdigest())),
}

incr.assert_any_call("post_process.process_replay_link.id_sampled")
incr.assert_any_call("post_process.process_replay_link.id_exists")
incr.assert_any_call("post_process.process_replay_link.id_sampled")
incr.assert_any_call("post_process.process_replay_link.id_exists")

def test_replay_linkage_with_tag_pii_scrubbed(self, incr, kafka_producer, kafka_publisher):
event = self.create_event(
data={"message": "testing", "tags": {"replayId": "***"}},
project_id=self.project.id,
)

with self.feature({"organizations:session-replay-event-linking": True}):
self.call_post_process_group(
is_new=True,
is_regression=False,
is_new_group_environment=True,
event=event,
)
assert kafka_producer.return_value.publish.call_count == 0
self.call_post_process_group(
is_new=True,
is_regression=False,
is_new_group_environment=True,
event=event,
)
assert kafka_producer.return_value.publish.call_count == 0

def test_no_replay(self, incr, kafka_producer, kafka_publisher):
event = self.create_event(
data={"message": "testing"},
project_id=self.project.id,
)

with self.feature({"organizations:session-replay-event-linking": True}):
self.call_post_process_group(
is_new=True,
is_regression=False,
is_new_group_environment=True,
event=event,
)
assert kafka_producer.return_value.publish.call_count == 0
incr.assert_any_call("post_process.process_replay_link.id_sampled")
self.call_post_process_group(
is_new=True,
is_regression=False,
is_new_group_environment=True,
event=event,
)
assert kafka_producer.return_value.publish.call_count == 0
incr.assert_any_call("post_process.process_replay_link.id_sampled")

def test_0_sample_rate_replays(self, incr, kafka_producer, kafka_publisher):
event = self.create_event(
data={"message": "testing"},
project_id=self.project.id,
)

with self.feature({"organizations:session-replay-event-linking": False}):
self.call_post_process_group(
is_new=True,
is_regression=False,
is_new_group_environment=True,
event=event,
)
assert kafka_producer.return_value.publish.call_count == 0
for args, _ in incr.call_args_list:
self.assertNotEqual(args, ("post_process.process_replay_link.id_sampled"))
self.call_post_process_group(
is_new=True,
is_regression=False,
is_new_group_environment=True,
event=event,
)
assert kafka_producer.return_value.publish.call_count == 0
for args, _ in incr.call_args_list:
self.assertNotEqual(args, ("post_process.process_replay_link.id_sampled"))


class DetectNewEscalationTestMixin(BasePostProgressGroupMixin):
Expand Down

0 comments on commit 4dc60ad

Please sign in to comment.