From 5ad31df5229687f6acdb1e519848dd5ffa583bb1 Mon Sep 17 00:00:00 2001 From: Edward Gou Date: Wed, 12 Nov 2025 17:45:54 -0500 Subject: [PATCH 1/6] Consolidates fcp lcp and ttfb into a single vital issue --- .../tasks/web_vitals_issue_detection.py | 44 +++- .../web_vitals/issue_platform_adapter.py | 39 ++- src/sentry/web_vitals/types.py | 7 +- .../tasks/test_web_vitals_issue_detection.py | 244 ++++++++++++++++-- 4 files changed, 286 insertions(+), 48 deletions(-) diff --git a/src/sentry/tasks/web_vitals_issue_detection.py b/src/sentry/tasks/web_vitals_issue_detection.py index ec61d56fd866e6..0c8fd36c14e6fa 100644 --- a/src/sentry/tasks/web_vitals_issue_detection.py +++ b/src/sentry/tasks/web_vitals_issue_detection.py @@ -17,7 +17,11 @@ from sentry.taskworker.namespaces import issues_tasks from sentry.web_vitals.issue_platform_adapter import send_web_vitals_issue_to_platform from sentry.web_vitals.query import get_trace_by_web_vital_measurement -from sentry.web_vitals.types import WebVitalIssueDetectionType, WebVitalIssueGroupData +from sentry.web_vitals.types import ( + WebVitalIssueDetectionGroupingType, + WebVitalIssueDetectionType, + WebVitalIssueGroupData, +) logger = logging.getLogger("sentry.tasks.web_vitals_issue_detection") @@ -26,6 +30,13 @@ SCORE_THRESHOLD = 0.9 # Scores below this threshold will create web vital issues SAMPLES_COUNT_THRESHOLD = 10 # TODO: Use project config threshold setting. Web Vitals require at least this amount of samples to create an issue VITALS: list[WebVitalIssueDetectionType] = ["lcp", "fcp", "cls", "ttfb", "inp"] +VITAL_GROUPING_MAP: dict[WebVitalIssueDetectionType, WebVitalIssueDetectionGroupingType] = { + "lcp": "rendering", + "fcp": "rendering", + "ttfb": "rendering", + "cls": "cls", + "inp": "inp", +} def get_enabled_project_ids() -> list[int]: @@ -79,11 +90,18 @@ def detect_web_vitals_issues_for_project(project_id: int) -> None: project_id, limit=TRANSACTIONS_PER_PROJECT_LIMIT ) for web_vital_issue_group in web_vital_issue_groups: - p75_vital_value = web_vital_issue_group["value"] + scores = web_vital_issue_group["scores"] + values = web_vital_issue_group["values"] + + # We can only use a single trace sample for an issue event + # Use the p75 of the worst performing vital + vital = sorted(scores.items(), key=lambda item: item[1])[0][0] + p75_vital_value = values[vital] + trace = get_trace_by_web_vital_measurement( web_vital_issue_group["transaction"], project_id, - web_vital_issue_group["vital"], + vital, p75_vital_value, start_time_delta=DEFAULT_START_TIME_DELTA, ) @@ -155,7 +173,7 @@ def get_highest_opportunity_page_vitals_for_project( sampling_mode="NORMAL", ) - web_vital_issue_groups: list[WebVitalIssueGroupData] = [] + web_vital_issue_groups: dict[WebVitalIssueDetectionGroupingType, WebVitalIssueGroupData] = {} seen_names = set() for row in result.get("data", []): name = row.get("transaction") @@ -166,6 +184,8 @@ def get_highest_opportunity_page_vitals_for_project( if normalized_name in seen_names: continue seen_names.add(normalized_name) + + # Collect all vital scores and values for vital in VITALS: score = row.get(f"performance_score(measurements.score.{vital})") p75_value = row.get(f"p75(measurements.{vital})") @@ -178,17 +198,19 @@ def get_highest_opportunity_page_vitals_for_project( and enough_samples and p75_value is not None ): - web_vital_issue_groups.append( - { + if VITAL_GROUPING_MAP[vital] not in web_vital_issue_groups: + web_vital_issue_groups[VITAL_GROUPING_MAP[vital]] = { "transaction": name, - "vital": vital, - "score": score, "project": project, - "value": p75_value, + "vital_grouping": VITAL_GROUPING_MAP[vital], + "scores": {vital: score}, + "values": {vital: p75_value}, } - ) + else: + web_vital_issue_groups[VITAL_GROUPING_MAP[vital]]["scores"][vital] = score + web_vital_issue_groups[VITAL_GROUPING_MAP[vital]]["values"][vital] = p75_value - return web_vital_issue_groups + return list(web_vital_issue_groups.values()) def check_seer_setup_for_project(project: Project) -> bool: diff --git a/src/sentry/web_vitals/issue_platform_adapter.py b/src/sentry/web_vitals/issue_platform_adapter.py index 066402193d3d59..655250937b57ff 100644 --- a/src/sentry/web_vitals/issue_platform_adapter.py +++ b/src/sentry/web_vitals/issue_platform_adapter.py @@ -7,11 +7,11 @@ from sentry.issues.issue_occurrence import IssueEvidence, IssueOccurrence from sentry.issues.producer import PayloadType, produce_occurrence_to_kafka from sentry.models.group import Group, GroupStatus -from sentry.web_vitals.types import WebVitalIssueDetectionType, WebVitalIssueGroupData +from sentry.web_vitals.types import WebVitalIssueDetectionGroupingType, WebVitalIssueGroupData -def create_fingerprint(vital: WebVitalIssueDetectionType, transaction: str) -> str: - prehashed_fingerprint = f"insights-web-vitals-{vital}-{transaction}" +def create_fingerprint(vital_grouping: WebVitalIssueDetectionGroupingType, transaction: str) -> str: + prehashed_fingerprint = f"insights-web-vitals-{vital_grouping}-{transaction}" fingerprint = hashlib.sha1((prehashed_fingerprint).encode()).hexdigest() return fingerprint @@ -24,15 +24,19 @@ def send_web_vitals_issue_to_platform(data: WebVitalIssueGroupData, trace_id: st event_id = uuid4().hex now = datetime.now(UTC) transaction = data["transaction"] - vital = data["vital"] + scores = data["scores"] + values = data["values"] tags = { "transaction": data["transaction"], - "web_vital": vital, - "score": f"{data['score']:.2g}", - vital: f"{data['value']}", } + # These should already match, but use the intersection to be safe + vitals = scores.keys() & values.keys() + for vital in vitals: + tags[f"{vital}_score"] = f"{scores[vital]:.2g}" + tags[vital] = f"{values[vital]}" + event_data = { "event_id": event_id, "project_id": data["project"].id, @@ -63,10 +67,21 @@ def send_web_vitals_issue_to_platform(data: WebVitalIssueGroupData, trace_id: st ] # TODO: Add better titles and subtitles - title = f"{data['vital'].upper()} score needs improvement" - subtitle = f"{transaction} has a {data['vital'].upper()} score of {data['score']:.2g}" - - fingerprint = create_fingerprint(data["vital"], transaction) + if data["vital_grouping"] == "rendering": + title = "Rendering Web Vital scores needs improvement" + else: + title = f"{data['vital_grouping'].upper()} score needs improvement" + subtitle_parts = [] + for vital in data["scores"]: + a_or_an = "an" if vital in ("lcp", "fcp", "inp") else "a" + subtitle_parts.append(f"{a_or_an} {vital.upper()} score of {data['scores'][vital]:.2g}") + if len(subtitle_parts) > 1: + scores_text = ", ".join(subtitle_parts[:-1]) + " and " + subtitle_parts[-1] + else: + scores_text = subtitle_parts[0] + subtitle = f"{transaction} has {scores_text}" + + fingerprint = create_fingerprint(data["vital_grouping"], transaction) occurence = IssueOccurrence( id=uuid4().hex, @@ -90,7 +105,7 @@ def send_web_vitals_issue_to_platform(data: WebVitalIssueGroupData, trace_id: st def check_unresolved_web_vitals_issue_exists(data: WebVitalIssueGroupData) -> bool: - fingerprint = create_fingerprint(data["vital"], data["transaction"]) + fingerprint = create_fingerprint(data["vital_grouping"], data["transaction"]) fingerprint_hash = hash_fingerprint([fingerprint])[0] return Group.objects.filter( diff --git a/src/sentry/web_vitals/types.py b/src/sentry/web_vitals/types.py index e3487bbacfe35a..8d997c6d353a23 100644 --- a/src/sentry/web_vitals/types.py +++ b/src/sentry/web_vitals/types.py @@ -3,11 +3,12 @@ from sentry.models.project import Project WebVitalIssueDetectionType = Literal["lcp", "fcp", "cls", "ttfb", "inp"] +WebVitalIssueDetectionGroupingType = Literal["rendering", "cls", "inp"] class WebVitalIssueGroupData(TypedDict): transaction: str - vital: WebVitalIssueDetectionType - score: float project: Project - value: float + vital_grouping: WebVitalIssueDetectionGroupingType + scores: dict[WebVitalIssueDetectionType, float] + values: dict[WebVitalIssueDetectionType, float] diff --git a/tests/sentry/tasks/test_web_vitals_issue_detection.py b/tests/sentry/tasks/test_web_vitals_issue_detection.py index 77293333993e24..08fc864ffec6b3 100644 --- a/tests/sentry/tasks/test_web_vitals_issue_detection.py +++ b/tests/sentry/tasks/test_web_vitals_issue_detection.py @@ -208,7 +208,7 @@ def test_run_detection_produces_occurrences(self, mock_produce_occurrence_to_kaf ): run_web_vitals_issue_detection() - assert mock_produce_occurrence_to_kafka.call_count == 3 + assert mock_produce_occurrence_to_kafka.call_count == 2 call_args_list = mock_produce_occurrence_to_kafka.call_args_list # Common attributes @@ -230,34 +230,122 @@ def test_run_detection_produces_occurrences(self, mock_produce_occurrence_to_kaf lcp_call = call_args_list[0] lcp_occurrence = lcp_call.kwargs["occurrence"] - assert lcp_occurrence.fingerprint == ["b004a0dcf761775c1724e1a8d7cf0f1e0403978e"] - assert lcp_occurrence.issue_title == "LCP score needs improvement" - assert lcp_occurrence.subtitle == "/home has a LCP score of 0.5" + assert lcp_occurrence.fingerprint == ["d94185e6d794589212c74476702515734b703f86"] + assert lcp_occurrence.issue_title == "Rendering score needs improvement" + assert ( + lcp_occurrence.subtitle == "/home has an LCP score of 0.5 and an FCP score of 0.8" + ) lcp_event_data = lcp_call.kwargs["event_data"] - assert lcp_event_data["tags"]["web_vital"] == "lcp" - assert lcp_event_data["tags"]["score"] == "0.5" + assert lcp_event_data["tags"]["lcp_score"] == "0.5" assert lcp_event_data["tags"]["lcp"] == "3500.0" - fcp_call = call_args_list[1] - fcp_occurrence = fcp_call.kwargs["occurrence"] - assert fcp_occurrence.fingerprint == ["0f101df7dfe2a5249a430b5088528a28e8e07aac"] - assert fcp_occurrence.issue_title == "FCP score needs improvement" - assert fcp_occurrence.subtitle == "/home has a FCP score of 0.8" - fcp_event_data = fcp_call.kwargs["event_data"] - assert fcp_event_data["tags"]["web_vital"] == "fcp" - assert fcp_event_data["tags"]["score"] == "0.8" - assert fcp_event_data["tags"]["fcp"] == "1800.0" - - inp_call = call_args_list[2] + inp_call = call_args_list[1] inp_occurrence = inp_call.kwargs["occurrence"] assert inp_occurrence.fingerprint == ["d8b421cb6e5476121654d1383e80f4515a7f58b9"] assert inp_occurrence.issue_title == "INP score needs improvement" - assert inp_occurrence.subtitle == "/home has a INP score of 0.85" + assert inp_occurrence.subtitle == "/home has an INP score of 0.85" inp_event_data = inp_call.kwargs["event_data"] - assert inp_event_data["tags"]["web_vital"] == "inp" - assert inp_event_data["tags"]["score"] == "0.85" + assert inp_event_data["tags"]["inp_score"] == "0.85" assert inp_event_data["tags"]["inp"] == "200.0" + @pytest.mark.snuba + @patch("sentry.web_vitals.issue_platform_adapter.produce_occurrence_to_kafka") + def test_run_detection_groups_rendering_vitals(self, mock_produce_occurrence_to_kafka): + project = self.create_project() + + spans = [] + # web vital issue detection requires at least 10 samples per vital to create an issue + for _ in range(10): + spans.extend( + [ + self.create_span( + project=project, + extra_data={ + "sentry_tags": { + "op": "ui.webvitals.lcp", + "transaction": "/home", + }, + }, + start_ts=self.ten_mins_ago, + duration=100, + measurements={ + "score.ratio.lcp": {"value": 0.5}, + "lcp": {"value": 3500}, + }, + ), + self.create_span( + project=project, + extra_data={ + "description": "pageload", + "sentry_tags": { + "op": "pageload", + "transaction": "/home", + }, + }, + start_ts=self.ten_mins_ago, + duration=3000, + measurements={ + "score.ratio.fcp": {"value": 0.8}, + "score.ratio.ttfb": {"value": 0.6}, + "fcp": {"value": 1800}, + "ttfb": {"value": 2000}, + }, + ), + ] + ) + + self.store_spans(spans, is_eap=True) + + with ( + self.mock_seer_ack(), + self.mock_code_mapping(), + self.options( + { + "issue-detection.web-vitals-detection.enabled": True, + "issue-detection.web-vitals-detection.projects-allowlist": [project.id], + } + ), + self.feature("organizations:gen-ai-features"), + TaskRunner(), + ): + run_web_vitals_issue_detection() + + assert mock_produce_occurrence_to_kafka.call_count == 1 + call_args_list = mock_produce_occurrence_to_kafka.call_args_list + + # Common attributes + for call in call_args_list: + call_kwargs = call.kwargs + occurrence = call_kwargs["occurrence"] + event_data = call_kwargs["event_data"] + assert occurrence.type == WebVitalsGroup + assert occurrence.project_id == project.id + assert occurrence.evidence_data == {"transaction": "/home"} + assert len(occurrence.evidence_display) == 1 + assert occurrence.evidence_display[0].name == "Transaction" + assert occurrence.evidence_display[0].value == "/home" + assert occurrence.level == "info" + assert occurrence.culprit == "/home" + assert event_data["project_id"] == project.id + assert event_data["tags"]["transaction"] == "/home" + assert "trace" in event_data["contexts"] + + lcp_call = call_args_list[0] + lcp_occurrence = lcp_call.kwargs["occurrence"] + assert lcp_occurrence.fingerprint == ["d94185e6d794589212c74476702515734b703f86"] + assert lcp_occurrence.issue_title == "Rendering score needs improvement" + assert ( + lcp_occurrence.subtitle + == "/home has an LCP score of 0.5, an FCP score of 0.8 and a TTFB score of 0.6" + ) + lcp_event_data = lcp_call.kwargs["event_data"] + assert lcp_event_data["tags"]["lcp_score"] == "0.5" + assert lcp_event_data["tags"]["fcp_score"] == "0.8" + assert lcp_event_data["tags"]["ttfb_score"] == "0.6" + assert lcp_event_data["tags"]["lcp"] == "3500.0" + assert lcp_event_data["tags"]["fcp"] == "1800.0" + assert lcp_event_data["tags"]["ttfb"] == "2000.0" + @pytest.mark.snuba @patch("sentry.web_vitals.issue_platform_adapter.produce_occurrence_to_kafka") def test_run_detection_does_not_produce_occurrences_for_existing_issues( @@ -288,8 +376,8 @@ def test_run_detection_does_not_produce_occurrences_for_existing_issues( # Create an existing issue group so that the web vital issue detection does not produce a new occurrence group = self.create_group(project=project) - lcp_fingerprint = "b004a0dcf761775c1724e1a8d7cf0f1e0403978e" - hashed_fingerprint = hash_fingerprint([lcp_fingerprint]) + rendering_fingerprint = "d94185e6d794589212c74476702515734b703f86" + hashed_fingerprint = hash_fingerprint([rendering_fingerprint]) GroupHash.objects.create( project=project, group=group, @@ -444,3 +532,115 @@ def test_run_detection_selects_trace_closest_to_p75_web_vital_value( call_args_list[0].kwargs["event_data"]["contexts"]["trace"]["trace_id"] == p75_span["trace_id"] ) + + @pytest.mark.snuba + @patch("sentry.web_vitals.issue_platform_adapter.produce_occurrence_to_kafka") + def test_run_detection_selects_trace_from_worst_score(self, mock_produce_occurrence_to_kafka): + project = self.create_project() + + spans = [ + self.create_span( + project=project, + extra_data={ + "sentry_tags": { + "op": "ui.webvitals.lcp", + "transaction": "/home", + }, + }, + start_ts=self.ten_mins_ago, + duration=100, + measurements={ + "score.ratio.lcp": {"value": 0.1}, + "lcp": {"value": 100}, + }, + ) + for _ in range(7) + ] + + p75_span = self.create_span( + project=project, + extra_data={ + "sentry_tags": { + "op": "ui.webvitals.lcp", + "transaction": "/home", + }, + }, + start_ts=self.ten_mins_ago, + duration=100, + measurements={ + "score.ratio.lcp": {"value": 0.5}, + "lcp": {"value": 2000}, + }, + ) + spans.append(p75_span) + + spans.extend( + [ + self.create_span( + project=project, + extra_data={ + "sentry_tags": { + "op": "ui.webvitals.lcp", + "transaction": "/home", + }, + }, + start_ts=self.ten_mins_ago, + duration=100, + measurements={ + "score.ratio.lcp": {"value": 0.2}, + "lcp": {"value": 3500}, + }, + ) + for _ in range(2) + ] + ) + + for _ in range(10): + spans.extend( + [ + self.create_span( + project=project, + extra_data={ + "description": "pageload", + "sentry_tags": { + "op": "pageload", + "transaction": "/home", + }, + }, + start_ts=self.ten_mins_ago, + duration=3000, + measurements={ + "score.ratio.fcp": {"value": 0.8}, + "score.ratio.ttfb": {"value": 0.6}, + "fcp": {"value": 1800}, + "ttfb": {"value": 2000}, + }, + ), + ] + ) + + self.store_spans(spans, is_eap=True) + + with ( + self.mock_seer_ack(), + self.mock_code_mapping(), + self.options( + { + "issue-detection.web-vitals-detection.enabled": True, + "issue-detection.web-vitals-detection.projects-allowlist": [project.id], + } + ), + self.feature("organizations:gen-ai-features"), + TaskRunner(), + ): + run_web_vitals_issue_detection() + + assert mock_produce_occurrence_to_kafka.call_count == 1 + call_args_list = mock_produce_occurrence_to_kafka.call_args_list + assert call_args_list[0].kwargs["event_data"]["tags"]["lcp"] == "2000.0" + assert call_args_list[0].kwargs["event_data"]["tags"]["fcp"] == "1800.0" + assert call_args_list[0].kwargs["event_data"]["tags"]["ttfb"] == "2000.0" + assert ( + call_args_list[0].kwargs["event_data"]["contexts"]["trace"]["trace_id"] + == p75_span["trace_id"] + ) From 1645d12ac4f20baa2b93b0904c86d730c9330a73 Mon Sep 17 00:00:00 2001 From: Edward Gou Date: Wed, 12 Nov 2025 17:48:49 -0500 Subject: [PATCH 2/6] remove comment --- src/sentry/tasks/web_vitals_issue_detection.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/sentry/tasks/web_vitals_issue_detection.py b/src/sentry/tasks/web_vitals_issue_detection.py index 0c8fd36c14e6fa..6e617114a1a039 100644 --- a/src/sentry/tasks/web_vitals_issue_detection.py +++ b/src/sentry/tasks/web_vitals_issue_detection.py @@ -185,7 +185,6 @@ def get_highest_opportunity_page_vitals_for_project( continue seen_names.add(normalized_name) - # Collect all vital scores and values for vital in VITALS: score = row.get(f"performance_score(measurements.score.{vital})") p75_value = row.get(f"p75(measurements.{vital})") From b267c8695530cc61f38a1efa4869ab8e3fe4f4c6 Mon Sep 17 00:00:00 2001 From: Edward Gou Date: Wed, 12 Nov 2025 18:56:47 -0500 Subject: [PATCH 3/6] fix titles --- src/sentry/web_vitals/issue_platform_adapter.py | 2 +- tests/sentry/tasks/test_web_vitals_issue_detection.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/sentry/web_vitals/issue_platform_adapter.py b/src/sentry/web_vitals/issue_platform_adapter.py index 655250937b57ff..fc420e62d39b57 100644 --- a/src/sentry/web_vitals/issue_platform_adapter.py +++ b/src/sentry/web_vitals/issue_platform_adapter.py @@ -68,7 +68,7 @@ def send_web_vitals_issue_to_platform(data: WebVitalIssueGroupData, trace_id: st # TODO: Add better titles and subtitles if data["vital_grouping"] == "rendering": - title = "Rendering Web Vital scores needs improvement" + title = "Render time Web Vital scores need improvement" else: title = f"{data['vital_grouping'].upper()} score needs improvement" subtitle_parts = [] diff --git a/tests/sentry/tasks/test_web_vitals_issue_detection.py b/tests/sentry/tasks/test_web_vitals_issue_detection.py index 08fc864ffec6b3..320f82d4627c8d 100644 --- a/tests/sentry/tasks/test_web_vitals_issue_detection.py +++ b/tests/sentry/tasks/test_web_vitals_issue_detection.py @@ -231,7 +231,7 @@ def test_run_detection_produces_occurrences(self, mock_produce_occurrence_to_kaf lcp_call = call_args_list[0] lcp_occurrence = lcp_call.kwargs["occurrence"] assert lcp_occurrence.fingerprint == ["d94185e6d794589212c74476702515734b703f86"] - assert lcp_occurrence.issue_title == "Rendering score needs improvement" + assert lcp_occurrence.issue_title == "Render time Web Vital scores need improvement" assert ( lcp_occurrence.subtitle == "/home has an LCP score of 0.5 and an FCP score of 0.8" ) From e8a53a18f421f8be03dbf2275e8b6be3b19e1c24 Mon Sep 17 00:00:00 2001 From: Edward Gou Date: Wed, 12 Nov 2025 21:32:59 -0500 Subject: [PATCH 4/6] fix test --- tests/sentry/tasks/test_web_vitals_issue_detection.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/sentry/tasks/test_web_vitals_issue_detection.py b/tests/sentry/tasks/test_web_vitals_issue_detection.py index 320f82d4627c8d..0151760583d131 100644 --- a/tests/sentry/tasks/test_web_vitals_issue_detection.py +++ b/tests/sentry/tasks/test_web_vitals_issue_detection.py @@ -333,7 +333,7 @@ def test_run_detection_groups_rendering_vitals(self, mock_produce_occurrence_to_ lcp_call = call_args_list[0] lcp_occurrence = lcp_call.kwargs["occurrence"] assert lcp_occurrence.fingerprint == ["d94185e6d794589212c74476702515734b703f86"] - assert lcp_occurrence.issue_title == "Rendering score needs improvement" + assert lcp_occurrence.issue_title == "Render time Web Vital scores need improvement" assert ( lcp_occurrence.subtitle == "/home has an LCP score of 0.5, an FCP score of 0.8 and a TTFB score of 0.6" From 05a0d67c2134b1f9770c07170e4f4b45ec7b1245 Mon Sep 17 00:00:00 2001 From: Edward Gou Date: Thu, 13 Nov 2025 18:23:29 -0500 Subject: [PATCH 5/6] fix collision --- .../tasks/web_vitals_issue_detection.py | 16 +++++++++---- .../tasks/test_web_vitals_issue_detection.py | 24 +++++++++++++++---- 2 files changed, 30 insertions(+), 10 deletions(-) diff --git a/src/sentry/tasks/web_vitals_issue_detection.py b/src/sentry/tasks/web_vitals_issue_detection.py index 6e617114a1a039..c98ff13be82d81 100644 --- a/src/sentry/tasks/web_vitals_issue_detection.py +++ b/src/sentry/tasks/web_vitals_issue_detection.py @@ -173,7 +173,9 @@ def get_highest_opportunity_page_vitals_for_project( sampling_mode="NORMAL", ) - web_vital_issue_groups: dict[WebVitalIssueDetectionGroupingType, WebVitalIssueGroupData] = {} + web_vital_issue_groups: dict[ + (WebVitalIssueDetectionGroupingType, str), WebVitalIssueGroupData + ] = {} seen_names = set() for row in result.get("data", []): name = row.get("transaction") @@ -197,8 +199,8 @@ def get_highest_opportunity_page_vitals_for_project( and enough_samples and p75_value is not None ): - if VITAL_GROUPING_MAP[vital] not in web_vital_issue_groups: - web_vital_issue_groups[VITAL_GROUPING_MAP[vital]] = { + if (VITAL_GROUPING_MAP[vital], name) not in web_vital_issue_groups: + web_vital_issue_groups[(VITAL_GROUPING_MAP[vital], name)] = { "transaction": name, "project": project, "vital_grouping": VITAL_GROUPING_MAP[vital], @@ -206,8 +208,12 @@ def get_highest_opportunity_page_vitals_for_project( "values": {vital: p75_value}, } else: - web_vital_issue_groups[VITAL_GROUPING_MAP[vital]]["scores"][vital] = score - web_vital_issue_groups[VITAL_GROUPING_MAP[vital]]["values"][vital] = p75_value + web_vital_issue_groups[(VITAL_GROUPING_MAP[vital], name)]["scores"][ + vital + ] = score + web_vital_issue_groups[(VITAL_GROUPING_MAP[vital], name)]["values"][ + vital + ] = p75_value return list(web_vital_issue_groups.values()) diff --git a/tests/sentry/tasks/test_web_vitals_issue_detection.py b/tests/sentry/tasks/test_web_vitals_issue_detection.py index 0151760583d131..b09392eb78d3ec 100644 --- a/tests/sentry/tasks/test_web_vitals_issue_detection.py +++ b/tests/sentry/tasks/test_web_vitals_issue_detection.py @@ -291,6 +291,21 @@ def test_run_detection_groups_rendering_vitals(self, mock_produce_occurrence_to_ "ttfb": {"value": 2000}, }, ), + self.create_span( + project=project, + extra_data={ + "sentry_tags": { + "op": "ui.webvitals.lcp", + "transaction": "/settings", + }, + }, + start_ts=self.ten_mins_ago, + duration=100, + measurements={ + "score.ratio.lcp": {"value": 0.5}, + "lcp": {"value": 3500}, + }, + ), ] ) @@ -310,7 +325,7 @@ def test_run_detection_groups_rendering_vitals(self, mock_produce_occurrence_to_ ): run_web_vitals_issue_detection() - assert mock_produce_occurrence_to_kafka.call_count == 1 + assert mock_produce_occurrence_to_kafka.call_count == 2 call_args_list = mock_produce_occurrence_to_kafka.call_args_list # Common attributes @@ -320,16 +335,15 @@ def test_run_detection_groups_rendering_vitals(self, mock_produce_occurrence_to_ event_data = call_kwargs["event_data"] assert occurrence.type == WebVitalsGroup assert occurrence.project_id == project.id - assert occurrence.evidence_data == {"transaction": "/home"} assert len(occurrence.evidence_display) == 1 assert occurrence.evidence_display[0].name == "Transaction" - assert occurrence.evidence_display[0].value == "/home" assert occurrence.level == "info" - assert occurrence.culprit == "/home" assert event_data["project_id"] == project.id - assert event_data["tags"]["transaction"] == "/home" assert "trace" in event_data["contexts"] + assert call_args_list[0].kwargs["event_data"]["tags"]["transaction"] == "/home" + assert call_args_list[1].kwargs["event_data"]["tags"]["transaction"] == "/settings" + lcp_call = call_args_list[0] lcp_occurrence = lcp_call.kwargs["occurrence"] assert lcp_occurrence.fingerprint == ["d94185e6d794589212c74476702515734b703f86"] From 631310ae3be763ea00d02eb94f12948398274397 Mon Sep 17 00:00:00 2001 From: Edward Gou Date: Thu, 13 Nov 2025 19:10:05 -0500 Subject: [PATCH 6/6] type --- src/sentry/tasks/web_vitals_issue_detection.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sentry/tasks/web_vitals_issue_detection.py b/src/sentry/tasks/web_vitals_issue_detection.py index c98ff13be82d81..e67fdad5856a5a 100644 --- a/src/sentry/tasks/web_vitals_issue_detection.py +++ b/src/sentry/tasks/web_vitals_issue_detection.py @@ -174,7 +174,7 @@ def get_highest_opportunity_page_vitals_for_project( ) web_vital_issue_groups: dict[ - (WebVitalIssueDetectionGroupingType, str), WebVitalIssueGroupData + tuple[WebVitalIssueDetectionGroupingType, str], WebVitalIssueGroupData ] = {} seen_names = set() for row in result.get("data", []):