Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
51 changes: 39 additions & 12 deletions src/sentry/tasks/web_vitals_issue_detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,11 @@
from sentry.taskworker.namespaces import issues_tasks
from sentry.web_vitals.issue_platform_adapter import send_web_vitals_issue_to_platform
from sentry.web_vitals.query import get_trace_by_web_vital_measurement
from sentry.web_vitals.types import WebVitalIssueDetectionType, WebVitalIssueGroupData
from sentry.web_vitals.types import (
WebVitalIssueDetectionGroupingType,
WebVitalIssueDetectionType,
WebVitalIssueGroupData,
)

logger = logging.getLogger("sentry.tasks.web_vitals_issue_detection")

Expand All @@ -27,6 +31,13 @@
SCORE_THRESHOLD = 0.9 # Scores below this threshold will create web vital issues
DEFAULT_SAMPLES_COUNT_THRESHOLD = 10
VITALS: list[WebVitalIssueDetectionType] = ["lcp", "fcp", "cls", "ttfb", "inp"]
VITAL_GROUPING_MAP: dict[WebVitalIssueDetectionType, WebVitalIssueDetectionGroupingType] = {
"lcp": "rendering",
"fcp": "rendering",
"ttfb": "rendering",
"cls": "cls",
"inp": "inp",
}


def get_enabled_project_ids() -> list[int]:
Expand Down Expand Up @@ -85,11 +96,18 @@ def detect_web_vitals_issues_for_project(project_id: int) -> None:
project_id, limit=TRANSACTIONS_PER_PROJECT_LIMIT
)
for web_vital_issue_group in web_vital_issue_groups:
p75_vital_value = web_vital_issue_group["value"]
scores = web_vital_issue_group["scores"]
values = web_vital_issue_group["values"]

# We can only use a single trace sample for an issue event
# Use the p75 of the worst performing vital
vital = sorted(scores.items(), key=lambda item: item[1])[0][0]
p75_vital_value = values[vital]

trace = get_trace_by_web_vital_measurement(
web_vital_issue_group["transaction"],
project_id,
web_vital_issue_group["vital"],
vital,
p75_vital_value,
start_time_delta=DEFAULT_START_TIME_DELTA,
)
Expand Down Expand Up @@ -167,7 +185,9 @@ def get_highest_opportunity_page_vitals_for_project(
sampling_mode="NORMAL",
)

web_vital_issue_groups: list[WebVitalIssueGroupData] = []
web_vital_issue_groups: dict[
tuple[WebVitalIssueDetectionGroupingType, str], WebVitalIssueGroupData
] = {}
seen_names = set()
for row in result.get("data", []):
name = row.get("transaction")
Expand All @@ -178,6 +198,7 @@ def get_highest_opportunity_page_vitals_for_project(
if normalized_name in seen_names:
continue
seen_names.add(normalized_name)

for vital in VITALS:
score = row.get(f"performance_score(measurements.score.{vital})")
p75_value = row.get(f"p75(measurements.{vital})")
Expand All @@ -190,17 +211,23 @@ def get_highest_opportunity_page_vitals_for_project(
and enough_samples
and p75_value is not None
):
web_vital_issue_groups.append(
{
if (VITAL_GROUPING_MAP[vital], name) not in web_vital_issue_groups:
web_vital_issue_groups[(VITAL_GROUPING_MAP[vital], name)] = {
"transaction": name,
"vital": vital,
"score": score,
"project": project,
"value": p75_value,
"vital_grouping": VITAL_GROUPING_MAP[vital],
"scores": {vital: score},
"values": {vital: p75_value},
}
)

return web_vital_issue_groups
else:
web_vital_issue_groups[(VITAL_GROUPING_MAP[vital], name)]["scores"][
vital
] = score
web_vital_issue_groups[(VITAL_GROUPING_MAP[vital], name)]["values"][
vital
] = p75_value

return list(web_vital_issue_groups.values())


def check_seer_setup_for_project(project: Project) -> bool:
Expand Down
39 changes: 27 additions & 12 deletions src/sentry/web_vitals/issue_platform_adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,11 @@
from sentry.issues.issue_occurrence import IssueEvidence, IssueOccurrence
from sentry.issues.producer import PayloadType, produce_occurrence_to_kafka
from sentry.models.group import Group, GroupStatus
from sentry.web_vitals.types import WebVitalIssueDetectionType, WebVitalIssueGroupData
from sentry.web_vitals.types import WebVitalIssueDetectionGroupingType, WebVitalIssueGroupData


def create_fingerprint(vital: WebVitalIssueDetectionType, transaction: str) -> str:
prehashed_fingerprint = f"insights-web-vitals-{vital}-{transaction}"
def create_fingerprint(vital_grouping: WebVitalIssueDetectionGroupingType, transaction: str) -> str:
prehashed_fingerprint = f"insights-web-vitals-{vital_grouping}-{transaction}"
fingerprint = hashlib.sha1((prehashed_fingerprint).encode()).hexdigest()
return fingerprint

Expand All @@ -24,15 +24,19 @@ def send_web_vitals_issue_to_platform(data: WebVitalIssueGroupData, trace_id: st
event_id = uuid4().hex
now = datetime.now(UTC)
transaction = data["transaction"]
vital = data["vital"]
scores = data["scores"]
values = data["values"]

tags = {
"transaction": data["transaction"],
"web_vital": vital,
"score": f"{data['score']:.2g}",
vital: f"{data['value']}",
}

# These should already match, but use the intersection to be safe
vitals = scores.keys() & values.keys()
for vital in vitals:
tags[f"{vital}_score"] = f"{scores[vital]:.2g}"
tags[vital] = f"{values[vital]}"

event_data = {
"event_id": event_id,
"project_id": data["project"].id,
Expand Down Expand Up @@ -63,10 +67,21 @@ def send_web_vitals_issue_to_platform(data: WebVitalIssueGroupData, trace_id: st
]

# TODO: Add better titles and subtitles
title = f"{data['vital'].upper()} score needs improvement"
subtitle = f"{transaction} has a {data['vital'].upper()} score of {data['score']:.2g}"

fingerprint = create_fingerprint(data["vital"], transaction)
if data["vital_grouping"] == "rendering":
title = "Render time Web Vital scores need improvement"
else:
title = f"{data['vital_grouping'].upper()} score needs improvement"
subtitle_parts = []
for vital in data["scores"]:
a_or_an = "an" if vital in ("lcp", "fcp", "inp") else "a"
subtitle_parts.append(f"{a_or_an} {vital.upper()} score of {data['scores'][vital]:.2g}")
if len(subtitle_parts) > 1:
scores_text = ", ".join(subtitle_parts[:-1]) + " and " + subtitle_parts[-1]
else:
scores_text = subtitle_parts[0]
subtitle = f"{transaction} has {scores_text}"

fingerprint = create_fingerprint(data["vital_grouping"], transaction)

occurence = IssueOccurrence(
id=uuid4().hex,
Expand All @@ -90,7 +105,7 @@ def send_web_vitals_issue_to_platform(data: WebVitalIssueGroupData, trace_id: st


def check_unresolved_web_vitals_issue_exists(data: WebVitalIssueGroupData) -> bool:
fingerprint = create_fingerprint(data["vital"], data["transaction"])
fingerprint = create_fingerprint(data["vital_grouping"], data["transaction"])
fingerprint_hash = hash_fingerprint([fingerprint])[0]

return Group.objects.filter(
Expand Down
7 changes: 4 additions & 3 deletions src/sentry/web_vitals/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,12 @@
from sentry.models.project import Project

WebVitalIssueDetectionType = Literal["lcp", "fcp", "cls", "ttfb", "inp"]
WebVitalIssueDetectionGroupingType = Literal["rendering", "cls", "inp"]


class WebVitalIssueGroupData(TypedDict):
transaction: str
vital: WebVitalIssueDetectionType
score: float
project: Project
value: float
vital_grouping: WebVitalIssueDetectionGroupingType
scores: dict[WebVitalIssueDetectionType, float]
values: dict[WebVitalIssueDetectionType, float]
Loading
Loading