Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 7 additions & 1 deletion src/sentry/event_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,10 @@
from sentry.utils.safe import get_path, safe_execute, setdefault_path, trim
from sentry.utils.sdk import set_span_attribute
from sentry.utils.tag_normalization import normalized_sdk_tag_from_event
from sentry.workflow_engine.processors.detector import associate_new_group_with_detector
from sentry.workflow_engine.processors.detector import (
associate_new_group_with_detector,
ensure_association_with_detector,
)

from .utils.event_tracker import TransactionStageStatus, track_sampled_event

Expand Down Expand Up @@ -1434,6 +1437,9 @@ def handle_existing_grouphash(
release=job["release"],
)

# Ensure the group has a DetectorGroup association for existing groups
ensure_association_with_detector(group)

return GroupInfo(group=group, is_new=False, is_regression=is_regression)


Expand Down
10 changes: 9 additions & 1 deletion src/sentry/issues/ingest.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,10 @@
from sentry.utils.strings import truncatechars
from sentry.utils.tag_normalization import normalized_sdk_tag_from_event
from sentry.workflow_engine.models import IncidentGroupOpenPeriod
from sentry.workflow_engine.processors.detector import associate_new_group_with_detector
from sentry.workflow_engine.processors.detector import (
associate_new_group_with_detector,
ensure_association_with_detector,
)

issue_rate_limiter = RedisSlidingWindowRateLimiter(
**settings.SENTRY_ISSUE_PLATFORM_RATE_LIMITER_OPTIONS
Expand Down Expand Up @@ -320,6 +323,11 @@ def save_issue_from_occurrence(
is_regression = _process_existing_aggregate(group, group_event, issue_kwargs, release)
group_info = GroupInfo(group=group, is_new=False, is_regression=is_regression)

detector_id = None
if occurrence.evidence_data:
detector_id = occurrence.evidence_data.get("detector_id")
ensure_association_with_detector(group, detector_id)

# if it's a regression and the priority changed, we should update the existing GroupOpenPeriodActivity
# row if applicable. Otherwise, we should record a new row if applicable.
if (
Expand Down
7 changes: 7 additions & 0 deletions src/sentry/options/defaults.py
Original file line number Diff line number Diff line change
Expand Up @@ -3177,6 +3177,13 @@
flags=FLAG_AUTOMATOR_MODIFIABLE,
)

register(
"workflow_engine.ensure_detector_association",
type=Bool,
default=True,
flags=FLAG_AUTOMATOR_MODIFIABLE,
)

register(
"grouping.grouphash_metadata.ingestion_writes_enabled",
type=Bool,
Expand Down
66 changes: 66 additions & 0 deletions src/sentry/workflow_engine/processors/detector.py
Original file line number Diff line number Diff line change
Expand Up @@ -413,3 +413,69 @@ def associate_new_group_with_detector(group: Group, detector_id: int | None = No
tags={"group_type": group.type, "result": "success"},
)
return True


def ensure_association_with_detector(group: Group, detector_id: int | None = None) -> bool:
"""
Ensure a Group has a DetectorGroup association, creating it if missing.
Backdates date_added to group.first_seen for gradual backfill of existing groups.
"""
if not options.get("workflow_engine.ensure_detector_association"):
return False

# Common case: it exists, we verify and move on.
if DetectorGroup.objects.filter(group_id=group.id).exists():
return True

# Association is missing, determine the detector_id if not provided
if detector_id is None:
# For error Groups, we know there is a Detector and we can find it by project.
if group.type == ErrorGroupType.type_id:
try:
detector_id = Detector.get_error_detector_for_project(group.project.id).id
except Detector.DoesNotExist:
logger.warning(
"ensure_association_with_detector_detector_not_found",
extra={
"group_id": group.id,
"group_type": group.type,
"project_id": group.project.id,
},
)
return False
else:
return False
else:
# Check if the explicitly provided detector exists. If not, create DetectorGroup
# with null detector_id to make it clear that we were associated with a detector
# that no longer exists.
if not Detector.objects.filter(id=detector_id).exists():
detector_group, created = DetectorGroup.objects.get_or_create(
group_id=group.id,
defaults={"detector_id": None},
)
if created:
# Backdate the date_added to match the group's first_seen
DetectorGroup.objects.filter(id=detector_group.id).update(
date_added=group.first_seen
)
metrics.incr(
"workflow_engine.ensure_association_with_detector.created",
tags={"group_type": group.type},
)
return True

detector_group, created = DetectorGroup.objects.get_or_create(
group_id=group.id,
defaults={"detector_id": detector_id},
)

if created:
# Backdate the date_added to match the group's first_seen
DetectorGroup.objects.filter(id=detector_group.id).update(date_added=group.first_seen)
metrics.incr(
"workflow_engine.ensure_association_with_detector.created",
tags={"group_type": group.type},
)

return True
83 changes: 83 additions & 0 deletions tests/sentry/workflow_engine/processors/test_detector.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
from sentry.workflow_engine.models.detector_group import DetectorGroup
from sentry.workflow_engine.processors.detector import (
associate_new_group_with_detector,
ensure_association_with_detector,
get_detector_by_event,
get_detectors_by_groupevents_bulk,
process_detectors,
Expand Down Expand Up @@ -1070,3 +1071,85 @@ def test_deleted_detector_creates_null_association(self) -> None:
detector_group = DetectorGroup.objects.get(group_id=group.id)
assert detector_group.detector_id is None
assert detector_group.group_id == group.id


class TestEnsureAssociationWithDetector(TestCase):
def setUp(self) -> None:
super().setUp()
self.metric_detector = self.create_detector(project=self.project, type="metric_issue")
self.error_detector = self.create_detector(project=self.project, type="error")
self.options_context = self.options({"workflow_engine.ensure_detector_association": True})
self.options_context.__enter__()

def tearDown(self) -> None:
self.options_context.__exit__(None, None, None)
super().tearDown()

def test_feature_disabled_returns_false(self) -> None:
group = self.create_group(project=self.project, type=ErrorGroupType.type_id)

with self.options({"workflow_engine.ensure_detector_association": False}):
assert not ensure_association_with_detector(group)
assert not DetectorGroup.objects.filter(group_id=group.id).exists()

def test_already_exists_returns_true(self) -> None:
group = self.create_group(project=self.project, type=ErrorGroupType.type_id)
DetectorGroup.objects.create(detector=self.error_detector, group=group)

assert ensure_association_with_detector(group)
assert DetectorGroup.objects.filter(group_id=group.id).count() == 1

def test_error_group_creates_association(self) -> None:
group = self.create_group(project=self.project, type=ErrorGroupType.type_id)

assert ensure_association_with_detector(group)
detector_group = DetectorGroup.objects.get(group_id=group.id)
assert detector_group.detector_id == self.error_detector.id
assert detector_group.group_id == group.id

def test_metric_group_with_detector_id(self) -> None:
group = self.create_group(project=self.project, type=MetricIssue.type_id)

assert ensure_association_with_detector(group, self.metric_detector.id)
detector_group = DetectorGroup.objects.get(group_id=group.id)
assert detector_group.detector_id == self.metric_detector.id
assert detector_group.group_id == group.id

def test_feedback_group_returns_false(self) -> None:
group = self.create_group(project=self.project, type=FeedbackGroup.type_id)

assert not ensure_association_with_detector(group)
assert not DetectorGroup.objects.filter(group_id=group.id).exists()

def test_deleted_detector_creates_null_association(self) -> None:
group = self.create_group(project=self.project, type=MetricIssue.type_id)
deleted_detector_id = self.metric_detector.id

self.metric_detector.delete()

assert ensure_association_with_detector(group, deleted_detector_id)

detector_group = DetectorGroup.objects.get(group_id=group.id)
assert detector_group.detector_id is None
assert detector_group.group_id == group.id

def test_backdates_date_added_to_group_first_seen(self) -> None:
group = self.create_group(project=self.project, type=ErrorGroupType.type_id)

assert ensure_association_with_detector(group)
detector_group = DetectorGroup.objects.get(group_id=group.id)
assert detector_group.date_added == group.first_seen

def test_race_condition_handled(self) -> None:
group = self.create_group(project=self.project, type=ErrorGroupType.type_id)

assert ensure_association_with_detector(group)
assert ensure_association_with_detector(group)
assert DetectorGroup.objects.filter(group_id=group.id).count() == 1

def test_detector_not_found(self) -> None:
group = self.create_group(project=self.project, type=ErrorGroupType.type_id)
self.error_detector.delete()

assert not ensure_association_with_detector(group)
assert not DetectorGroup.objects.filter(group_id=group.id).exists()
Loading