Skip to content
Merged
1 change: 1 addition & 0 deletions src/sentry/workflow_engine/handlers/detector/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ class EvidenceData(Generic[DataPacketEvaluationType]):
detector_id: int
data_packet_source_id: int
conditions: list[dict[str, Any]]
data_sources: list[dict[str, Any]] = dataclasses.field(default_factory=list, kw_only=True)


@dataclasses.dataclass(frozen=True, kw_only=True)
Expand Down
31 changes: 29 additions & 2 deletions src/sentry/workflow_engine/handlers/detector/stateful.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from django.db.models import Q
from sentry_redis_tools.retrying_cluster import RetryingRedisCluster

from sentry.api.serializers import serialize
from sentry.issues.issue_occurrence import IssueOccurrence
from sentry.issues.status_change_message import StatusChangeMessage
from sentry.models.group import GroupStatus
Expand All @@ -21,7 +22,7 @@
EventData,
GroupedDetectorEvaluationResult,
)
from sentry.workflow_engine.models import DataPacket, Detector, DetectorState
from sentry.workflow_engine.models import DataPacket, DataSource, Detector, DetectorState
from sentry.workflow_engine.processors.data_condition_group import (
ProcessedDataConditionGroup,
process_data_condition_group,
Expand Down Expand Up @@ -353,6 +354,29 @@ def build_detector_evidence_data(
"""
return {}

def _build_evidence_data_sources(
self, data_packet: DataPacket[DataPacketType]
) -> list[dict[str, Any]]:
try:
data_sources = list(
DataSource.objects.filter(detectors=self.detector, source_id=data_packet.source_id)
)
if not data_sources:
logger.warning(
"Matching data source not found for detector while generating occurrence evidence data",
extra={
"detector_id": self.detector.id,
"data_packet_source_id": data_packet.source_id,
},
)
return []
return serialize(data_sources)
except Exception:
logger.exception(
"Failed to serialize data source definition when building workflow engine evidence data"
)
return []

def _build_workflow_engine_evidence_data(
self,
evaluation_result: ProcessedDataConditionGroup,
Expand All @@ -363,15 +387,18 @@ def _build_workflow_engine_evidence_data(
Build the workflow engine specific evidence data.
This is data that is common to all detectors.
"""
return {
base: dict[str, Any] = {
"detector_id": self.detector.id,
"value": evaluation_value,
"data_packet_source_id": str(data_packet.source_id),
"conditions": [
result.condition.get_snapshot() for result in evaluation_result.condition_results
],
"data_sources": self._build_evidence_data_sources(data_packet),
}

return base

def evaluate_impl(
self, data_packet: DataPacket[DataPacketType]
) -> GroupedDetectorEvaluationResult:
Expand Down
24 changes: 24 additions & 0 deletions tests/sentry/incidents/test_metric_issue_detector_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ def generate_evidence_data(
detector_trigger: DataCondition,
extra_trigger: DataCondition | None = None,
):
self.query_subscription.refresh_from_db()

conditions = [
{
Expand All @@ -50,6 +51,29 @@ def generate_evidence_data(
"alert_id": self.alert_rule.id,
"data_packet_source_id": str(self.query_subscription.id),
"conditions": conditions,
"data_sources": [
{
"id": str(self.data_source.id),
"organizationId": str(self.organization.id),
"type": self.data_source.type,
"sourceId": str(self.query_subscription.id),
"queryObj": {
"id": str(self.query_subscription.id),
"status": self.query_subscription.status,
"subscription": self.query_subscription.subscription_id,
"snubaQuery": {
"id": str(self.snuba_query.id),
"dataset": self.snuba_query.dataset,
"query": self.snuba_query.query,
"aggregate": self.snuba_query.aggregate,
"timeWindow": self.snuba_query.time_window,
"environment": self.environment.name,
"eventTypes": ["error"],
"extrapolationMode": "unknown",
},
},
}
],
}

return evidence_data
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,7 @@ def create_models(self):
"condition_result": DetectorPriorityLevel.OK.value,
},
],
data_sources=[],
alert_id=self.alert_rule.id,
)

Expand All @@ -126,6 +127,7 @@ def create_models(self):
"condition_result": DetectorPriorityLevel.HIGH.value,
},
],
data_sources=[],
alert_id=self.alert_rule.id,
)
self.group, self.event, self.group_event = self.create_group_event(
Expand Down
Loading