From 8248b9dcd9340b7d1d87004d114307e1f01f3a7c Mon Sep 17 00:00:00 2001 From: Colleen O'Rourke Date: Thu, 13 Nov 2025 14:25:48 -0800 Subject: [PATCH 1/3] feat(ACI): Send updated data to Seer on all snuba query changes --- src/sentry/incidents/metric_issue_detector.py | 27 ++++++------ .../store_data_workflow_engine.py | 2 +- .../endpoints/validators/test_validators.py | 42 +++++++++++++++++++ 3 files changed, 58 insertions(+), 13 deletions(-) diff --git a/src/sentry/incidents/metric_issue_detector.py b/src/sentry/incidents/metric_issue_detector.py index 2fd1b7082469ed..e62f474304cd37 100644 --- a/src/sentry/incidents/metric_issue_detector.py +++ b/src/sentry/incidents/metric_issue_detector.py @@ -241,7 +241,9 @@ def is_editing_transaction_dataset( return True return False - def update_data_source(self, instance: Detector, data_source: SnubaQueryDataSourceType): + def update_data_source( + self, instance: Detector, data_source: SnubaQueryDataSourceType, seer_updated: bool = False + ): try: source_instance = DataSource.objects.get(detector=instance) except DataSource.DoesNotExist: @@ -276,17 +278,15 @@ def update_data_source(self, instance: Detector, data_source: SnubaQueryDataSour # Handle a dynamic detector's snuba query changing if instance.config.get("detection_type") == AlertRuleDetectionType.DYNAMIC: - if snuba_query.query != data_source.get( - "query" - ) or snuba_query.aggregate != data_source.get("aggregate"): - try: - validated_data_source = cast(dict[str, Any], data_source) + try: + validated_data_source = cast(dict[str, Any], data_source) + if not seer_updated: update_detector_data(instance, validated_data_source) - except Exception: - # don't update the snuba query if we failed to send data to Seer - raise serializers.ValidationError( - "Failed to send data to Seer, cannot update detector" - ) + except Exception: + # don't update the snuba query if we failed to send data to Seer + raise serializers.ValidationError( + "Failed to send data to Seer, cannot update detector" + ) update_snuba_query( snuba_query=snuba_query, @@ -304,6 +304,7 @@ def update_data_source(self, instance: Detector, data_source: SnubaQueryDataSour ) def update(self, instance: Detector, validated_data: dict[str, Any]): + seer_updated = False # Handle anomaly detection changes first in case we need to exit before saving if ( not instance.config.get("detection_type") == AlertRuleDetectionType.DYNAMIC @@ -313,6 +314,8 @@ def update(self, instance: Detector, validated_data: dict[str, Any]): # Detector has been changed to become a dynamic detector try: update_detector_data(instance, validated_data) + seer_updated = True + except Exception: # Don't update if we failed to send data to Seer raise serializers.ValidationError( @@ -339,7 +342,7 @@ def update(self, instance: Detector, validated_data: dict[str, Any]): data_source = validated_data.pop("data_sources")[0] if data_source is not None: - self.update_data_source(instance, data_source) + self.update_data_source(instance, data_source, seer_updated) instance.save() diff --git a/src/sentry/seer/anomaly_detection/store_data_workflow_engine.py b/src/sentry/seer/anomaly_detection/store_data_workflow_engine.py index eb5d08d4c79462..0e91727c7ee8b5 100644 --- a/src/sentry/seer/anomaly_detection/store_data_workflow_engine.py +++ b/src/sentry/seer/anomaly_detection/store_data_workflow_engine.py @@ -100,7 +100,7 @@ def update_detector_data( updated_data_source_data = updated_fields.get("data_sources") if updated_data_source_data: data_source_data = updated_data_source_data[0] - event_types = data_source_data.get("eventTypes") + event_types = data_source_data.get("event_types") for k, v in data_source_data.items(): if k == "dataset": diff --git a/tests/sentry/incidents/endpoints/validators/test_validators.py b/tests/sentry/incidents/endpoints/validators/test_validators.py index f323594d7ab929..5e613c5492b138 100644 --- a/tests/sentry/incidents/endpoints/validators/test_validators.py +++ b/tests/sentry/incidents/endpoints/validators/test_validators.py @@ -814,6 +814,48 @@ def test_update_anomaly_detection_snuba_query( event=audit_log.get_event_id("DETECTOR_EDIT"), data=dynamic_detector.get_audit_log_data(), ) + mock_audit.reset_mock() + mock_seer_request.reset_mock() + + # Change the dataset, queryType, and aggregate to perf stuff + update_data = { + **self.valid_anomaly_detection_data, + "dataSources": [ + { + "queryType": SnubaQuery.Type.PERFORMANCE.value, + "dataset": Dataset.EventsAnalyticsPlatform.value, + "query": "updated_query", + "aggregate": "count()", + "timeWindow": 3600, + "environment": self.environment.name, + "eventTypes": [SnubaQueryEventType.EventType.TRANSACTION.name.lower()], + } + ], + } + update_validator = MetricIssueDetectorValidator( + instance=detector, data=update_data, context=self.context, partial=True + ) + assert update_validator.is_valid(), update_validator.errors + dynamic_detector = update_validator.save() + + assert mock_seer_request.call_count == 1 + + # Verify snuba query changes + data_source = DataSource.objects.get(detector=dynamic_detector) + query_subscription = QuerySubscription.objects.get(id=data_source.source_id) + snuba_query = SnubaQuery.objects.get(id=query_subscription.snuba_query_id) + assert snuba_query.aggregate == "count()" + assert snuba_query.type == SnubaQuery.Type.PERFORMANCE.value + assert snuba_query.dataset == Dataset.EventsAnalyticsPlatform.value + assert snuba_query.query == "updated_query" + + mock_audit.assert_called_once_with( + request=self.context["request"], + organization=self.project.organization, + target_object=dynamic_detector.id, + event=audit_log.get_event_id("DETECTOR_EDIT"), + data=dynamic_detector.get_audit_log_data(), + ) @mock.patch( "sentry.seer.anomaly_detection.store_data_workflow_engine.seer_anomaly_detection_connection_pool.urlopen" From daea021501be448a3c65b0fc60a14ad682a0f38f Mon Sep 17 00:00:00 2001 From: Colleen O'Rourke Date: Thu, 13 Nov 2025 14:46:51 -0800 Subject: [PATCH 2/3] ai actually caught a bug --- src/sentry/incidents/metric_issue_detector.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/sentry/incidents/metric_issue_detector.py b/src/sentry/incidents/metric_issue_detector.py index e62f474304cd37..7941704cc4910a 100644 --- a/src/sentry/incidents/metric_issue_detector.py +++ b/src/sentry/incidents/metric_issue_detector.py @@ -1,5 +1,5 @@ from datetime import timedelta -from typing import Any, cast +from typing import Any from rest_framework import serializers @@ -279,7 +279,7 @@ def update_data_source( # Handle a dynamic detector's snuba query changing if instance.config.get("detection_type") == AlertRuleDetectionType.DYNAMIC: try: - validated_data_source = cast(dict[str, Any], data_source) + validated_data_source: dict[str, Any] = {"data_sources": [data_source]} if not seer_updated: update_detector_data(instance, validated_data_source) except Exception: From d799b245dff22d4bf50e51990df82c6a7e6f8aeb Mon Sep 17 00:00:00 2001 From: Colleen O'Rourke Date: Fri, 14 Nov 2025 11:50:47 -0800 Subject: [PATCH 3/3] break out tests, add one for eventTypes/event_types --- .../endpoints/validators/test_validators.py | 110 +++++++++++++++++- 1 file changed, 106 insertions(+), 4 deletions(-) diff --git a/tests/sentry/incidents/endpoints/validators/test_validators.py b/tests/sentry/incidents/endpoints/validators/test_validators.py index 5e613c5492b138..447ec64384859f 100644 --- a/tests/sentry/incidents/endpoints/validators/test_validators.py +++ b/tests/sentry/incidents/endpoints/validators/test_validators.py @@ -709,12 +709,11 @@ def test_update_anomaly_detection_from_static( "sentry.seer.anomaly_detection.store_data_workflow_engine.seer_anomaly_detection_connection_pool.urlopen" ) @mock.patch("sentry.workflow_engine.endpoints.validators.base.detector.create_audit_entry") - def test_update_anomaly_detection_snuba_query( + def test_update_anomaly_detection_snuba_query_query( self, mock_audit: mock.MagicMock, mock_seer_request: mock.MagicMock ) -> None: """ - Test that when we update the snuba query for a dynamic detector - we make a call to Seer with the changes + Test that when we update the snuba query query for a dynamic detector we make a call to Seer with the changes """ seer_return_value: StoreDataResponse = {"success": True} mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value), status=200) @@ -775,6 +774,36 @@ def test_update_anomaly_detection_snuba_query( event=audit_log.get_event_id("DETECTOR_EDIT"), data=dynamic_detector.get_audit_log_data(), ) + + @mock.patch( + "sentry.seer.anomaly_detection.store_data_workflow_engine.seer_anomaly_detection_connection_pool.urlopen" + ) + @mock.patch("sentry.workflow_engine.endpoints.validators.base.detector.create_audit_entry") + def test_update_anomaly_detection_snuba_query_aggregate( + self, mock_audit: mock.MagicMock, mock_seer_request: mock.MagicMock + ) -> None: + """ + Test that when we update the snuba query aggregate for a dynamic detector we make a call to Seer with the changes + """ + seer_return_value: StoreDataResponse = {"success": True} + mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value), status=200) + + detector = self.create_dynamic_detector() + + # Verify detector in DB + self.assert_validated(detector) + + assert mock_seer_request.call_count == 1 + mock_seer_request.reset_mock() + + # Verify audit log + mock_audit.assert_called_once_with( + request=self.context["request"], + organization=self.project.organization, + target_object=detector.id, + event=audit_log.get_event_id("DETECTOR_ADD"), + data=detector.get_audit_log_data(), + ) mock_audit.reset_mock() # Change the aggregate which should call Seer @@ -814,8 +843,25 @@ def test_update_anomaly_detection_snuba_query( event=audit_log.get_event_id("DETECTOR_EDIT"), data=dynamic_detector.get_audit_log_data(), ) - mock_audit.reset_mock() + + @mock.patch( + "sentry.seer.anomaly_detection.store_data_workflow_engine.seer_anomaly_detection_connection_pool.urlopen" + ) + @mock.patch("sentry.workflow_engine.endpoints.validators.base.detector.create_audit_entry") + def test_update_anomaly_detection_snuba_query_to_perf( + self, mock_audit: mock.MagicMock, mock_seer_request: mock.MagicMock + ) -> None: + """ + Test that when we update the snuba query for a dynamic detector + to become a performance query we make a call to Seer with the changes + """ + seer_return_value: StoreDataResponse = {"success": True} + mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value), status=200) + + detector = self.create_dynamic_detector() + assert mock_seer_request.call_count == 1 mock_seer_request.reset_mock() + mock_audit.reset_mock() # Change the dataset, queryType, and aggregate to perf stuff update_data = { @@ -857,6 +903,62 @@ def test_update_anomaly_detection_snuba_query( data=dynamic_detector.get_audit_log_data(), ) + @mock.patch( + "sentry.seer.anomaly_detection.store_data_workflow_engine.seer_anomaly_detection_connection_pool.urlopen" + ) + @mock.patch( + "sentry.seer.anomaly_detection.store_data_workflow_engine.handle_send_historical_data_to_seer" + ) + def test_update_anomaly_detection_event_types( + self, mock_send_historical_data: mock.MagicMock, mock_seer_request: mock.MagicMock + ) -> None: + """ + Test that when we update the eventTypes for a dynamic detector it gets sent through as expected + """ + seer_return_value: StoreDataResponse = {"success": True} + mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value), status=200) + + detector = self.create_dynamic_detector() + assert mock_send_historical_data.call_count == 1 + mock_send_historical_data.reset_mock() + + # Change the dataset, queryType, aggregate, and eventTypes to performance data + data_source_data = { + "queryType": SnubaQuery.Type.PERFORMANCE.value, + "dataset": Dataset.EventsAnalyticsPlatform.value, + "query": "updated_query", + "aggregate": "count()", + "timeWindow": 3600, + "environment": self.environment.name, + "eventTypes": [SnubaQueryEventType.EventType.TRANSACTION.name.lower()], + } + update_data = { + **self.valid_anomaly_detection_data, + "dataSources": [data_source_data], + } + update_validator = MetricIssueDetectorValidator( + instance=detector, data=update_data, context=self.context, partial=True + ) + assert update_validator.is_valid(), update_validator.errors + detector = update_validator.save() + + # Verify snuba query changes + data_source = DataSource.objects.get(detector=detector) + query_subscription = QuerySubscription.objects.get(id=data_source.source_id) + snuba_query = SnubaQuery.objects.get(id=query_subscription.snuba_query_id) + condition_group = DataConditionGroup.objects.get(id=detector.workflow_condition_group_id) + data_condition = DataCondition.objects.get(condition_group=condition_group) + + mock_send_historical_data.assert_called_once_with( + detector, + data_source, + data_condition, + snuba_query, + detector.project, + "update", + [SnubaQueryEventType.EventType.TRANSACTION], + ) + @mock.patch( "sentry.seer.anomaly_detection.store_data_workflow_engine.seer_anomaly_detection_connection_pool.urlopen" )