Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 16 additions & 13 deletions src/sentry/incidents/metric_issue_detector.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from datetime import timedelta
from typing import Any, cast
from typing import Any

from rest_framework import serializers

Expand Down Expand Up @@ -241,7 +241,9 @@ def is_editing_transaction_dataset(
return True
return False

def update_data_source(self, instance: Detector, data_source: SnubaQueryDataSourceType):
def update_data_source(
self, instance: Detector, data_source: SnubaQueryDataSourceType, seer_updated: bool = False
):
try:
source_instance = DataSource.objects.get(detector=instance)
except DataSource.DoesNotExist:
Expand Down Expand Up @@ -276,17 +278,15 @@ def update_data_source(self, instance: Detector, data_source: SnubaQueryDataSour

# Handle a dynamic detector's snuba query changing
if instance.config.get("detection_type") == AlertRuleDetectionType.DYNAMIC:
if snuba_query.query != data_source.get(
"query"
) or snuba_query.aggregate != data_source.get("aggregate"):
try:
validated_data_source = cast(dict[str, Any], data_source)
try:
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Bug: Incorrect Enum Comparison Always Fails

The comparison checks if instance.config.get("detection_type") equals AlertRuleDetectionType.DYNAMIC, but the config stores string values while the enum comparison uses the enum member itself. This comparison will always be False since it's comparing a string ("dynamic") against an enum member. The check should use AlertRuleDetectionType.DYNAMIC.value to compare with the stored string value, as done elsewhere in the codebase at line 357.

Fix in Cursor Fix in Web

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think cursor understands string enums. My tests would fail if this were true.

validated_data_source: dict[str, Any] = {"data_sources": [data_source]}
if not seer_updated:
update_detector_data(instance, validated_data_source)
except Exception:
# don't update the snuba query if we failed to send data to Seer
raise serializers.ValidationError(
"Failed to send data to Seer, cannot update detector"
)
except Exception:
# don't update the snuba query if we failed to send data to Seer
raise serializers.ValidationError(
"Failed to send data to Seer, cannot update detector"
)

update_snuba_query(
snuba_query=snuba_query,
Expand All @@ -304,6 +304,7 @@ def update_data_source(self, instance: Detector, data_source: SnubaQueryDataSour
)

def update(self, instance: Detector, validated_data: dict[str, Any]):
seer_updated = False
# Handle anomaly detection changes first in case we need to exit before saving
if (
not instance.config.get("detection_type") == AlertRuleDetectionType.DYNAMIC
Expand All @@ -313,6 +314,8 @@ def update(self, instance: Detector, validated_data: dict[str, Any]):
# Detector has been changed to become a dynamic detector
try:
update_detector_data(instance, validated_data)
seer_updated = True

except Exception:
# Don't update if we failed to send data to Seer
raise serializers.ValidationError(
Expand All @@ -339,7 +342,7 @@ def update(self, instance: Detector, validated_data: dict[str, Any]):
data_source = validated_data.pop("data_sources")[0]

if data_source is not None:
self.update_data_source(instance, data_source)
self.update_data_source(instance, data_source, seer_updated)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Bug: Transactional Flaw Leaves Detector Inconsistent

The detector update lacks transactional consistency. The parent's update at line 325 commits detector config changes (like detection type) in a transaction, but update_data_source at line 345 runs after that transaction completes. If update_data_source raises a ValidationError (which can happen at multiple points including Seer communication failures), the detector config will already be persisted while the snuba query changes won't be applied. This leaves the detector in an inconsistent state where its config (e.g., detection type) doesn't match its underlying query configuration.

Fix in Cursor Fix in Web


instance.save()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def update_detector_data(
updated_data_source_data = updated_fields.get("data_sources")
if updated_data_source_data:
data_source_data = updated_data_source_data[0]
event_types = data_source_data.get("eventTypes")
event_types = data_source_data.get("event_types")
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this was just incorrect before, oops

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Mind adding a test for this to ensure everything is working as expected with this change?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

added 👍

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Bug: Inconsistent Data: Mixing Stale and Current Values

When data_source_data.get("event_types") returns None (field not being updated), event_types is set to None and later falls back to snuba_query.event_types at line 162. However, snuba_query.event_types is a property that queries the database and returns the OLD values, while other fields like dataset, query, and aggregate may have been modified in-memory via setattr at lines 105-113. This creates an inconsistency where Seer receives a mix of new and old values - the updated fields reflect the changes, but event_types reflects the stale database state rather than what's actually being saved.

Fix in Cursor Fix in Web


for k, v in data_source_data.items():
if k == "dataset":
Expand Down
150 changes: 147 additions & 3 deletions tests/sentry/incidents/endpoints/validators/test_validators.py
Original file line number Diff line number Diff line change
Expand Up @@ -709,12 +709,11 @@ def test_update_anomaly_detection_from_static(
"sentry.seer.anomaly_detection.store_data_workflow_engine.seer_anomaly_detection_connection_pool.urlopen"
)
@mock.patch("sentry.workflow_engine.endpoints.validators.base.detector.create_audit_entry")
def test_update_anomaly_detection_snuba_query(
def test_update_anomaly_detection_snuba_query_query(
self, mock_audit: mock.MagicMock, mock_seer_request: mock.MagicMock
) -> None:
"""
Test that when we update the snuba query for a dynamic detector
we make a call to Seer with the changes
Test that when we update the snuba query query for a dynamic detector we make a call to Seer with the changes
"""
seer_return_value: StoreDataResponse = {"success": True}
mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value), status=200)
Expand Down Expand Up @@ -775,6 +774,36 @@ def test_update_anomaly_detection_snuba_query(
event=audit_log.get_event_id("DETECTOR_EDIT"),
data=dynamic_detector.get_audit_log_data(),
)

@mock.patch(
"sentry.seer.anomaly_detection.store_data_workflow_engine.seer_anomaly_detection_connection_pool.urlopen"
)
@mock.patch("sentry.workflow_engine.endpoints.validators.base.detector.create_audit_entry")
def test_update_anomaly_detection_snuba_query_aggregate(
self, mock_audit: mock.MagicMock, mock_seer_request: mock.MagicMock
) -> None:
"""
Test that when we update the snuba query aggregate for a dynamic detector we make a call to Seer with the changes
"""
seer_return_value: StoreDataResponse = {"success": True}
mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value), status=200)

detector = self.create_dynamic_detector()

# Verify detector in DB
self.assert_validated(detector)

assert mock_seer_request.call_count == 1
mock_seer_request.reset_mock()

# Verify audit log
mock_audit.assert_called_once_with(
request=self.context["request"],
organization=self.project.organization,
target_object=detector.id,
event=audit_log.get_event_id("DETECTOR_ADD"),
data=detector.get_audit_log_data(),
)
mock_audit.reset_mock()

# Change the aggregate which should call Seer
Expand Down Expand Up @@ -815,6 +844,121 @@ def test_update_anomaly_detection_snuba_query(
data=dynamic_detector.get_audit_log_data(),
)

@mock.patch(
"sentry.seer.anomaly_detection.store_data_workflow_engine.seer_anomaly_detection_connection_pool.urlopen"
)
@mock.patch("sentry.workflow_engine.endpoints.validators.base.detector.create_audit_entry")
def test_update_anomaly_detection_snuba_query_to_perf(
self, mock_audit: mock.MagicMock, mock_seer_request: mock.MagicMock
) -> None:
"""
Test that when we update the snuba query for a dynamic detector
to become a performance query we make a call to Seer with the changes
"""
seer_return_value: StoreDataResponse = {"success": True}
mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value), status=200)

detector = self.create_dynamic_detector()
assert mock_seer_request.call_count == 1
mock_seer_request.reset_mock()
mock_audit.reset_mock()

# Change the dataset, queryType, and aggregate to perf stuff
update_data = {
**self.valid_anomaly_detection_data,
"dataSources": [
{
"queryType": SnubaQuery.Type.PERFORMANCE.value,
"dataset": Dataset.EventsAnalyticsPlatform.value,
"query": "updated_query",
"aggregate": "count()",
"timeWindow": 3600,
"environment": self.environment.name,
"eventTypes": [SnubaQueryEventType.EventType.TRANSACTION.name.lower()],
}
],
}
update_validator = MetricIssueDetectorValidator(
instance=detector, data=update_data, context=self.context, partial=True
)
assert update_validator.is_valid(), update_validator.errors
dynamic_detector = update_validator.save()

assert mock_seer_request.call_count == 1

# Verify snuba query changes
data_source = DataSource.objects.get(detector=dynamic_detector)
query_subscription = QuerySubscription.objects.get(id=data_source.source_id)
snuba_query = SnubaQuery.objects.get(id=query_subscription.snuba_query_id)
assert snuba_query.aggregate == "count()"
assert snuba_query.type == SnubaQuery.Type.PERFORMANCE.value
assert snuba_query.dataset == Dataset.EventsAnalyticsPlatform.value
assert snuba_query.query == "updated_query"

mock_audit.assert_called_once_with(
request=self.context["request"],
organization=self.project.organization,
target_object=dynamic_detector.id,
event=audit_log.get_event_id("DETECTOR_EDIT"),
data=dynamic_detector.get_audit_log_data(),
)

@mock.patch(
"sentry.seer.anomaly_detection.store_data_workflow_engine.seer_anomaly_detection_connection_pool.urlopen"
)
@mock.patch(
"sentry.seer.anomaly_detection.store_data_workflow_engine.handle_send_historical_data_to_seer"
)
def test_update_anomaly_detection_event_types(
self, mock_send_historical_data: mock.MagicMock, mock_seer_request: mock.MagicMock
) -> None:
"""
Test that when we update the eventTypes for a dynamic detector it gets sent through as expected
"""
seer_return_value: StoreDataResponse = {"success": True}
mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value), status=200)

detector = self.create_dynamic_detector()
assert mock_send_historical_data.call_count == 1
mock_send_historical_data.reset_mock()

# Change the dataset, queryType, aggregate, and eventTypes to performance data
data_source_data = {
"queryType": SnubaQuery.Type.PERFORMANCE.value,
"dataset": Dataset.EventsAnalyticsPlatform.value,
"query": "updated_query",
"aggregate": "count()",
"timeWindow": 3600,
"environment": self.environment.name,
"eventTypes": [SnubaQueryEventType.EventType.TRANSACTION.name.lower()],
}
update_data = {
**self.valid_anomaly_detection_data,
"dataSources": [data_source_data],
}
update_validator = MetricIssueDetectorValidator(
instance=detector, data=update_data, context=self.context, partial=True
)
assert update_validator.is_valid(), update_validator.errors
detector = update_validator.save()

# Verify snuba query changes
data_source = DataSource.objects.get(detector=detector)
query_subscription = QuerySubscription.objects.get(id=data_source.source_id)
snuba_query = SnubaQuery.objects.get(id=query_subscription.snuba_query_id)
condition_group = DataConditionGroup.objects.get(id=detector.workflow_condition_group_id)
data_condition = DataCondition.objects.get(condition_group=condition_group)

mock_send_historical_data.assert_called_once_with(
detector,
data_source,
data_condition,
snuba_query,
detector.project,
"update",
[SnubaQueryEventType.EventType.TRANSACTION],
)

@mock.patch(
"sentry.seer.anomaly_detection.store_data_workflow_engine.seer_anomaly_detection_connection_pool.urlopen"
)
Expand Down
Loading