Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 17 additions & 4 deletions src/sentry/explore/translation/alerts_translation.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from sentry.incidents.models.alert_rule import AlertRuleDetectionType
from sentry.incidents.subscription_processor import MetricIssueDetectorConfig
from sentry.incidents.utils.types import DATA_SOURCE_SNUBA_QUERY_SUBSCRIPTION
from sentry.search.events.fields import parse_function
from sentry.seer.anomaly_detection.store_data import SeerMethod
from sentry.seer.anomaly_detection.store_data_workflow_engine import (
handle_send_historical_data_to_seer,
Expand All @@ -26,6 +27,14 @@

logger = logging.getLogger(__name__)

COUNT_BASED_ALERT_AGGREAGTES = [
"count",
"failure_count",
"sum",
"count_if",
"count_unique",
]


def snapshot_snuba_query(snuba_query: SnubaQuery):
if snuba_query.dataset in [Dataset.PerformanceMetrics.value, Dataset.Transactions.value]:
Expand Down Expand Up @@ -93,10 +102,14 @@ def translate_detector_and_update_subscription_in_snuba(snuba_query: SnubaQuery)
snuba_query.query = translated_query
snuba_query.dataset = Dataset.EventsAnalyticsPlatform.value

if snapshot["dataset"] == Dataset.PerformanceMetrics.value:
snuba_query.extrapolation_mode = ExtrapolationMode.SERVER_WEIGHTED.value
elif snapshot["dataset"] == Dataset.Transactions.value:
snuba_query.extrapolation_mode = ExtrapolationMode.NONE.value
function_name, _, _ = parse_function(old_aggregate)
if function_name in COUNT_BASED_ALERT_AGGREAGTES:
if snapshot["dataset"] == Dataset.PerformanceMetrics.value:
snuba_query.extrapolation_mode = ExtrapolationMode.SERVER_WEIGHTED.value
elif snapshot["dataset"] == Dataset.Transactions.value:
snuba_query.extrapolation_mode = ExtrapolationMode.NONE.value
else:
snuba_query.extrapolation_mode = ExtrapolationMode.CLIENT_AND_SERVER_WEIGHTED.value

with atomic_transaction(
using=(
Expand Down
242 changes: 241 additions & 1 deletion tests/sentry/explore/translation/test_alerts_translation.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ def test_translate_alert_rule_p95(self, mock_create_rpc) -> None:
assert snuba_query.dataset == Dataset.EventsAnalyticsPlatform.value
assert snuba_query.aggregate == "p95(span.duration)"
assert snuba_query.query == "(transaction.method:GET) AND is_transaction:1"
assert snuba_query.extrapolation_mode == ExtrapolationMode.NONE.value
assert snuba_query.extrapolation_mode == ExtrapolationMode.CLIENT_AND_SERVER_WEIGHTED.value

event_types = list(
SnubaQueryEventType.objects.filter(snuba_query=snuba_query).values_list(
Expand Down Expand Up @@ -744,3 +744,243 @@ def test_rollback_anomaly_detection_alert(
assert project_arg.id == self.project.id
assert seer_method_arg == SeerMethod.UPDATE
assert event_types_arg == [SnubaQueryEventType.EventType.TRANSACTION]

@with_feature("organizations:migrate-transaction-alerts-to-spans")
@patch("sentry.snuba.tasks._create_rpc_in_snuba")
def test_extrapolation_mode_sum_performance_metrics(self, mock_create_rpc) -> None:
mock_create_rpc.return_value = "test-subscription-id"

snuba_query = create_snuba_query(
query_type=SnubaQuery.Type.PERFORMANCE,
dataset=Dataset.PerformanceMetrics,
query="",
aggregate="sum(transaction.duration)",
time_window=timedelta(minutes=10),
environment=None,
event_types=[SnubaQueryEventType.EventType.TRANSACTION],
resolution=timedelta(minutes=1),
)

create_snuba_subscription(
project=self.project,
subscription_type=INCIDENTS_SNUBA_SUBSCRIPTION_TYPE,
snuba_query=snuba_query,
)

data_source = self.create_data_source(
organization=self.org,
source_id=str(snuba_query.id),
type=DATA_SOURCE_SNUBA_QUERY_SUBSCRIPTION,
)

detector_data_condition_group = self.create_data_condition_group(
organization=self.org,
)

detector = self.create_detector(
name="Test Detector",
type=MetricIssue.slug,
project=self.project,
config={"detection_type": AlertRuleDetectionType.STATIC.value},
workflow_condition_group=detector_data_condition_group,
)

data_source.detectors.add(detector)

with self.tasks():
translate_detector_and_update_subscription_in_snuba(snuba_query)
snuba_query.refresh_from_db()

assert snuba_query.extrapolation_mode == ExtrapolationMode.SERVER_WEIGHTED.value

@with_feature("organizations:migrate-transaction-alerts-to-spans")
@patch("sentry.snuba.tasks._create_rpc_in_snuba")
def test_extrapolation_mode_sum_transactions(self, mock_create_rpc) -> None:
mock_create_rpc.return_value = "test-subscription-id"

snuba_query = create_snuba_query(
query_type=SnubaQuery.Type.PERFORMANCE,
dataset=Dataset.Transactions,
query="",
aggregate="sum(transaction.duration)",
time_window=timedelta(minutes=10),
environment=None,
event_types=[SnubaQueryEventType.EventType.TRANSACTION],
resolution=timedelta(minutes=1),
)

create_snuba_subscription(
project=self.project,
subscription_type=INCIDENTS_SNUBA_SUBSCRIPTION_TYPE,
snuba_query=snuba_query,
)

data_source = self.create_data_source(
organization=self.org,
source_id=str(snuba_query.id),
type=DATA_SOURCE_SNUBA_QUERY_SUBSCRIPTION,
)

detector_data_condition_group = self.create_data_condition_group(
organization=self.org,
)

detector = self.create_detector(
name="Test Detector",
type=MetricIssue.slug,
project=self.project,
config={"detection_type": AlertRuleDetectionType.STATIC.value},
workflow_condition_group=detector_data_condition_group,
)

data_source.detectors.add(detector)

with self.tasks():
translate_detector_and_update_subscription_in_snuba(snuba_query)
snuba_query.refresh_from_db()

assert snuba_query.extrapolation_mode == ExtrapolationMode.NONE.value

@with_feature("organizations:migrate-transaction-alerts-to-spans")
@patch("sentry.snuba.tasks._create_rpc_in_snuba")
def test_extrapolation_mode_count_if_performance_metrics(self, mock_create_rpc) -> None:
mock_create_rpc.return_value = "test-subscription-id"

snuba_query = create_snuba_query(
query_type=SnubaQuery.Type.PERFORMANCE,
dataset=Dataset.PerformanceMetrics,
query="",
aggregate="count_if(transaction.duration,greater,100)",
time_window=timedelta(minutes=10),
environment=None,
event_types=[SnubaQueryEventType.EventType.TRANSACTION],
resolution=timedelta(minutes=1),
)

create_snuba_subscription(
project=self.project,
subscription_type=INCIDENTS_SNUBA_SUBSCRIPTION_TYPE,
snuba_query=snuba_query,
)

data_source = self.create_data_source(
organization=self.org,
source_id=str(snuba_query.id),
type=DATA_SOURCE_SNUBA_QUERY_SUBSCRIPTION,
)

detector_data_condition_group = self.create_data_condition_group(
organization=self.org,
)

detector = self.create_detector(
name="Test Detector",
type=MetricIssue.slug,
project=self.project,
config={"detection_type": AlertRuleDetectionType.STATIC.value},
workflow_condition_group=detector_data_condition_group,
)

data_source.detectors.add(detector)

with self.tasks():
translate_detector_and_update_subscription_in_snuba(snuba_query)
snuba_query.refresh_from_db()

assert snuba_query.extrapolation_mode == ExtrapolationMode.SERVER_WEIGHTED.value

@with_feature("organizations:migrate-transaction-alerts-to-spans")
@patch("sentry.snuba.tasks._create_rpc_in_snuba")
def test_extrapolation_mode_count_if_transactions(self, mock_create_rpc) -> None:
mock_create_rpc.return_value = "test-subscription-id"

snuba_query = create_snuba_query(
query_type=SnubaQuery.Type.PERFORMANCE,
dataset=Dataset.Transactions,
query="",
aggregate="count_if(transaction.duration,greater,100)",
time_window=timedelta(minutes=10),
environment=None,
event_types=[SnubaQueryEventType.EventType.TRANSACTION],
resolution=timedelta(minutes=1),
)

create_snuba_subscription(
project=self.project,
subscription_type=INCIDENTS_SNUBA_SUBSCRIPTION_TYPE,
snuba_query=snuba_query,
)

data_source = self.create_data_source(
organization=self.org,
source_id=str(snuba_query.id),
type=DATA_SOURCE_SNUBA_QUERY_SUBSCRIPTION,
)

detector_data_condition_group = self.create_data_condition_group(
organization=self.org,
)

detector = self.create_detector(
name="Test Detector",
type=MetricIssue.slug,
project=self.project,
config={"detection_type": AlertRuleDetectionType.STATIC.value},
workflow_condition_group=detector_data_condition_group,
)

data_source.detectors.add(detector)

with self.tasks():
translate_detector_and_update_subscription_in_snuba(snuba_query)
snuba_query.refresh_from_db()

assert snuba_query.extrapolation_mode == ExtrapolationMode.NONE.value

@with_feature("organizations:migrate-transaction-alerts-to-spans")
@patch("sentry.snuba.tasks._create_rpc_in_snuba")
def test_extrapolation_mode_p50_transactions(self, mock_create_rpc) -> None:
mock_create_rpc.return_value = "test-subscription-id"

snuba_query = create_snuba_query(
query_type=SnubaQuery.Type.PERFORMANCE,
dataset=Dataset.Transactions,
query="",
aggregate="p50(transaction.duration)",
time_window=timedelta(minutes=10),
environment=None,
event_types=[SnubaQueryEventType.EventType.TRANSACTION],
resolution=timedelta(minutes=1),
)

create_snuba_subscription(
project=self.project,
subscription_type=INCIDENTS_SNUBA_SUBSCRIPTION_TYPE,
snuba_query=snuba_query,
)

data_source = self.create_data_source(
organization=self.org,
source_id=str(snuba_query.id),
type=DATA_SOURCE_SNUBA_QUERY_SUBSCRIPTION,
)

detector_data_condition_group = self.create_data_condition_group(
organization=self.org,
)

detector = self.create_detector(
name="Test Detector",
type=MetricIssue.slug,
project=self.project,
config={"detection_type": AlertRuleDetectionType.STATIC.value},
workflow_condition_group=detector_data_condition_group,
)

data_source.detectors.add(detector)

with self.tasks():
translate_detector_and_update_subscription_in_snuba(snuba_query)
snuba_query.refresh_from_db()

assert snuba_query.extrapolation_mode == ExtrapolationMode.CLIENT_AND_SERVER_WEIGHTED.value
Loading