From 8ee3aed5032653447ca77ebc8a94c5e36c12ca1b Mon Sep 17 00:00:00 2001 From: Josh Callender <1569818+saponifi3d@users.noreply.github.com> Date: Thu, 13 Nov 2025 12:33:59 -0800 Subject: [PATCH 1/8] remove the name from the repl for a workflow, these will show up in logs --- src/sentry/workflow_engine/models/workflow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sentry/workflow_engine/models/workflow.py b/src/sentry/workflow_engine/models/workflow.py index c55bff06579289..612853cbf2c6dc 100644 --- a/src/sentry/workflow_engine/models/workflow.py +++ b/src/sentry/workflow_engine/models/workflow.py @@ -83,7 +83,7 @@ class Workflow(DefaultFieldsModel, OwnerModel, JSONConfigBase): "additionalProperties": False, } - __repr__ = sane_repr("name", "organization_id") + __repr__ = sane_repr("organization_id") class Meta: app_label = "workflow_engine" From bf637620dcbbb6cd8a0215ee314e091b4a6dc993 Mon Sep 17 00:00:00 2001 From: Josh Callender <1569818+saponifi3d@users.noreply.github.com> Date: Thu, 13 Nov 2025 12:34:27 -0800 Subject: [PATCH 2/8] use the workflow context logger to log out the tagged event handler info to help investigate why this isn't working --- .../handlers/condition/tagged_event_handler.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/src/sentry/workflow_engine/handlers/condition/tagged_event_handler.py b/src/sentry/workflow_engine/handlers/condition/tagged_event_handler.py index 7614a3fe225308..787963d07148d6 100644 --- a/src/sentry/workflow_engine/handlers/condition/tagged_event_handler.py +++ b/src/sentry/workflow_engine/handlers/condition/tagged_event_handler.py @@ -6,6 +6,9 @@ from sentry.workflow_engine.models.data_condition import Condition from sentry.workflow_engine.registry import condition_handler_registry from sentry.workflow_engine.types import DataConditionHandler, WorkflowEventData +from sentry.workflow_engine.utils import log_context + +logger = log_context.get_logger(__name__) @condition_handler_registry.register(Condition.TAGGED_EVENT) @@ -90,4 +93,17 @@ def evaluate_value(event_data: WorkflowEventData, comparison: Any) -> bool: if k.lower() == key or tagstore.backend.get_standardized_key(k) == key ) - return match_values(group_values=tag_values, match_value=value, match_type=match) + result = match_values(group_values=tag_values, match_value=value, match_type=match) + + logger.debug( + "workflow_engine.handlers.tagged_event_handler", + extra={ + "evaluation_result": result, + "event": event, + "event_tags": event.tags, + "processed_values": tag_values, + "comparison_type": match, + }, + ) + + return result From 627b781e4ef256d7ccd3a74004ecd8f73fdc2c98 Mon Sep 17 00:00:00 2001 From: Josh Callender <1569818+saponifi3d@users.noreply.github.com> Date: Thu, 13 Nov 2025 12:50:10 -0800 Subject: [PATCH 3/8] change group_event to event, add more logging data into the extra --- src/sentry/workflow_engine/processors/workflow.py | 2 +- src/sentry/workflow_engine/types.py | 12 ++++++++++-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/src/sentry/workflow_engine/processors/workflow.py b/src/sentry/workflow_engine/processors/workflow.py index 831be818cdd373..bd3184e2950cc3 100644 --- a/src/sentry/workflow_engine/processors/workflow.py +++ b/src/sentry/workflow_engine/processors/workflow.py @@ -482,7 +482,7 @@ def process_workflows( fire_actions, ) - workflow_evaluation_data = WorkflowEvaluationData(group_event=event_data.event) + workflow_evaluation_data = WorkflowEvaluationData(event=event_data.event) try: if detector is None and isinstance(event_data.event, GroupEvent): diff --git a/src/sentry/workflow_engine/types.py b/src/sentry/workflow_engine/types.py index a3b19363a68c2f..139b83bd555258 100644 --- a/src/sentry/workflow_engine/types.py +++ b/src/sentry/workflow_engine/types.py @@ -89,7 +89,7 @@ class WorkflowEventData: @dataclass class WorkflowEvaluationData: - group_event: GroupEvent | Activity + event: GroupEvent | Activity action_groups: set[DataConditionGroup] | None = None workflows: set[Workflow] | None = None triggered_actions: set[Action] | None = None @@ -134,7 +134,15 @@ def to_log(self, logger: Logger) -> None: else: log_str = f"{log_str}.actions.triggered" - logger.info(log_str, extra={**asdict(self.data), "debug_msg": self.msg}) + logger.info( + log_str, + extra={ + **asdict(self.data), + "debug_msg": self.msg, + "group": self.data.event.group, + "data": self.data.event.data, + }, + ) class ConfigTransformer(ABC): From b93ac294e19162473174f5109abbc98a024f6502 Mon Sep 17 00:00:00 2001 From: Josh Callender <1569818+saponifi3d@users.noreply.github.com> Date: Thu, 13 Nov 2025 16:41:39 -0800 Subject: [PATCH 4/8] started to add get_snapshot to the models, this will simplify the layers of trying to get debuggable output. --- src/sentry/workflow_engine/models/action.py | 8 ++- .../models/data_condition_group.py | 13 ++++- src/sentry/workflow_engine/models/detector.py | 12 +++++ src/sentry/workflow_engine/models/workflow.py | 17 +++++++ src/sentry/workflow_engine/types.py | 49 +++++++++++++++---- tests/sentry/workflow_engine/test_task.py | 45 +++++++++++++++++ 6 files changed, 132 insertions(+), 12 deletions(-) diff --git a/src/sentry/workflow_engine/models/action.py b/src/sentry/workflow_engine/models/action.py index 01f776126b4872..a0ec5c42d58259 100644 --- a/src/sentry/workflow_engine/models/action.py +++ b/src/sentry/workflow_engine/models/action.py @@ -3,7 +3,7 @@ import builtins import logging from enum import StrEnum -from typing import TYPE_CHECKING, ClassVar +from typing import TYPE_CHECKING, Any, ClassVar from django.db import models from django.db.models import Q @@ -112,6 +112,12 @@ class Meta: ), ] + def get_snapshot(self) -> dict[str, Any]: + return { + "id": self.id, + "type": self.type, + } + def get_handler(self) -> builtins.type[ActionHandler]: action_type = Action.Type(self.type) return action_handler_registry.get(action_type) diff --git a/src/sentry/workflow_engine/models/data_condition_group.py b/src/sentry/workflow_engine/models/data_condition_group.py index bb09ed2c9695f6..e50ceb6aabf386 100644 --- a/src/sentry/workflow_engine/models/data_condition_group.py +++ b/src/sentry/workflow_engine/models/data_condition_group.py @@ -1,5 +1,5 @@ from enum import StrEnum -from typing import ClassVar, Self +from typing import Any, ClassVar, Self from django.db import models @@ -36,3 +36,14 @@ class Type(StrEnum): max_length=200, choices=[(t.value, t.value) for t in Type], default=Type.ANY ) organization = models.ForeignKey("sentry.Organization", on_delete=models.CASCADE) + + def get_snapshot(self) -> dict[str, Any]: + conditions = [] + if hasattr(self, "conditions"): + conditions = [cond.get_snapshot() for cond in self.conditions.all()] + + return { + "id": self.id, + "logic_type": self.logic_type, + "conditions": conditions, + } diff --git a/src/sentry/workflow_engine/models/detector.py b/src/sentry/workflow_engine/models/detector.py index 3ffefe92f3a033..032db26919da07 100644 --- a/src/sentry/workflow_engine/models/detector.py +++ b/src/sentry/workflow_engine/models/detector.py @@ -141,6 +141,18 @@ def settings(self) -> DetectorSettings: return settings + def get_snapshot(self) -> dict[str, Any]: + trigger_conditions = None + if self.workflow_condition_group: + trigger_conditions = self.workflow_condition_group.get_snapshot() + + return { + "id": self.id, + "enabled": self.enabled, + "status": self.status, + "trigger_conditions": trigger_conditions, + } + def get_audit_log_data(self) -> dict[str, Any]: return {"name": self.name} diff --git a/src/sentry/workflow_engine/models/workflow.py b/src/sentry/workflow_engine/models/workflow.py index 612853cbf2c6dc..7fc39b4049f5a7 100644 --- a/src/sentry/workflow_engine/models/workflow.py +++ b/src/sentry/workflow_engine/models/workflow.py @@ -92,6 +92,23 @@ class Meta: def get_audit_log_data(self) -> dict[str, Any]: return {"name": self.name} + def get_snapshot(self) -> dict[str, Any]: + when_condition_group = None + if self.when_condition_group: + when_condition_group = self.when_condition_group.get_snapshot() + + environment_id = None + if self.environment: + environment_id = self.environment.id + + return { + "id": self.id, + "enabled": self.enabled, + "environment_id": environment_id, + "status": self.status, + "triggers": when_condition_group, + } + def evaluate_trigger_conditions( self, event_data: WorkflowEventData, when_data_conditions: list[DataCondition] | None = None ) -> tuple[TriggerResult, list[DataCondition]]: diff --git a/src/sentry/workflow_engine/types.py b/src/sentry/workflow_engine/types.py index 139b83bd555258..2be1a1dfd649a2 100644 --- a/src/sentry/workflow_engine/types.py +++ b/src/sentry/workflow_engine/types.py @@ -1,7 +1,7 @@ from __future__ import annotations from abc import ABC, abstractmethod -from dataclasses import asdict, dataclass, field +from dataclasses import dataclass, field from enum import IntEnum, StrEnum from logging import Logger from typing import TYPE_CHECKING, Any, ClassVar, Generic, TypedDict, TypeVar @@ -96,6 +96,43 @@ class WorkflowEvaluationData: triggered_workflows: set[Workflow] | None = None associated_detector: Detector | None = None + def get_snapshot(self) -> dict[str, Any]: + """ + This method will take the complex data structures, like models / list of models, + and turn them into the critical attributes of a model or lists of IDs. + """ + + associated_detector = None + if self.associated_detector: + associated_detector = self.associated_detector.get_snapshot() + + workflow_ids = [] + if self.workflows: + workflow_ids = [workflow.id for workflow in self.workflows] + + triggered_workflows = [] + if self.triggered_workflows: + triggered_workflows = [workflow.get_snapshot() for workflow in self.triggered_workflows] + + action_filter_conditions = [] + if self.action_groups: + action_filter_conditions = [group.get_snapshot() for group in self.action_groups] + + triggered_actions = [] + if self.triggered_actions: + triggered_actions = [action.get_snapshot() for action in self.triggered_actions] + + return { + "workflow_ids": workflow_ids, + "associated_detector": associated_detector, + "event": self.event, + "group": self.event.group, + "event_data": self.event.data, + "action_filter_conditions": action_filter_conditions, + "triggered_actions": triggered_actions, + "triggered_workflows": triggered_workflows, + } + @dataclass(frozen=True) class WorkflowEvaluation: @@ -134,15 +171,7 @@ def to_log(self, logger: Logger) -> None: else: log_str = f"{log_str}.actions.triggered" - logger.info( - log_str, - extra={ - **asdict(self.data), - "debug_msg": self.msg, - "group": self.data.event.group, - "data": self.data.event.data, - }, - ) + logger.info(log_str, extra={**self.data.get_snapshot(), "debug_msg": self.msg}) class ConfigTransformer(ABC): diff --git a/tests/sentry/workflow_engine/test_task.py b/tests/sentry/workflow_engine/test_task.py index c633741dcc245d..656e75ff511d77 100644 --- a/tests/sentry/workflow_engine/test_task.py +++ b/tests/sentry/workflow_engine/test_task.py @@ -254,6 +254,51 @@ def test_process_workflow_activity( }, ) + @mock.patch("sentry.workflow_engine.processors.workflow.evaluate_workflow_triggers") + @mock.patch("sentry.workflow_engine.tasks.workflows.logger") + def test_process_workflow_activity__success_logs( + self, mock_logger, mock_evaluate_workflow_triggers + ) -> None: + self.workflow = self.create_workflow(organization=self.organization) + self.workflow.when_condition_group = self.create_data_condition_group() + self.create_data_condition(condition_group=self.workflow.when_condition_group) + self.workflow.save() + + self.action_group = self.create_data_condition_group(logic_type="any-short") + self.action = self.create_action() + self.create_data_condition_group_action( + condition_group=self.action_group, + action=self.action, + ) + self.create_workflow_data_condition_group(self.workflow, self.action_group) + + self.create_detector_workflow( + detector=self.detector, + workflow=self.workflow, + ) + + mock_evaluate_workflow_triggers.return_value = ({self.workflow}, {}) + process_workflow_activity( + activity_id=self.activity.id, + group_id=self.group.id, + detector_id=self.detector.id, + ) + + mock_logger.info.assert_called_once_with( + "workflow_engine.process_workflows.evaluation.actions.triggered", + extra={ + "workflow_ids": [self.workflow.id], + "associated_detector": self.detector.get_snapshot(), + "event": self.activity, + "group": self.activity.group, + "event_data": self.activity.data, + "action_filter_conditions": [self.action_group.get_snapshot()], + "triggered_actions": [self.action.get_snapshot()], + "triggered_workflows": [self.workflow.get_snapshot()], + "debug_msg": None, + }, + ) + @mock.patch( "sentry.workflow_engine.models.incident_groupopenperiod.update_incident_based_on_open_period_status_change" ) # rollout code that is independently tested From dd7b24c6c98ec26348eaae501280143432747c57 Mon Sep 17 00:00:00 2001 From: Josh Callender <1569818+saponifi3d@users.noreply.github.com> Date: Thu, 13 Nov 2025 17:22:48 -0800 Subject: [PATCH 5/8] add tests, change empty lists to None --- src/sentry/workflow_engine/types.py | 8 ++--- tests/sentry/workflow_engine/test_task.py | 40 ++++++++++------------- 2 files changed, 21 insertions(+), 27 deletions(-) diff --git a/src/sentry/workflow_engine/types.py b/src/sentry/workflow_engine/types.py index 2be1a1dfd649a2..2e151899eaf52d 100644 --- a/src/sentry/workflow_engine/types.py +++ b/src/sentry/workflow_engine/types.py @@ -106,19 +106,19 @@ def get_snapshot(self) -> dict[str, Any]: if self.associated_detector: associated_detector = self.associated_detector.get_snapshot() - workflow_ids = [] + workflow_ids = None if self.workflows: workflow_ids = [workflow.id for workflow in self.workflows] - triggered_workflows = [] + triggered_workflows = None if self.triggered_workflows: triggered_workflows = [workflow.get_snapshot() for workflow in self.triggered_workflows] - action_filter_conditions = [] + action_filter_conditions = None if self.action_groups: action_filter_conditions = [group.get_snapshot() for group in self.action_groups] - triggered_actions = [] + triggered_actions = None if self.triggered_actions: triggered_actions = [action.get_snapshot() for action in self.triggered_actions] diff --git a/tests/sentry/workflow_engine/test_task.py b/tests/sentry/workflow_engine/test_task.py index 656e75ff511d77..3cd89acc56d8e2 100644 --- a/tests/sentry/workflow_engine/test_task.py +++ b/tests/sentry/workflow_engine/test_task.py @@ -154,13 +154,15 @@ def test_process_workflow_activity__no_workflows(self, mock_logger) -> None: mock_logger.info.assert_called_once_with( "workflow_engine.process_workflows.evaluation.workflows.not_triggered", extra={ - "debug_msg": "No workflows are associated with the detector in the event", - "group_event": self.activity, - "action_groups": None, + "workflow_ids": None, + "associated_detector": self.detector.get_snapshot(), + "event": self.activity, + "group": self.activity.group, + "event_data": self.activity.data, + "action_filter_conditions": None, "triggered_actions": None, - "workflows": set(), "triggered_workflows": None, - "associated_detector": self.detector, + "debug_msg": "No workflows are associated with the detector in the event", }, ) @@ -199,13 +201,15 @@ def test_process_workflow_activity__workflows__no_actions( mock_logger.info.assert_called_once_with( "workflow_engine.process_workflows.evaluation.workflows.triggered", extra={ - "debug_msg": "No items were triggered or queued for slow evaluation", - "group_event": self.activity, - "action_groups": None, + "workflow_ids": [self.workflow.id], + "associated_detector": self.detector.get_snapshot(), + "event": self.activity, + "group": self.activity.group, + "event_data": self.activity.data, + "action_filter_conditions": None, "triggered_actions": None, - "workflows": {self.workflow}, - "triggered_workflows": set(), # from the mock - "associated_detector": self.detector, + "triggered_workflows": None, + "debug_msg": "No items were triggered or queued for slow evaluation", }, ) @@ -241,18 +245,6 @@ def test_process_workflow_activity( ) mock_filter_actions.assert_called_once_with({self.action_group}, expected_event_data) - mock_logger.info.assert_called_once_with( - "workflow_engine.process_workflows.evaluation.actions.triggered", - extra={ - "debug_msg": None, - "group_event": self.activity, - "action_groups": {self.action_group}, - "triggered_actions": set(), - "workflows": {self.workflow}, - "triggered_workflows": {self.workflow}, - "associated_detector": self.detector, - }, - ) @mock.patch("sentry.workflow_engine.processors.workflow.evaluate_workflow_triggers") @mock.patch("sentry.workflow_engine.tasks.workflows.logger") @@ -260,6 +252,8 @@ def test_process_workflow_activity__success_logs( self, mock_logger, mock_evaluate_workflow_triggers ) -> None: self.workflow = self.create_workflow(organization=self.organization) + + # Add additional data to ensure logs work as expected self.workflow.when_condition_group = self.create_data_condition_group() self.create_data_condition(condition_group=self.workflow.when_condition_group) self.workflow.save() From 541012c9177c7f78f0e7acf562d66bc1fd11acdb Mon Sep 17 00:00:00 2001 From: Josh Callender <1569818+saponifi3d@users.noreply.github.com> Date: Thu, 13 Nov 2025 21:38:49 -0800 Subject: [PATCH 6/8] use TypedDicts to ensure values are correct --- .../workflow_engine/models/data_condition.py | 11 +++++++++-- .../models/data_condition_group.py | 15 ++++++++++++--- src/sentry/workflow_engine/models/detector.py | 18 +++++++++++++----- src/sentry/workflow_engine/models/workflow.py | 17 ++++++++++++++--- .../workflow_engine/processors/workflow.py | 2 +- 5 files changed, 49 insertions(+), 14 deletions(-) diff --git a/src/sentry/workflow_engine/models/data_condition.py b/src/sentry/workflow_engine/models/data_condition.py index 8b1f6d4b6f8f26..84c52f55aa5aa5 100644 --- a/src/sentry/workflow_engine/models/data_condition.py +++ b/src/sentry/workflow_engine/models/data_condition.py @@ -3,7 +3,7 @@ import time from datetime import timedelta from enum import StrEnum -from typing import Any, TypeVar, cast +from typing import Any, TypedDict, TypeVar, cast from django.db import models from django.db.models.signals import pre_save @@ -111,6 +111,13 @@ class Condition(StrEnum): FAST_CONDITION_TOO_SLOW_THRESHOLD = timedelta(milliseconds=500) +class DataConditionSnapshot(TypedDict): + id: int + type: str + comparison: str + condition_result: DataConditionResult + + @region_silo_model class DataCondition(DefaultFieldsModel): """ @@ -137,7 +144,7 @@ class DataCondition(DefaultFieldsModel): on_delete=models.CASCADE, ) - def get_snapshot(self) -> dict[str, Any]: + def get_snapshot(self) -> DataConditionSnapshot: return { "id": self.id, "type": self.type, diff --git a/src/sentry/workflow_engine/models/data_condition_group.py b/src/sentry/workflow_engine/models/data_condition_group.py index e50ceb6aabf386..b236e294dde920 100644 --- a/src/sentry/workflow_engine/models/data_condition_group.py +++ b/src/sentry/workflow_engine/models/data_condition_group.py @@ -1,11 +1,20 @@ +from __future__ import annotations + from enum import StrEnum -from typing import Any, ClassVar, Self +from typing import ClassVar, Self, TypedDict from django.db import models from sentry.backup.scopes import RelocationScope from sentry.db.models import DefaultFieldsModel, region_silo_model, sane_repr from sentry.db.models.manager.base import BaseManager +from sentry.workflow_engine.models.data_condition import DataConditionSnapshot + + +class DataConditionGroupSnapshot(TypedDict): + id: int + logic_type: DataConditionGroup.Type + conditions: list[DataConditionSnapshot] @region_silo_model @@ -37,13 +46,13 @@ class Type(StrEnum): ) organization = models.ForeignKey("sentry.Organization", on_delete=models.CASCADE) - def get_snapshot(self) -> dict[str, Any]: + def get_snapshot(self) -> DataConditionGroupSnapshot: conditions = [] if hasattr(self, "conditions"): conditions = [cond.get_snapshot() for cond in self.conditions.all()] return { "id": self.id, - "logic_type": self.logic_type, + "logic_type": DataConditionGroup.Type(self.logic_type), "conditions": conditions, } diff --git a/src/sentry/workflow_engine/models/detector.py b/src/sentry/workflow_engine/models/detector.py index 032db26919da07..26d993d1bb967d 100644 --- a/src/sentry/workflow_engine/models/detector.py +++ b/src/sentry/workflow_engine/models/detector.py @@ -3,7 +3,7 @@ import builtins import logging from collections.abc import Callable -from typing import TYPE_CHECKING, Any, ClassVar +from typing import TYPE_CHECKING, Any, ClassVar, TypedDict from django.conf import settings from django.db import models @@ -29,10 +29,18 @@ if TYPE_CHECKING: from sentry.workflow_engine.handlers.detector import DetectorHandler + from sentry.workflow_engine.models.data_condition_group import DataConditionGroupSnapshot logger = logging.getLogger(__name__) +class DetectorSnapshot(TypedDict): + id: int + enabled: bool + status: int + trigger_condition: DataConditionGroupSnapshot | None + + class DetectorManager(BaseManager["Detector"]): def get_queryset(self) -> BaseQuerySet[Detector]: return ( @@ -141,16 +149,16 @@ def settings(self) -> DetectorSettings: return settings - def get_snapshot(self) -> dict[str, Any]: - trigger_conditions = None + def get_snapshot(self) -> DetectorSnapshot: + trigger_condition = None if self.workflow_condition_group: - trigger_conditions = self.workflow_condition_group.get_snapshot() + trigger_condition = self.workflow_condition_group.get_snapshot() return { "id": self.id, "enabled": self.enabled, "status": self.status, - "trigger_conditions": trigger_conditions, + "trigger_condition": trigger_condition, } def get_audit_log_data(self) -> dict[str, Any]: diff --git a/src/sentry/workflow_engine/models/workflow.py b/src/sentry/workflow_engine/models/workflow.py index 7fc39b4049f5a7..07cfc53202b60d 100644 --- a/src/sentry/workflow_engine/models/workflow.py +++ b/src/sentry/workflow_engine/models/workflow.py @@ -2,7 +2,7 @@ import logging from dataclasses import replace -from typing import Any, ClassVar +from typing import Any, ClassVar, TypedDict from django.conf import settings from django.db import models @@ -17,7 +17,10 @@ from sentry.db.models.manager.base_query_set import BaseQuerySet from sentry.models.owner_base import OwnerModel from sentry.workflow_engine.models.data_condition import DataCondition, is_slow_condition -from sentry.workflow_engine.models.data_condition_group import DataConditionGroup +from sentry.workflow_engine.models.data_condition_group import ( + DataConditionGroup, + DataConditionGroupSnapshot, +) from sentry.workflow_engine.processors.data_condition_group import TriggerResult from sentry.workflow_engine.types import ConditionError, WorkflowEventData @@ -26,6 +29,14 @@ logger = logging.getLogger(__name__) +class WorkflowSnapshot(TypedDict): + id: int + enabled: bool + environment_id: int | None + status: int + triggers: DataConditionGroupSnapshot | None + + class WorkflowManager(BaseManager["Workflow"]): def get_queryset(self) -> BaseQuerySet[Workflow]: return ( @@ -92,7 +103,7 @@ class Meta: def get_audit_log_data(self) -> dict[str, Any]: return {"name": self.name} - def get_snapshot(self) -> dict[str, Any]: + def get_snapshot(self) -> WorkflowSnapshot: when_condition_group = None if self.when_condition_group: when_condition_group = self.when_condition_group.get_snapshot() diff --git a/src/sentry/workflow_engine/processors/workflow.py b/src/sentry/workflow_engine/processors/workflow.py index bd3184e2950cc3..21d418a574ea02 100644 --- a/src/sentry/workflow_engine/processors/workflow.py +++ b/src/sentry/workflow_engine/processors/workflow.py @@ -117,7 +117,7 @@ def enqueue_workflows( for queue_item in items_by_workflow.values(): if not queue_item.delayed_if_group_ids and not queue_item.passing_if_group_ids: # Skip because there are no IF groups we could possibly fire actions for if - # the WHEN/IF delayed condtions are met + # the WHEN/IF delayed conditions are met continue project_id = queue_item.event.project_id items_by_project_id[project_id].append(queue_item) From ea244c9f414f09777336b965e2d3d144ea54a7a7 Mon Sep 17 00:00:00 2001 From: Josh Callender <1569818+saponifi3d@users.noreply.github.com> Date: Thu, 13 Nov 2025 21:41:26 -0800 Subject: [PATCH 7/8] use the is_model_attr_cached method rather than just checking for the attribute --- src/sentry/workflow_engine/models/data_condition_group.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/sentry/workflow_engine/models/data_condition_group.py b/src/sentry/workflow_engine/models/data_condition_group.py index b236e294dde920..dddeac54217745 100644 --- a/src/sentry/workflow_engine/models/data_condition_group.py +++ b/src/sentry/workflow_engine/models/data_condition_group.py @@ -8,6 +8,7 @@ from sentry.backup.scopes import RelocationScope from sentry.db.models import DefaultFieldsModel, region_silo_model, sane_repr from sentry.db.models.manager.base import BaseManager +from sentry.db.models.utils import is_model_attr_cached from sentry.workflow_engine.models.data_condition import DataConditionSnapshot @@ -48,7 +49,7 @@ class Type(StrEnum): def get_snapshot(self) -> DataConditionGroupSnapshot: conditions = [] - if hasattr(self, "conditions"): + if is_model_attr_cached(self, "conditions"): conditions = [cond.get_snapshot() for cond in self.conditions.all()] return { From 45508c931334481fda278d1af43289ecf9748013 Mon Sep 17 00:00:00 2001 From: Josh Callender <1569818+saponifi3d@users.noreply.github.com> Date: Thu, 13 Nov 2025 21:44:48 -0800 Subject: [PATCH 8/8] whoops, missed a snapshot type --- src/sentry/workflow_engine/models/action.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/src/sentry/workflow_engine/models/action.py b/src/sentry/workflow_engine/models/action.py index a0ec5c42d58259..5831bf92faa413 100644 --- a/src/sentry/workflow_engine/models/action.py +++ b/src/sentry/workflow_engine/models/action.py @@ -3,7 +3,7 @@ import builtins import logging from enum import StrEnum -from typing import TYPE_CHECKING, Any, ClassVar +from typing import TYPE_CHECKING, ClassVar, TypedDict from django.db import models from django.db.models import Q @@ -30,6 +30,11 @@ logger = logging.getLogger(__name__) +class ActionSnapshot(TypedDict): + id: int + type: Action.Type + + class ActionManager(BaseManager["Action"]): def get_queryset(self) -> BaseQuerySet[Action]: return ( @@ -112,10 +117,10 @@ class Meta: ), ] - def get_snapshot(self) -> dict[str, Any]: + def get_snapshot(self) -> ActionSnapshot: return { "id": self.id, - "type": self.type, + "type": Action.Type(self.type), } def get_handler(self) -> builtins.type[ActionHandler]: