Skip to content
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@
from sentry.workflow_engine.models.data_condition import Condition
from sentry.workflow_engine.registry import condition_handler_registry
from sentry.workflow_engine.types import DataConditionHandler, WorkflowEventData
from sentry.workflow_engine.utils import log_context

logger = log_context.get_logger(__name__)


@condition_handler_registry.register(Condition.TAGGED_EVENT)
Expand Down Expand Up @@ -90,4 +93,17 @@ def evaluate_value(event_data: WorkflowEventData, comparison: Any) -> bool:
if k.lower() == key or tagstore.backend.get_standardized_key(k) == key
)

return match_values(group_values=tag_values, match_value=value, match_type=match)
result = match_values(group_values=tag_values, match_value=value, match_type=match)

logger.debug(
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this is just incase the top level logs don't show me everything, i want to be able to handle the user case(s) around this specific handler.

"workflow_engine.handlers.tagged_event_handler",
extra={
"evaluation_result": result,
"event": event,
"event_tags": event.tags,
"processed_values": tag_values,
"comparison_type": match,
},
)

return result
13 changes: 12 additions & 1 deletion src/sentry/workflow_engine/models/action.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import builtins
import logging
from enum import StrEnum
from typing import TYPE_CHECKING, ClassVar
from typing import TYPE_CHECKING, ClassVar, TypedDict

from django.db import models
from django.db.models import Q
Expand All @@ -30,6 +30,11 @@
logger = logging.getLogger(__name__)


class ActionSnapshot(TypedDict):
id: int
type: Action.Type


class ActionManager(BaseManager["Action"]):
def get_queryset(self) -> BaseQuerySet[Action]:
return (
Expand Down Expand Up @@ -112,6 +117,12 @@ class Meta:
),
]

def get_snapshot(self) -> ActionSnapshot:
return {
"id": self.id,
"type": Action.Type(self.type),
}

def get_handler(self) -> builtins.type[ActionHandler]:
action_type = Action.Type(self.type)
return action_handler_registry.get(action_type)
Expand Down
11 changes: 9 additions & 2 deletions src/sentry/workflow_engine/models/data_condition.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import time
from datetime import timedelta
from enum import StrEnum
from typing import Any, TypeVar, cast
from typing import Any, TypedDict, TypeVar, cast

from django.db import models
from django.db.models.signals import pre_save
Expand Down Expand Up @@ -111,6 +111,13 @@ class Condition(StrEnum):
FAST_CONDITION_TOO_SLOW_THRESHOLD = timedelta(milliseconds=500)


class DataConditionSnapshot(TypedDict):
id: int
type: str
comparison: str
condition_result: DataConditionResult


@region_silo_model
class DataCondition(DefaultFieldsModel):
"""
Expand All @@ -137,7 +144,7 @@ class DataCondition(DefaultFieldsModel):
on_delete=models.CASCADE,
)

def get_snapshot(self) -> dict[str, Any]:
def get_snapshot(self) -> DataConditionSnapshot:
return {
"id": self.id,
"type": self.type,
Expand Down
23 changes: 22 additions & 1 deletion src/sentry/workflow_engine/models/data_condition_group.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,21 @@
from __future__ import annotations

from enum import StrEnum
from typing import ClassVar, Self
from typing import ClassVar, Self, TypedDict

from django.db import models

from sentry.backup.scopes import RelocationScope
from sentry.db.models import DefaultFieldsModel, region_silo_model, sane_repr
from sentry.db.models.manager.base import BaseManager
from sentry.db.models.utils import is_model_attr_cached
from sentry.workflow_engine.models.data_condition import DataConditionSnapshot


class DataConditionGroupSnapshot(TypedDict):
id: int
logic_type: DataConditionGroup.Type
conditions: list[DataConditionSnapshot]


@region_silo_model
Expand Down Expand Up @@ -36,3 +46,14 @@ class Type(StrEnum):
max_length=200, choices=[(t.value, t.value) for t in Type], default=Type.ANY
)
organization = models.ForeignKey("sentry.Organization", on_delete=models.CASCADE)

def get_snapshot(self) -> DataConditionGroupSnapshot:
conditions = []
if is_model_attr_cached(self, "conditions"):
conditions = [cond.get_snapshot() for cond in self.conditions.all()]

return {
"id": self.id,
"logic_type": DataConditionGroup.Type(self.logic_type),
"conditions": conditions,
}
22 changes: 21 additions & 1 deletion src/sentry/workflow_engine/models/detector.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import builtins
import logging
from collections.abc import Callable
from typing import TYPE_CHECKING, Any, ClassVar
from typing import TYPE_CHECKING, Any, ClassVar, TypedDict

from django.conf import settings
from django.db import models
Expand All @@ -29,10 +29,18 @@

if TYPE_CHECKING:
from sentry.workflow_engine.handlers.detector import DetectorHandler
from sentry.workflow_engine.models.data_condition_group import DataConditionGroupSnapshot

logger = logging.getLogger(__name__)


class DetectorSnapshot(TypedDict):
id: int
enabled: bool
status: int
trigger_condition: DataConditionGroupSnapshot | None


class DetectorManager(BaseManager["Detector"]):
def get_queryset(self) -> BaseQuerySet[Detector]:
return (
Expand Down Expand Up @@ -141,6 +149,18 @@ def settings(self) -> DetectorSettings:

return settings

def get_snapshot(self) -> DetectorSnapshot:
trigger_condition = None
if self.workflow_condition_group:
trigger_condition = self.workflow_condition_group.get_snapshot()

return {
"id": self.id,
"enabled": self.enabled,
"status": self.status,
"trigger_condition": trigger_condition,
}

def get_audit_log_data(self) -> dict[str, Any]:
return {"name": self.name}

Expand Down
34 changes: 31 additions & 3 deletions src/sentry/workflow_engine/models/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import logging
from dataclasses import replace
from typing import Any, ClassVar
from typing import Any, ClassVar, TypedDict

from django.conf import settings
from django.db import models
Expand All @@ -17,7 +17,10 @@
from sentry.db.models.manager.base_query_set import BaseQuerySet
from sentry.models.owner_base import OwnerModel
from sentry.workflow_engine.models.data_condition import DataCondition, is_slow_condition
from sentry.workflow_engine.models.data_condition_group import DataConditionGroup
from sentry.workflow_engine.models.data_condition_group import (
DataConditionGroup,
DataConditionGroupSnapshot,
)
from sentry.workflow_engine.processors.data_condition_group import TriggerResult
from sentry.workflow_engine.types import ConditionError, WorkflowEventData

Expand All @@ -26,6 +29,14 @@
logger = logging.getLogger(__name__)


class WorkflowSnapshot(TypedDict):
id: int
enabled: bool
environment_id: int | None
status: int
triggers: DataConditionGroupSnapshot | None


class WorkflowManager(BaseManager["Workflow"]):
def get_queryset(self) -> BaseQuerySet[Workflow]:
return (
Expand Down Expand Up @@ -83,7 +94,7 @@ class Workflow(DefaultFieldsModel, OwnerModel, JSONConfigBase):
"additionalProperties": False,
}

__repr__ = sane_repr("name", "organization_id")
__repr__ = sane_repr("organization_id")

class Meta:
app_label = "workflow_engine"
Expand All @@ -92,6 +103,23 @@ class Meta:
def get_audit_log_data(self) -> dict[str, Any]:
return {"name": self.name}

def get_snapshot(self) -> WorkflowSnapshot:
when_condition_group = None
if self.when_condition_group:
when_condition_group = self.when_condition_group.get_snapshot()

environment_id = None
if self.environment:
environment_id = self.environment.id

return {
"id": self.id,
"enabled": self.enabled,
"environment_id": environment_id,
"status": self.status,
"triggers": when_condition_group,
}

def evaluate_trigger_conditions(
self, event_data: WorkflowEventData, when_data_conditions: list[DataCondition] | None = None
) -> tuple[TriggerResult, list[DataCondition]]:
Expand Down
4 changes: 2 additions & 2 deletions src/sentry/workflow_engine/processors/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ def enqueue_workflows(
for queue_item in items_by_workflow.values():
if not queue_item.delayed_if_group_ids and not queue_item.passing_if_group_ids:
# Skip because there are no IF groups we could possibly fire actions for if
# the WHEN/IF delayed condtions are met
# the WHEN/IF delayed conditions are met
continue
project_id = queue_item.event.project_id
items_by_project_id[project_id].append(queue_item)
Expand Down Expand Up @@ -482,7 +482,7 @@ def process_workflows(
fire_actions,
)

workflow_evaluation_data = WorkflowEvaluationData(group_event=event_data.event)
workflow_evaluation_data = WorkflowEvaluationData(event=event_data.event)

try:
if detector is None and isinstance(event_data.event, GroupEvent):
Expand Down
43 changes: 40 additions & 3 deletions src/sentry/workflow_engine/types.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from __future__ import annotations

from abc import ABC, abstractmethod
from dataclasses import asdict, dataclass, field
from dataclasses import dataclass, field
from enum import IntEnum, StrEnum
from logging import Logger
from typing import TYPE_CHECKING, Any, ClassVar, Generic, TypedDict, TypeVar
Expand Down Expand Up @@ -89,13 +89,50 @@ class WorkflowEventData:

@dataclass
class WorkflowEvaluationData:
group_event: GroupEvent | Activity
event: GroupEvent | Activity
action_groups: set[DataConditionGroup] | None = None
workflows: set[Workflow] | None = None
triggered_actions: set[Action] | None = None
triggered_workflows: set[Workflow] | None = None
associated_detector: Detector | None = None

def get_snapshot(self) -> dict[str, Any]:
"""
This method will take the complex data structures, like models / list of models,
and turn them into the critical attributes of a model or lists of IDs.
"""

associated_detector = None
if self.associated_detector:
associated_detector = self.associated_detector.get_snapshot()

workflow_ids = None
if self.workflows:
workflow_ids = [workflow.id for workflow in self.workflows]

triggered_workflows = None
if self.triggered_workflows:
triggered_workflows = [workflow.get_snapshot() for workflow in self.triggered_workflows]

action_filter_conditions = None
if self.action_groups:
action_filter_conditions = [group.get_snapshot() for group in self.action_groups]

triggered_actions = None
if self.triggered_actions:
triggered_actions = [action.get_snapshot() for action in self.triggered_actions]

return {
"workflow_ids": workflow_ids,
"associated_detector": associated_detector,
"event": self.event,
"group": self.event.group,
"event_data": self.event.data,
"action_filter_conditions": action_filter_conditions,
"triggered_actions": triggered_actions,
"triggered_workflows": triggered_workflows,
}


@dataclass(frozen=True)
class WorkflowEvaluation:
Expand Down Expand Up @@ -134,7 +171,7 @@ def to_log(self, logger: Logger) -> None:
else:
log_str = f"{log_str}.actions.triggered"

logger.info(log_str, extra={**asdict(self.data), "debug_msg": self.msg})
logger.info(log_str, extra={**self.data.get_snapshot(), "debug_msg": self.msg})


class ConfigTransformer(ABC):
Expand Down
Loading
Loading