Skip to content

Commit

Permalink
ref(escalating-issues): Move functions to centralize helper functions (
Browse files Browse the repository at this point in the history
…#47586)

Move escalating issue forecast helper functions into centralized
location
  • Loading branch information
jangjodi committed Apr 19, 2023
1 parent af1ad9b commit 5b1edbc
Show file tree
Hide file tree
Showing 4 changed files with 76 additions and 61 deletions.
27 changes: 26 additions & 1 deletion src/sentry/issues/escalating.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from datetime import datetime, timedelta
from typing import List, Tuple, TypedDict
from typing import Dict, List, Tuple, TypedDict

from snuba_sdk import (
Column,
Expand All @@ -15,6 +15,7 @@
Request,
)

from sentry.issues.escalating_issues_alg import GroupCount
from sentry.models import Group
from sentry.utils.snuba import raw_snql_query

Expand All @@ -27,6 +28,8 @@
{"group_id": int, "hourBucket": str, "count()": int},
)

ParsedGroupsCount = Dict[int, GroupCount]


def query_groups_past_counts(groups: List[Group]) -> List[GroupsCountResponse]:
"""Query Snuba for the counts for every group bucketed into hours"""
Expand All @@ -48,6 +51,28 @@ def query_groups_past_counts(groups: List[Group]) -> List[GroupsCountResponse]:
return all_results


def parse_groups_past_counts(response: List[GroupsCountResponse]) -> ParsedGroupsCount:
"""
Return the parsed snuba response for groups past counts to be used in generate_issue_forecast.
ParsedGroupCount is of the form {<group_id>: {"intervals": [str], "data": [int]}}.
`response`: Snuba response for group event counts
"""
group_counts: ParsedGroupsCount = {}
group_ids_list = group_counts.keys()
for data in response:
group_id = data["group_id"]
if group_id not in group_ids_list:
group_counts[group_id] = {
"intervals": [data["hourBucket"]],
"data": [data["count()"]],
}
else:
group_counts[group_id]["intervals"].append(data["hourBucket"])
group_counts[group_id]["data"].append(data["count()"])
return group_counts


def _generate_query(
group_ids: List[int],
project_ids: List[int],
Expand Down
45 changes: 45 additions & 0 deletions src/sentry/issues/forecasts.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
"""
This module is for helper functions for escalating issues forecasts.
"""

from datetime import datetime
from typing import List

from sentry.issues.escalating import (
ParsedGroupsCount,
parse_groups_past_counts,
query_groups_past_counts,
)
from sentry.issues.escalating_group_forecast import EscalatingGroupForecast
from sentry.issues.escalating_issues_alg import generate_issue_forecast
from sentry.models import Group


def save_forecast_per_group(
until_escalating_groups: List[Group], group_counts: ParsedGroupsCount
) -> None:
"""
Saves the list of forecasted values for each group in nodestore.
`until_escalating_groups`: List of archived until escalating groups to be forecasted
`group_counts`: Parsed snuba response of group counts
"""
time = datetime.now()
group_dict = {group.id: group for group in until_escalating_groups}
for group_id in group_counts.keys():
forecasts = generate_issue_forecast(group_counts[group_id], time)
forecasts_list = [forecast["forecasted_value"] for forecast in forecasts]
escalating_group_forecast = EscalatingGroupForecast(
group_dict[group_id].project.id, group_id, forecasts_list, datetime.now()
)
escalating_group_forecast.save()


def get_forecasts(groups: List[Group]) -> None:
"""
Returns a list of forecasted values for each group.
`groups`: List of groups to be forecasted
"""
past_counts = query_groups_past_counts(groups)
group_counts = parse_groups_past_counts(past_counts)
save_forecast_per_group(groups, group_counts)
57 changes: 1 addition & 56 deletions src/sentry/tasks/weekly_escalating_forecast.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,9 @@
import logging
from datetime import datetime
from typing import Dict, List, TypedDict

from sentry_sdk.crons.decorator import monitor

from sentry.issues.escalating import GroupsCountResponse, query_groups_past_counts
from sentry.issues.escalating_group_forecast import EscalatingGroupForecast
from sentry.issues.escalating_issues_alg import generate_issue_forecast
from sentry.issues.forecasts import get_forecasts
from sentry.models import Group, GroupStatus
from sentry.tasks.base import instrumented_task
from sentry.types.group import GroupSubStatus
Expand Down Expand Up @@ -50,55 +47,3 @@ def run_escalating_forecast() -> None:
return

get_forecasts(until_escalating_groups)


def parse_groups_past_counts(response: List[GroupsCountResponse]) -> ParsedGroupsCount:
"""
Return the parsed snuba response for groups past counts to be used in generate_issue_forecast.
ParsedGroupCount is of the form {<group_id>: {"intervals": [str], "data": [int]}}.
`response`: Snuba response for group event counts
"""
group_counts: ParsedGroupsCount = {}
group_ids_list = group_counts.keys()
for data in response:
group_id = data["group_id"]
if group_id not in group_ids_list:
group_counts[group_id] = {
"intervals": [data["hourBucket"]],
"data": [data["count()"]],
}
else:
group_counts[group_id]["intervals"].append(data["hourBucket"])
group_counts[group_id]["data"].append(data["count()"])
return group_counts


def save_forecast_per_group(
until_escalating_groups: List[Group], group_counts: ParsedGroupsCount
) -> None:
"""
Saves the list of forecasted values for each group in nodestore.
`until_escalating_groups`: List of archived until escalating groups to be forecasted
`group_counts`: Parsed snuba response of group counts
"""
time = datetime.now()
group_dict = {group.id: group for group in until_escalating_groups}
for group_id in group_counts.keys():
forecasts = generate_issue_forecast(group_counts[group_id], time)
forecasts_list = [forecast["forecasted_value"] for forecast in forecasts]
escalating_group_forecast = EscalatingGroupForecast(
group_dict[group_id].project.id, group_id, forecasts_list, datetime.now()
)
escalating_group_forecast.save()


def get_forecasts(groups: List[Group]) -> None:
"""
Returns a list of forecasted values for each group.
`groups`: List of groups to be forecasted
"""
past_counts = query_groups_past_counts(groups)
group_counts = parse_groups_past_counts(past_counts)
save_forecast_per_group(groups, group_counts)
8 changes: 4 additions & 4 deletions tests/sentry/tasks/test_weekly_escalating_forecast.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def create_archived_until_escalating_groups(self, num_groups: int) -> List[Group
group_list.append(group)
return group_list

@patch("sentry.tasks.weekly_escalating_forecast.query_groups_past_counts")
@patch("sentry.issues.escalating.query_groups_past_counts")
def test_empty_escalating_forecast(self, mock_query_groups_past_counts):
group_list = self.create_archived_until_escalating_groups(num_groups=1)

Expand All @@ -67,7 +67,7 @@ def test_empty_escalating_forecast(self, mock_query_groups_past_counts):
fetched_forecast = EscalatingGroupForecast.fetch(group_list[0].project.id, group_list[0].id)
assert fetched_forecast is None

@patch("sentry.tasks.weekly_escalating_forecast.query_groups_past_counts")
@patch("sentry.issues.forecasts.query_groups_past_counts")
def test_single_group_escalating_forecast(self, mock_query_groups_past_counts):
group_list = self.create_archived_until_escalating_groups(num_groups=1)

Expand All @@ -86,7 +86,7 @@ def test_single_group_escalating_forecast(self, mock_query_groups_past_counts):
) == approximate_date_added.replace(second=0, microsecond=0)
assert fetched_forecast.date_added < approximate_date_added

@patch("sentry.tasks.weekly_escalating_forecast.query_groups_past_counts")
@patch("sentry.issues.forecasts.query_groups_past_counts")
def test_multiple_groups_escalating_forecast(self, mock_query_groups_past_counts):
group_list = self.create_archived_until_escalating_groups(num_groups=3)

Expand All @@ -108,7 +108,7 @@ def test_multiple_groups_escalating_forecast(self, mock_query_groups_past_counts
) == approximate_date_added.replace(second=0, microsecond=0)
assert fetched_forecast.date_added < approximate_date_added

@patch("sentry.tasks.weekly_escalating_forecast.query_groups_past_counts")
@patch("sentry.issues.forecasts.query_groups_past_counts")
def test_update_group_escalating_forecast(self, mock_query_groups_past_counts):
group_list = self.create_archived_until_escalating_groups(num_groups=1)

Expand Down

0 comments on commit 5b1edbc

Please sign in to comment.