From 5b1edbc90022f04d9c3a6723802ae44c35d03201 Mon Sep 17 00:00:00 2001 From: Jodi Jang <116035587+jangjodi@users.noreply.github.com> Date: Wed, 19 Apr 2023 08:32:37 -0700 Subject: [PATCH] ref(escalating-issues): Move functions to centralize helper functions (#47586) Move escalating issue forecast helper functions into centralized location --- src/sentry/issues/escalating.py | 27 ++++++++- src/sentry/issues/forecasts.py | 45 +++++++++++++++ .../tasks/weekly_escalating_forecast.py | 57 +------------------ .../tasks/test_weekly_escalating_forecast.py | 8 +-- 4 files changed, 76 insertions(+), 61 deletions(-) create mode 100644 src/sentry/issues/forecasts.py diff --git a/src/sentry/issues/escalating.py b/src/sentry/issues/escalating.py index a731eed4f18c6a..cbebee5b77e692 100644 --- a/src/sentry/issues/escalating.py +++ b/src/sentry/issues/escalating.py @@ -1,5 +1,5 @@ from datetime import datetime, timedelta -from typing import List, Tuple, TypedDict +from typing import Dict, List, Tuple, TypedDict from snuba_sdk import ( Column, @@ -15,6 +15,7 @@ Request, ) +from sentry.issues.escalating_issues_alg import GroupCount from sentry.models import Group from sentry.utils.snuba import raw_snql_query @@ -27,6 +28,8 @@ {"group_id": int, "hourBucket": str, "count()": int}, ) +ParsedGroupsCount = Dict[int, GroupCount] + def query_groups_past_counts(groups: List[Group]) -> List[GroupsCountResponse]: """Query Snuba for the counts for every group bucketed into hours""" @@ -48,6 +51,28 @@ def query_groups_past_counts(groups: List[Group]) -> List[GroupsCountResponse]: return all_results +def parse_groups_past_counts(response: List[GroupsCountResponse]) -> ParsedGroupsCount: + """ + Return the parsed snuba response for groups past counts to be used in generate_issue_forecast. + ParsedGroupCount is of the form {: {"intervals": [str], "data": [int]}}. + + `response`: Snuba response for group event counts + """ + group_counts: ParsedGroupsCount = {} + group_ids_list = group_counts.keys() + for data in response: + group_id = data["group_id"] + if group_id not in group_ids_list: + group_counts[group_id] = { + "intervals": [data["hourBucket"]], + "data": [data["count()"]], + } + else: + group_counts[group_id]["intervals"].append(data["hourBucket"]) + group_counts[group_id]["data"].append(data["count()"]) + return group_counts + + def _generate_query( group_ids: List[int], project_ids: List[int], diff --git a/src/sentry/issues/forecasts.py b/src/sentry/issues/forecasts.py new file mode 100644 index 00000000000000..c8e9557240e369 --- /dev/null +++ b/src/sentry/issues/forecasts.py @@ -0,0 +1,45 @@ +""" +This module is for helper functions for escalating issues forecasts. +""" + +from datetime import datetime +from typing import List + +from sentry.issues.escalating import ( + ParsedGroupsCount, + parse_groups_past_counts, + query_groups_past_counts, +) +from sentry.issues.escalating_group_forecast import EscalatingGroupForecast +from sentry.issues.escalating_issues_alg import generate_issue_forecast +from sentry.models import Group + + +def save_forecast_per_group( + until_escalating_groups: List[Group], group_counts: ParsedGroupsCount +) -> None: + """ + Saves the list of forecasted values for each group in nodestore. + + `until_escalating_groups`: List of archived until escalating groups to be forecasted + `group_counts`: Parsed snuba response of group counts + """ + time = datetime.now() + group_dict = {group.id: group for group in until_escalating_groups} + for group_id in group_counts.keys(): + forecasts = generate_issue_forecast(group_counts[group_id], time) + forecasts_list = [forecast["forecasted_value"] for forecast in forecasts] + escalating_group_forecast = EscalatingGroupForecast( + group_dict[group_id].project.id, group_id, forecasts_list, datetime.now() + ) + escalating_group_forecast.save() + + +def get_forecasts(groups: List[Group]) -> None: + """ + Returns a list of forecasted values for each group. + `groups`: List of groups to be forecasted + """ + past_counts = query_groups_past_counts(groups) + group_counts = parse_groups_past_counts(past_counts) + save_forecast_per_group(groups, group_counts) diff --git a/src/sentry/tasks/weekly_escalating_forecast.py b/src/sentry/tasks/weekly_escalating_forecast.py index 904a00b09731f3..7e770d881533d9 100644 --- a/src/sentry/tasks/weekly_escalating_forecast.py +++ b/src/sentry/tasks/weekly_escalating_forecast.py @@ -1,12 +1,9 @@ import logging -from datetime import datetime from typing import Dict, List, TypedDict from sentry_sdk.crons.decorator import monitor -from sentry.issues.escalating import GroupsCountResponse, query_groups_past_counts -from sentry.issues.escalating_group_forecast import EscalatingGroupForecast -from sentry.issues.escalating_issues_alg import generate_issue_forecast +from sentry.issues.forecasts import get_forecasts from sentry.models import Group, GroupStatus from sentry.tasks.base import instrumented_task from sentry.types.group import GroupSubStatus @@ -50,55 +47,3 @@ def run_escalating_forecast() -> None: return get_forecasts(until_escalating_groups) - - -def parse_groups_past_counts(response: List[GroupsCountResponse]) -> ParsedGroupsCount: - """ - Return the parsed snuba response for groups past counts to be used in generate_issue_forecast. - ParsedGroupCount is of the form {: {"intervals": [str], "data": [int]}}. - - `response`: Snuba response for group event counts - """ - group_counts: ParsedGroupsCount = {} - group_ids_list = group_counts.keys() - for data in response: - group_id = data["group_id"] - if group_id not in group_ids_list: - group_counts[group_id] = { - "intervals": [data["hourBucket"]], - "data": [data["count()"]], - } - else: - group_counts[group_id]["intervals"].append(data["hourBucket"]) - group_counts[group_id]["data"].append(data["count()"]) - return group_counts - - -def save_forecast_per_group( - until_escalating_groups: List[Group], group_counts: ParsedGroupsCount -) -> None: - """ - Saves the list of forecasted values for each group in nodestore. - - `until_escalating_groups`: List of archived until escalating groups to be forecasted - `group_counts`: Parsed snuba response of group counts - """ - time = datetime.now() - group_dict = {group.id: group for group in until_escalating_groups} - for group_id in group_counts.keys(): - forecasts = generate_issue_forecast(group_counts[group_id], time) - forecasts_list = [forecast["forecasted_value"] for forecast in forecasts] - escalating_group_forecast = EscalatingGroupForecast( - group_dict[group_id].project.id, group_id, forecasts_list, datetime.now() - ) - escalating_group_forecast.save() - - -def get_forecasts(groups: List[Group]) -> None: - """ - Returns a list of forecasted values for each group. - `groups`: List of groups to be forecasted - """ - past_counts = query_groups_past_counts(groups) - group_counts = parse_groups_past_counts(past_counts) - save_forecast_per_group(groups, group_counts) diff --git a/tests/sentry/tasks/test_weekly_escalating_forecast.py b/tests/sentry/tasks/test_weekly_escalating_forecast.py index 2b3779060172f8..2bf47722829cd6 100644 --- a/tests/sentry/tasks/test_weekly_escalating_forecast.py +++ b/tests/sentry/tasks/test_weekly_escalating_forecast.py @@ -57,7 +57,7 @@ def create_archived_until_escalating_groups(self, num_groups: int) -> List[Group group_list.append(group) return group_list - @patch("sentry.tasks.weekly_escalating_forecast.query_groups_past_counts") + @patch("sentry.issues.escalating.query_groups_past_counts") def test_empty_escalating_forecast(self, mock_query_groups_past_counts): group_list = self.create_archived_until_escalating_groups(num_groups=1) @@ -67,7 +67,7 @@ def test_empty_escalating_forecast(self, mock_query_groups_past_counts): fetched_forecast = EscalatingGroupForecast.fetch(group_list[0].project.id, group_list[0].id) assert fetched_forecast is None - @patch("sentry.tasks.weekly_escalating_forecast.query_groups_past_counts") + @patch("sentry.issues.forecasts.query_groups_past_counts") def test_single_group_escalating_forecast(self, mock_query_groups_past_counts): group_list = self.create_archived_until_escalating_groups(num_groups=1) @@ -86,7 +86,7 @@ def test_single_group_escalating_forecast(self, mock_query_groups_past_counts): ) == approximate_date_added.replace(second=0, microsecond=0) assert fetched_forecast.date_added < approximate_date_added - @patch("sentry.tasks.weekly_escalating_forecast.query_groups_past_counts") + @patch("sentry.issues.forecasts.query_groups_past_counts") def test_multiple_groups_escalating_forecast(self, mock_query_groups_past_counts): group_list = self.create_archived_until_escalating_groups(num_groups=3) @@ -108,7 +108,7 @@ def test_multiple_groups_escalating_forecast(self, mock_query_groups_past_counts ) == approximate_date_added.replace(second=0, microsecond=0) assert fetched_forecast.date_added < approximate_date_added - @patch("sentry.tasks.weekly_escalating_forecast.query_groups_past_counts") + @patch("sentry.issues.forecasts.query_groups_past_counts") def test_update_group_escalating_forecast(self, mock_query_groups_past_counts): group_list = self.create_archived_until_escalating_groups(num_groups=1)