From f6c62870302359884d396f0f7f9908f2238c356f Mon Sep 17 00:00:00 2001 From: William Mak Date: Wed, 19 Nov 2025 14:42:47 -0500 Subject: [PATCH] feat(events): Add rpc request even on error - Currently we only return the rpc request when there's no error, but it would be (more) helpful to get it when there is an error --- src/sentry/api/utils.py | 8 +++ src/sentry/snuba/ourlogs.py | 3 +- src/sentry/snuba/profile_functions.py | 3 +- src/sentry/snuba/rpc_dataset_common.py | 21 +++++++- src/sentry/snuba/spans_rpc.py | 3 +- src/sentry/snuba/trace_metrics.py | 3 +- .../api/endpoints/test_organization_events.py | 41 ++++++++++++++ ..._organization_events_stats_span_indexed.py | 54 +++++++++++++++++++ 8 files changed, 128 insertions(+), 8 deletions(-) diff --git a/src/sentry/api/utils.py b/src/sentry/api/utils.py index 30dc469ad07fe8..0a45ead73756bd 100644 --- a/src/sentry/api/utils.py +++ b/src/sentry/api/utils.py @@ -44,6 +44,7 @@ from sentry.search.utils import InvalidQuery, parse_datetime_string from sentry.silo.base import SiloMode from sentry.types.region import get_local_region +from sentry.utils import json from sentry.utils.dates import parse_stats_period from sentry.utils.sdk import capture_exception, merge_context_into_scope, set_span_attribute from sentry.utils.snuba import ( @@ -384,6 +385,13 @@ def handle_query_errors() -> Generator[None]: sentry_sdk.set_tag("query.error_reason", "Timeout") raise TimeoutException(detail=TIMEOUT_RPC_ERROR_MESSAGE) sentry_sdk.capture_exception(error) + if hasattr(error, "debug"): + raise APIException( + detail={ + "detail": message, + "meta": {"debug_info": {"query": json.loads(error.debug)}}, + } + ) raise APIException(detail=message) except SnubaError as error: message = "Internal error. Please try again." diff --git a/src/sentry/snuba/ourlogs.py b/src/sentry/snuba/ourlogs.py index b984f2f79b23f0..f19b534893fbfb 100644 --- a/src/sentry/snuba/ourlogs.py +++ b/src/sentry/snuba/ourlogs.py @@ -12,7 +12,6 @@ from sentry.search.events.types import SAMPLING_MODES, EventsMeta, SnubaParams from sentry.snuba import rpc_dataset_common from sentry.snuba.discover import zerofill -from sentry.utils import snuba_rpc from sentry.utils.snuba import SnubaTSResult logger = logging.getLogger("sentry.snuba.ourlogs") @@ -108,7 +107,7 @@ def run_timeseries_query( ) """Run the query""" - rpc_response = snuba_rpc.timeseries_rpc([rpc_request])[0] + rpc_response = cls._run_timeseries_rpc(params.debug, rpc_request) """Process the results""" result = rpc_dataset_common.ProcessedTimeseries() diff --git a/src/sentry/snuba/profile_functions.py b/src/sentry/snuba/profile_functions.py index d7f7ba99ead307..aeb6f03e65b802 100644 --- a/src/sentry/snuba/profile_functions.py +++ b/src/sentry/snuba/profile_functions.py @@ -11,7 +11,6 @@ from sentry.search.events.types import SAMPLING_MODES, EventsMeta, SnubaParams from sentry.snuba import rpc_dataset_common from sentry.snuba.discover import zerofill -from sentry.utils import snuba_rpc from sentry.utils.snuba import SnubaTSResult logger = logging.getLogger("sentry.snuba.profile_functions") @@ -87,7 +86,7 @@ def run_timeseries_query( ) """Run the query""" - rpc_response = snuba_rpc.timeseries_rpc([rpc_request])[0] + rpc_response = cls._run_timeseries_rpc(params.debug, rpc_request) """Process the results""" result = rpc_dataset_common.ProcessedTimeseries() diff --git a/src/sentry/snuba/rpc_dataset_common.py b/src/sentry/snuba/rpc_dataset_common.py index 7897a4c18002d5..a5485e7770ea2f 100644 --- a/src/sentry/snuba/rpc_dataset_common.py +++ b/src/sentry/snuba/rpc_dataset_common.py @@ -12,6 +12,7 @@ Expression, TimeSeries, TimeSeriesRequest, + TimeSeriesResponse, ) from sentry_protos.snuba.v1.endpoint_trace_item_table_pb2 import ( Column, @@ -318,7 +319,13 @@ def _run_table_query( """Run the query""" table_request = cls.get_table_rpc_request(query) rpc_request = table_request.rpc_request - rpc_response = snuba_rpc.table_rpc([rpc_request])[0] + try: + rpc_response = snuba_rpc.table_rpc([rpc_request])[0] + except Exception as e: + # add the rpc to the error so we can include it in the response + if debug: + setattr(e, "debug", MessageToJson(rpc_request)) + raise sentry_sdk.set_tag( "query.storage_meta.tier", rpc_response.meta.downsampled_storage_meta.tier ) @@ -517,6 +524,18 @@ def update_timestamps( else: raise InvalidSearchQuery("start, end and interval are required") + @classmethod + def _run_timeseries_rpc( + self, debug: bool, rpc_request: TimeSeriesRequest + ) -> TimeSeriesResponse: + try: + return snuba_rpc.timeseries_rpc([rpc_request])[0] + except Exception as e: + # add the rpc to the error so we can include it in the response + if debug: + setattr(e, "debug", MessageToJson(rpc_request)) + raise + @classmethod def process_timeseries_list(cls, timeseries_list: list[TimeSeries]) -> ProcessedTimeseries: result = ProcessedTimeseries() diff --git a/src/sentry/snuba/spans_rpc.py b/src/sentry/snuba/spans_rpc.py index 8138616463b659..09bc4fe8bf5fda 100644 --- a/src/sentry/snuba/spans_rpc.py +++ b/src/sentry/snuba/spans_rpc.py @@ -100,7 +100,8 @@ def run_timeseries_query( ) """Run the query""" - rpc_response = snuba_rpc.timeseries_rpc([rpc_request])[0] + rpc_response = cls._run_timeseries_rpc(params.debug, rpc_request) + """Process the results""" result = rpc_dataset_common.ProcessedTimeseries() final_meta: EventsMeta = events_meta_from_rpc_request_meta(rpc_response.meta) diff --git a/src/sentry/snuba/trace_metrics.py b/src/sentry/snuba/trace_metrics.py index 0356b98ff3fa79..e20216918141ab 100644 --- a/src/sentry/snuba/trace_metrics.py +++ b/src/sentry/snuba/trace_metrics.py @@ -12,7 +12,6 @@ from sentry.search.events.types import SAMPLING_MODES, EventsMeta, SnubaParams from sentry.snuba import rpc_dataset_common from sentry.snuba.discover import zerofill -from sentry.utils import snuba_rpc from sentry.utils.snuba import SnubaTSResult logger = logging.getLogger("sentry.snuba.trace_metrics") @@ -99,7 +98,7 @@ def run_timeseries_query( ) """Run the query""" - rpc_response = snuba_rpc.timeseries_rpc([rpc_request])[0] + rpc_response = cls._run_timeseries_rpc(params.debug, rpc_request) """Process the results""" result = rpc_dataset_common.ProcessedTimeseries() diff --git a/tests/snuba/api/endpoints/test_organization_events.py b/tests/snuba/api/endpoints/test_organization_events.py index e29ef3b259c27a..d623e236f7c7f6 100644 --- a/tests/snuba/api/endpoints/test_organization_events.py +++ b/tests/snuba/api/endpoints/test_organization_events.py @@ -44,6 +44,7 @@ from sentry.types.group import GroupSubStatus from sentry.utils import json from sentry.utils.samples import load_data +from sentry.utils.snuba_rpc import SnubaRPCError from tests.sentry.issues.test_utils import SearchIssueTestMixin MAX_QUERYABLE_TRANSACTION_THRESHOLDS = 1 @@ -5975,6 +5976,46 @@ def test_debug_param(self) -> None: # We should get the snql query back in the query key assert "MATCH" in response.data["meta"]["debug_info"]["query"] + @mock.patch("sentry.utils.snuba_rpc.table_rpc") + def test_debug_param_with_error(self, mock_query) -> None: + mock_query.side_effect = SnubaRPCError("test") + self.user = self.create_user("superuser@example.com", is_superuser=True) + self.create_team(organization=self.organization, members=[self.user]) + + response = self.do_request( + { + "field": ["spans.http"], + "project": [self.project.id], + "query": "event.type:transaction", + "dataset": "spans", + "debug": True, + }, + { + "organizations:discover-basic": True, + }, + ) + assert response.status_code == 500, response.content + assert "debug_info" in response.data["meta"] + # We should get the snql query back in the query key + assert "virtualColumnContexts" in response.data["meta"]["debug_info"]["query"] + + # Need to reset the mock, otherwise previous query is still attached + mock_query.side_effect = SnubaRPCError("test") + response = self.do_request( + { + "field": ["spans.http"], + "project": [self.project.id], + "query": "event.type:transaction", + "dataset": "spans", + }, + { + "organizations:discover-basic": True, + }, + ) + assert response.status_code == 500, response.content + assert "meta" not in response.data + assert "debug_info" not in response.data + class OrganizationEventsProfilesDatasetEndpointTest(OrganizationEventsEndpointTestBase): @mock.patch("sentry.search.events.builder.base.raw_snql_query") diff --git a/tests/snuba/api/endpoints/test_organization_events_stats_span_indexed.py b/tests/snuba/api/endpoints/test_organization_events_stats_span_indexed.py index 77de0f8179f488..cec0b200a0de6f 100644 --- a/tests/snuba/api/endpoints/test_organization_events_stats_span_indexed.py +++ b/tests/snuba/api/endpoints/test_organization_events_stats_span_indexed.py @@ -1,10 +1,12 @@ from datetime import timedelta +from unittest.mock import patch import pytest from django.urls import reverse from sentry.search.utils import DEVICE_CLASS from sentry.testutils.helpers.datetime import before_now +from sentry.utils.snuba_rpc import SnubaRPCError from tests.snuba.api.endpoints.test_organization_events import OrganizationEventsEndpointTestBase from tests.snuba.api.endpoints.test_organization_events_span_indexed import KNOWN_PREFLIGHT_ID @@ -2440,6 +2442,58 @@ def test_debug_param(self) -> None: ] ) + @patch("sentry.utils.snuba_rpc.timeseries_rpc") + def test_debug_param_with_error(self, mock_query) -> None: + self.user = self.create_user("superuser@example.com", is_superuser=True) + self.create_team(organization=self.organization, members=[self.user]) + self.login_as(user=self.user) + mock_query.side_effect = SnubaRPCError("test") + + response = self._do_request( + data={ + "start": self.day_ago, + "end": self.day_ago + timedelta(minutes=4), + "interval": "1m", + "query": "", + "yAxis": ["count()"], + "project": self.project.id, + "dataset": "spans", + "debug": True, + }, + ) + + assert response.status_code == 500, response.content + assert response.data["detail"] == "Internal error. Please try again." + assert "meta" in response.data + assert "debug_info" in response.data["meta"] + + assert ( + "FUNCTION_COUNT" + == response.data["meta"]["debug_info"]["query"]["expressions"][0]["aggregation"][ + "aggregate" + ] + ) + + # Need to reset the mock, otherwise previous query is still attached + mock_query.side_effect = SnubaRPCError("test") + + response = self._do_request( + data={ + "start": self.day_ago, + "end": self.day_ago + timedelta(minutes=4), + "interval": "1m", + "query": "", + "yAxis": ["count()"], + "project": self.project.id, + "dataset": "spans", + }, + ) + + assert response.status_code == 500, response.content + assert response.data["detail"] == "Internal error. Please try again." + assert "meta" not in response.data + assert "debug_info" not in response.data + def test_groupby_non_existent_attribute(self): self.store_spans( [