Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions src/sentry/api/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
from sentry.search.utils import InvalidQuery, parse_datetime_string
from sentry.silo.base import SiloMode
from sentry.types.region import get_local_region
from sentry.utils import json
from sentry.utils.dates import parse_stats_period
from sentry.utils.sdk import capture_exception, merge_context_into_scope, set_span_attribute
from sentry.utils.snuba import (
Expand Down Expand Up @@ -384,6 +385,13 @@ def handle_query_errors() -> Generator[None]:
sentry_sdk.set_tag("query.error_reason", "Timeout")
raise TimeoutException(detail=TIMEOUT_RPC_ERROR_MESSAGE)
sentry_sdk.capture_exception(error)
if hasattr(error, "debug"):
raise APIException(
detail={
"detail": message,
"meta": {"debug_info": {"query": json.loads(error.debug)}},
}
)
raise APIException(detail=message)
except SnubaError as error:
message = "Internal error. Please try again."
Expand Down
3 changes: 1 addition & 2 deletions src/sentry/snuba/ourlogs.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
from sentry.search.events.types import SAMPLING_MODES, EventsMeta, SnubaParams
from sentry.snuba import rpc_dataset_common
from sentry.snuba.discover import zerofill
from sentry.utils import snuba_rpc
from sentry.utils.snuba import SnubaTSResult

logger = logging.getLogger("sentry.snuba.ourlogs")
Expand Down Expand Up @@ -108,7 +107,7 @@ def run_timeseries_query(
)

"""Run the query"""
rpc_response = snuba_rpc.timeseries_rpc([rpc_request])[0]
rpc_response = cls._run_timeseries_rpc(params.debug, rpc_request)

"""Process the results"""
result = rpc_dataset_common.ProcessedTimeseries()
Expand Down
3 changes: 1 addition & 2 deletions src/sentry/snuba/profile_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
from sentry.search.events.types import SAMPLING_MODES, EventsMeta, SnubaParams
from sentry.snuba import rpc_dataset_common
from sentry.snuba.discover import zerofill
from sentry.utils import snuba_rpc
from sentry.utils.snuba import SnubaTSResult

logger = logging.getLogger("sentry.snuba.profile_functions")
Expand Down Expand Up @@ -87,7 +86,7 @@ def run_timeseries_query(
)

"""Run the query"""
rpc_response = snuba_rpc.timeseries_rpc([rpc_request])[0]
rpc_response = cls._run_timeseries_rpc(params.debug, rpc_request)

"""Process the results"""
result = rpc_dataset_common.ProcessedTimeseries()
Expand Down
21 changes: 20 additions & 1 deletion src/sentry/snuba/rpc_dataset_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
Expression,
TimeSeries,
TimeSeriesRequest,
TimeSeriesResponse,
)
from sentry_protos.snuba.v1.endpoint_trace_item_table_pb2 import (
Column,
Expand Down Expand Up @@ -318,7 +319,13 @@ def _run_table_query(
"""Run the query"""
table_request = cls.get_table_rpc_request(query)
rpc_request = table_request.rpc_request
rpc_response = snuba_rpc.table_rpc([rpc_request])[0]
try:
rpc_response = snuba_rpc.table_rpc([rpc_request])[0]
except Exception as e:
# add the rpc to the error so we can include it in the response
if debug:
setattr(e, "debug", MessageToJson(rpc_request))
raise
sentry_sdk.set_tag(
"query.storage_meta.tier", rpc_response.meta.downsampled_storage_meta.tier
)
Expand Down Expand Up @@ -517,6 +524,18 @@ def update_timestamps(
else:
raise InvalidSearchQuery("start, end and interval are required")

@classmethod
def _run_timeseries_rpc(
self, debug: bool, rpc_request: TimeSeriesRequest
) -> TimeSeriesResponse:
try:
return snuba_rpc.timeseries_rpc([rpc_request])[0]
except Exception as e:
# add the rpc to the error so we can include it in the response
if debug:
setattr(e, "debug", MessageToJson(rpc_request))
raise

@classmethod
def process_timeseries_list(cls, timeseries_list: list[TimeSeries]) -> ProcessedTimeseries:
result = ProcessedTimeseries()
Expand Down
3 changes: 2 additions & 1 deletion src/sentry/snuba/spans_rpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,8 @@ def run_timeseries_query(
)

"""Run the query"""
rpc_response = snuba_rpc.timeseries_rpc([rpc_request])[0]
rpc_response = cls._run_timeseries_rpc(params.debug, rpc_request)

"""Process the results"""
result = rpc_dataset_common.ProcessedTimeseries()
final_meta: EventsMeta = events_meta_from_rpc_request_meta(rpc_response.meta)
Expand Down
3 changes: 1 addition & 2 deletions src/sentry/snuba/trace_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
from sentry.search.events.types import SAMPLING_MODES, EventsMeta, SnubaParams
from sentry.snuba import rpc_dataset_common
from sentry.snuba.discover import zerofill
from sentry.utils import snuba_rpc
from sentry.utils.snuba import SnubaTSResult

logger = logging.getLogger("sentry.snuba.trace_metrics")
Expand Down Expand Up @@ -99,7 +98,7 @@ def run_timeseries_query(
)

"""Run the query"""
rpc_response = snuba_rpc.timeseries_rpc([rpc_request])[0]
rpc_response = cls._run_timeseries_rpc(params.debug, rpc_request)

"""Process the results"""
result = rpc_dataset_common.ProcessedTimeseries()
Expand Down
41 changes: 41 additions & 0 deletions tests/snuba/api/endpoints/test_organization_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
from sentry.types.group import GroupSubStatus
from sentry.utils import json
from sentry.utils.samples import load_data
from sentry.utils.snuba_rpc import SnubaRPCError
from tests.sentry.issues.test_utils import SearchIssueTestMixin

MAX_QUERYABLE_TRANSACTION_THRESHOLDS = 1
Expand Down Expand Up @@ -5975,6 +5976,46 @@ def test_debug_param(self) -> None:
# We should get the snql query back in the query key
assert "MATCH" in response.data["meta"]["debug_info"]["query"]

@mock.patch("sentry.utils.snuba_rpc.table_rpc")
def test_debug_param_with_error(self, mock_query) -> None:
mock_query.side_effect = SnubaRPCError("test")
self.user = self.create_user("superuser@example.com", is_superuser=True)
self.create_team(organization=self.organization, members=[self.user])

response = self.do_request(
{
"field": ["spans.http"],
"project": [self.project.id],
"query": "event.type:transaction",
"dataset": "spans",
"debug": True,
},
{
"organizations:discover-basic": True,
},
)
assert response.status_code == 500, response.content
assert "debug_info" in response.data["meta"]
# We should get the snql query back in the query key
assert "virtualColumnContexts" in response.data["meta"]["debug_info"]["query"]

# Need to reset the mock, otherwise previous query is still attached
mock_query.side_effect = SnubaRPCError("test")
response = self.do_request(
{
"field": ["spans.http"],
"project": [self.project.id],
"query": "event.type:transaction",
"dataset": "spans",
},
{
"organizations:discover-basic": True,
},
)
assert response.status_code == 500, response.content
assert "meta" not in response.data
assert "debug_info" not in response.data


class OrganizationEventsProfilesDatasetEndpointTest(OrganizationEventsEndpointTestBase):
@mock.patch("sentry.search.events.builder.base.raw_snql_query")
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
from datetime import timedelta
from unittest.mock import patch

import pytest
from django.urls import reverse

from sentry.search.utils import DEVICE_CLASS
from sentry.testutils.helpers.datetime import before_now
from sentry.utils.snuba_rpc import SnubaRPCError
from tests.snuba.api.endpoints.test_organization_events import OrganizationEventsEndpointTestBase
from tests.snuba.api.endpoints.test_organization_events_span_indexed import KNOWN_PREFLIGHT_ID

Expand Down Expand Up @@ -2440,6 +2442,58 @@ def test_debug_param(self) -> None:
]
)

@patch("sentry.utils.snuba_rpc.timeseries_rpc")
def test_debug_param_with_error(self, mock_query) -> None:
self.user = self.create_user("superuser@example.com", is_superuser=True)
self.create_team(organization=self.organization, members=[self.user])
self.login_as(user=self.user)
mock_query.side_effect = SnubaRPCError("test")

response = self._do_request(
data={
"start": self.day_ago,
"end": self.day_ago + timedelta(minutes=4),
"interval": "1m",
"query": "",
"yAxis": ["count()"],
"project": self.project.id,
"dataset": "spans",
"debug": True,
},
)

assert response.status_code == 500, response.content
assert response.data["detail"] == "Internal error. Please try again."
assert "meta" in response.data
assert "debug_info" in response.data["meta"]

assert (
"FUNCTION_COUNT"
== response.data["meta"]["debug_info"]["query"]["expressions"][0]["aggregation"][
"aggregate"
]
)

# Need to reset the mock, otherwise previous query is still attached
mock_query.side_effect = SnubaRPCError("test")

response = self._do_request(
data={
"start": self.day_ago,
"end": self.day_ago + timedelta(minutes=4),
"interval": "1m",
"query": "",
"yAxis": ["count()"],
"project": self.project.id,
"dataset": "spans",
},
)

assert response.status_code == 500, response.content
assert response.data["detail"] == "Internal error. Please try again."
assert "meta" not in response.data
assert "debug_info" not in response.data

def test_groupby_non_existent_attribute(self):
self.store_spans(
[
Expand Down
Loading