-
-
Notifications
You must be signed in to change notification settings - Fork 4.5k
feat(explorer): support generic table and timeseries rpcs and migrate existing tools to them #103413
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Merged
+168
−30
Merged
feat(explorer): support generic table and timeseries rpcs and migrate existing tools to them #103413
Changes from all commits
Commits
Show all changes
4 commits
Select commit
Hold shift + click to select a range
File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Some comments aren't visible on the classic Files Changed page.
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -5,11 +5,12 @@ | |
|
|
||
| from sentry import eventstore, features | ||
| from sentry.api import client | ||
| from sentry.api.endpoints.organization_events_timeseries import TOP_EVENTS_DATASETS | ||
| from sentry.api.serializers.base import serialize | ||
| from sentry.api.serializers.models.event import EventSerializer, IssueEventSerializerResponse | ||
| from sentry.api.serializers.models.group import GroupSerializer | ||
| from sentry.api.utils import default_start_end_dates | ||
| from sentry.constants import ObjectStatus | ||
| from sentry.constants import ALL_ACCESS_PROJECT_ID, ObjectStatus | ||
| from sentry.models.apikey import ApiKey | ||
| from sentry.models.group import Group | ||
| from sentry.models.organization import Organization | ||
|
|
@@ -18,14 +19,15 @@ | |
| from sentry.replays.post_process import process_raw_response | ||
| from sentry.replays.query import query_replay_id_by_prefix, query_replay_instance | ||
| from sentry.search.eap.types import SearchResolverConfig | ||
| from sentry.search.events.types import SnubaParams | ||
| from sentry.search.events.types import SAMPLING_MODES, SnubaParams | ||
| from sentry.seer.autofix.autofix import get_all_tags_overview | ||
| from sentry.seer.constants import SEER_SUPPORTED_SCM_PROVIDERS | ||
| from sentry.seer.sentry_data_models import EAPTrace | ||
| from sentry.services.eventstore.models import Event, GroupEvent | ||
| from sentry.snuba.referrer import Referrer | ||
| from sentry.snuba.spans_rpc import Spans | ||
| from sentry.snuba.trace import query_trace_data | ||
| from sentry.snuba.utils import get_dataset | ||
| from sentry.utils.dates import parse_stats_period | ||
|
|
||
| logger = logging.getLogger(__name__) | ||
|
|
@@ -175,6 +177,151 @@ def execute_trace_query_table( | |
| return resp.data | ||
|
|
||
|
|
||
| def execute_table_query( | ||
| *, | ||
| org_id: int, | ||
| dataset: str, | ||
| fields: list[str], | ||
| query: str, | ||
| sort: str, | ||
| per_page: int, | ||
| stats_period: str, | ||
| project_ids: list[int] | None = None, | ||
| project_slugs: list[str] | None = None, | ||
| sampling_mode: SAMPLING_MODES = "NORMAL", | ||
| ) -> dict[str, Any] | None: | ||
| """ | ||
| Execute a query to get table data by calling the events endpoint. | ||
|
|
||
| Arg notes: | ||
| project_ids: The IDs of the projects to query. Cannot be provided with project_slugs. | ||
| project_slugs: The slugs of the projects to query. Cannot be provided with project_ids. | ||
| If neither project_ids nor project_slugs are provided, all active projects will be queried. | ||
| """ | ||
| try: | ||
| organization = Organization.objects.get(id=org_id) | ||
| except Organization.DoesNotExist: | ||
| logger.warning("Organization not found", extra={"org_id": org_id}) | ||
| return None | ||
|
|
||
| if not project_ids and not project_slugs: | ||
| project_ids = [ALL_ACCESS_PROJECT_ID] | ||
| # Note if both project_ids and project_slugs are provided, the API request will 400. | ||
|
|
||
| params: dict[str, Any] = { | ||
| "dataset": dataset, | ||
| "field": fields, | ||
| "query": query, | ||
| "sort": sort if sort else ("-timestamp" if "timestamp" in fields else None), | ||
| "per_page": per_page, | ||
| "statsPeriod": stats_period, | ||
| "project": project_ids, | ||
| "projectSlug": project_slugs, | ||
| "sampling": sampling_mode, | ||
| "referrer": Referrer.SEER_RPC, | ||
| } | ||
|
|
||
| # Remove None values | ||
| params = {k: v for k, v in params.items() if v is not None} | ||
|
|
||
| # Call sentry API client. This will raise API errors for non-2xx / 3xx status. | ||
aliu39 marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| resp = client.get( | ||
| auth=ApiKey(organization_id=organization.id, scope_list=["org:read", "project:read"]), | ||
| user=None, | ||
| path=f"/organizations/{organization.slug}/events/", | ||
| params=params, | ||
| ) | ||
| return resp.data | ||
|
|
||
|
|
||
| def execute_timeseries_query( | ||
| *, | ||
| org_id: int, | ||
| dataset: str, | ||
| y_axes: list[str], | ||
| group_by: list[str] | None = None, | ||
| query: str, | ||
| stats_period: str, | ||
| interval: str | None = None, | ||
| project_ids: list[int] | None = None, | ||
| project_slugs: list[str] | None = None, | ||
| sampling_mode: SAMPLING_MODES = "NORMAL", | ||
| partial: Literal["0", "1"] | None = None, | ||
|
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. only used by issues event timeseries atm |
||
| ) -> dict[str, Any] | None: | ||
| """ | ||
| Execute a query to get chart/timeseries data by calling the events-stats endpoint. | ||
|
|
||
| Arg notes: | ||
| interval: The interval of each bucket. Valid stats period format, e.g. '3h'. | ||
| partial: Whether to allow partial buckets if the last bucket does not align with rollup. | ||
| project_ids: The IDs of the projects to query. Cannot be provided with project_slugs. | ||
| project_slugs: The slugs of the projects to query. Cannot be provided with project_ids. | ||
| If neither project_ids nor project_slugs are provided, all active projects will be queried. | ||
| """ | ||
| try: | ||
| organization = Organization.objects.get(id=org_id) | ||
| except Organization.DoesNotExist: | ||
| logger.warning("Organization not found", extra={"org_id": org_id}) | ||
| return None | ||
|
|
||
| group_by = group_by or [] | ||
| if not project_ids and not project_slugs: | ||
| project_ids = [ALL_ACCESS_PROJECT_ID] | ||
| # Note if both project_ids and project_slugs are provided, the API request will 400. | ||
aliu39 marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
|
||
| params: dict[str, Any] = { | ||
| "dataset": dataset, | ||
| "yAxis": y_axes, | ||
| "field": y_axes + group_by, | ||
| "query": query, | ||
| "statsPeriod": stats_period, | ||
| "interval": interval, | ||
| "project": project_ids, | ||
| "projectSlug": project_slugs, | ||
| "sampling": sampling_mode, | ||
| "referrer": Referrer.SEER_RPC, | ||
| "partial": partial, | ||
| "excludeOther": "0", # Always include "Other" series | ||
| } | ||
|
|
||
| # Add top_events if group_by is provided | ||
| if group_by and get_dataset(dataset) in TOP_EVENTS_DATASETS: | ||
| params["topEvents"] = 5 | ||
|
|
||
| # Remove None values | ||
| params = {k: v for k, v in params.items() if v is not None} | ||
|
|
||
| # Call sentry API client. This will raise API errors for non-2xx / 3xx status. | ||
aliu39 marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| resp = client.get( | ||
| auth=ApiKey(organization_id=organization.id, scope_list=["org:read", "project:read"]), | ||
| user=None, | ||
| path=f"/organizations/{organization.slug}/events-stats/", | ||
| params=params, | ||
| ) | ||
| data = resp.data | ||
|
|
||
| # Always normalize to the nested {"metric": {"data": [...]}} format for consistency | ||
| metric_is_single = len(y_axes) == 1 | ||
| metric_name = y_axes[0] if metric_is_single else None | ||
| if metric_name and metric_is_single: | ||
| # Handle grouped data with single metric: wrap each group's data in the metric name | ||
| if group_by: | ||
| return { | ||
| group_value: ( | ||
| {metric_name: group_data} | ||
| if isinstance(group_data, dict) and "data" in group_data | ||
| else group_data | ||
| ) | ||
| for group_value, group_data in data.items() | ||
| } | ||
|
|
||
| # Handle non-grouped data with single metric: wrap data in the metric name | ||
| if isinstance(data, dict) and "data" in data: | ||
| return {metric_name: data} | ||
|
|
||
| return data | ||
aliu39 marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
|
||
|
|
||
| def get_trace_waterfall(trace_id: str, organization_id: int) -> EAPTrace | None: | ||
| """ | ||
| Get the full span waterfall and connected errors for a trace. | ||
|
|
@@ -352,7 +499,8 @@ def _get_issue_event_timeseries( | |
| first_seen_delta: timedelta, | ||
| ) -> tuple[dict[str, Any], str, str] | None: | ||
| """ | ||
| Get event counts over time for an issue by calling the events-stats endpoint. | ||
| Get event counts over time for an issue (no group by) by calling the events-stats endpoint. Dynamically picks | ||
| a stats period and interval based on the issue's first seen date and EVENT_TIMESERIES_RESOLUTIONS. | ||
| """ | ||
|
|
||
| stats_period, interval = None, None | ||
|
|
@@ -364,35 +512,21 @@ def _get_issue_event_timeseries( | |
| stats_period = stats_period or "90d" | ||
| interval = interval or "3d" | ||
|
|
||
| params: dict[str, Any] = { | ||
| "dataset": "issuePlatform", | ||
| "query": f"issue:{issue_short_id}", | ||
| "yAxis": "count()", | ||
| "partial": "1", | ||
| "statsPeriod": stats_period, | ||
| "interval": interval, | ||
| "project": project_id, | ||
| "referrer": Referrer.SEER_RPC, | ||
| } | ||
|
|
||
| resp = client.get( | ||
| auth=ApiKey(organization_id=organization.id, scope_list=["org:read", "project:read"]), | ||
| user=None, | ||
| path=f"/organizations/{organization.slug}/events-stats/", | ||
| params=params, | ||
| data = execute_timeseries_query( | ||
| org_id=organization.id, | ||
| dataset="issuePlatform", | ||
| y_axes=["count()"], | ||
| group_by=[], | ||
| query=f"issue:{issue_short_id}", | ||
| stats_period=stats_period, | ||
| interval=interval, | ||
| project_ids=[project_id], | ||
| partial="1", | ||
| ) | ||
| if resp.status_code != 200 or not (resp.data or {}).get("data"): | ||
| logger.warning( | ||
| "Failed to get event counts for issue", | ||
| extra={ | ||
| "organization_slug": organization.slug, | ||
| "project_id": project_id, | ||
| "issue_id": issue_short_id, | ||
| }, | ||
| ) | ||
| return None | ||
|
|
||
| return {"count()": {"data": resp.data["data"]}}, stats_period, interval | ||
| if data is None: | ||
| return None | ||
| return data, stats_period, interval | ||
aliu39 marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
|
||
|
|
||
| def get_issue_details( | ||
|
|
||
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
Uh oh!
There was an error while loading. Please reload this page.