From 4f4451e697161bbc5b11127b63b3ed4599378413 Mon Sep 17 00:00:00 2001 From: Andrew Liu <159852527+aliu39@users.noreply.github.com> Date: Tue, 23 Jul 2024 12:53:40 -0700 Subject: [PATCH 001/126] fix(toolbar): hide feedback panel when submit feedback form opens (#74679) Fixes https://github.com/getsentry/sentry/issues/74156 so the toolbar will be totally hidden when submitting feedback / taking a screenshot. https://github.com/user-attachments/assets/6f733424-5520-495a-b06f-b17c6be50117 --------- Co-authored-by: Ryan Albrecht --- .../components/devtoolbar/components/app.tsx | 15 +++++++++----- .../components/feedback/feedbackPanel.tsx | 13 +++++++++++- .../devtoolbar/components/navigation.tsx | 8 ++++++-- .../devtoolbar/components/providers.tsx | 5 ++++- .../devtoolbar/hooks/useSDKFeedbackButton.tsx | 12 ++++++++--- .../devtoolbar/hooks/useVisibility.tsx | 20 +++++++++++++++++++ 6 files changed, 61 insertions(+), 12 deletions(-) create mode 100644 static/app/components/devtoolbar/hooks/useVisibility.tsx diff --git a/static/app/components/devtoolbar/components/app.tsx b/static/app/components/devtoolbar/components/app.tsx index 47c8bb7ad8d86e..2696a86a23a0b4 100644 --- a/static/app/components/devtoolbar/components/app.tsx +++ b/static/app/components/devtoolbar/components/app.tsx @@ -5,6 +5,7 @@ import LoadingTriangle from 'sentry/components/loadingTriangle'; import {useSessionStorage} from 'sentry/utils/useSessionStorage'; import usePlacementCss from '../hooks/usePlacementCss'; +import useVisibility from '../hooks/useVisibility'; import {fixedContainerBaseCss} from '../styles/fixedContainer'; import {avatarCss, globalCss, loadingIndicatorCss} from '../styles/global'; import {resetFlexColumnCss} from '../styles/reset'; @@ -15,8 +16,12 @@ import PanelRouter from './panelRouter'; export default function App() { const placement = usePlacementCss(); - const [isHidden, setIsHidden] = useSessionStorage('hide_employee_devtoolbar', false); - if (isHidden) { + const [visibility] = useVisibility(); + const [isDisabled, setIsDisabled] = useSessionStorage( + 'hide_employee_devtoolbar', + false + ); + if (isDisabled) { return null; } @@ -25,10 +30,10 @@ export default function App() { -
- {isHidden ? null : ( +
+ {isDisabled ? null : ( - + }> diff --git a/static/app/components/devtoolbar/components/feedback/feedbackPanel.tsx b/static/app/components/devtoolbar/components/feedback/feedbackPanel.tsx index 807eaa376f818c..15f907c707734d 100644 --- a/static/app/components/devtoolbar/components/feedback/feedbackPanel.tsx +++ b/static/app/components/devtoolbar/components/feedback/feedbackPanel.tsx @@ -12,6 +12,7 @@ import useReplayCount from 'sentry/utils/replayCount/useReplayCount'; import useConfiguration from '../../hooks/useConfiguration'; import useCurrentTransactionName from '../../hooks/useCurrentTransactionName'; import {useSDKFeedbackButton} from '../../hooks/useSDKFeedbackButton'; +import useVisibility from '../../hooks/useVisibility'; import { badgeWithLabelCss, gridFlexEndCss, @@ -34,7 +35,17 @@ import SentryAppLink from '../sentryAppLink'; import useInfiniteFeedbackList from './useInfiniteFeedbackList'; export default function FeedbackPanel() { - const buttonRef = useSDKFeedbackButton(); + const [, setVisible] = useVisibility(); + + const buttonRef = useSDKFeedbackButton( + useMemo( + () => ({ + onFormOpen: () => setVisible('hidden'), + onFormClose: () => setVisible('visible'), + }), + [setVisible] + ) + ); const transactionName = useCurrentTransactionName(); const queryResult = useInfiniteFeedbackList({ query: `url:*${transactionName}`, diff --git a/static/app/components/devtoolbar/components/navigation.tsx b/static/app/components/devtoolbar/components/navigation.tsx index f7c3b5ac290d4e..f81d131fbe9dcb 100644 --- a/static/app/components/devtoolbar/components/navigation.tsx +++ b/static/app/components/devtoolbar/components/navigation.tsx @@ -9,7 +9,11 @@ import useToolbarRoute from '../hooks/useToolbarRoute'; import {navigationButtonCss, navigationCss} from '../styles/navigation'; import {resetButtonCss, resetDialogCss} from '../styles/reset'; -export default function Navigation({setIsHidden}: {setIsHidden: (val: boolean) => void}) { +export default function Navigation({ + setIsDisabled, +}: { + setIsDisabled: (val: boolean) => void; +}) { const {trackAnalytics} = useConfiguration(); const placement = usePlacementCss(); @@ -28,7 +32,7 @@ export default function Navigation({setIsHidden}: {setIsHidden: (val: boolean) = } /> { - setIsHidden(true); + setIsDisabled(true); trackAnalytics?.({ eventKey: `devtoolbar.nav.hide.click`, eventName: `devtoolbar: Hide devtoolbar`, diff --git a/static/app/components/devtoolbar/components/providers.tsx b/static/app/components/devtoolbar/components/providers.tsx index 37fdbcdb4079e5..3ba29e36ad0512 100644 --- a/static/app/components/devtoolbar/components/providers.tsx +++ b/static/app/components/devtoolbar/components/providers.tsx @@ -7,6 +7,7 @@ import {lightTheme} from 'sentry/utils/theme'; import {ConfigurationContextProvider} from '../hooks/useConfiguration'; import {ToolbarRouterContextProvider} from '../hooks/useToolbarRoute'; +import {VisibilityContextProvider} from '../hooks/useVisibility'; import type {Configuration} from '../types'; interface Props { @@ -36,7 +37,9 @@ export default function Providers({children, config, container}: Props) { - {children} + + {children} + diff --git a/static/app/components/devtoolbar/hooks/useSDKFeedbackButton.tsx b/static/app/components/devtoolbar/hooks/useSDKFeedbackButton.tsx index 1024f5826be957..314e840e9cc724 100644 --- a/static/app/components/devtoolbar/hooks/useSDKFeedbackButton.tsx +++ b/static/app/components/devtoolbar/hooks/useSDKFeedbackButton.tsx @@ -2,17 +2,23 @@ import {useEffect, useRef} from 'react'; import useConfiguration from './useConfiguration'; -export function useSDKFeedbackButton() { +export function useSDKFeedbackButton({ + onFormClose, + onFormOpen, +}: { + onFormClose?: () => void; + onFormOpen?: () => void; +}) { const buttonRef = useRef(null); const {SentrySDK} = useConfiguration(); const feedback = SentrySDK && 'getFeedback' in SentrySDK && SentrySDK.getFeedback(); useEffect(() => { if (feedback && buttonRef.current) { - return feedback.attachTo(buttonRef.current, {}); + return feedback.attachTo(buttonRef.current, {onFormOpen, onFormClose}); } return () => {}; - }, [feedback]); + }, [feedback, onFormOpen, onFormClose]); return feedback ? buttonRef : undefined; } diff --git a/static/app/components/devtoolbar/hooks/useVisibility.tsx b/static/app/components/devtoolbar/hooks/useVisibility.tsx new file mode 100644 index 00000000000000..5f9a31f809beff --- /dev/null +++ b/static/app/components/devtoolbar/hooks/useVisibility.tsx @@ -0,0 +1,20 @@ +import type {CSSProperties, Dispatch, ReactNode, SetStateAction} from 'react'; +import {createContext, useContext, useState} from 'react'; + +type State = CSSProperties['visibility']; + +const VisibilityContext = createContext<[State, Dispatch>]>([ + 'visible', + () => {}, +]); + +export function VisibilityContextProvider({children}: {children: ReactNode}) { + const state = useState('visible'); + return ( + {children} + ); +} + +export default function useVisibility() { + return useContext(VisibilityContext); +} From 078c9f046f4075a5e7e98efaab43815a0ea93a1a Mon Sep 17 00:00:00 2001 From: Malachi Willey Date: Tue, 23 Jul 2024 13:13:08 -0700 Subject: [PATCH 002/126] feat(query-builder): Add config for disabling wildcard tokens (#74678) Adds `disallowWildcard` config option. Will mark invalid tokens and free text as red and display a tooltip explaining the issue. --- .../app/components/compactSelect/control.tsx | 7 + .../searchQueryBuilder/index.spec.tsx | 41 +++ .../searchQueryBuilder/index.stories.tsx | 42 ++- .../components/searchQueryBuilder/index.tsx | 14 +- .../searchQueryBuilder/tokenizedQueryGrid.tsx | 1 - .../tokens/deletableToken.tsx | 14 +- .../tokens/filter/filter.tsx | 70 +++-- .../tokens/filter/filterKeyOperator.tsx | 9 +- .../searchQueryBuilder/tokens/freeText.tsx | 267 +++++++++++------- .../tokens/invalidTokenTooltip.tsx | 61 ++++ .../components/searchQueryBuilder/utils.tsx | 20 +- 11 files changed, 405 insertions(+), 141 deletions(-) create mode 100644 static/app/components/searchQueryBuilder/tokens/invalidTokenTooltip.tsx diff --git a/static/app/components/compactSelect/control.tsx b/static/app/components/compactSelect/control.tsx index 20e243a0b85fe3..8b63cb81284816 100644 --- a/static/app/components/compactSelect/control.tsx +++ b/static/app/components/compactSelect/control.tsx @@ -171,6 +171,10 @@ export interface ControlProps * true). */ onClear?: () => void; + /** + * Called when the menu is opened or closed. + */ + onOpenChange?: (newOpenState: boolean) => void; /** * Called when the search input's value changes (applicable only when `searchable` * is true). @@ -233,6 +237,7 @@ export function Control({ menuHeaderTrailingItems, menuBody, menuFooter, + onOpenChange, // Select props size = 'md', @@ -327,6 +332,8 @@ export function Control({ preventOverflowOptions, flipOptions, onOpenChange: open => { + onOpenChange?.(open); + nextFrameCallback(() => { if (open) { // Focus on search box if present diff --git a/static/app/components/searchQueryBuilder/index.spec.tsx b/static/app/components/searchQueryBuilder/index.spec.tsx index 6cd1116dc81de5..69cdfbdcbaa00a 100644 --- a/static/app/components/searchQueryBuilder/index.spec.tsx +++ b/static/app/components/searchQueryBuilder/index.spec.tsx @@ -1969,4 +1969,45 @@ describe('SearchQueryBuilder', function () { ).toBeInTheDocument(); }); }); + + describe('disallowWildcard', function () { + it('should mark tokens with wildcards invalid', async function () { + render( + + ); + + expect(screen.getByRole('row', {name: 'browser.name:Firefox*'})).toHaveAttribute( + 'aria-invalid', + 'true' + ); + + // Put focus into token, should show error message + await userEvent.click(getLastInput()); + await userEvent.keyboard('{ArrowLeft}'); + + expect( + await screen.findByText('Wildcards not supported in search') + ).toBeInTheDocument(); + }); + + it('should mark free text with wildcards invalid', async function () { + render( + + ); + + expect(screen.getByRole('row', {name: 'foo*'})).toHaveAttribute( + 'aria-invalid', + 'true' + ); + + await userEvent.click(getLastInput()); + expect( + await screen.findByText('Wildcards not supported in search') + ).toBeInTheDocument(); + }); + }); }); diff --git a/static/app/components/searchQueryBuilder/index.stories.tsx b/static/app/components/searchQueryBuilder/index.stories.tsx index c00e9960198e7b..1eb1e4604afb88 100644 --- a/static/app/components/searchQueryBuilder/index.stories.tsx +++ b/static/app/components/searchQueryBuilder/index.stories.tsx @@ -1,7 +1,8 @@ -import {Fragment} from 'react'; +import {Fragment, useState} from 'react'; import styled from '@emotion/styled'; import Alert from 'sentry/components/alert'; +import MultipleCheckbox from 'sentry/components/forms/controls/multipleCheckbox'; import {SearchQueryBuilder} from 'sentry/components/searchQueryBuilder'; import type {FilterKeySection} from 'sentry/components/searchQueryBuilder/types'; import SizingWindow from 'sentry/components/stories/sizingWindow'; @@ -111,6 +112,45 @@ export default storyBook(SearchQueryBuilder, story => { ); }); + + story('Config Options', () => { + const configs = ['disallowLogicalOperators', 'disallowWildcard']; + + const [enabledConfigs, setEnabledConfigs] = useState([...configs]); + const queryBuilderOptions = enabledConfigs.reduce((acc, config) => { + acc[config] = true; + return acc; + }, {}); + + return ( + +

+ There are some config options which allow you to customize which types of syntax + are considered valid. This should be used when the search backend does not + support certain operators like boolean logic or wildcards. +

+ + {configs.map(config => ( + + {config} + + ))} + + +
+ ); + }); }); const MinHeightSizingWindow = styled(SizingWindow)` diff --git a/static/app/components/searchQueryBuilder/index.tsx b/static/app/components/searchQueryBuilder/index.tsx index 7083b9acf74126..0779b22011039c 100644 --- a/static/app/components/searchQueryBuilder/index.tsx +++ b/static/app/components/searchQueryBuilder/index.tsx @@ -44,6 +44,10 @@ export interface SearchQueryBuilderProps { * When true, parens and logical operators (AND, OR) will be marked as invalid. */ disallowLogicalOperators?: boolean; + /** + * When true, the wildcard (*) in filter values or free text will be marked as invalid. + */ + disallowWildcard?: boolean; /** * The lookup strategy for field definitions. * Each SearchQueryBuilder instance can support a different list of fields and @@ -92,6 +96,7 @@ function ActionButtons() { export function SearchQueryBuilder({ className, disallowLogicalOperators, + disallowWildcard, label, initialQuery, fieldDefinitionGetter = getFieldDefinition, @@ -113,9 +118,16 @@ export function SearchQueryBuilder({ () => parseQueryBuilderValue(state.query, fieldDefinitionGetter, { disallowLogicalOperators, + disallowWildcard, filterKeys, }), - [disallowLogicalOperators, fieldDefinitionGetter, filterKeys, state.query] + [ + disallowLogicalOperators, + disallowWildcard, + fieldDefinitionGetter, + filterKeys, + state.query, + ] ); useEffectAfterFirstRender(() => { diff --git a/static/app/components/searchQueryBuilder/tokenizedQueryGrid.tsx b/static/app/components/searchQueryBuilder/tokenizedQueryGrid.tsx index e04438f008ba63..c7f4c9393d0913 100644 --- a/static/app/components/searchQueryBuilder/tokenizedQueryGrid.tsx +++ b/static/app/components/searchQueryBuilder/tokenizedQueryGrid.tsx @@ -86,7 +86,6 @@ function Grid(props: GridProps) { /> ); case Token.FREE_TEXT: - case Token.SPACES: return ( {children} - + - + ); } diff --git a/static/app/components/searchQueryBuilder/tokens/filter/filter.tsx b/static/app/components/searchQueryBuilder/tokens/filter/filter.tsx index b1b8e60e46c7de..de2c813eb342fc 100644 --- a/static/app/components/searchQueryBuilder/tokens/filter/filter.tsx +++ b/static/app/components/searchQueryBuilder/tokens/filter/filter.tsx @@ -13,6 +13,7 @@ import {FilterKeyOperator} from 'sentry/components/searchQueryBuilder/tokens/fil import {useFilterButtonProps} from 'sentry/components/searchQueryBuilder/tokens/filter/useFilterButtonProps'; import {formatFilterValue} from 'sentry/components/searchQueryBuilder/tokens/filter/utils'; import {SearchQueryBuilderValueCombobox} from 'sentry/components/searchQueryBuilder/tokens/filter/valueCombobox'; +import {InvalidTokenTooltip} from 'sentry/components/searchQueryBuilder/tokens/invalidTokenTooltip'; import { type ParseResultToken, Token, @@ -31,6 +32,7 @@ interface SearchQueryTokenProps { interface FilterValueProps extends SearchQueryTokenProps { filterRef: React.RefObject; + onActiveChange: (active: boolean) => void; } function FilterValueText({token}: {token: TokenResult}) { @@ -81,7 +83,7 @@ function FilterValueText({token}: {token: TokenResult}) { } } -function FilterValue({token, state, item, filterRef}: FilterValueProps) { +function FilterValue({token, state, item, filterRef, onActiveChange}: FilterValueProps) { const ref = useRef(null); const {dispatch, focusOverride} = useSearchQueryBuilder(); @@ -94,9 +96,10 @@ function FilterValue({token, state, item, filterRef}: FilterValueProps) { focusOverride.part === 'value' ) { setIsEditing(true); + onActiveChange(true); dispatch({type: 'RESET_FOCUS_OVERRIDE'}); } - }, [dispatch, focusOverride, isEditing, item.key]); + }, [dispatch, focusOverride, isEditing, item.key, onActiveChange]); const {focusWithinProps} = useFocusWithin({ onBlurWithin: () => { @@ -116,9 +119,11 @@ function FilterValue({token, state, item, filterRef}: FilterValueProps) { filterRef.current?.focus(); state.selectionManager.setFocusedKey(item.key); setIsEditing(false); + onActiveChange(false); }} onCommit={() => { setIsEditing(false); + onActiveChange(false); if (state.collection.getKeyAfter(item.key)) { state.selectionManager.setFocusedKey( state.collection.getKeyAfter(item.key) @@ -133,7 +138,10 @@ function FilterValue({token, state, item, filterRef}: FilterValueProps) { return ( setIsEditing(true)} + onClick={() => { + setIsEditing(true); + onActiveChange(true); + }} {...filterButtonProps} > @@ -160,6 +168,7 @@ function FilterDelete({token, state, item}: SearchQueryTokenProps) { export function SearchQueryBuilderFilter({item, state, token}: SearchQueryTokenProps) { const ref = useRef(null); + const [filterMenuOpen, setFilterMenuOpen] = useState(false); const isFocused = item.key === state.selectionManager.focusedKey; @@ -185,38 +194,52 @@ export function SearchQueryBuilderFilter({item, state, token}: SearchQueryTokenP onKeyDown, }); - // TODO(malwilley): Add better error messaging const tokenHasError = 'invalid' in token && defined(token.invalid); return ( - - - - - - - - - + + + + + + + + + + + ); } const FilterWrapper = styled('div')` position: relative; - display: grid; - grid-template-columns: auto auto auto auto; - align-items: stretch; border: 1px solid ${p => p.theme.innerBorder}; border-radius: ${p => p.theme.borderRadius}; height: 24px; - /* Ensures that filters do not grow outside of the container */ min-width: 0; @@ -228,6 +251,17 @@ const FilterWrapper = styled('div')` &[aria-selected='true'] { background-color: ${p => p.theme.gray100}; } + + &[aria-invalid='true'] { + border-color: ${p => p.theme.red400}; + } +`; + +const GridInvalidTokenTooltip = styled(InvalidTokenTooltip)` + display: grid; + grid-template-columns: auto auto auto auto; + align-items: stretch; + height: 22px; `; const BaseGridCell = styled('div')` diff --git a/static/app/components/searchQueryBuilder/tokens/filter/filterKeyOperator.tsx b/static/app/components/searchQueryBuilder/tokens/filter/filterKeyOperator.tsx index 00ce337586689c..cd7d284140bbfc 100644 --- a/static/app/components/searchQueryBuilder/tokens/filter/filterKeyOperator.tsx +++ b/static/app/components/searchQueryBuilder/tokens/filter/filterKeyOperator.tsx @@ -24,6 +24,7 @@ import useOrganization from 'sentry/utils/useOrganization'; type FilterOperatorProps = { item: Node; + onOpenChange: (isOpen: boolean) => void; state: ListState; token: TokenResult; }; @@ -185,7 +186,12 @@ function getOperatorInfo(token: TokenResult): { }; } -export function FilterKeyOperator({token, state, item}: FilterOperatorProps) { +export function FilterKeyOperator({ + token, + state, + item, + onOpenChange, +}: FilterOperatorProps) { const organization = useOrganization(); const {dispatch, searchSource, query, savedSearchType} = useSearchQueryBuilder(); const filterButtonProps = useFilterButtonProps({state, item}); @@ -206,6 +212,7 @@ export function FilterKeyOperator({token, state, item}: FilterOperatorProps) { size="sm" options={options} value={operator} + onOpenChange={onOpenChange} onChange={option => { trackAnalytics('search.operator_autocompleted', { organization, diff --git a/static/app/components/searchQueryBuilder/tokens/freeText.tsx b/static/app/components/searchQueryBuilder/tokens/freeText.tsx index 52f72f17f34652..2f5b4f591b63e0 100644 --- a/static/app/components/searchQueryBuilder/tokens/freeText.tsx +++ b/static/app/components/searchQueryBuilder/tokens/freeText.tsx @@ -11,6 +11,7 @@ import {useSearchQueryBuilder} from 'sentry/components/searchQueryBuilder/contex import {useQueryBuilderGridItem} from 'sentry/components/searchQueryBuilder/hooks/useQueryBuilderGridItem'; import {replaceTokensWithPadding} from 'sentry/components/searchQueryBuilder/hooks/useQueryBuilderState'; import {SearchQueryBuilderCombobox} from 'sentry/components/searchQueryBuilder/tokens/combobox'; +import {InvalidTokenTooltip} from 'sentry/components/searchQueryBuilder/tokens/invalidTokenTooltip'; import { getDefaultFilterValue, useShiftFocusToChild, @@ -38,15 +39,14 @@ import useOrganization from 'sentry/utils/useOrganization'; type SearchQueryBuilderInputProps = { item: Node; state: ListState; - token: TokenResult | TokenResult; + token: TokenResult; }; type SearchQueryBuilderInputInternalProps = { item: Node; rowRef: React.RefObject; state: ListState; - tabIndex: number; - token: TokenResult | TokenResult; + token: TokenResult; }; type KeyItem = { @@ -261,10 +261,36 @@ function KeyDescription({tag}: {tag: Tag}) { ); } +function InvalidText({ + token, + state, + item, + inputValue, +}: { + inputValue: string; + item: Node; + state: ListState; + token: TokenResult; +}) { + // Because the text input may be larger than the actual text, we use a div + // with the same text contents to determine where the tooltip should be + // positioned. + return ( + + {inputValue} + + ); +} + function SearchQueryBuilderInputInternal({ item, token, - tabIndex, state, rowRef, }: SearchQueryBuilderInputInternalProps) { @@ -273,6 +299,8 @@ function SearchQueryBuilderInputInternal({ const trimmedTokenValue = token.text.trim(); const [inputValue, setInputValue] = useState(trimmedTokenValue); const [selectionIndex, setSelectionIndex] = useState(0); + const isFocused = + state.selectionManager.isFocused && item.key === state.selectionManager.focusedKey; const updateSelectionIndex = useCallback(() => { setSelectionIndex(inputRef.current?.selectionStart ?? 0); @@ -379,112 +407,116 @@ function SearchQueryBuilderInputInternal({ }, [updateSelectionIndex]); return ( - { - dispatch({ - type: 'UPDATE_FREE_TEXT', - tokens: [token], - text: replaceFocusedWordWithFilter( - inputValue, - selectionIndex, - value, - getFieldDefinition - ), - focusOverride: calculateNextFocusForFilter(state), - }); - resetInputValue(); - const selectedKey = filterKeys[value]; - trackAnalytics('search.key_autocompleted', { - organization, - search_type: savedSearchType === 0 ? 'issues' : 'events', - search_source: searchSource, - item_name: value, - item_kind: selectedKey?.kind ?? FieldKind.FIELD, - item_value_type: getFieldDefinition(value)?.valueType ?? FieldValueType.STRING, - filtered: Boolean(filterValue), - new_experience: true, - }); - }} - onCustomValueBlurred={value => { - dispatch({type: 'UPDATE_FREE_TEXT', tokens: [token], text: value}); - resetInputValue(); - }} - onCustomValueCommitted={value => { - dispatch({type: 'UPDATE_FREE_TEXT', tokens: [token], text: value}); - resetInputValue(); - - // Because the query does not change until a subsequent render, - // we need to do the replacement that is does in the reducer here - handleSearch(replaceTokensWithPadding(query, [token], value)); - }} - onExit={() => { - if (inputValue !== token.value.trim()) { - dispatch({type: 'UPDATE_FREE_TEXT', tokens: [token], text: inputValue}); - resetInputValue(); - } - }} - inputValue={inputValue} - token={token} - inputLabel={t('Add a search term')} - onInputChange={e => { - if (e.target.value.includes('(') || e.target.value.includes(')')) { + + { dispatch({ type: 'UPDATE_FREE_TEXT', tokens: [token], - text: e.target.value, - focusOverride: calculateNextFocusForParen(item), + text: replaceFocusedWordWithFilter( + inputValue, + selectionIndex, + value, + getFieldDefinition + ), + focusOverride: calculateNextFocusForFilter(state), }); resetInputValue(); - return; - } - - if (e.target.value.includes(':')) { - dispatch({ - type: 'UPDATE_FREE_TEXT', - tokens: [token], - text: e.target.value, - focusOverride: calculateNextFocusForFilter(state), + const selectedKey = filterKeys[value]; + trackAnalytics('search.key_autocompleted', { + organization, + search_type: savedSearchType === 0 ? 'issues' : 'events', + search_source: searchSource, + item_name: value, + item_kind: selectedKey?.kind ?? FieldKind.FIELD, + item_value_type: + getFieldDefinition(value)?.valueType ?? FieldValueType.STRING, + filtered: Boolean(filterValue), + new_experience: true, }); + }} + onCustomValueBlurred={value => { + dispatch({type: 'UPDATE_FREE_TEXT', tokens: [token], text: value}); resetInputValue(); - return; - } + }} + onCustomValueCommitted={value => { + dispatch({type: 'UPDATE_FREE_TEXT', tokens: [token], text: value}); + resetInputValue(); + + // Because the query does not change until a subsequent render, + // we need to do the replacement that is does in the reducer here + handleSearch(replaceTokensWithPadding(query, [token], value)); + }} + onExit={() => { + if (inputValue !== token.value.trim()) { + dispatch({type: 'UPDATE_FREE_TEXT', tokens: [token], text: inputValue}); + resetInputValue(); + } + }} + inputValue={inputValue} + token={token} + inputLabel={t('Add a search term')} + onInputChange={e => { + if (e.target.value.includes('(') || e.target.value.includes(')')) { + dispatch({ + type: 'UPDATE_FREE_TEXT', + tokens: [token], + text: e.target.value, + focusOverride: calculateNextFocusForParen(item), + }); + resetInputValue(); + return; + } - setInputValue(e.target.value); - setSelectionIndex(e.target.selectionStart ?? 0); - }} - onKeyDown={onKeyDown} - tabIndex={tabIndex} - maxOptions={50} - onPaste={onPaste} - displayTabbedMenu={inputValue.length === 0 && filterKeySections.length > 0} - shouldFilterResults={false} - shouldCloseOnInteractOutside={el => { - if (rowRef.current?.contains(el)) { - return false; + if (e.target.value.includes(':')) { + dispatch({ + type: 'UPDATE_FREE_TEXT', + tokens: [token], + text: e.target.value, + focusOverride: calculateNextFocusForFilter(state), + }); + resetInputValue(); + return; + } + + setInputValue(e.target.value); + setSelectionIndex(e.target.selectionStart ?? 0); + }} + onKeyDown={onKeyDown} + tabIndex={isFocused ? 0 : -1} + maxOptions={50} + onPaste={onPaste} + displayTabbedMenu={inputValue.length === 0 && filterKeySections.length > 0} + shouldFilterResults={false} + shouldCloseOnInteractOutside={el => { + if (rowRef.current?.contains(el)) { + return false; + } + return true; + }} + onClick={onClick} + > + {keyItem => + isSection(keyItem) ? ( +
+ {keyItem.options.map(child => ( + + {child.label} + + ))} +
+ ) : ( + + {keyItem.label} + + ) } - return true; - }} - onClick={onClick} - > - {keyItem => - isSection(keyItem) ? ( -
- {keyItem.options.map(child => ( - - {child.label} - - ))} -
- ) : ( - - {keyItem.label} - - ) - } -
+
+ + ); } @@ -502,16 +534,20 @@ export function SearchQueryBuilderFreeText({ const {rowProps, gridCellProps} = useQueryBuilderGridItem(item, state, ref); const {shiftFocusProps} = useShiftFocusToChild(item, state); - const isFocused = item.key === state.selectionManager.focusedKey; + const isInvalid = Boolean(token.invalid); return ( - + e.stopPropagation()}> @@ -530,6 +566,12 @@ const Row = styled('div')` flex-grow: 1; } + &[aria-invalid='true'] { + input { + color: ${p => p.theme.red400}; + } + } + &[aria-selected='true'] { &::before { content: ''; @@ -550,6 +592,7 @@ const Row = styled('div')` `; const GridCell = styled('div')` + position: relative; display: flex; align-items: stretch; height: 100%; @@ -585,3 +628,19 @@ const Term = styled('dt')` `; const Details = styled('dd')``; + +const PositionedTooltip = styled(InvalidTokenTooltip)` + position: absolute; + z-index: -1; + top: 0; + left: 0; + height: 100%; +`; + +const InvisibleText = styled('div')` + color: transparent; + visibility: hidden; + padding: 0 ${space(0.5)}; + min-width: 9px; + height: 100%; +`; diff --git a/static/app/components/searchQueryBuilder/tokens/invalidTokenTooltip.tsx b/static/app/components/searchQueryBuilder/tokens/invalidTokenTooltip.tsx new file mode 100644 index 00000000000000..219a92ef15dba2 --- /dev/null +++ b/static/app/components/searchQueryBuilder/tokens/invalidTokenTooltip.tsx @@ -0,0 +1,61 @@ +import type {ReactNode} from 'react'; +import type {ListState} from '@react-stately/list'; +import type {Node} from '@react-types/shared'; + +import type {ParseResultToken} from 'sentry/components/searchSyntax/parser'; +import {Tooltip, type TooltipProps} from 'sentry/components/tooltip'; +import {t} from 'sentry/locale'; +import {defined} from 'sentry/utils'; + +interface InvalidTokenTooltipProps extends Omit { + children: ReactNode; + item: Node; + state: ListState; + token: ParseResultToken; +} + +function getForceVisible({ + isFocused, + isInvalid, + forceVisible, +}: { + isFocused: boolean; + isInvalid: boolean; + forceVisible?: boolean; +}) { + if (!isInvalid) { + return false; + } + + if (defined(forceVisible)) { + return forceVisible; + } + + return isFocused ? true : undefined; +} + +export function InvalidTokenTooltip({ + children, + token, + state, + item, + forceVisible, + ...tooltipProps +}: InvalidTokenTooltipProps) { + const invalid = 'invalid' in token ? token.invalid : null; + const isInvalid = Boolean(invalid); + const isFocused = + state.selectionManager.isFocused && state.selectionManager.focusedKey === item.key; + + return ( + + {children} + + ); +} diff --git a/static/app/components/searchQueryBuilder/utils.tsx b/static/app/components/searchQueryBuilder/utils.tsx index d3dfaf692da8e3..3314e464d49956 100644 --- a/static/app/components/searchQueryBuilder/utils.tsx +++ b/static/app/components/searchQueryBuilder/utils.tsx @@ -63,11 +63,16 @@ function getSearchConfigFromKeys( export function parseQueryBuilderValue( value: string, getFieldDefinition: FieldDefinitionGetter, - options?: {filterKeys: TagCollection; disallowLogicalOperators?: boolean} + options?: { + filterKeys: TagCollection; + disallowLogicalOperators?: boolean; + disallowWildcard?: boolean; + } ): ParseResult | null { return collapseTextTokens( parseSearch(value || ' ', { flattenParenGroups: true, + disallowWildcard: options?.disallowWildcard, disallowedLogicalOperators: options?.disallowLogicalOperators ? new Set([BooleanOperator.AND, BooleanOperator.OR]) : undefined, @@ -124,9 +129,16 @@ function collapseTextTokens(tokens: ParseResult | null) { const lastToken = acc[acc.length - 1]; if (isSimpleTextToken(token) && isSimpleTextToken(lastToken)) { - lastToken.value += token.value; - lastToken.text += token.text; - lastToken.location.end = token.location.end; + const freeTextToken = lastToken as TokenResult; + freeTextToken.value += token.value; + freeTextToken.text += token.text; + freeTextToken.location.end = token.location.end; + + if (token.type === Token.FREE_TEXT) { + freeTextToken.quoted = freeTextToken.quoted || token.quoted; + freeTextToken.invalid = freeTextToken.invalid ?? token.invalid; + } + return acc; } From bff71c496c503a7334359a624437bbfb4491ef99 Mon Sep 17 00:00:00 2001 From: Snigdha Sharma Date: Tue, 23 Jul 2024 13:20:42 -0700 Subject: [PATCH 003/126] feat(seer-priority): Add a temporary feature flag to control seer-based priority (#74758) Adding a new org-level flag to determine if priority should factor in calculations from Seer and the severity microservice. This flag will need to be moved to `features/permanent.py` and out of flagpole before we release. Fixes https://github.com/getsentry/sentry/issues/74757 --- src/sentry/event_manager.py | 6 ++++++ src/sentry/features/temporary.py | 2 ++ tests/sentry/event_manager/test_priority.py | 1 + tests/sentry/event_manager/test_severity.py | 20 ++++++++++++++++++++ 4 files changed, 29 insertions(+) diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index 7847eb8d1ab323..1b6c386cb748db 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -2344,6 +2344,12 @@ def _get_severity_metadata_for_group( """ from sentry.receivers.rules import PLATFORMS_WITH_PRIORITY_ALERTS + organization_supports_severity = features.has( + "organizations:seer-based-priority", event.project.organization, actor=None + ) + if not organization_supports_severity: + return {} + if killswitch_matches_context("issues.skip-seer-requests", {"project_id": event.project_id}): logger.warning( "get_severity_metadata_for_group.seer_killswitch_enabled", diff --git a/src/sentry/features/temporary.py b/src/sentry/features/temporary.py index 13af8c1b476a83..3d1e7c9eae1d6e 100644 --- a/src/sentry/features/temporary.py +++ b/src/sentry/features/temporary.py @@ -421,6 +421,8 @@ def register_temporary_features(manager: FeatureManager): manager.add("organizations:insights-browser-webvitals-optional-components", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False) # Add default browser performance score profile for fallback when no or unknown browser name is provided manager.add("organizations:insights-default-performance-score-profiles", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False) + # Enable priority alerts using the Seer calculations. This flag will move to a permanent flag before we release. + manager.add("organizations:seer-based-priority", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False) # Enable standalone span ingestion manager.add("organizations:standalone-span-ingestion", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False) # Enable the aggregate span waterfall view diff --git a/tests/sentry/event_manager/test_priority.py b/tests/sentry/event_manager/test_priority.py index 14a2c9903f4200..01ea2fb58a16ce 100644 --- a/tests/sentry/event_manager/test_priority.py +++ b/tests/sentry/event_manager/test_priority.py @@ -17,6 +17,7 @@ @region_silo_test @apply_feature_flag_on_cls("projects:first-event-severity-calculation") +@apply_feature_flag_on_cls("organizations:seer-based-priority") class TestEventManagerPriority(TestCase): @patch("sentry.event_manager._get_severity_score", return_value=(0.1121, "ml")) def test_flag_on(self, mock_get_severity_score: MagicMock): diff --git a/tests/sentry/event_manager/test_severity.py b/tests/sentry/event_manager/test_severity.py index 84555c0c96a15b..26e4addd0c925f 100644 --- a/tests/sentry/event_manager/test_severity.py +++ b/tests/sentry/event_manager/test_severity.py @@ -36,6 +36,7 @@ def make_event(**kwargs) -> dict[str, Any]: return result +@apply_feature_flag_on_cls("organizations:seer-based-priority") class TestGetEventSeverity(TestCase): @patch( "sentry.event_manager.severity_connection_pool.urlopen", @@ -323,6 +324,7 @@ def test_other_exception( assert cache.get(SEER_ERROR_COUNT_KEY) == 1 +@apply_feature_flag_on_cls("organizations:seer-based-priority") @apply_feature_flag_on_cls("projects:first-event-severity-calculation") class TestEventManagerSeverity(TestCase): @patch("sentry.event_manager._get_severity_score", return_value=(0.1121, "ml")) @@ -360,6 +362,24 @@ def test_flag_off(self, mock_get_severity_score: MagicMock): and "severity.reason" not in event.group.get_event_metadata() ) + @patch("sentry.event_manager._get_severity_score", return_value=(0.1121, "ml")) + def test_permanent_flag_off(self, mock_get_severity_score: MagicMock): + with self.feature({"organizations:seer-based-priority": False}): + manager = EventManager( + make_event( + exception={"values": [{"type": "NopeError", "value": "Nopey McNopeface"}]}, + platform="python", + ) + ) + event = manager.save(self.project.id) + + mock_get_severity_score.assert_not_called() + assert ( + event.group + and "severity" not in event.group.get_event_metadata() + and "severity.reason" not in event.group.get_event_metadata() + ) + @patch("sentry.event_manager._get_severity_score", return_value=(0.1121, "ml")) def test_get_severity_score_not_called_on_second_event( self, mock_get_severity_score: MagicMock From 568c329b6aa7998de6b193f0badc965cfe22d44b Mon Sep 17 00:00:00 2001 From: Raj Joshi Date: Tue, 23 Jul 2024 13:23:13 -0700 Subject: [PATCH 004/126] feat(data-secrecy): Migration to Add `prevent_superuser_access` Bit Flag (#74700) Created migration to add bit flag to Organization. Will need to follow this up with a migration to add the flag so Hybrid Cloud services can handle syncing. **Glossary** **Data secrecy mode:** Disallows any kind of superuser access into an organization **Enable/Disable Data secrecy mode:** Persistently enable/disable data secrecy for an organization **Waive Data secrecy mode:** Temporarily disable data secrecy for an organizations **Reinstate Data secrecy mode:** Re-enable data secrecy after a temporary waiver This flag handles the enable/disable function. [spec](https://www.notion.so/sentry/Superuser-Data-Secrecy-Mode-b9f7fdfd8b564615ae1f91d3d981bc1a) --- migrations_lockfile.txt | 2 +- .../api/endpoints/organization_details.py | 4 ++ .../api/serializers/models/organization.py | 1 + ...45_add_prevent_superuser_access_bitflag.py | 48 +++++++++++++++++++ src/sentry/models/organization.py | 5 +- .../endpoints/test_organization_details.py | 5 ++ tests/sentry/models/test_organization.py | 34 ++++++++----- 7 files changed, 85 insertions(+), 14 deletions(-) create mode 100644 src/sentry/migrations/0745_add_prevent_superuser_access_bitflag.py diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt index cf9def7ff9bc5a..9987c215a17b24 100644 --- a/migrations_lockfile.txt +++ b/migrations_lockfile.txt @@ -10,6 +10,6 @@ hybridcloud: 0016_add_control_cacheversion nodestore: 0002_nodestore_no_dictfield remote_subscriptions: 0003_drop_remote_subscription replays: 0004_index_together -sentry: 0744_add_dataset_source_field_to_dashboards +sentry: 0745_add_prevent_superuser_access_bitflag social_auth: 0002_default_auto_field uptime: 0006_projectuptimesubscription_name_owner diff --git a/src/sentry/api/endpoints/organization_details.py b/src/sentry/api/endpoints/organization_details.py index 64575ce5798f84..7b16fde820ca89 100644 --- a/src/sentry/api/endpoints/organization_details.py +++ b/src/sentry/api/endpoints/organization_details.py @@ -246,6 +246,7 @@ class OrganizationSerializer(BaseOrganizationSerializer): openMembership = serializers.BooleanField(required=False) allowSharedIssues = serializers.BooleanField(required=False) allowMemberProjectCreation = serializers.BooleanField(required=False) + allowSuperuserAccess = serializers.BooleanField(required=False) enhancedPrivacy = serializers.BooleanField(required=False) dataScrubber = serializers.BooleanField(required=False) dataScrubberDefaults = serializers.BooleanField(required=False) @@ -511,6 +512,8 @@ def save(self): org.flags.require_email_verification = data["requireEmailVerification"] if "allowMemberProjectCreation" in data: org.flags.disable_member_project_creation = not data["allowMemberProjectCreation"] + if "allowSuperuserAccess" in data: + org.flags.prevent_superuser_access = not data["allowSuperuserAccess"] if "name" in data: org.name = data["name"] if "slug" in data: @@ -528,6 +531,7 @@ def save(self): "require_2fa": org.flags.require_2fa.is_set, "codecov_access": org.flags.codecov_access.is_set, "disable_member_project_creation": org.flags.disable_member_project_creation.is_set, + "prevent_superuser_access": org.flags.prevent_superuser_access.is_set, }, } diff --git a/src/sentry/api/serializers/models/organization.py b/src/sentry/api/serializers/models/organization.py index cc8166c1b0451b..32dc0fb53cc093 100644 --- a/src/sentry/api/serializers/models/organization.py +++ b/src/sentry/api/serializers/models/organization.py @@ -353,6 +353,7 @@ def serialize( ), "avatar": avatar, "allowMemberProjectCreation": not obj.flags.disable_member_project_creation, + "allowSuperuserAccess": not obj.flags.prevent_superuser_access, "links": { "organizationUrl": generate_organization_url(obj.slug), "regionUrl": generate_region_url(), diff --git a/src/sentry/migrations/0745_add_prevent_superuser_access_bitflag.py b/src/sentry/migrations/0745_add_prevent_superuser_access_bitflag.py new file mode 100644 index 00000000000000..cd3ccc40971738 --- /dev/null +++ b/src/sentry/migrations/0745_add_prevent_superuser_access_bitflag.py @@ -0,0 +1,48 @@ +# Generated by Django 5.0.6 on 2024-07-23 17:37 + +from django.db import migrations + +import bitfield.models +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("sentry", "0744_add_dataset_source_field_to_dashboards"), + ] + + operations = [ + migrations.AlterField( + model_name="organization", + name="flags", + field=bitfield.models.BitField( + [ + "allow_joinleave", + "enhanced_privacy", + "disable_shared_issues", + "early_adopter", + "require_2fa", + "disable_new_visibility_features", + "require_email_verification", + "codecov_access", + "disable_member_project_creation", + "prevent_superuser_access", + ], + default=1, + ), + ), + ] diff --git a/src/sentry/models/organization.py b/src/sentry/models/organization.py index fc188fe108f46f..222803594e2bbc 100644 --- a/src/sentry/models/organization.py +++ b/src/sentry/models/organization.py @@ -198,6 +198,9 @@ class flags(TypedClassBitField): # Disable org-members from creating new projects disable_member_project_creation: bool + # Prevent superuser access to an organization + prevent_superuser_access: bool + bitfield_default = 1 objects: ClassVar[OrganizationManager] = OrganizationManager(cache_fields=("pk", "slug")) @@ -354,7 +357,7 @@ def _get_bulk_owner_ids(cls, organizations: Collection[Organization]) -> dict[in organization_id__in=org_ids_to_query, role=roles.get_top_dog().id ).values_list("organization_id", "user_id") - for (org_id, user_id) in queried_owner_ids: + for org_id, user_id in queried_owner_ids: # An org may have multiple owners. Here we mimic the behavior of # `get_default_owner`, which is to use the first one in the query # result's iteration order. diff --git a/tests/sentry/api/endpoints/test_organization_details.py b/tests/sentry/api/endpoints/test_organization_details.py index ba1e51ac3c17b9..2e1e73d54b61f2 100644 --- a/tests/sentry/api/endpoints/test_organization_details.py +++ b/tests/sentry/api/endpoints/test_organization_details.py @@ -428,6 +428,7 @@ def test_various_options(self, mock_get_repositories): "openMembership": False, "isEarlyAdopter": True, "codecovAccess": True, + "allowSuperuserAccess": False, "aiSuggestedSolution": False, "githubOpenPRBot": False, "githubNudgeInvite": False, @@ -470,6 +471,7 @@ def test_various_options(self, mock_get_repositories): assert org.flags.early_adopter assert org.flags.codecov_access + assert org.flags.prevent_superuser_access assert not org.flags.allow_joinleave assert org.flags.disable_shared_issues assert org.flags.enhanced_privacy @@ -501,6 +503,9 @@ def test_various_options(self, mock_get_repositories): assert "to {}".format(data["openMembership"]) in log.data["allow_joinleave"] assert "to {}".format(data["isEarlyAdopter"]) in log.data["early_adopter"] assert "to {}".format(data["codecovAccess"]) in log.data["codecov_access"] + assert ( + "to {}".format(not data["allowSuperuserAccess"]) in log.data["prevent_superuser_access"] + ) assert "to {}".format(data["enhancedPrivacy"]) in log.data["enhanced_privacy"] assert "to {}".format(not data["allowSharedIssues"]) in log.data["disable_shared_issues"] assert "to {}".format(data["require2FA"]) in log.data["require_2fa"] diff --git a/tests/sentry/models/test_organization.py b/tests/sentry/models/test_organization.py index 82a22ee1fe77c4..eca01a672ef61c 100644 --- a/tests/sentry/models/test_organization.py +++ b/tests/sentry/models/test_organization.py @@ -95,11 +95,13 @@ def test_flags_have_changed(self): org.flags.codecov_access = True org.flags.require_2fa = True org.flags.disable_member_project_creation = True + org.flags.prevent_superuser_access = True assert flag_has_changed(org, "allow_joinleave") is False assert flag_has_changed(org, "early_adopter") assert flag_has_changed(org, "codecov_access") assert flag_has_changed(org, "require_2fa") assert flag_has_changed(org, "disable_member_project_creation") + assert flag_has_changed(org, "prevent_superuser_access") def test_has_changed(self): org = self.create_organization() @@ -228,9 +230,11 @@ def test_handle_2fa_required__compliant_and_non_compliant_members(self): self.assert_org_member_mapping(org_member=compliant_member) self.assert_org_member_mapping(org_member=non_compliant_member) - with self.options( - {"system.url-prefix": "http://example.com"} - ), self.tasks(), outbox_runner(): + with ( + self.options({"system.url-prefix": "http://example.com"}), + self.tasks(), + outbox_runner(), + ): self.org.handle_2fa_required(self.request) self.is_organization_member(compliant_user.id, compliant_member.id) @@ -280,9 +284,11 @@ def test_handle_2fa_required__non_compliant_members(self): self.assert_org_member_mapping(org_member=member) non_compliant.append((user, member)) - with self.options( - {"system.url-prefix": "http://example.com"} - ), self.tasks(), outbox_runner(): + with ( + self.options({"system.url-prefix": "http://example.com"}), + self.tasks(), + outbox_runner(), + ): self.org.handle_2fa_required(self.request) for user, member in non_compliant: @@ -340,9 +346,11 @@ def test_handle_2fa_required__no_actor_and_api_key__ok(self, auth_log): self.assert_org_member_mapping(org_member=member) - with self.options( - {"system.url-prefix": "http://example.com"} - ), self.tasks(), outbox_runner(): + with ( + self.options({"system.url-prefix": "http://example.com"}), + self.tasks(), + outbox_runner(), + ): with assume_test_silo_mode(SiloMode.CONTROL): api_key = ApiKey.objects.create( organization_id=self.org.id, @@ -373,9 +381,11 @@ def test_handle_2fa_required__no_ip_address__ok(self, auth_log): user, member = self._create_user_and_member() self.assert_org_member_mapping(org_member=member) - with self.options( - {"system.url-prefix": "http://example.com"} - ), self.tasks(), outbox_runner(): + with ( + self.options({"system.url-prefix": "http://example.com"}), + self.tasks(), + outbox_runner(), + ): request = copy.deepcopy(self.request) request.META["REMOTE_ADDR"] = None self.org.handle_2fa_required(request) From 2f0a9385e56a9f16355c343dfa0860750518c455 Mon Sep 17 00:00:00 2001 From: Jodi Jang <116035587+jangjodi@users.noreply.github.com> Date: Tue, 23 Jul 2024 13:24:01 -0700 Subject: [PATCH 005/126] chore(similarity): Add re-queue for backfill task when worker is killed (#74771) Add reject_on_worker_lost flag to re-queue the message if the worker is killed --- .../backfill_seer_grouping_records_for_project.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/sentry/tasks/embeddings_grouping/backfill_seer_grouping_records_for_project.py b/src/sentry/tasks/embeddings_grouping/backfill_seer_grouping_records_for_project.py index 9a62146e02a309..6d5fe8bd170f13 100644 --- a/src/sentry/tasks/embeddings_grouping/backfill_seer_grouping_records_for_project.py +++ b/src/sentry/tasks/embeddings_grouping/backfill_seer_grouping_records_for_project.py @@ -43,6 +43,7 @@ soft_time_limit=60 * 15, time_limit=60 * 15 + 5, acks_late=True, + reject_on_worker_lost=True, ) def backfill_seer_grouping_records_for_project( current_project_id: int, From 1c3a348618967f2249d7dd1b4d54dc6e27d882b9 Mon Sep 17 00:00:00 2001 From: Danny Lee Date: Tue, 23 Jul 2024 13:24:26 -0700 Subject: [PATCH 006/126] feat(saml2): Implement SP-initiated Single Logout (#74711) --- src/sentry/api/endpoints/auth_index.py | 3 +++ src/sentry/auth/providers/saml2/provider.py | 29 ++++++++++++++++++++- 2 files changed, 31 insertions(+), 1 deletion(-) diff --git a/src/sentry/api/endpoints/auth_index.py b/src/sentry/api/endpoints/auth_index.py index ec3b5a2abd36e8..725d0a55feaee3 100644 --- a/src/sentry/api/endpoints/auth_index.py +++ b/src/sentry/api/endpoints/auth_index.py @@ -18,6 +18,7 @@ from sentry.api.validators import AuthVerifyValidator from sentry.api.validators.auth import MISSING_PASSWORD_OR_U2F_CODE from sentry.auth.authenticators.u2f import U2fInterface +from sentry.auth.providers.saml2.provider import handle_saml_single_logout from sentry.auth.services.auth.impl import promote_request_rpc_user from sentry.auth.superuser import SUPERUSER_ORG_ID from sentry.models.authenticator import Authenticator @@ -293,6 +294,8 @@ def delete(self, request: Request, *args, **kwargs) -> Response: Deauthenticate all active sessions for this user. """ + handle_saml_single_logout(request) + # For signals to work here, we must promote the request.user to a full user object logout(request._request) request.user = AnonymousUser() diff --git a/src/sentry/auth/providers/saml2/provider.py b/src/sentry/auth/providers/saml2/provider.py index d836ea4742c2db..41ef66f4d171fe 100644 --- a/src/sentry/auth/providers/saml2/provider.py +++ b/src/sentry/auth/providers/saml2/provider.py @@ -1,6 +1,7 @@ import abc from urllib.parse import urlparse +import sentry_sdk from django.contrib import messages from django.contrib.auth import logout from django.http import HttpResponse, HttpResponseServerError @@ -12,7 +13,7 @@ from onelogin.saml2.constants import OneLogin_Saml2_Constants from rest_framework.request import Request -from sentry import options +from sentry import features, options from sentry.auth.exceptions import IdentityNotValid from sentry.auth.provider import Provider from sentry.auth.view import AuthView @@ -20,6 +21,7 @@ from sentry.models.organization import OrganizationStatus from sentry.models.organizationmapping import OrganizationMapping from sentry.organizations.services.organization import organization_service +from sentry.users.services.user.service import user_service from sentry.utils.auth import get_login_url from sentry.utils.http import absolute_uri from sentry.web.frontend.base import BaseView, control_silo_view @@ -387,3 +389,28 @@ def build_auth(request, saml_config): } return OneLogin_Saml2_Auth(saml_request, saml_config) + + +def handle_saml_single_logout(request): + # Do not handle SLO if a user is in more than 1 organization + # Propagating it to multiple IdPs results in confusion for the user + organizations = user_service.get_organizations(user_id=request.user.id) + if not len(organizations) == 1: + return + + org = organizations[0] + if not features.has("organizations:sso-saml2-slo", org): + return + + provider = get_provider(org.slug) + if not provider or not provider.is_saml: + return + + # Try/catch is needed because IdP may not support SLO (e.g. Okta) and + # will return an error + try: + saml_config = build_saml_config(provider.config, org) + idp_auth = build_auth(request, saml_config) + idp_auth.logout() + except Exception as e: + sentry_sdk.capture_exception(e) From 56946876d07c14fcf819a0e5577c0923a3c2d613 Mon Sep 17 00:00:00 2001 From: colin-sentry <161344340+colin-sentry@users.noreply.github.com> Date: Tue, 23 Jul 2024 16:31:46 -0400 Subject: [PATCH 007/126] Still display tokens used if no token cost metric has been sent (#74742) If you have never sent a span with ai.model_id, the span metric ai.total_cost will never be extracted, and then the indexer will throw an error if you try to look for it. This separates the "ai.total_cost" and "ai.total_tokens.used" table entries --------- Co-authored-by: George Gritsouk <989898+gggritso@users.noreply.github.com> Co-authored-by: getsantry[bot] <66042841+getsantry[bot]@users.noreply.github.com> --- .../components/tables/pipelinesTable.tsx | 29 +++++++++++++++---- 1 file changed, 23 insertions(+), 6 deletions(-) diff --git a/static/app/views/insights/llmMonitoring/components/tables/pipelinesTable.tsx b/static/app/views/insights/llmMonitoring/components/tables/pipelinesTable.tsx index 3538763cc92c6e..4ec71e2455dafc 100644 --- a/static/app/views/insights/llmMonitoring/components/tables/pipelinesTable.tsx +++ b/static/app/views/insights/llmMonitoring/components/tables/pipelinesTable.tsx @@ -129,11 +129,21 @@ export function PipelinesTable() { search: new MutableSearch( `span.category:ai span.ai.pipeline.group:[${(data as Row[])?.map(x => x['span.group']).join(',')}]` ), - fields: [ - 'span.ai.pipeline.group', - 'sum(ai.total_tokens.used)', - 'sum(ai.total_cost)', - ], + fields: ['span.ai.pipeline.group', 'sum(ai.total_tokens.used)'], + }, + 'api.performance.ai-analytics.token-usage-chart' + ); + + const { + data: tokenCostData, + isLoading: tokenCostLoading, + error: tokenCostError, + } = useSpanMetrics( + { + search: new MutableSearch( + `span.category:ai span.ai.pipeline.group:[${(data as Row[])?.map(x => x['span.group']).join(',')}]` + ), + fields: ['span.ai.pipeline.group', 'sum(ai.total_cost)'], }, 'api.performance.ai-analytics.token-usage-chart' ); @@ -151,7 +161,14 @@ export function PipelinesTable() { if (tokenUsedDataPoint) { row['sum(ai.total_tokens.used)'] = tokenUsedDataPoint['sum(ai.total_tokens.used)']; - row['sum(ai.total_cost)'] = tokenUsedDataPoint['sum(ai.total_cost)']; + } + } + if (!tokenCostLoading && !tokenCostError) { + const tokenCostDataPoint = tokenCostData.find( + tokenRow => tokenRow['span.ai.pipeline.group'] === row['span.group'] + ); + if (tokenCostDataPoint) { + row['sum(ai.total_cost)'] = tokenCostDataPoint['sum(ai.total_cost)']; } } return row; From 11e8424a4a91ee95cade8157590db60913ba1393 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Tue, 23 Jul 2024 13:39:05 -0700 Subject: [PATCH 008/126] ref(replay): refactor replayerStepper calls to be memoized (#74606) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - closes https://github.com/getsentry/team-replay/issues/450 - this is the branch off from https://github.com/getsentry/sentry/pull/74540, the experimental refactor of `replayerStepper` - TLDR: memoize the `replayerStepper` calls so that we remember the results and don't have to reload the data every time. **current state of things**: we have distinct `replayerStepper` calls in the breadcrumbs + memory tabs. in total, we could be calling it 3 times because it's used by (1) the breadcrumbs tab to render the HTML snippet, (2) for hydration error text diffs, (3) and the DOM nodes chart. **the plan**: improve things by calling `replayerStepper` once (or at least less) and memoize the results. side note: it was hard to combine the hydration error text diff `replayerStepper` call with the others due to the types, so we had to keep that call separate. **the question**: is it faster to run one `replayerStepper` instance, and collect all the data at one time (iterate over the frames exactly once)? this means the speed of any tab will always be limited by the slower call, since we're going through ALL the data at once, even some that we might not need yet. like this: https://github.com/getsentry/sentry/blob/70c90544f39dcc3d87dea8c85c0a7669c9585403/static/app/utils/replays/replayReader.tsx#L363-L366 or is it better to continue to have 2 stepper instances that iterate over 2 different sets of frames (breadcrumbs loops over hundreds of frames, and DOM node count iterates over thousands, which means the speeds could be drastically different). in this situation, each tab would handle their own array of necessary frames. **the experiments**: the first PR (https://github.com/getsentry/sentry/pull/74540) tried to explore a refactor where `replayerStepper` is called once for the two DOM node actions (counting, for the memory chart, and extracting, for the breadcrumbs HTML). changes were made to the stepper so it could accept a list of callbacks, and return all types of data in one shot, so we only need one hidden `replayer` instance on the page for the DOM node functions. unscientifically, it seemed like loading breadcrumbs + memory took the same amount of time as loading the memory chart. (see video below -- as soon as the breadcrumbs tab is done loading, the memory tab is already done). https://github.com/user-attachments/assets/2c882bcb-c8df-409c-8e2a-69d75c279735 this makes sense because we’re iterating over thousands of frames for the DOM nodes chart, so that’s the bottleneck. this means the breadcrumbs tab loads slower. **this PR**: compared to that is the approach where we have 1 stepper instance for breadcrumbs, and one for DOM node count, each iterating over their own lists (which is what i'm doing in this PR). this approach showed breadcrumb data on the screen about 2x as fast as the approach above. therefore 2 stepper instances on the screen is better for users, especially since breadcrumbs tab is more popular than the memory tab. the video below demonstrates the memoization in action. once the breadcrumbs or memory tabs are loaded, switching back to them does not cause re-loading, because the results are cached. notice that the breadcrumbs tab loading is not dependent on the loading of the DOM tab, unlike the video above where the breadcrumbs tab has to "wait" for the memory tab. (i'm also on slower wifi in this clip below) https://github.com/user-attachments/assets/be71add1-63b5-4308-9d26-356d544980ef --- .../replays/breadcrumbs/breadcrumbItem.tsx | 2 +- .../replays/diff/replayTextDiff.tsx | 6 +- static/app/utils/replays/countDomNodes.tsx | 54 ------------- .../{extractDomNodes.tsx => extractHtml.tsx} | 52 +----------- .../utils/replays/hooks/useCountDomNodes.tsx | 26 ++++++ .../replays/hooks/useExtractDomNodes.tsx | 18 +++++ .../useExtractPageHtml.tsx} | 26 +++++- .../replays/hooks/useExtractedDomNodes.tsx | 16 ---- .../replays/hooks/useExtractedPageHtml.tsx | 21 ----- static/app/utils/replays/replayReader.tsx | 80 +++++++++++++++++++ static/app/utils/replays/replayerStepper.tsx | 14 ++-- .../detail/breadcrumbs/breadcrumbRow.tsx | 2 +- .../replays/detail/breadcrumbs/index.tsx | 8 +- .../detail/memoryPanel/domNodesChart.tsx | 2 +- .../replays/detail/memoryPanel/index.tsx | 23 ++---- 15 files changed, 172 insertions(+), 178 deletions(-) delete mode 100644 static/app/utils/replays/countDomNodes.tsx rename static/app/utils/replays/{extractDomNodes.tsx => extractHtml.tsx} (54%) create mode 100644 static/app/utils/replays/hooks/useCountDomNodes.tsx create mode 100644 static/app/utils/replays/hooks/useExtractDomNodes.tsx rename static/app/utils/replays/{extractPageHtml.tsx => hooks/useExtractPageHtml.tsx} (65%) delete mode 100644 static/app/utils/replays/hooks/useExtractedDomNodes.tsx delete mode 100644 static/app/utils/replays/hooks/useExtractedPageHtml.tsx diff --git a/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx b/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx index 143108b9ffa1f3..5a9f00cecf9dee 100644 --- a/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx +++ b/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx @@ -18,7 +18,7 @@ import {useHasNewTimelineUI} from 'sentry/components/timeline/utils'; import {Tooltip} from 'sentry/components/tooltip'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; -import type {Extraction} from 'sentry/utils/replays/extractDomNodes'; +import type {Extraction} from 'sentry/utils/replays/extractHtml'; import {getReplayDiffOffsetsFromFrame} from 'sentry/utils/replays/getDiffTimestamps'; import getFrameDetails from 'sentry/utils/replays/getFrameDetails'; import type ReplayReader from 'sentry/utils/replays/replayReader'; diff --git a/static/app/components/replays/diff/replayTextDiff.tsx b/static/app/components/replays/diff/replayTextDiff.tsx index 50fd76f2e745c7..2bafcd5458ef40 100644 --- a/static/app/components/replays/diff/replayTextDiff.tsx +++ b/static/app/components/replays/diff/replayTextDiff.tsx @@ -7,7 +7,7 @@ import {CopyToClipboardButton} from 'sentry/components/copyToClipboardButton'; import SplitDiff from 'sentry/components/splitDiff'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; -import useExtractedPageHtml from 'sentry/utils/replays/hooks/useExtractedPageHtml'; +import useExtractPageHtml from 'sentry/utils/replays/hooks/useExtractPageHtml'; import type ReplayReader from 'sentry/utils/replays/replayReader'; interface Props { @@ -16,8 +16,8 @@ interface Props { rightOffsetMs: number; } -export function ReplayTextDiff({leftOffsetMs, replay, rightOffsetMs}: Props) { - const {data} = useExtractedPageHtml({ +export function ReplayTextDiff({replay, leftOffsetMs, rightOffsetMs}: Props) { + const {data} = useExtractPageHtml({ replay, offsetMsToStopAt: [leftOffsetMs, rightOffsetMs], }); diff --git a/static/app/utils/replays/countDomNodes.tsx b/static/app/utils/replays/countDomNodes.tsx deleted file mode 100644 index f219f504e0258c..00000000000000 --- a/static/app/utils/replays/countDomNodes.tsx +++ /dev/null @@ -1,54 +0,0 @@ -import replayerStepper from 'sentry/utils/replays/replayerStepper'; -import type {RecordingFrame} from 'sentry/utils/replays/types'; - -export type DomNodeChartDatapoint = { - added: number; - count: number; - endTimestampMs: number; - removed: number; - startTimestampMs: number; - timestampMs: number; -}; - -type Args = { - frames: RecordingFrame[] | undefined; - rrwebEvents: RecordingFrame[] | undefined; - startTimestampMs: number; -}; - -export default function countDomNodes({ - frames, - rrwebEvents, - startTimestampMs, -}: Args): Promise> { - let frameCount = 0; - const length = frames?.length ?? 0; - const frameStep = Math.max(Math.round(length * 0.007), 1); - - let prevIds: number[] = []; - - return replayerStepper({ - frames, - rrwebEvents, - startTimestampMs, - shouldVisitFrame: () => { - frameCount++; - return frameCount % frameStep === 0; - }, - onVisitFrame: (frame, collection, replayer) => { - const ids = replayer.getMirror().getIds(); // gets list of DOM nodes present - const count = ids.length; - const added = ids.filter(id => !prevIds.includes(id)).length; - const removed = prevIds.filter(id => !ids.includes(id)).length; - collection.set(frame as RecordingFrame, { - count, - added, - removed, - timestampMs: frame.timestamp, - startTimestampMs: frame.timestamp, - endTimestampMs: frame.timestamp, - }); - prevIds = ids; - }, - }); -} diff --git a/static/app/utils/replays/extractDomNodes.tsx b/static/app/utils/replays/extractHtml.tsx similarity index 54% rename from static/app/utils/replays/extractDomNodes.tsx rename to static/app/utils/replays/extractHtml.tsx index c225aa1f6f2d58..cb6333177d5396 100644 --- a/static/app/utils/replays/extractDomNodes.tsx +++ b/static/app/utils/replays/extractHtml.tsx @@ -1,11 +1,6 @@ import type {Mirror} from '@sentry-internal/rrweb-snapshot'; -import replayerStepper from 'sentry/utils/replays/replayerStepper'; -import { - getNodeId, - type RecordingFrame, - type ReplayFrame, -} from 'sentry/utils/replays/types'; +import type {ReplayFrame} from 'sentry/utils/replays/types'; export type Extraction = { frame: ReplayFrame; @@ -13,50 +8,7 @@ export type Extraction = { timestamp: number; }; -type Args = { - /** - * Frames where we should stop and extract html for a given dom node - */ - frames: ReplayFrame[] | undefined; - - /** - * The rrweb events that constitute the replay - */ - rrwebEvents: RecordingFrame[] | undefined; - - /** - * The replay start time, in ms - */ - startTimestampMs: number; -}; - -export default function extractDomNodes({ - frames, - rrwebEvents, - startTimestampMs, -}: Args): Promise> { - return replayerStepper({ - frames, - rrwebEvents, - startTimestampMs, - shouldVisitFrame: frame => { - const nodeId = getNodeId(frame); - return nodeId !== undefined && nodeId !== -1; - }, - onVisitFrame: (frame, collection, replayer) => { - const mirror = replayer.getMirror(); - const nodeId = getNodeId(frame); - const html = extractHtml(nodeId as number, mirror); - collection.set(frame as ReplayFrame, { - frame, - html, - timestamp: frame.timestampMs, - }); - }, - }); -} - -function extractHtml(nodeId: number, mirror: Mirror): string | null { +export default function extractHtml(nodeId: number, mirror: Mirror): string | null { const node = mirror.getNode(nodeId); const html = diff --git a/static/app/utils/replays/hooks/useCountDomNodes.tsx b/static/app/utils/replays/hooks/useCountDomNodes.tsx new file mode 100644 index 00000000000000..af11197e056fd1 --- /dev/null +++ b/static/app/utils/replays/hooks/useCountDomNodes.tsx @@ -0,0 +1,26 @@ +import {useQuery, type UseQueryResult} from 'sentry/utils/queryClient'; +import type ReplayReader from 'sentry/utils/replays/replayReader'; +import type {RecordingFrame} from 'sentry/utils/replays/types'; + +export type DomNodeChartDatapoint = { + added: number; + count: number; + endTimestampMs: number; + removed: number; + startTimestampMs: number; + timestampMs: number; +}; + +export default function useCountDomNodes({ + replay, +}: { + replay: null | ReplayReader; +}): UseQueryResult> { + return useQuery( + ['countDomNodes', replay], + () => { + return replay?.getCountDomNodes(); + }, + {enabled: Boolean(replay), cacheTime: Infinity} + ); +} diff --git a/static/app/utils/replays/hooks/useExtractDomNodes.tsx b/static/app/utils/replays/hooks/useExtractDomNodes.tsx new file mode 100644 index 00000000000000..29dc0609cc97cd --- /dev/null +++ b/static/app/utils/replays/hooks/useExtractDomNodes.tsx @@ -0,0 +1,18 @@ +import {useQuery, type UseQueryResult} from 'sentry/utils/queryClient'; +import type {Extraction} from 'sentry/utils/replays/extractHtml'; +import type ReplayReader from 'sentry/utils/replays/replayReader'; +import type {ReplayFrame} from 'sentry/utils/replays/types'; + +export default function useExtractDomNodes({ + replay, +}: { + replay: null | ReplayReader; +}): UseQueryResult> { + return useQuery( + ['getDomNodes', replay], + () => { + return replay?.getExtractDomNodes(); + }, + {enabled: Boolean(replay), cacheTime: Infinity} + ); +} diff --git a/static/app/utils/replays/extractPageHtml.tsx b/static/app/utils/replays/hooks/useExtractPageHtml.tsx similarity index 65% rename from static/app/utils/replays/extractPageHtml.tsx rename to static/app/utils/replays/hooks/useExtractPageHtml.tsx index 00d5d97bca4786..8cb4dff0d1b908 100644 --- a/static/app/utils/replays/extractPageHtml.tsx +++ b/static/app/utils/replays/hooks/useExtractPageHtml.tsx @@ -1,4 +1,6 @@ +import {useQuery} from 'sentry/utils/queryClient'; import replayerStepper from 'sentry/utils/replays/replayerStepper'; +import type ReplayReader from 'sentry/utils/replays/replayReader'; import type {RecordingFrame, ReplayFrame} from 'sentry/utils/replays/types'; type Args = { @@ -18,7 +20,7 @@ type Args = { startTimestampMs: number; }; -export default async function extactPageHtml({ +async function extractPageHtml({ offsetMsToStopAt, rrwebEvents, startTimestampMs, @@ -32,11 +34,11 @@ export default async function extactPageHtml({ frames, rrwebEvents, startTimestampMs, - shouldVisitFrame(_frame) { + shouldVisitFrame: () => { // Visit all the timestamps (converted to frames) that were passed in above return true; }, - onVisitFrame(frame, collection, replayer) { + onVisitFrame: (frame, collection, replayer) => { const doc = replayer.getMirror().getNode(1); const html = (doc as Document)?.body.outerHTML ?? ''; collection.set(frame, html); @@ -46,3 +48,21 @@ export default async function extactPageHtml({ return [frame.offsetMs, html]; }); } + +interface Props { + offsetMsToStopAt: number[]; + replay: ReplayReader | null; +} + +export default function useExtractPageHtml({replay, offsetMsToStopAt}: Props) { + return useQuery( + ['extactPageHtml', replay, offsetMsToStopAt], + () => + extractPageHtml({ + offsetMsToStopAt, + rrwebEvents: replay?.getRRWebFrames(), + startTimestampMs: replay?.getReplay().started_at.getTime() ?? 0, + }), + {enabled: Boolean(replay), cacheTime: Infinity} + ); +} diff --git a/static/app/utils/replays/hooks/useExtractedDomNodes.tsx b/static/app/utils/replays/hooks/useExtractedDomNodes.tsx deleted file mode 100644 index c5eb4c7566c217..00000000000000 --- a/static/app/utils/replays/hooks/useExtractedDomNodes.tsx +++ /dev/null @@ -1,16 +0,0 @@ -import {useQuery} from 'sentry/utils/queryClient'; -import extractDomNodes from 'sentry/utils/replays/extractDomNodes'; -import type ReplayReader from 'sentry/utils/replays/replayReader'; - -export default function useExtractedDomNodes({replay}: {replay: null | ReplayReader}) { - return useQuery( - ['getDomNodes', replay], - () => - extractDomNodes({ - frames: replay?.getDOMFrames(), - rrwebEvents: replay?.getRRWebFrames(), - startTimestampMs: replay?.getReplay().started_at.getTime() ?? 0, - }), - {enabled: Boolean(replay), cacheTime: Infinity} - ); -} diff --git a/static/app/utils/replays/hooks/useExtractedPageHtml.tsx b/static/app/utils/replays/hooks/useExtractedPageHtml.tsx deleted file mode 100644 index 2906cff4395fbd..00000000000000 --- a/static/app/utils/replays/hooks/useExtractedPageHtml.tsx +++ /dev/null @@ -1,21 +0,0 @@ -import {useQuery} from 'sentry/utils/queryClient'; -import extractPageHtml from 'sentry/utils/replays/extractPageHtml'; -import type ReplayReader from 'sentry/utils/replays/replayReader'; - -interface Props { - offsetMsToStopAt: number[]; - replay: ReplayReader | null; -} - -export default function useExtractedPageHtml({replay, offsetMsToStopAt}: Props) { - return useQuery( - ['extactPageHtml', replay, offsetMsToStopAt], - () => - extractPageHtml({ - offsetMsToStopAt, - rrwebEvents: replay?.getRRWebFrames(), - startTimestampMs: replay?.getReplay().started_at.getTime() ?? 0, - }), - {enabled: Boolean(replay), cacheTime: Infinity} - ); -} diff --git a/static/app/utils/replays/replayReader.tsx b/static/app/utils/replays/replayReader.tsx index f5d8b2d12830cd..f612447fef2632 100644 --- a/static/app/utils/replays/replayReader.tsx +++ b/static/app/utils/replays/replayReader.tsx @@ -1,4 +1,5 @@ import * as Sentry from '@sentry/react'; +import type {eventWithTime} from '@sentry-internal/rrweb'; import memoize from 'lodash/memoize'; import {type Duration, duration} from 'moment-timezone'; @@ -6,6 +7,7 @@ import {defined} from 'sentry/utils'; import domId from 'sentry/utils/domId'; import localStorageWrapper from 'sentry/utils/localStorage'; import clamp from 'sentry/utils/number/clamp'; +import extractHtml from 'sentry/utils/replays/extractHtml'; import hydrateBreadcrumbs, { replayInitBreadcrumb, } from 'sentry/utils/replays/hydrateBreadcrumbs'; @@ -17,6 +19,7 @@ import { } from 'sentry/utils/replays/hydrateRRWebRecordingFrames'; import hydrateSpans from 'sentry/utils/replays/hydrateSpans'; import {replayTimestamps} from 'sentry/utils/replays/replayDataUtils'; +import replayerStepper from 'sentry/utils/replays/replayerStepper'; import type { BreadcrumbFrame, ClipWindow, @@ -26,6 +29,7 @@ import type { MemoryFrame, OptionFrame, RecordingFrame, + ReplayFrame, serializedNodeWithId, SlowClickFrame, SpanFrame, @@ -34,6 +38,7 @@ import type { import { BreadcrumbCategories, EventType, + getNodeId, IncrementalSource, isDeadClick, isDeadRageClick, @@ -137,6 +142,53 @@ function removeDuplicateNavCrumbs( return otherBreadcrumbFrames.concat(uniqueNavCrumbs); } +const extractDomNodes = { + shouldVisitFrame: frame => { + const nodeId = getNodeId(frame); + return nodeId !== undefined && nodeId !== -1; + }, + onVisitFrame: (frame, collection, replayer) => { + const mirror = replayer.getMirror(); + const nodeId = getNodeId(frame); + const html = extractHtml(nodeId as number, mirror); + collection.set(frame as ReplayFrame, { + frame, + html, + timestamp: frame.timestampMs, + }); + }, +}; + +const countDomNodes = function (frames: eventWithTime[]) { + let frameCount = 0; + const length = frames?.length ?? 0; + const frameStep = Math.max(Math.round(length * 0.007), 1); + + let prevIds: number[] = []; + + return { + shouldVisitFrame() { + frameCount++; + return frameCount % frameStep === 0; + }, + onVisitFrame(frame, collection, replayer) { + const ids = replayer.getMirror().getIds(); // gets list of DOM nodes present + const count = ids.length; + const added = ids.filter(id => !prevIds.includes(id)).length; + const removed = prevIds.filter(id => !ids.includes(id)).length; + collection.set(frame as RecordingFrame, { + count, + added, + removed, + timestampMs: frame.timestamp, + startTimestampMs: frame.timestamp, + endTimestampMs: frame.timestamp, + }); + prevIds = ids; + }, + }; +}; + export default class ReplayReader { static factory({ attachments, @@ -412,6 +464,34 @@ export default class ReplayReader { return this.processingErrors().length; }; + getCountDomNodes = memoize(async () => { + const {onVisitFrame, shouldVisitFrame} = countDomNodes(this.getRRWebMutations()); + + const results = await replayerStepper({ + frames: this.getRRWebMutations(), + rrwebEvents: this.getRRWebFrames(), + startTimestampMs: this.getReplay().started_at.getTime() ?? 0, + onVisitFrame, + shouldVisitFrame, + }); + + return results; + }); + + getExtractDomNodes = memoize(async () => { + const {onVisitFrame, shouldVisitFrame} = extractDomNodes; + + const results = await replayerStepper({ + frames: this.getDOMFrames(), + rrwebEvents: this.getRRWebFrames(), + startTimestampMs: this.getReplay().started_at.getTime() ?? 0, + onVisitFrame, + shouldVisitFrame, + }); + + return results; + }); + getClipWindow = () => this._clipWindow; /** diff --git a/static/app/utils/replays/replayerStepper.tsx b/static/app/utils/replays/replayerStepper.tsx index 77dae4de4f7238..ad9cf47b31f23a 100644 --- a/static/app/utils/replays/replayerStepper.tsx +++ b/static/app/utils/replays/replayerStepper.tsx @@ -17,7 +17,7 @@ interface Args { } type FrameRef = { - frame: Frame | undefined; + current: Frame | undefined; }; export default function replayerStepper< @@ -62,25 +62,25 @@ export default function replayerStepper< }; const frameRef: FrameRef = { - frame: undefined, + current: undefined, }; const considerFrame = (frame: Frame) => { if (shouldVisitFrame(frame, replayer)) { - frameRef.frame = frame; - window.setTimeout(() => { + frameRef.current = frame; + window.requestAnimationFrame(() => { const timestamp = 'offsetMs' in frame ? frame.offsetMs : frame.timestamp - startTimestampMs; replayer.pause(timestamp); - }, 0); + }); } else { - frameRef.frame = undefined; + frameRef.current = undefined; nextOrDone(); } }; const handlePause = () => { - onVisitFrame(frameRef.frame!, collection, replayer); + onVisitFrame(frameRef.current!, collection, replayer); nextOrDone(); }; diff --git a/static/app/views/replays/detail/breadcrumbs/breadcrumbRow.tsx b/static/app/views/replays/detail/breadcrumbs/breadcrumbRow.tsx index 33f9eac5a6bf2c..f0a0a818ef1bcf 100644 --- a/static/app/views/replays/detail/breadcrumbs/breadcrumbRow.tsx +++ b/static/app/views/replays/detail/breadcrumbs/breadcrumbRow.tsx @@ -4,7 +4,7 @@ import classNames from 'classnames'; import BreadcrumbItem from 'sentry/components/replays/breadcrumbs/breadcrumbItem'; import {useReplayContext} from 'sentry/components/replays/replayContext'; -import type {Extraction} from 'sentry/utils/replays/extractDomNodes'; +import type {Extraction} from 'sentry/utils/replays/extractHtml'; import useCrumbHandlers from 'sentry/utils/replays/hooks/useCrumbHandlers'; import useCurrentHoverTime from 'sentry/utils/replays/playback/providers/useCurrentHoverTime'; import type {ReplayFrame} from 'sentry/utils/replays/types'; diff --git a/static/app/views/replays/detail/breadcrumbs/index.tsx b/static/app/views/replays/detail/breadcrumbs/index.tsx index 9423985810a2d0..5cc7c9a98d0bae 100644 --- a/static/app/views/replays/detail/breadcrumbs/index.tsx +++ b/static/app/views/replays/detail/breadcrumbs/index.tsx @@ -8,7 +8,7 @@ import {useReplayContext} from 'sentry/components/replays/replayContext'; import useJumpButtons from 'sentry/components/replays/useJumpButtons'; import {t} from 'sentry/locale'; import useCrumbHandlers from 'sentry/utils/replays/hooks/useCrumbHandlers'; -import useExtractedDomNodes from 'sentry/utils/replays/hooks/useExtractedDomNodes'; +import useExtractDomNodes from 'sentry/utils/replays/hooks/useExtractDomNodes'; import useVirtualizedInspector from 'sentry/views/replays/detail//useVirtualizedInspector'; import BreadcrumbFilters from 'sentry/views/replays/detail/breadcrumbs/breadcrumbFilters'; import BreadcrumbRow from 'sentry/views/replays/detail/breadcrumbs/breadcrumbRow'; @@ -30,8 +30,10 @@ const cellMeasurer = { function Breadcrumbs() { const {currentTime, replay} = useReplayContext(); const {onClickTimestamp} = useCrumbHandlers(); - const {data: frameToExtraction, isFetching: isFetchingExtractions} = - useExtractedDomNodes({replay}); + + const {data: frameToExtraction, isFetching: isFetchingExtractions} = useExtractDomNodes( + {replay} + ); const startTimestampMs = replay?.getStartTimestampMs() ?? 0; const frames = replay?.getChapterFrames(); diff --git a/static/app/views/replays/detail/memoryPanel/domNodesChart.tsx b/static/app/views/replays/detail/memoryPanel/domNodesChart.tsx index a9ba0aa74df53c..4437e483633150 100644 --- a/static/app/views/replays/detail/memoryPanel/domNodesChart.tsx +++ b/static/app/views/replays/detail/memoryPanel/domNodesChart.tsx @@ -16,7 +16,7 @@ import {getFormattedDate} from 'sentry/utils/dates'; import {axisLabelFormatter} from 'sentry/utils/discover/charts'; import domId from 'sentry/utils/domId'; import formatReplayDuration from 'sentry/utils/duration/formatReplayDuration'; -import type {DomNodeChartDatapoint} from 'sentry/utils/replays/countDomNodes'; +import type {DomNodeChartDatapoint} from 'sentry/utils/replays/hooks/useCountDomNodes'; interface Props extends Pick, 'currentTime' | 'setCurrentTime'> { diff --git a/static/app/views/replays/detail/memoryPanel/index.tsx b/static/app/views/replays/detail/memoryPanel/index.tsx index 9358cb2d18fb74..6fc2c0d206c71a 100644 --- a/static/app/views/replays/detail/memoryPanel/index.tsx +++ b/static/app/views/replays/detail/memoryPanel/index.tsx @@ -6,33 +6,20 @@ import Placeholder from 'sentry/components/placeholder'; import {useReplayContext} from 'sentry/components/replays/replayContext'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; -import {useQuery} from 'sentry/utils/queryClient'; -import countDomNodes from 'sentry/utils/replays/countDomNodes'; +import useCountDomNodes from 'sentry/utils/replays/hooks/useCountDomNodes'; import useCurrentHoverTime from 'sentry/utils/replays/playback/providers/useCurrentHoverTime'; -import type ReplayReader from 'sentry/utils/replays/replayReader'; import DomNodesChart from 'sentry/views/replays/detail/memoryPanel/domNodesChart'; import MemoryChart from 'sentry/views/replays/detail/memoryPanel/memoryChart'; -function useCountDomNodes({replay}: {replay: null | ReplayReader}) { - return useQuery( - ['countDomNodes', replay], - () => - countDomNodes({ - frames: replay?.getRRWebMutations(), - rrwebEvents: replay?.getRRWebFrames(), - startTimestampMs: replay?.getStartTimestampMs() ?? 0, - }), - {enabled: Boolean(replay), cacheTime: Infinity} - ); -} - export default function MemoryPanel() { const {currentTime, isFetching, replay, setCurrentTime} = useReplayContext(); const [currentHoverTime, setCurrentHoverTime] = useCurrentHoverTime(); const memoryFrames = replay?.getMemoryFrames(); - const {data: frameToCount} = useCountDomNodes({replay}); + const {data: frameToCount, isLoading: isDomNodeDataLoading} = useCountDomNodes({ + replay, + }); const domNodeData = useMemo( () => Array.from(frameToCount?.values() || []), [frameToCount] @@ -65,7 +52,7 @@ export default function MemoryPanel() { ); const domNodesChart = - !replay || isFetching ? ( + !replay || isDomNodeDataLoading ? ( ) : ( From adfb52c925d65360911ea99df541212a35073398 Mon Sep 17 00:00:00 2001 From: Malachi Willey Date: Tue, 23 Jul 2024 13:42:12 -0700 Subject: [PATCH 009/126] feat(query-builder): Add config for disallowing free text (#74752) --- .../searchQueryBuilder/index.spec.tsx | 18 +++++++++ .../searchQueryBuilder/index.stories.tsx | 4 +- .../components/searchQueryBuilder/index.tsx | 7 ++++ .../searchQueryBuilder/tokens/combobox.tsx | 6 +++ .../searchQueryBuilder/tokens/freeText.tsx | 40 ++++++++++++++++++- .../components/searchQueryBuilder/utils.tsx | 2 + 6 files changed, 73 insertions(+), 4 deletions(-) diff --git a/static/app/components/searchQueryBuilder/index.spec.tsx b/static/app/components/searchQueryBuilder/index.spec.tsx index 69cdfbdcbaa00a..af1e1828ae3723 100644 --- a/static/app/components/searchQueryBuilder/index.spec.tsx +++ b/static/app/components/searchQueryBuilder/index.spec.tsx @@ -2010,4 +2010,22 @@ describe('SearchQueryBuilder', function () { ).toBeInTheDocument(); }); }); + + describe('disallowFreeText', function () { + it('should mark free text invalid', async function () { + render( + + ); + + expect(screen.getByRole('row', {name: 'foo'})).toHaveAttribute( + 'aria-invalid', + 'true' + ); + + await userEvent.click(getLastInput()); + expect( + await screen.findByText('Free text is not supported in this search') + ).toBeInTheDocument(); + }); + }); }); diff --git a/static/app/components/searchQueryBuilder/index.stories.tsx b/static/app/components/searchQueryBuilder/index.stories.tsx index 1eb1e4604afb88..6b5ce33602da5c 100644 --- a/static/app/components/searchQueryBuilder/index.stories.tsx +++ b/static/app/components/searchQueryBuilder/index.stories.tsx @@ -114,7 +114,7 @@ export default storyBook(SearchQueryBuilder, story => { }); story('Config Options', () => { - const configs = ['disallowLogicalOperators', 'disallowWildcard']; + const configs = ['disallowFreeText', 'disallowLogicalOperators', 'disallowWildcard']; const [enabledConfigs, setEnabledConfigs] = useState([...configs]); const queryBuilderOptions = enabledConfigs.reduce((acc, config) => { @@ -141,7 +141,7 @@ export default storyBook(SearchQueryBuilder, story => { ))} parseQueryBuilderValue(state.query, fieldDefinitionGetter, { + disallowFreeText, disallowLogicalOperators, disallowWildcard, filterKeys, }), [ + disallowFreeText, disallowLogicalOperators, disallowWildcard, fieldDefinitionGetter, diff --git a/static/app/components/searchQueryBuilder/tokens/combobox.tsx b/static/app/components/searchQueryBuilder/tokens/combobox.tsx index 2978a7f11b83cd..255e46698c35f6 100644 --- a/static/app/components/searchQueryBuilder/tokens/combobox.tsx +++ b/static/app/components/searchQueryBuilder/tokens/combobox.tsx @@ -87,6 +87,7 @@ type SearchQueryBuilderComboboxProps; onKeyDown?: (e: KeyboardEvent) => void; onKeyUp?: (e: KeyboardEvent) => void; + onOpenChange?: (newOpenState: boolean) => void; onPaste?: (e: React.ClipboardEvent) => void; openOnFocus?: boolean; placeholder?: string; @@ -469,6 +470,7 @@ function SearchQueryBuilderComboboxInner { + onOpenChange?.(isOpen); + }, [onOpenChange, isOpen]); + const { overlayProps, triggerProps, diff --git a/static/app/components/searchQueryBuilder/tokens/freeText.tsx b/static/app/components/searchQueryBuilder/tokens/freeText.tsx index 2f5b4f591b63e0..f14245c1866ddc 100644 --- a/static/app/components/searchQueryBuilder/tokens/freeText.tsx +++ b/static/app/components/searchQueryBuilder/tokens/freeText.tsx @@ -22,6 +22,7 @@ import type { FocusOverride, } from 'sentry/components/searchQueryBuilder/types'; import { + InvalidReason, type ParseResultToken, Token, type TokenResult, @@ -261,13 +262,38 @@ function KeyDescription({tag}: {tag: Tag}) { ); } +function shouldHideInvalidTooltip({ + token, + inputValue, + isOpen, +}: { + inputValue: string; + isOpen: boolean; + token: TokenResult; +}) { + if (!token.invalid || isOpen) { + return true; + } + + switch (token.invalid.type) { + case InvalidReason.FREE_TEXT_NOT_ALLOWED: + return inputValue === ''; + case InvalidReason.WILDCARD_NOT_ALLOWED: + return !inputValue.includes('*'); + default: + return false; + } +} + function InvalidText({ token, state, item, inputValue, + isOpen, }: { inputValue: string; + isOpen: boolean; item: Node; state: ListState; token: TokenResult; @@ -280,7 +306,9 @@ function InvalidText({ state={state} token={token} item={item} - forceVisible={!inputValue.includes('*') ? false : undefined} + forceVisible={ + shouldHideInvalidTooltip({token, inputValue, isOpen}) ? false : undefined + } skipWrapper={false} > {inputValue} @@ -297,6 +325,7 @@ function SearchQueryBuilderInputInternal({ const organization = useOrganization(); const inputRef = useRef(null); const trimmedTokenValue = token.text.trim(); + const [isOpen, setIsOpen] = useState(false); const [inputValue, setInputValue] = useState(trimmedTokenValue); const [selectionIndex, setSelectionIndex] = useState(0); const isFocused = @@ -486,6 +515,7 @@ function SearchQueryBuilderInputInternal({ setSelectionIndex(e.target.selectionStart ?? 0); }} onKeyDown={onKeyDown} + onOpenChange={setIsOpen} tabIndex={isFocused ? 0 : -1} maxOptions={50} onPaste={onPaste} @@ -515,7 +545,13 @@ function SearchQueryBuilderInputInternal({ ) } - + ); } diff --git a/static/app/components/searchQueryBuilder/utils.tsx b/static/app/components/searchQueryBuilder/utils.tsx index 3314e464d49956..22d4028165618c 100644 --- a/static/app/components/searchQueryBuilder/utils.tsx +++ b/static/app/components/searchQueryBuilder/utils.tsx @@ -65,6 +65,7 @@ export function parseQueryBuilderValue( getFieldDefinition: FieldDefinitionGetter, options?: { filterKeys: TagCollection; + disallowFreeText?: boolean; disallowLogicalOperators?: boolean; disallowWildcard?: boolean; } @@ -72,6 +73,7 @@ export function parseQueryBuilderValue( return collapseTextTokens( parseSearch(value || ' ', { flattenParenGroups: true, + disallowFreeText: options?.disallowFreeText, disallowWildcard: options?.disallowWildcard, disallowedLogicalOperators: options?.disallowLogicalOperators ? new Set([BooleanOperator.AND, BooleanOperator.OR]) From cff5483b15a4dbce148687d960124912096f3bc1 Mon Sep 17 00:00:00 2001 From: Alberto Leal Date: Tue, 23 Jul 2024 16:47:02 -0400 Subject: [PATCH 010/126] fix(hybrid-cloud): Fix private field for RpcOrganization model (#74748) Fields in RPC models that begin with underscores are automatically excluded: https://docs.pydantic.dev/1.10/usage/models/#automatically-excluded-attributes To avoid this, we need to mark them with [`PrivateAttr()`](https://docs.pydantic.dev/1.10/usage/models/#private-model-attributes). Otherwise, `RpcOrganization.default_owner_id` will always return `None`. This is needed for https://github.com/getsentry/getsentry/pull/14709. --- src/sentry/organizations/services/organization/model.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/sentry/organizations/services/organization/model.py b/src/sentry/organizations/services/organization/model.py index c6f258805cc3a0..f08ec25cdc693b 100644 --- a/src/sentry/organizations/services/organization/model.py +++ b/src/sentry/organizations/services/organization/model.py @@ -9,7 +9,7 @@ from django.dispatch import Signal from django.utils import timezone -from pydantic import Field +from pydantic import Field, PrivateAttr from typing_extensions import TypedDict from sentry import roles @@ -249,7 +249,7 @@ class RpcOrganization(RpcOrganizationSummary): default_role: str = "" date_added: datetime = Field(default_factory=timezone.now) - _default_owner_id: int | None = None + _default_owner_id: int | None = PrivateAttr(default=None) def get_audit_log_data(self) -> dict[str, Any]: return { @@ -286,7 +286,7 @@ def default_owner_id(self) -> int | None: This mirrors the method on the Organization model. """ - if not hasattr(self, "_default_owner_id"): + if getattr(self, "_default_owner_id") is None: owners = self.get_owners() if len(owners) == 0: return None From 4cc5752915b7de18d41c83c5401c7fb30c0c58aa Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Tue, 23 Jul 2024 20:47:14 +0000 Subject: [PATCH 011/126] Revert "feat(seer-priority): Add a temporary feature flag to control seer-based priority (#74758)" This reverts commit bff71c496c503a7334359a624437bbfb4491ef99. Co-authored-by: snigdhas <16563948+snigdhas@users.noreply.github.com> --- src/sentry/event_manager.py | 6 ------ src/sentry/features/temporary.py | 2 -- tests/sentry/event_manager/test_priority.py | 1 - tests/sentry/event_manager/test_severity.py | 20 -------------------- 4 files changed, 29 deletions(-) diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index 1b6c386cb748db..7847eb8d1ab323 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -2344,12 +2344,6 @@ def _get_severity_metadata_for_group( """ from sentry.receivers.rules import PLATFORMS_WITH_PRIORITY_ALERTS - organization_supports_severity = features.has( - "organizations:seer-based-priority", event.project.organization, actor=None - ) - if not organization_supports_severity: - return {} - if killswitch_matches_context("issues.skip-seer-requests", {"project_id": event.project_id}): logger.warning( "get_severity_metadata_for_group.seer_killswitch_enabled", diff --git a/src/sentry/features/temporary.py b/src/sentry/features/temporary.py index 3d1e7c9eae1d6e..13af8c1b476a83 100644 --- a/src/sentry/features/temporary.py +++ b/src/sentry/features/temporary.py @@ -421,8 +421,6 @@ def register_temporary_features(manager: FeatureManager): manager.add("organizations:insights-browser-webvitals-optional-components", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False) # Add default browser performance score profile for fallback when no or unknown browser name is provided manager.add("organizations:insights-default-performance-score-profiles", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False) - # Enable priority alerts using the Seer calculations. This flag will move to a permanent flag before we release. - manager.add("organizations:seer-based-priority", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False) # Enable standalone span ingestion manager.add("organizations:standalone-span-ingestion", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False) # Enable the aggregate span waterfall view diff --git a/tests/sentry/event_manager/test_priority.py b/tests/sentry/event_manager/test_priority.py index 01ea2fb58a16ce..14a2c9903f4200 100644 --- a/tests/sentry/event_manager/test_priority.py +++ b/tests/sentry/event_manager/test_priority.py @@ -17,7 +17,6 @@ @region_silo_test @apply_feature_flag_on_cls("projects:first-event-severity-calculation") -@apply_feature_flag_on_cls("organizations:seer-based-priority") class TestEventManagerPriority(TestCase): @patch("sentry.event_manager._get_severity_score", return_value=(0.1121, "ml")) def test_flag_on(self, mock_get_severity_score: MagicMock): diff --git a/tests/sentry/event_manager/test_severity.py b/tests/sentry/event_manager/test_severity.py index 26e4addd0c925f..84555c0c96a15b 100644 --- a/tests/sentry/event_manager/test_severity.py +++ b/tests/sentry/event_manager/test_severity.py @@ -36,7 +36,6 @@ def make_event(**kwargs) -> dict[str, Any]: return result -@apply_feature_flag_on_cls("organizations:seer-based-priority") class TestGetEventSeverity(TestCase): @patch( "sentry.event_manager.severity_connection_pool.urlopen", @@ -324,7 +323,6 @@ def test_other_exception( assert cache.get(SEER_ERROR_COUNT_KEY) == 1 -@apply_feature_flag_on_cls("organizations:seer-based-priority") @apply_feature_flag_on_cls("projects:first-event-severity-calculation") class TestEventManagerSeverity(TestCase): @patch("sentry.event_manager._get_severity_score", return_value=(0.1121, "ml")) @@ -362,24 +360,6 @@ def test_flag_off(self, mock_get_severity_score: MagicMock): and "severity.reason" not in event.group.get_event_metadata() ) - @patch("sentry.event_manager._get_severity_score", return_value=(0.1121, "ml")) - def test_permanent_flag_off(self, mock_get_severity_score: MagicMock): - with self.feature({"organizations:seer-based-priority": False}): - manager = EventManager( - make_event( - exception={"values": [{"type": "NopeError", "value": "Nopey McNopeface"}]}, - platform="python", - ) - ) - event = manager.save(self.project.id) - - mock_get_severity_score.assert_not_called() - assert ( - event.group - and "severity" not in event.group.get_event_metadata() - and "severity.reason" not in event.group.get_event_metadata() - ) - @patch("sentry.event_manager._get_severity_score", return_value=(0.1121, "ml")) def test_get_severity_score_not_called_on_second_event( self, mock_get_severity_score: MagicMock From f1c4de5f97333bfbfa85375f0b7bcc39ac5c27e4 Mon Sep 17 00:00:00 2001 From: Abdullah Khan <60121741+Abdkhan14@users.noreply.github.com> Date: Tue, 23 Jul 2024 16:53:55 -0400 Subject: [PATCH 012/126] feat(new-trace): Fixing Typerror from cacheMetrics being null. (#74778) Metrics can be null, led to Type error from formatters. Co-authored-by: Abdullah Khan --- .../transaction/sections/cacheMetrics.tsx | 28 ++++++++++++------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/details/transaction/sections/cacheMetrics.tsx b/static/app/views/performance/newTraceDetails/traceDrawer/details/transaction/sections/cacheMetrics.tsx index bde04f91d8f5dd..690c3ba43b74e4 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/details/transaction/sections/cacheMetrics.tsx +++ b/static/app/views/performance/newTraceDetails/traceDrawer/details/transaction/sections/cacheMetrics.tsx @@ -11,18 +11,26 @@ export function CacheMetrics({ }: { cacheMetrics: Pick[]; }) { - const items: SectionCardKeyValueList = cacheMetrics.flatMap((metricRow, idx) => [ - { - key: `avg(cache.item_size)${idx}`, + const itemSize: number | null = cacheMetrics[0]['avg(cache.item_size)']; + const missRate: number | null = cacheMetrics[0]['cache_miss_rate()']; + + const items: SectionCardKeyValueList = []; + + if (itemSize !== null) { + items.push({ + key: 'avg(cache.item_size)', subject: DataTitles['avg(cache.item_size)'], - value: formatBytesBase2(metricRow?.['avg(cache.item_size)']), - }, - { - key: `cache_miss_rate()${idx}`, + value: formatBytesBase2(itemSize), + }); + } + + if (missRate !== null) { + items.push({ + key: 'cache_miss_rate()', subject: DataTitles['cache_miss_rate()'], - value: formatPercentage(metricRow?.['cache_miss_rate()']), - }, - ]); + value: formatPercentage(missRate), + }); + } return ; } From b7e035bb03d9eca3f95a53fed9486e7f55adcf66 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Tue, 23 Jul 2024 13:57:27 -0700 Subject: [PATCH 013/126] fix(utils): Specify redis cluster to use for circuit breaker rate limiter (#74775) This changes the new `CircuitBreaker` class to use the `"ratelimiter"` redis cluster rather than the default one, which it appears may only work locally. --- src/sentry/utils/circuit_breaker2.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/sentry/utils/circuit_breaker2.py b/src/sentry/utils/circuit_breaker2.py index cbf870d50137a4..da144a77097ec2 100644 --- a/src/sentry/utils/circuit_breaker2.py +++ b/src/sentry/utils/circuit_breaker2.py @@ -135,7 +135,9 @@ def __init__(self, key: str, config: CircuitBreakerConfig): "recovery_duration", self.window * DEFAULT_RECOVERY_WINDOW_MULTIPLIER ) - self.limiter = RedisSlidingWindowRateLimiter() + self.limiter = RedisSlidingWindowRateLimiter( + cluster=settings.SENTRY_RATE_LIMIT_REDIS_CLUSTER + ) self.redis_pipeline = self.limiter.client.pipeline() self.primary_quota = Quota( From 82af6a3f209976d8d0a2359d818aa8bf74b3cfa7 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Tue, 23 Jul 2024 14:00:34 -0700 Subject: [PATCH 014/126] chore(spans): Fix typo in aggregate spans banner (#74767) Adverbs for the win! --- .../app/components/events/interfaces/spans/aggregateSpans.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/app/components/events/interfaces/spans/aggregateSpans.tsx b/static/app/components/events/interfaces/spans/aggregateSpans.tsx index 54d41288c398a2..465ca8afaf853d 100644 --- a/static/app/components/events/interfaces/spans/aggregateSpans.tsx +++ b/static/app/components/events/interfaces/spans/aggregateSpans.tsx @@ -106,7 +106,7 @@ export function AggregateSpans({transaction, httpMethod}: Props) { trailingItems={ setIsBannerOpen(false)} />} > {tct( - 'This is an aggregate view across [x] events. You can see how frequent each span appears in the aggregate and identify any outliers.', + 'This is an aggregate view across [x] events. You can see how frequently each span appears in the aggregate and identify any outliers.', {x: event.count} )} From 21289b72bd026a7c3420bc501faa41d013df2d38 Mon Sep 17 00:00:00 2001 From: Leander Rodrigues Date: Tue, 23 Jul 2024 17:11:45 -0400 Subject: [PATCH 015/126] fix(context): Hide profile, replay and trace buttons on issue details (#74776) The `actionButton` for the trace context was previously used to display buttons in line with the rows of KeyValueList components. With the context redesign we omitted using `actionButton` in favour of `action.link`. [A recent change](https://github.com/getsentry/sentry/pull/74405) allowed for `actionButton` to be rendered in the new design, which consequently them to be rendered for the existing context that hadn't had those button specs removed yet. This PR removes the action buttons from the context rendered on issue details so that it can continue using the new component without rendering a button alongside them. Even though the `` component design (which uses `KeyValueList`) is deprecated, until it is removed I didn't feel comfortable removing the buttons without first modifying KeyValueList to read the new `action.link` property, just in case they're still used somewhere in the app. --- .../events/contexts/profile/index.spec.tsx | 2 +- .../events/contexts/profile/index.tsx | 20 ++----------------- .../events/contexts/replay/index.spec.tsx | 2 +- .../events/contexts/replay/index.tsx | 11 +--------- .../trace/getTraceKnownDataDetails.tsx | 15 ++------------ .../events/interfaces/keyValueList/index.tsx | 10 +++++++++- static/app/components/keyValueData/index.tsx | 5 +++-- 7 files changed, 19 insertions(+), 46 deletions(-) diff --git a/static/app/components/events/contexts/profile/index.spec.tsx b/static/app/components/events/contexts/profile/index.spec.tsx index 844ddfb4ec52dc..b2160e395423c5 100644 --- a/static/app/components/events/contexts/profile/index.spec.tsx +++ b/static/app/components/events/contexts/profile/index.spec.tsx @@ -47,6 +47,6 @@ describe('profile event context', function () { expect(screen.getByText('Profile ID')).toBeInTheDocument(); expect(screen.getByText(profileId)).toBeInTheDocument(); - expect(screen.getByRole('button', {name: 'View Profile'})).toBeInTheDocument(); + expect(screen.getByRole('link', {name: profileId})).toBeInTheDocument(); }); }); diff --git a/static/app/components/events/contexts/profile/index.tsx b/static/app/components/events/contexts/profile/index.tsx index efdf8d40a611d3..1abb2393e5cec0 100644 --- a/static/app/components/events/contexts/profile/index.tsx +++ b/static/app/components/events/contexts/profile/index.tsx @@ -1,5 +1,4 @@ import Feature from 'sentry/components/acl/feature'; -import {Button} from 'sentry/components/button'; import ErrorBoundary from 'sentry/components/errorBoundary'; import KeyValueList from 'sentry/components/events/interfaces/keyValueList'; import {t} from 'sentry/locale'; @@ -7,7 +6,6 @@ import type {Event, ProfileContext} from 'sentry/types/event'; import {ProfileContextKey} from 'sentry/types/event'; import type {Organization} from 'sentry/types/organization'; import type {Project} from 'sentry/types/project'; -import {trackAnalytics} from 'sentry/utils/analytics'; import {generateProfileFlamechartRoute} from 'sentry/utils/profiling/routes'; import useOrganization from 'sentry/utils/useOrganization'; import useProjects from 'sentry/utils/useProjects'; @@ -103,7 +101,7 @@ function getProfileKnownDataDetails({ return undefined; } - const target = project?.slug + const link = project?.slug ? generateProfileFlamechartRoute({ orgSlug: organization.slug, projectSlug: project?.slug, @@ -114,21 +112,7 @@ function getProfileKnownDataDetails({ return { subject: t('Profile ID'), value: data.profile_id, - action: {link: target}, - actionButton: target && ( - - ), + action: {link}, }; } default: diff --git a/static/app/components/events/contexts/replay/index.spec.tsx b/static/app/components/events/contexts/replay/index.spec.tsx index 81da9a9197c350..49a8529e922e23 100644 --- a/static/app/components/events/contexts/replay/index.spec.tsx +++ b/static/app/components/events/contexts/replay/index.spec.tsx @@ -19,6 +19,6 @@ describe('replay event context', function () { expect(screen.getByText('Replay ID')).toBeInTheDocument(); expect(screen.getByText(replayId)).toBeInTheDocument(); - expect(screen.getByRole('button', {name: 'View Replay'})).toBeInTheDocument(); + expect(screen.getByRole('link', {name: replayId})).toBeInTheDocument(); }); }); diff --git a/static/app/components/events/contexts/replay/index.tsx b/static/app/components/events/contexts/replay/index.tsx index 8fdfcec36a221e..f61043de35e227 100644 --- a/static/app/components/events/contexts/replay/index.tsx +++ b/static/app/components/events/contexts/replay/index.tsx @@ -1,4 +1,3 @@ -import {LinkButton} from 'sentry/components/button'; import ErrorBoundary from 'sentry/components/errorBoundary'; import KeyValueList from 'sentry/components/events/interfaces/keyValueList'; import {t} from 'sentry/locale'; @@ -90,18 +89,10 @@ function getReplayKnownDataDetails({ return undefined; } const link = `/organizations/${organization.slug}/replays/${encodeURIComponent(replayId)}/`; - return { subject: t('Replay ID'), value: replayId, - action: { - link, - }, - actionButton: link && ( - - {t('View Replay')} - - ), + action: {link}, }; } default: diff --git a/static/app/components/events/contexts/trace/getTraceKnownDataDetails.tsx b/static/app/components/events/contexts/trace/getTraceKnownDataDetails.tsx index 19a449a4512a2b..334af145703d7b 100644 --- a/static/app/components/events/contexts/trace/getTraceKnownDataDetails.tsx +++ b/static/app/components/events/contexts/trace/getTraceKnownDataDetails.tsx @@ -1,6 +1,5 @@ import type {Location} from 'history'; -import {LinkButton} from 'sentry/components/button'; import type {KnownDataDetails} from 'sentry/components/events/contexts/utils'; import {generateTraceTarget} from 'sentry/components/quickTrace/utils'; import {t} from 'sentry/locale'; @@ -46,11 +45,6 @@ export function getTraceKnownDataDetails({ subject: t('Trace ID'), value: traceId, action: {link}, - actionButton: ( - - {t('Search by Trace')} - - ), }; } @@ -99,7 +93,7 @@ export function getTraceKnownDataDetails({ }; } - const to = transactionSummaryRouteWithQuery({ + const link = transactionSummaryRouteWithQuery({ orgSlug: organization.slug, transaction: transactionName, projectID: event.projectID, @@ -109,12 +103,7 @@ export function getTraceKnownDataDetails({ return { subject: t('Transaction'), value: transactionName, - action: {link: to}, - actionButton: ( - - {t('View Summary')} - - ), + action: {link}, }; } diff --git a/static/app/components/events/interfaces/keyValueList/index.tsx b/static/app/components/events/interfaces/keyValueList/index.tsx index 51117bc8efb69c..8667d5ba5a8482 100644 --- a/static/app/components/events/interfaces/keyValueList/index.tsx +++ b/static/app/components/events/interfaces/keyValueList/index.tsx @@ -3,6 +3,7 @@ import styled from '@emotion/styled'; import classNames from 'classnames'; import sortBy from 'lodash/sortBy'; +import {ValueLink} from 'sentry/components/keyValueData'; import {space} from 'sentry/styles/space'; import type {KeyValueListData} from 'sentry/types'; import {defined} from 'sentry/utils'; @@ -51,6 +52,7 @@ function KeyValueList({ meta, subjectIcon, subjectDataTestId, + action, actionButton, isContextData: valueIsContextData, isMultiValue, @@ -65,11 +67,17 @@ function KeyValueList({ raw, }; + const valueItem = action?.link ? ( + {} + ) : ( + + ); + const valueContainer = isMultiValue && Array.isArray(value) ? ( ) : ( - + valueItem ); return ( diff --git a/static/app/components/keyValueData/index.tsx b/static/app/components/keyValueData/index.tsx index 919e435cacc59e..250c39efdc564d 100644 --- a/static/app/components/keyValueData/index.tsx +++ b/static/app/components/keyValueData/index.tsx @@ -19,7 +19,8 @@ export interface KeyValueDataContentProps { * Specifies the item to display. * - If set, item.subjectNode will override displaying item.subject. * - If item.subjectNode is null, the value section will span the whole card. - * - The only displayed action is item.action.link, not item.actionButton + * - If item.action.link is specified, the value will appear as a link. + * - If item.actionButton is specified, the button will be rendered inline with the value. */ item: KeyValueListDataItem; /** @@ -284,7 +285,7 @@ const CardColumn = styled('div')` grid-column: span 1; `; -const ValueLink = styled(Link)` +export const ValueLink = styled(Link)` text-decoration: ${p => p.theme.linkUnderline} underline dotted; `; From 21ace0869ab5582f4cd976ff735c940e852f8e7b Mon Sep 17 00:00:00 2001 From: colin-sentry <161344340+colin-sentry@users.noreply.github.com> Date: Tue, 23 Jul 2024 17:34:22 -0400 Subject: [PATCH 016/126] Remove extra commas from the pipelines table (#74783) This breaks if a user sends bad data which causes a category not to have a group. --- .../llmMonitoring/components/tables/pipelinesTable.tsx | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/static/app/views/insights/llmMonitoring/components/tables/pipelinesTable.tsx b/static/app/views/insights/llmMonitoring/components/tables/pipelinesTable.tsx index 4ec71e2455dafc..eba56768d7bb24 100644 --- a/static/app/views/insights/llmMonitoring/components/tables/pipelinesTable.tsx +++ b/static/app/views/insights/llmMonitoring/components/tables/pipelinesTable.tsx @@ -127,7 +127,10 @@ export function PipelinesTable() { const {data: tokensUsedData, isLoading: tokensUsedLoading} = useSpanMetrics( { search: new MutableSearch( - `span.category:ai span.ai.pipeline.group:[${(data as Row[])?.map(x => x['span.group']).join(',')}]` + `span.category:ai span.ai.pipeline.group:[${(data as Row[]) + ?.map(x => x['span.group']) + ?.filter(x => !!x) + .join(',')}]` ), fields: ['span.ai.pipeline.group', 'sum(ai.total_tokens.used)'], }, From 9213703042e810118a39730a69aecec2a94cc395 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Tue, 23 Jul 2024 14:39:34 -0700 Subject: [PATCH 017/126] feat(toolbar): add linkout to gh code search from feature flag panel (#74782) clicking on a feature flag now links to code search for `sentry-options-automator` and `sentry`: https://github.com/user-attachments/assets/914b7166-dec9-4bfd-b7a7-7512b46abbba --- .../featureFlags/featureFlagsPanel.tsx | 20 +++++++++++++++++-- .../devtoolbar/hooks/useConfiguration.tsx | 1 + static/app/components/devtoolbar/types.ts | 1 + static/app/utils/useDevToolbar.tsx | 2 ++ 4 files changed, 22 insertions(+), 2 deletions(-) diff --git a/static/app/components/devtoolbar/components/featureFlags/featureFlagsPanel.tsx b/static/app/components/devtoolbar/components/featureFlags/featureFlagsPanel.tsx index b08e66626a3f97..35ea1e1d753d23 100644 --- a/static/app/components/devtoolbar/components/featureFlags/featureFlagsPanel.tsx +++ b/static/app/components/devtoolbar/components/featureFlags/featureFlagsPanel.tsx @@ -6,6 +6,7 @@ import { panelScrollableCss, } from 'sentry/components/devtoolbar/styles/infiniteList'; import Input from 'sentry/components/input'; +import ExternalLink from 'sentry/components/links/externalLink'; import {PanelTable} from 'sentry/components/panels/panelTable'; import {Cell} from 'sentry/components/replays/virtualizedGrid/bodyCell'; @@ -17,7 +18,7 @@ import PanelLayout from '../panelLayout'; export default function FeatureFlagsPanel() { const featureFlags = useEnabledFeatureFlags(); - const {organizationSlug} = useConfiguration(); + const {organizationSlug, featureFlagTemplateUrl, trackAnalytics} = useConfiguration(); const [searchTerm, setSearchTerm] = useState(''); const searchInput = useRef(null); @@ -42,7 +43,22 @@ export default function FeatureFlagsPanel() { .map(flag => { return ( - {flag} + {featureFlagTemplateUrl?.(flag) ? ( + { + trackAnalytics?.({ + eventKey: `devtoolbar.feature-flag-list.item.click`, + eventName: `devtoolbar: Click feature-flag-list item`, + }); + }} + > + {flag} + + ) : ( + {flag} + )} ); })} diff --git a/static/app/components/devtoolbar/hooks/useConfiguration.tsx b/static/app/components/devtoolbar/hooks/useConfiguration.tsx index 97de5a13155590..2aa65a58bc5e05 100644 --- a/static/app/components/devtoolbar/hooks/useConfiguration.tsx +++ b/static/app/components/devtoolbar/hooks/useConfiguration.tsx @@ -10,6 +10,7 @@ const context = createContext({ projectId: 0, projectSlug: '', featureFlags: [], + featureFlagTemplateUrl: undefined, }); export function ConfigurationContextProvider({ diff --git a/static/app/components/devtoolbar/types.ts b/static/app/components/devtoolbar/types.ts index fc0e90a9f69ad3..9f1a39f8ec0ff0 100644 --- a/static/app/components/devtoolbar/types.ts +++ b/static/app/components/devtoolbar/types.ts @@ -11,6 +11,7 @@ export type Configuration = { projectSlug: string; SentrySDK?: typeof SentrySDK; domId?: string; + featureFlagTemplateUrl?: undefined | ((flag: string) => string | undefined); featureFlags?: string[]; trackAnalytics?: (props: {eventKey: string; eventName: string}) => void; }; diff --git a/static/app/utils/useDevToolbar.tsx b/static/app/utils/useDevToolbar.tsx index 06530b743d2bdb..13c27d67bbe88e 100644 --- a/static/app/utils/useDevToolbar.tsx +++ b/static/app/utils/useDevToolbar.tsx @@ -25,6 +25,8 @@ export default function useDevToolbar({enabled}: {enabled: boolean}) { projectId: 11276, projectSlug: 'javascript', featureFlags: organization.features, + featureFlagTemplateUrl: flag => + `https://github.com/search?q=repo%3Agetsentry%2Fsentry-options-automator+OR+repo%3Agetsentry%2Fsentry+${flag}&type=code`, trackAnalytics: (props: {eventKey: string; eventName: string}) => rawTrackAnalyticsEvent({...props, organization}), From ab9e0e389d2c39ab9c94086f9d51bdb267499f31 Mon Sep 17 00:00:00 2001 From: Malachi Willey Date: Tue, 23 Jul 2024 14:41:35 -0700 Subject: [PATCH 018/126] feat(query-builder): Add disallowUnsupportedFilters config option (#74760) --- .../searchQueryBuilder/index.spec.tsx | 23 +++++++++++++++++++ .../searchQueryBuilder/index.stories.tsx | 9 ++++++-- .../components/searchQueryBuilder/index.tsx | 7 ++++++ .../components/searchQueryBuilder/utils.tsx | 3 +++ 4 files changed, 40 insertions(+), 2 deletions(-) diff --git a/static/app/components/searchQueryBuilder/index.spec.tsx b/static/app/components/searchQueryBuilder/index.spec.tsx index af1e1828ae3723..795b4b670cef39 100644 --- a/static/app/components/searchQueryBuilder/index.spec.tsx +++ b/static/app/components/searchQueryBuilder/index.spec.tsx @@ -2028,4 +2028,27 @@ describe('SearchQueryBuilder', function () { ).toBeInTheDocument(); }); }); + + describe('highlightUnsupportedFilters', function () { + it('should mark unsupported filters as invalid', async function () { + render( + + ); + + expect(screen.getByRole('row', {name: 'foo:bar'})).toHaveAttribute( + 'aria-invalid', + 'true' + ); + + await userEvent.click(getLastInput()); + await userEvent.keyboard('{ArrowLeft}'); + expect( + await screen.findByText('Invalid key. "foo" is not a supported search key.') + ).toBeInTheDocument(); + }); + }); }); diff --git a/static/app/components/searchQueryBuilder/index.stories.tsx b/static/app/components/searchQueryBuilder/index.stories.tsx index 6b5ce33602da5c..f96f564a154d30 100644 --- a/static/app/components/searchQueryBuilder/index.stories.tsx +++ b/static/app/components/searchQueryBuilder/index.stories.tsx @@ -114,7 +114,12 @@ export default storyBook(SearchQueryBuilder, story => { }); story('Config Options', () => { - const configs = ['disallowFreeText', 'disallowLogicalOperators', 'disallowWildcard']; + const configs = [ + 'disallowFreeText', + 'disallowLogicalOperators', + 'disallowWildcard', + 'disallowUnsupportedFilters', + ]; const [enabledConfigs, setEnabledConfigs] = useState([...configs]); const queryBuilderOptions = enabledConfigs.reduce((acc, config) => { @@ -141,7 +146,7 @@ export default storyBook(SearchQueryBuilder, story => { ))} Date: Tue, 23 Jul 2024 14:50:20 -0700 Subject: [PATCH 019/126] chore(relocation): Improve CODEOWNERS (#74762) --- .github/CODEOWNERS | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 2134948f8fb450..a5daafa9217312 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -85,23 +85,26 @@ Makefile @getsentry/owners-sentr /bin/react-to-product-owners-yml-changes.sh @getsentry/open-source /static/app/components/sidebar/index.tsx @getsentry/open-source +## Backup - getsentry/team-ospo#153 +/src/sentry/backup/ @getsentry/open-source +/src/sentry/runner/commands/backup.py @getsentry/open-source +/src/sentry/testutils/helpers/backups.py @getsentry/open-source +/tests/sentry/backup/ @getsentry/open-source +/tests/sentry/runner/commands/test_backup.py @getsentry/open-source + ## Relocation - getsentry/team-ospo#153 -/src/sentry/analytics/events/relocation_created.py @getsentry/open-source -/src/sentry/analytics/events/relocation_forked.py @getsentry/open-source -/src/sentry/analytics/events/relocation_organization_imported.py @getsentry/open-source +/src/sentry/analytics/events/relocation_*.py @getsentry/open-source /src/sentry/api/endpoints/organization_fork.py @getsentry/open-source /src/sentry/api/endpoints/relocation/ @getsentry/open-source -/src/sentry/backup/ @getsentry/open-source -/src/sentry/backup/services/import_export/ @getsentry/open-source -/src/sentry/backup/services/relocation_export/ @getsentry/open-source -/src/sentry/runner/commands/backup.py @getsentry/open-source +/src/sentry/api/serialiers/models/relocation/ @getsentry/open-source +/src/sentry/models/relocation/ @getsentry/open-source +/src/sentry/relocation/ @getsentry/open-source /src/sentry/tasks/relocation.py @getsentry/open-source -/src/sentry/testutils/helpers/backups.py @getsentry/open-source /src/sentry/utils/relocation.py @getsentry/open-source /tests/sentry/api/endpoints/relocation @getsentry/open-source -/tests/sentry/backup/ @getsentry/open-source +/tests/sentry/api/endpoints/test_organization_fork.py @getsentry/open-source +/tests/sentry/api/serializer/test_relocation.py @getsentry/open-source /tests/sentry/tasks/test_relocation.py @getsentry/open-source -/tests/sentry/runner/commands/test_backup.py @getsentry/open-source /tests/sentry/utils/test_relocation.py @getsentry/open-source ## Build & Releases From 55215ce33a44fdbcb57a4284f4b08686917c2d9a Mon Sep 17 00:00:00 2001 From: Richard Roggenkemper <46740234+roggenkemper@users.noreply.github.com> Date: Tue, 23 Jul 2024 14:54:12 -0700 Subject: [PATCH 020/126] feat(issue-details): Updated event navigation (#74466) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit this pr updates the event navigation as part of the issues detail streamline project. changes to the tab design will come in a different PR Screenshot 2024-07-16 at 3 31 52 PM. All Events will still redirect to the All Events tab, until we update the functionality of that. Part of https://github.com/getsentry/sentry/issues/73758 --- .../issueDetails/eventNavigation.spec.tsx | 96 +++++++ .../views/issueDetails/eventNavigation.tsx | 234 ++++++++++++++++++ .../views/issueDetails/groupEventHeader.tsx | 9 +- 3 files changed, 338 insertions(+), 1 deletion(-) create mode 100644 static/app/views/issueDetails/eventNavigation.spec.tsx create mode 100644 static/app/views/issueDetails/eventNavigation.tsx diff --git a/static/app/views/issueDetails/eventNavigation.spec.tsx b/static/app/views/issueDetails/eventNavigation.spec.tsx new file mode 100644 index 00000000000000..b70aabfa8f1426 --- /dev/null +++ b/static/app/views/issueDetails/eventNavigation.spec.tsx @@ -0,0 +1,96 @@ +import {EventFixture} from 'sentry-fixture/event'; +import {GroupFixture} from 'sentry-fixture/group'; + +import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; + +import {browserHistory} from 'sentry/utils/browserHistory'; +import * as useMedia from 'sentry/utils/useMedia'; +import EventNavigation from 'sentry/views/issueDetails/eventNavigation'; + +describe('EventNavigation', () => { + const testEvent = EventFixture({ + id: 'event-id', + size: 7, + dateCreated: '2019-03-20T00:00:00.000Z', + errors: [], + entries: [], + tags: [ + {key: 'environment', value: 'dev'}, + {key: 'replayId', value: 'replay-id'}, + ], + previousEventID: 'prev-event-id', + nextEventID: 'next-event-id', + }); + + const defaultProps = { + event: testEvent, + group: GroupFixture({id: 'group-id'}), + projectSlug: 'project-slug', + }; + + describe('recommended event tabs', () => { + it('can navigate to the oldest event', async () => { + jest.spyOn(useMedia, 'default').mockReturnValue(true); + + render(); + + await userEvent.click(screen.getByRole('tab', {name: 'First Event'})); + + expect(browserHistory.push).toHaveBeenCalledWith({ + pathname: '/organizations/org-slug/issues/group-id/events/oldest/', + query: {referrer: 'oldest-event'}, + }); + }); + + it('can navigate to the latest event', async () => { + jest.spyOn(useMedia, 'default').mockReturnValue(true); + + render(); + + await userEvent.click(screen.getByRole('tab', {name: 'Last Event'})); + + expect(browserHistory.push).toHaveBeenCalledWith({ + pathname: '/organizations/org-slug/issues/group-id/events/latest/', + query: {referrer: 'latest-event'}, + }); + }); + + it('can navigate to the recommended event', async () => { + jest.spyOn(useMedia, 'default').mockReturnValue(true); + + render(, { + router: { + params: {eventId: 'latest'}, + }, + }); + + await userEvent.click(screen.getByRole('tab', {name: 'Recommended Event'})); + + expect(browserHistory.push).toHaveBeenCalledWith({ + pathname: '/organizations/org-slug/issues/group-id/events/recommended/', + query: {referrer: 'recommended-event'}, + }); + }); + }); + + it('can navigate next/previous events', () => { + render(); + + expect(screen.getByLabelText(/Previous Event/)).toHaveAttribute( + 'href', + `/organizations/org-slug/issues/group-id/events/prev-event-id/?referrer=previous-event` + ); + expect(screen.getByLabelText(/Next Event/)).toHaveAttribute( + 'href', + `/organizations/org-slug/issues/group-id/events/next-event-id/?referrer=next-event` + ); + }); + + it('shows jump to sections', async () => { + render(); + + expect(await screen.findByText('Replay')).toBeInTheDocument(); + expect(await screen.findByText('Tags')).toBeInTheDocument(); + expect(await screen.findByText('Event Highlights')).toBeInTheDocument(); + }); +}); diff --git a/static/app/views/issueDetails/eventNavigation.tsx b/static/app/views/issueDetails/eventNavigation.tsx new file mode 100644 index 00000000000000..26fef4b348f0cd --- /dev/null +++ b/static/app/views/issueDetails/eventNavigation.tsx @@ -0,0 +1,234 @@ +import styled from '@emotion/styled'; +import omit from 'lodash/omit'; + +import {Button, LinkButton} from 'sentry/components/button'; +import ButtonBar from 'sentry/components/buttonBar'; +import {TabList, Tabs} from 'sentry/components/tabs'; +import TimeSince from 'sentry/components/timeSince'; +import {IconChevron} from 'sentry/icons'; +import {t, tct} from 'sentry/locale'; +import {space} from 'sentry/styles/space'; +import type {Event} from 'sentry/types/event'; +import type {Group} from 'sentry/types/group'; +import {defined} from 'sentry/utils'; +import {getShortEventId} from 'sentry/utils/events'; +import {getReplayIdFromEvent} from 'sentry/utils/replays/getReplayIdFromEvent'; +import {useLocation} from 'sentry/utils/useLocation'; +import useOrganization from 'sentry/utils/useOrganization'; +import {normalizeUrl} from 'sentry/utils/withDomainRequired'; + +type EventNavigationProps = { + event: Event; + group: Group; +}; + +type SectionDefinition = { + condition: (event: Event) => boolean; + label: string; + section: string; +}; + +enum EventNavOptions { + RECOMMENDED = 'recommended', + LATEST = 'latest', + OLDEST = 'oldest', +} + +const EventNavLabels = { + [EventNavOptions.RECOMMENDED]: t('Recommended Event'), + [EventNavOptions.LATEST]: t('Last Event'), + [EventNavOptions.OLDEST]: t('First Event'), +}; + +const eventDataSections: SectionDefinition[] = [ + {section: 'event-highlights', label: t('Event Highlights'), condition: () => true}, + { + section: 'stacktrace', + label: t('Stack Trace'), + condition: (event: Event) => event.entries.some(entry => entry.type === 'stacktrace'), + }, + { + section: 'exception', + label: t('Exception'), + condition: (event: Event) => event.entries.some(entry => entry.type === 'exception'), + }, + { + section: 'breadcrumbs', + label: t('Breadcrumbs'), + condition: (event: Event) => + event.entries.some(entry => entry.type === 'breadcrumbs'), + }, + {section: 'tags', label: t('Tags'), condition: (event: Event) => event.tags.length > 0}, + {section: 'context', label: t('Context'), condition: (event: Event) => !!event.context}, + { + section: 'user-feedback', + label: t('User Feedback'), + condition: (event: Event) => !!event.userReport, + }, + { + section: 'replay', + label: t('Replay'), + condition: (event: Event) => !!getReplayIdFromEvent(event), + }, +]; + +export default function EventNavigation({event, group}: EventNavigationProps) { + const location = useLocation(); + const organization = useOrganization(); + + const hasPreviousEvent = defined(event.previousEventID); + const hasNextEvent = defined(event.nextEventID); + + const baseEventsPath = `/organizations/${organization.slug}/issues/${group.id}/events/`; + + const jumpToSections = eventDataSections.filter(eventSection => + eventSection.condition(event) + ); + + return ( +
+ + + + {Object.keys(EventNavLabels).map(label => { + return ( + + {EventNavLabels[label]} + + ); + })} + + + + + } + disabled={!hasPreviousEvent} + to={{ + pathname: `${baseEventsPath}${event.previousEventID}/`, + query: {...location.query, referrer: 'previous-event'}, + }} + /> + } + disabled={!hasNextEvent} + to={{ + pathname: `${baseEventsPath}${event.nextEventID}/`, + query: {...location.query, referrer: 'next-event'}, + }} + /> + + + {t('View All Events')} + + + + + + + + {tct('Event [eventId]', {eventId: getShortEventId(event.id)})} + + + + +
{t('Jump to:')}
+ + {jumpToSections.map(jump => ( + { + document + .getElementById(jump.section) + ?.scrollIntoView({behavior: 'smooth'}); + }} + borderless + size="sm" + > + {jump.label} + + ))} + +
+
+
+ ); +} + +const EventNavigationWrapper = styled('div')` + display: flex; + justify-content: space-between; +`; + +const NavigationWrapper = styled('div')` + display: flex; +`; + +const Navigation = styled('div')` + display: flex; + border-right: 1px solid ${p => p.theme.gray100}; +`; + +const EventInfoJumpToWrapper = styled('div')` + display: flex; + gap: ${space(1)}; + flex-direction: row; + justify-content: space-between; + align-items: center; +`; + +const EventInfo = styled('div')` + display: flex; + gap: ${space(1)}; + flex-direction: row; + align-items: center; +`; + +const JumpTo = styled('div')` + display: flex; + gap: ${space(1)}; + flex-direction: row; + align-items: center; + color: ${p => p.theme.gray300}; +`; + +const Divider = styled('hr')` + height: 1px; + width: 100%; + background: ${p => p.theme.border}; + border: none; + margin-top: ${space(1)}; + margin-bottom: ${space(1)}; +`; + +const EventID = styled('div')` + font-weight: bold; + font-size: ${p => p.theme.fontSizeLarge}; +`; + +const StyledButton = styled(Button)` + color: ${p => p.theme.gray300}; +`; diff --git a/static/app/views/issueDetails/groupEventHeader.tsx b/static/app/views/issueDetails/groupEventHeader.tsx index 7cb7c586ba8715..ab51d5bc390c5a 100644 --- a/static/app/views/issueDetails/groupEventHeader.tsx +++ b/static/app/views/issueDetails/groupEventHeader.tsx @@ -5,7 +5,9 @@ import {space} from 'sentry/styles/space'; import type {Event} from 'sentry/types/event'; import type {Group} from 'sentry/types/group'; import type {Project} from 'sentry/types/project'; +import EventNavigation from 'sentry/views/issueDetails/eventNavigation'; import {GroupEventCarousel} from 'sentry/views/issueDetails/groupEventCarousel'; +import {useHasStreamlinedUI} from 'sentry/views/issueDetails/utils'; type GroupEventHeaderProps = { event: Event; @@ -14,9 +16,14 @@ type GroupEventHeaderProps = { }; function GroupEventHeader({event, group, project}: GroupEventHeaderProps) { + const hasUpdatedEventNavigation = useHasStreamlinedUI(); return ( - + {hasUpdatedEventNavigation ? ( + + ) : ( + + )} ); } From 73390a209ce767120634e00b7138b93e5cc269b1 Mon Sep 17 00:00:00 2001 From: Alex Zaslavsky Date: Tue, 23 Jul 2024 15:22:13 -0700 Subject: [PATCH 021/126] fix(relocation): Better organization fork endpoint errors (#74784) --- src/sentry/api/endpoints/organization_fork.py | 24 +++++++++---- .../api/endpoints/test_organization_fork.py | 34 ++++++++++++++++--- 2 files changed, 47 insertions(+), 11 deletions(-) diff --git a/src/sentry/api/endpoints/organization_fork.py b/src/sentry/api/endpoints/organization_fork.py index 7e3b3fb30e6841..d114b90a222e44 100644 --- a/src/sentry/api/endpoints/organization_fork.py +++ b/src/sentry/api/endpoints/organization_fork.py @@ -18,6 +18,7 @@ from sentry.api.permissions import SuperuserOrStaffFeatureFlaggedPermission from sentry.api.serializers import serialize from sentry.hybridcloud.services.organization_mapping import organization_mapping_service +from sentry.models.organization import OrganizationStatus from sentry.models.relocation import Relocation from sentry.organizations.services.organization import organization_service from sentry.tasks.relocation import uploading_start @@ -31,6 +32,9 @@ ERR_ORGANIZATION_MAPPING_NOT_FOUND = Template( "The target organization `$slug` has no region mapping." ) +ERR_ORGANIZATION_INACTIVE = Template( + "The target organization `$slug` has status `$status`; status can only be `ACTIVE`." +) ERR_CANNOT_FORK_INTO_SAME_REGION = Template( "The organization already lives in region `$region`, so it cannot be forked into that region." ) @@ -64,16 +68,11 @@ def post(self, request: Request, organization_id_or_slug) -> Response: logger.info("relocations.fork.post.start", extra={"caller": request.user.id}) - org_retrieval_args = { - "only_visible": True, - "include_projects": False, - "include_teams": False, - } org_context = ( organization_service.get_organization_by_id(id=organization_id_or_slug) if str(organization_id_or_slug).isdecimal() else organization_service.get_organization_by_slug( - slug=organization_id_or_slug, **org_retrieval_args + slug=organization_id_or_slug, only_visible=False # Check for visibility below ) ) if not org_context: @@ -88,11 +87,22 @@ def post(self, request: Request, organization_id_or_slug) -> Response: organization = org_context.organization org_slug = organization.slug + if org_context.organization.status != OrganizationStatus.ACTIVE: + return Response( + { + "detail": ERR_ORGANIZATION_INACTIVE.substitute( + slug=org_slug, + status=str(OrganizationStatus(organization.status).name), + ) + }, + status=status.HTTP_400_BAD_REQUEST, + ) + org_mapping = organization_mapping_service.get(organization_id=organization.id) if not org_mapping: return Response( { - "detail": ERR_ORGANIZATION_NOT_FOUND.substitute( + "detail": ERR_ORGANIZATION_MAPPING_NOT_FOUND.substitute( slug=org_slug, ) }, diff --git a/tests/sentry/api/endpoints/test_organization_fork.py b/tests/sentry/api/endpoints/test_organization_fork.py index 1c42e03ab0dec5..5d74f09416e5af 100644 --- a/tests/sentry/api/endpoints/test_organization_fork.py +++ b/tests/sentry/api/endpoints/test_organization_fork.py @@ -4,9 +4,12 @@ from sentry.api.endpoints.organization_fork import ( ERR_CANNOT_FORK_INTO_SAME_REGION, ERR_DUPLICATE_ORGANIZATION_FORK, + ERR_ORGANIZATION_INACTIVE, + ERR_ORGANIZATION_MAPPING_NOT_FOUND, ERR_ORGANIZATION_NOT_FOUND, ) from sentry.models.organization import OrganizationStatus +from sentry.models.organizationmapping import OrganizationMapping from sentry.models.relocation import Relocation, RelocationFile from sentry.silo.base import SiloMode from sentry.testutils.cases import APITestCase @@ -273,7 +276,6 @@ def test_bad_organization_not_found( response = response = self.get_error_response("does-not-exist", status_code=404) - assert response.data.get("detail") is not None assert response.data.get("detail") == ERR_ORGANIZATION_NOT_FOUND.substitute( pointer="does-not-exist" ) @@ -282,6 +284,29 @@ def test_bad_organization_not_found( assert Relocation.objects.count() == relocation_count assert RelocationFile.objects.count() == relocation_file_count + @override_options({"relocation.enabled": True, "relocation.daily-limit.small": 1}) + @assume_test_silo_mode(SiloMode.REGION, region_name=REQUESTING_TEST_REGION) + def test_bad_organization_mapping_not_found( + self, + uploading_start_mock: Mock, + analytics_record_mock: Mock, + ): + self.login_as(user=self.superuser, superuser=True) + relocation_count = Relocation.objects.count() + relocation_file_count = RelocationFile.objects.count() + with assume_test_silo_mode(SiloMode.CONTROL): + OrganizationMapping.objects.filter(slug=self.existing_org.slug).delete() + + response = response = self.get_error_response(self.existing_org.slug, status_code=404) + + assert response.data.get("detail") == ERR_ORGANIZATION_MAPPING_NOT_FOUND.substitute( + slug=self.existing_org.slug + ) + assert uploading_start_mock.call_count == 0 + assert analytics_record_mock.call_count == 0 + assert Relocation.objects.count() == relocation_count + assert RelocationFile.objects.count() == relocation_file_count + @override_options({"relocation.enabled": True, "relocation.daily-limit.small": 1}) @assume_test_silo_mode(SiloMode.REGION, region_name=REQUESTING_TEST_REGION) def test_bad_cannot_fork_deleted_organization( @@ -297,11 +322,12 @@ def test_bad_cannot_fork_deleted_organization( relocation_count = Relocation.objects.count() relocation_file_count = RelocationFile.objects.count() - response = response = self.get_error_response(self.existing_org.slug, status_code=404) + response = response = self.get_error_response(self.existing_org.slug, status_code=400) assert response.data.get("detail") is not None - assert response.data.get("detail") == ERR_ORGANIZATION_NOT_FOUND.substitute( - pointer=self.existing_org.slug + assert response.data.get("detail") == ERR_ORGANIZATION_INACTIVE.substitute( + slug=self.existing_org.slug, + status="DELETION_IN_PROGRESS", ) assert uploading_start_mock.call_count == 0 assert analytics_record_mock.call_count == 0 From 8d54097ff3d6d1f1ab0800f3ecf052cd507396a4 Mon Sep 17 00:00:00 2001 From: Ryan Albrecht Date: Tue, 23 Jul 2024 15:31:26 -0700 Subject: [PATCH 022/126] feat(toolbar): Add a badge showing the number of active alerts (#74791) The badge is looking like this: ![SCR-20240723-nkva](https://github.com/user-attachments/assets/78df774e-122e-4778-8157-58d19f765bf9) also, i'm pretty happy about getting the types in shape to be able to use `select` from useQuery. This makes things easier going forward. Fixes https://github.com/getsentry/sentry/issues/74568 --- .../components/alerts/alertCountBadge.tsx | 12 +++++++ .../components/alerts/useAlertsCount.tsx | 31 +++++++++++++++++++ .../devtoolbar/components/countBadge.tsx | 20 ++++++++++++ .../devtoolbar/components/navigation.tsx | 17 +++++++--- .../devtoolbar/hooks/useFetchApiData.tsx | 11 ++++--- .../components/devtoolbar/styles/global.ts | 20 ++++++++++++ 6 files changed, 103 insertions(+), 8 deletions(-) create mode 100644 static/app/components/devtoolbar/components/alerts/alertCountBadge.tsx create mode 100644 static/app/components/devtoolbar/components/alerts/useAlertsCount.tsx create mode 100644 static/app/components/devtoolbar/components/countBadge.tsx diff --git a/static/app/components/devtoolbar/components/alerts/alertCountBadge.tsx b/static/app/components/devtoolbar/components/alerts/alertCountBadge.tsx new file mode 100644 index 00000000000000..e1ebbe62395a42 --- /dev/null +++ b/static/app/components/devtoolbar/components/alerts/alertCountBadge.tsx @@ -0,0 +1,12 @@ +import CountBadge from '../countBadge'; + +import useAlertsCount from './useAlertsCount'; + +export default function AlertCountBadge() { + const {data: count} = useAlertsCount(); + + if (count === undefined) { + return null; + } + return ; +} diff --git a/static/app/components/devtoolbar/components/alerts/useAlertsCount.tsx b/static/app/components/devtoolbar/components/alerts/useAlertsCount.tsx new file mode 100644 index 00000000000000..48c2ddb806fbc4 --- /dev/null +++ b/static/app/components/devtoolbar/components/alerts/useAlertsCount.tsx @@ -0,0 +1,31 @@ +import {useMemo} from 'react'; + +import type {Incident} from 'sentry/views/alerts/types'; + +import useConfiguration from '../../hooks/useConfiguration'; +import useFetchApiData from '../../hooks/useFetchApiData'; +import type {ApiEndpointQueryKey} from '../../types'; + +export default function useAlertsCount() { + const {organizationSlug, projectId} = useConfiguration(); + + return useFetchApiData({ + queryKey: useMemo( + (): ApiEndpointQueryKey => [ + 'io.sentry.toolbar', + `/organizations/${organizationSlug}/incidents/`, + { + query: { + limit: 1, + queryReferrer: 'devtoolbar', + project: [projectId], + statusPeriod: '14d', + status: 'open', + }, + }, + ], + [organizationSlug, projectId] + ), + select: (data): number => data.json.length, + }); +} diff --git a/static/app/components/devtoolbar/components/countBadge.tsx b/static/app/components/devtoolbar/components/countBadge.tsx new file mode 100644 index 00000000000000..c1cbb7a5a7ab45 --- /dev/null +++ b/static/app/components/devtoolbar/components/countBadge.tsx @@ -0,0 +1,20 @@ +import {css} from '@emotion/react'; + +import {smallCss} from '../styles/typography'; + +export default function CountBadge({value}: {value: number}) { + return
{value}
; +} + +const counterCss = css` + background: red; + background: var(--red400); + border-radius: 50%; + color: var(--gray100); + height: 1rem; + line-height: 1rem; + position: absolute; + right: -6px; + top: -6px; + width: 1rem; +`; diff --git a/static/app/components/devtoolbar/components/navigation.tsx b/static/app/components/devtoolbar/components/navigation.tsx index f81d131fbe9dcb..d0587d29380e0c 100644 --- a/static/app/components/devtoolbar/components/navigation.tsx +++ b/static/app/components/devtoolbar/components/navigation.tsx @@ -1,13 +1,17 @@ +import type {ReactNode} from 'react'; import {css} from '@emotion/react'; -import useConfiguration from 'sentry/components/devtoolbar/hooks/useConfiguration'; import InteractionStateLayer from 'sentry/components/interactionStateLayer'; import {IconClose, IconFlag, IconIssues, IconMegaphone, IconSiren} from 'sentry/icons'; +import useConfiguration from '../hooks/useConfiguration'; import usePlacementCss from '../hooks/usePlacementCss'; import useToolbarRoute from '../hooks/useToolbarRoute'; import {navigationButtonCss, navigationCss} from '../styles/navigation'; import {resetButtonCss, resetDialogCss} from '../styles/reset'; +import {buttonCss} from '../styles/typography'; + +import AlertCountBadge from './alerts/alertCountBadge'; export default function Navigation({ setIsDisabled, @@ -28,7 +32,9 @@ export default function Navigation({ > } /> } /> - } /> + }> + + } /> { @@ -44,13 +50,15 @@ export default function Navigation({ } function NavButton({ + children, icon, label, panelName, }: { - icon: React.ReactNode; + icon: ReactNode; label: string; panelName: ReturnType['state']['activePanel']; + children?: ReactNode; }) { const {trackAnalytics} = useConfiguration(); const {state, setActivePanel} = useToolbarRoute(); @@ -73,6 +81,7 @@ function NavButton({ > {icon} + {children} ); } @@ -98,7 +107,7 @@ function HideButton({onClick}: {onClick: () => void}) { return ( void; mri: MRI}) > - {t('What are queries?')} + {t('What are filters?')} ); diff --git a/static/app/components/metrics/queryBuilder.tsx b/static/app/components/metrics/queryBuilder.tsx index a245ca718aedb0..4c0fa686d99e61 100644 --- a/static/app/components/metrics/queryBuilder.tsx +++ b/static/app/components/metrics/queryBuilder.tsx @@ -6,10 +6,7 @@ import uniqBy from 'lodash/uniqBy'; import GuideAnchor from 'sentry/components/assistant/guideAnchor'; import type {SelectOption} from 'sentry/components/compactSelect'; import {CompactSelect} from 'sentry/components/compactSelect'; -import { - CardinalityWarningIcon, - MetricQuerySelect, -} from 'sentry/components/metrics/metricQuerySelect'; +import {MetricQuerySelect} from 'sentry/components/metrics/metricQuerySelect'; import { MetricSearchBar, type MetricSearchBarProps, @@ -19,13 +16,12 @@ import {Tooltip} from 'sentry/components/tooltip'; import {IconWarning} from 'sentry/icons'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; -import type {MetricsExtractionCondition, MRI} from 'sentry/types/metrics'; +import type {MRI} from 'sentry/types/metrics'; import {trackAnalytics} from 'sentry/utils/analytics'; import {getDefaultAggregation, isAllowedAggregation} from 'sentry/utils/metrics'; import {hasMetricsNewInputs} from 'sentry/utils/metrics/features'; import {parseMRI} from 'sentry/utils/metrics/mri'; import type {MetricsQuery} from 'sentry/utils/metrics/types'; -import {useCardinalityLimitedMetricVolume} from 'sentry/utils/metrics/useCardinalityLimitedMetricVolume'; import {useIncrementQueryMetric} from 'sentry/utils/metrics/useIncrementQueryMetric'; import {useVirtualizedMetricsMeta} from 'sentry/utils/metrics/useMetricsMeta'; import {useMetricsTags} from 'sentry/utils/metrics/useMetricsTags'; @@ -52,7 +48,6 @@ export const QueryBuilder = memo(function QueryBuilder({ const pageFilters = usePageFilters(); const {getConditions, getVirtualMeta, resolveVirtualMRI, getTags} = useVirtualMetricsContext(); - const {data: cardinality} = useCardinalityLimitedMetricVolume(pageFilters.selection); const { data: meta, @@ -220,19 +215,6 @@ export const QueryBuilder = memo(function QueryBuilder({ const projectIdStrings = useMemo(() => projectIds.map(String), [projectIds]); - const isCardinalityLimited = (condition?: MetricsExtractionCondition): boolean => { - if (!cardinality || !condition) { - return false; - } - return condition.mris.some(conditionMri => cardinality[conditionMri] > 0); - }; - - const spanConditions = getConditions(metricsQuery.mri); - - const istMetricQueryCardinalityLimited = isCardinalityLimited( - spanConditions.find(c => c.id === metricsQuery.condition) - ); - return ( @@ -266,8 +248,6 @@ export const QueryBuilder = memo(function QueryBuilder({ {!hasMetricsNewInputs(organization) && (selectedMeta?.type === 'v' ? ( { @@ -370,18 +350,13 @@ export const QueryBuilder = memo(function QueryBuilder({ {hasMetricsNewInputs(organization) ? ( selectedMeta?.type === 'v' ? ( - - {istMetricQueryCardinalityLimited && } - {t('Where')} - + {t('Where')} { onChange({condition: value}); }} - isCardinalityLimited={istMetricQueryCardinalityLimited} /> {t('And')} return {children}; } -type CompactSelectProps = +type CompactSelectProps = ( | Omit, 'triggerProps'> - | Omit, 'triggerProps'>; + | Omit, 'triggerProps'> +) & { + triggerProps?: Pick; +}; // A series of TS function overloads to properly parse prop types across 2 dimensions: // option value types (number vs string), and selection mode (singular vs multiple) @@ -36,12 +39,16 @@ function CompactSelect( props: CompactSelectProps ): JSX.Element; -function CompactSelect(props: CompactSelectProps) { +function CompactSelect({ + triggerProps, + ...props +}: CompactSelectProps) { const theme = useTheme(); return ( <_CompactSelect {...props} triggerProps={{ + icon: triggerProps?.icon, className: 'tag-button', }} css={css` @@ -81,8 +88,9 @@ const StyledButton = styled(Button)` `; const ComboBox = styled(_ComboBox)` - input: { + input { border-radius: 0; + font-weight: 600; } :last-child input { border-radius: 0 ${p => p.theme.borderRadius} ${p => p.theme.borderRadius} 0; @@ -94,6 +102,10 @@ const SmartSearchBar = styled(_SmartSearchBar)` :last-child { border-radius: 0 ${p => p.theme.borderRadius} ${p => p.theme.borderRadius} 0; } + + label { + color: ${p => p.theme.gray500}; + } `; const FieldGroup = styled('div')` From e6c1468e493c414a7afa5ab150856c4d0746935d Mon Sep 17 00:00:00 2001 From: Priscila Oliveira Date: Wed, 24 Jul 2024 10:00:37 +0200 Subject: [PATCH 035/126] fix(metrics): Fix formula not working (#74718) --- .../metrics/equationInput/index.tsx | 2 +- .../metrics/equationSymbol.spec.tsx | 8 +++---- .../app/components/metrics/equationSymbol.tsx | 4 ++-- .../components/metrics/querySymbol.spec.tsx | 24 +++++++++---------- static/app/components/metrics/querySymbol.tsx | 3 ++- .../utils/metrics/dashboardImport.spec.tsx | 6 ++--- static/app/utils/metrics/useMetricsQuery.tsx | 5 +++- static/app/views/metrics/summaryTable.tsx | 14 ++++++++++- .../metrics/utils/useFormulaDependencies.tsx | 2 +- 9 files changed, 42 insertions(+), 26 deletions(-) diff --git a/static/app/components/metrics/equationInput/index.tsx b/static/app/components/metrics/equationInput/index.tsx index 3ac13233e38c9d..b343d0edea4086 100644 --- a/static/app/components/metrics/equationInput/index.tsx +++ b/static/app/components/metrics/equationInput/index.tsx @@ -56,7 +56,7 @@ export function EquationInput({ const validateVariable = useCallback( (variable: string): string | null => { - if (!availableVariables.has(variable.toLowerCase())) { + if (!availableVariables.has(variable.toUpperCase())) { return t('Unknown query "%s"', variable); } return null; diff --git a/static/app/components/metrics/equationSymbol.spec.tsx b/static/app/components/metrics/equationSymbol.spec.tsx index dfdcf74cba24b1..1fc1a998c728a3 100644 --- a/static/app/components/metrics/equationSymbol.spec.tsx +++ b/static/app/components/metrics/equationSymbol.spec.tsx @@ -8,17 +8,17 @@ import { describe('getEquationSymbol', () => { it('should return the correct symbol', () => { - expect(getEquationSymbol(0)).toBe('ƒ1'); - expect(getEquationSymbol(1)).toBe('ƒ2'); + expect(getEquationSymbol(0)).toBe('Ƒ1'); + expect(getEquationSymbol(1)).toBe('Ƒ2'); }); }); describe('EquationSymbol', () => { it('renders', () => { render(); - expect(screen.getByText(textWithMarkupMatcher('ƒ1'))).toBeInTheDocument(); + expect(screen.getByText(textWithMarkupMatcher('Ƒ1'))).toBeInTheDocument(); render(); - expect(screen.getByText(textWithMarkupMatcher('ƒ6'))).toBeInTheDocument(); + expect(screen.getByText(textWithMarkupMatcher('Ƒ6'))).toBeInTheDocument(); }); }); diff --git a/static/app/components/metrics/equationSymbol.tsx b/static/app/components/metrics/equationSymbol.tsx index c1e026ec58e49a..bd16053a7ca16b 100644 --- a/static/app/components/metrics/equationSymbol.tsx +++ b/static/app/components/metrics/equationSymbol.tsx @@ -10,7 +10,7 @@ interface EquationSymbolProps extends React.ComponentProps( @@ -20,7 +20,7 @@ export const EquationSymbol = forwardRef( return ( - ƒ{equationId + 1} + Ƒ{equationId + 1} ); diff --git a/static/app/components/metrics/querySymbol.spec.tsx b/static/app/components/metrics/querySymbol.spec.tsx index b47735bfd7a5c8..3f0c42d3d6ba48 100644 --- a/static/app/components/metrics/querySymbol.spec.tsx +++ b/static/app/components/metrics/querySymbol.spec.tsx @@ -4,26 +4,26 @@ import {getQuerySymbol, QuerySymbol} from 'sentry/components/metrics/querySymbol describe('getQuerySymbol', () => { it('should return the correct symbol', () => { - expect(getQuerySymbol(0)).toBe('a'); - expect(getQuerySymbol(1)).toBe('b'); - expect(getQuerySymbol(25)).toBe('z'); - expect(getQuerySymbol(26)).toBe('aa'); - expect(getQuerySymbol(27)).toBe('ab'); - expect(getQuerySymbol(52)).toBe('ba'); - expect(getQuerySymbol(53)).toBe('bb'); - expect(getQuerySymbol(77)).toBe('bz'); - expect(getQuerySymbol(78)).toBe('ca'); - expect(getQuerySymbol(702)).toBe('aaa'); + expect(getQuerySymbol(0)).toBe('A'); + expect(getQuerySymbol(1)).toBe('B'); + expect(getQuerySymbol(25)).toBe('Z'); + expect(getQuerySymbol(26)).toBe('AA'); + expect(getQuerySymbol(27)).toBe('AB'); + expect(getQuerySymbol(52)).toBe('BA'); + expect(getQuerySymbol(53)).toBe('BB'); + expect(getQuerySymbol(77)).toBe('BZ'); + expect(getQuerySymbol(78)).toBe('CA'); + expect(getQuerySymbol(702)).toBe('AAA'); }); }); describe('QuerySymbol', () => { it('renders', () => { render(); - expect(screen.getByText('a')).toBeInTheDocument(); + expect(screen.getByText('A')).toBeInTheDocument(); render(); - expect(screen.getByText('ab')).toBeInTheDocument(); + expect(screen.getByText('AB')).toBeInTheDocument(); }); it('does not render for negative query ids', () => { diff --git a/static/app/components/metrics/querySymbol.tsx b/static/app/components/metrics/querySymbol.tsx index 609fa711ffac4c..098b25b6faaf73 100644 --- a/static/app/components/metrics/querySymbol.tsx +++ b/static/app/components/metrics/querySymbol.tsx @@ -5,7 +5,7 @@ import {space} from 'sentry/styles/space'; import {hasMetricsNewInputs} from 'sentry/utils/metrics/features'; import useOrganization from 'sentry/utils/useOrganization'; -const indexToChar = 'abcdefghijklmnopqrstuvwxyz'; +const indexToChar = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'; export const getQuerySymbol = (index: number) => { let result = ''; @@ -20,6 +20,7 @@ export const getQuerySymbol = (index: number) => { export const DeprecatedSymbol = styled('span')<{ isHidden?: boolean; }>` + text-transform: lowercase; display: flex; width: 38px; height: 38px; diff --git a/static/app/utils/metrics/dashboardImport.spec.tsx b/static/app/utils/metrics/dashboardImport.spec.tsx index 4238f31673b428..128e21a83ba5ad 100644 --- a/static/app/utils/metrics/dashboardImport.spec.tsx +++ b/static/app/utils/metrics/dashboardImport.spec.tsx @@ -301,8 +301,8 @@ describe('parseDashboard', () => { const queries = widgets[0].queries; expect(queries.length).toEqual(5); - expect(queries[2].aggregates[0]).toEqual('equation|2 * $b'); - expect(queries[3].aggregates[0]).toEqual('equation|$a + $b'); - expect(queries[4].aggregates[0]).toEqual('equation|($b + $b) - $a'); + expect(queries[2].aggregates[0]).toEqual('equation|2 * $B'); + expect(queries[3].aggregates[0]).toEqual('equation|$A + $B'); + expect(queries[4].aggregates[0]).toEqual('equation|($B + $B) - $A'); }); }); diff --git a/static/app/utils/metrics/useMetricsQuery.tsx b/static/app/utils/metrics/useMetricsQuery.tsx index d175e2e0d321bc..d00764fcc965cb 100644 --- a/static/app/utils/metrics/useMetricsQuery.tsx +++ b/static/app/utils/metrics/useMetricsQuery.tsx @@ -203,7 +203,10 @@ export function useMetricsQuery( queries .map(query => { if (isMetricFormula(query)) { - return query; + return { + ...query, + formula: query.formula.toUpperCase(), + }; } if (!isVirtualMetric(query)) { return query; diff --git a/static/app/views/metrics/summaryTable.tsx b/static/app/views/metrics/summaryTable.tsx index 5d3649e59665a5..953bfa4323e8d6 100644 --- a/static/app/views/metrics/summaryTable.tsx +++ b/static/app/views/metrics/summaryTable.tsx @@ -16,6 +16,7 @@ import type {MetricAggregation} from 'sentry/types/metrics'; import {trackAnalytics} from 'sentry/utils/analytics'; import {getUtcDateString} from 'sentry/utils/dates'; import {DEFAULT_SORT_STATE} from 'sentry/utils/metrics/constants'; +import {hasMetricsNewInputs} from 'sentry/utils/metrics/features'; import {formatMetricUsingUnit} from 'sentry/utils/metrics/formatters'; import { type FocusedMetricsSeries, @@ -238,7 +239,14 @@ export const SummaryTable = memo(function SummaryTable({ delay={500} overlayStyle={{maxWidth: '80vw'}} > - {row.seriesName} + + + {row.seriesName.split(':')[0]}: + + {row.seriesName.split(':')[1]} + {totalColumns.map(aggregate => ( @@ -557,3 +565,7 @@ const Row = styled('div')` } } `; + +const SerieNamePrefix = styled('span')<{hasMetricsNewInputs: boolean}>` + text-transform: ${p => (p.hasMetricsNewInputs ? 'uppercase' : 'lowercase')}; +`; diff --git a/static/app/views/metrics/utils/useFormulaDependencies.tsx b/static/app/views/metrics/utils/useFormulaDependencies.tsx index 350713229d8039..ed5a84cbf7e34f 100644 --- a/static/app/views/metrics/utils/useFormulaDependencies.tsx +++ b/static/app/views/metrics/utils/useFormulaDependencies.tsx @@ -47,7 +47,7 @@ export function useFormulaDependencies() { tokens.forEach(token => { if (token.type === TokenType.VARIABLE) { - const widget = queriesLookup.get(token.content); + const widget = queriesLookup.get(token.content.toUpperCase()); if (widget) { dependencies.push(widget); } else { From 6de2850d94805a02fdc323413dd57fb2c48d0779 Mon Sep 17 00:00:00 2001 From: Giancarlo Buenaflor Date: Wed, 24 Jul 2024 10:05:01 +0200 Subject: [PATCH 036/126] feat(dart): add dart raw stacktrace representation (#74715) For raw stacktraces we don't have a Dart stacktrace representation and we currently fall back to using `getPythonFrame`. Related issue: https://github.com/getsentry/sentry-dart/issues/2040 --- .../stackTrace/rawContent.spec.tsx | 39 +++++++++++++++++++ .../crashContent/stackTrace/rawContent.tsx | 38 +++++++++++++++++- 2 files changed, 75 insertions(+), 2 deletions(-) diff --git a/static/app/components/events/interfaces/crashContent/stackTrace/rawContent.spec.tsx b/static/app/components/events/interfaces/crashContent/stackTrace/rawContent.spec.tsx index b9e3ae440ebe89..57b7e58d05ebc9 100644 --- a/static/app/components/events/interfaces/crashContent/stackTrace/rawContent.spec.tsx +++ b/static/app/components/events/interfaces/crashContent/stackTrace/rawContent.spec.tsx @@ -122,6 +122,45 @@ describe('RawStacktraceContent', () => { ); }); + it('renders dart example', () => { + const dartData: StacktraceType = { + hasSystemFrames: false, + framesOmitted: null, + registers: {}, + frames: [ + FrameFixture({ + function: 'doThing', + package: 'flutter', + lineNo: 300, + colNo: 2, + filename: 'ink_well.dart', + absPath: 'package:flutter/src/material/ink_well.dart', + platform: undefined, + }), + FrameFixture({ + function: '', + package: '', + platform: undefined, + }), + FrameFixture({ + function: 'main', + package: 'sentry_flutter', + lineNo: 778, + colNo: 5, + filename: 'main.dart', + absPath: 'package:sentry_flutter/main.dart', + platform: undefined, + }), + ], + }; + expect(displayRawContent(dartData, 'dart', exception)).toEqual( + `Error: an error occurred + #0 main (package:sentry_flutter/main.dart:778:5) + #1 + #2 doThing (package:flutter/src/material/ink_well.dart:300:2)` + ); + }); + const inAppFrame = (fnName, line) => FrameFixture({ function: fnName, diff --git a/static/app/components/events/interfaces/crashContent/stackTrace/rawContent.tsx b/static/app/components/events/interfaces/crashContent/stackTrace/rawContent.tsx index d1494b9f83400d..27816548da83c8 100644 --- a/static/app/components/events/interfaces/crashContent/stackTrace/rawContent.tsx +++ b/static/app/components/events/interfaces/crashContent/stackTrace/rawContent.tsx @@ -98,6 +98,33 @@ export function getJavaFrame(frame: Frame): string { return result; } +export function getDartFrame(frame: Frame, frameIdxFromEnd: number): string { + let result = ` #${frameIdxFromEnd}`; + + if (frame.function === '') { + return `${result} ${frame.function}`; + } + + if (defined(frame.function)) { + result += ' ' + frame.function; + } + if (defined(frame.absPath)) { + result += ' ('; + + result += frame.absPath; + if (defined(frame.lineNo) && frame.lineNo >= 0) { + result += ':' + frame.lineNo; + } + if (defined(frame.colNo) && frame.colNo >= 0) { + result += ':' + frame.colNo; + } + + result += ')'; + } + + return result; +} + function ljust(str: string, len: number) { return str + Array(Math.max(0, len - str.length) + 1).join(' '); } @@ -138,7 +165,12 @@ function getPreamble(exception: ExceptionValue, platform: string | undefined): s } } -function getFrame(frame: Frame, frameIdx: number, platform: string | undefined): string { +function getFrame( + frame: Frame, + frameIdx: number, + frameIdxFromEnd: number, + platform: string | undefined +): string { if (frame.platform) { platform = frame.platform; } @@ -153,6 +185,8 @@ function getFrame(frame: Frame, frameIdx: number, platform: string | undefined): return getPythonFrame(frame); case 'java': return getJavaFrame(frame); + case 'dart': + return getDartFrame(frame, frameIdxFromEnd); case 'objc': // fallthrough case 'cocoa': @@ -180,7 +214,7 @@ export default function displayRawContent( : rawFrames; const frames = framesToUse.map((frame, frameIdx) => - getFrame(frame, frameIdx, platform) + getFrame(frame, frameIdx, framesToUse.length - frameIdx - 1, platform) ); if (platform !== 'python') { From 3e81cf8e27a92e1e1fe74025761b11a341918e9a Mon Sep 17 00:00:00 2001 From: Ogi <86684834+obostjancic@users.noreply.github.com> Date: Wed, 24 Jul 2024 11:07:43 +0200 Subject: [PATCH 037/126] feat(metrics): span metrics open in traces (#74829) --- .../utils/metrics/virtualMetricsContext.tsx | 12 +++ static/app/views/metrics/widgetDetails.tsx | 80 +++++++++++++------ 2 files changed, 66 insertions(+), 26 deletions(-) diff --git a/static/app/utils/metrics/virtualMetricsContext.tsx b/static/app/utils/metrics/virtualMetricsContext.tsx index 4aaa31aba1dec0..85d1184038590d 100644 --- a/static/app/utils/metrics/virtualMetricsContext.tsx +++ b/static/app/utils/metrics/virtualMetricsContext.tsx @@ -15,6 +15,7 @@ import useOrganization from 'sentry/utils/useOrganization'; import usePageFilters from 'sentry/utils/usePageFilters'; const Context = createContext<{ + getCondition: (mri: MRI, conditionId: number) => MetricsExtractionCondition | null; getConditions: (mri: MRI) => MetricsExtractionCondition[]; getExtractionRule: (mri: MRI) => MetricsExtractionRule | null; getTags: (mri: MRI) => MetricTag[]; @@ -41,6 +42,7 @@ const Context = createContext<{ throw new Error('Not implemented'); }, getConditions: () => [], + getCondition: () => null, getExtractionRule: () => null, getTags: () => [], getVirtualMRIQuery: () => null, @@ -142,6 +144,14 @@ export function VirtualMetricsContextProvider({children}: Props) { [virtualMRIToRuleMap] ); + const getCondition = useCallback( + (mri: MRI, conditionId: number) => { + const rule = virtualMRIToRuleMap.get(mri); + return rule?.conditions.find(c => c.id === conditionId) || null; + }, + [virtualMRIToRuleMap] + ); + const getTags = useCallback( (mri: MRI): MetricTag[] => { const rule = virtualMRIToRuleMap.get(mri); @@ -229,6 +239,7 @@ export function VirtualMetricsContextProvider({children}: Props) { getVirtualMRI, getVirtualMeta, getConditions, + getCondition, getExtractionRule, getTags, getVirtualMRIQuery, @@ -240,6 +251,7 @@ export function VirtualMetricsContextProvider({children}: Props) { getVirtualMRI, getVirtualMeta, getConditions, + getCondition, getExtractionRule, getTags, getVirtualMRIQuery, diff --git a/static/app/views/metrics/widgetDetails.tsx b/static/app/views/metrics/widgetDetails.tsx index 4e14e3f894e028..12c1a657a2732f 100644 --- a/static/app/views/metrics/widgetDetails.tsx +++ b/static/app/views/metrics/widgetDetails.tsx @@ -15,9 +15,11 @@ import {space} from 'sentry/styles/space'; import type {PageFilters} from 'sentry/types/core'; import type {MetricAggregation, MRI} from 'sentry/types/metrics'; import {defined} from 'sentry/utils'; +import {isVirtualMetric} from 'sentry/utils/metrics'; import type {FocusedMetricsSeries, MetricsWidget} from 'sentry/utils/metrics/types'; import {isMetricsEquationWidget} from 'sentry/utils/metrics/types'; import type {MetricsSamplesResults} from 'sentry/utils/metrics/useMetricsSamples'; +import {useVirtualMetricsContext} from 'sentry/utils/metrics/virtualMetricsContext'; import useOrganization from 'sentry/utils/useOrganization'; import usePageFilters from 'sentry/utils/usePageFilters'; import type {FocusAreaProps} from 'sentry/views/metrics/context'; @@ -92,6 +94,7 @@ export function MetricDetails({ }: MetricDetailsProps) { const {selection} = usePageFilters(); const organization = useOrganization(); + const {getCondition} = useVirtualMetricsContext(); const queryWithFocusedSeries = useMemo( () => @@ -104,32 +107,57 @@ export function MetricDetails({ ); const selectionRange = focusArea?.selection?.range; - const selectionDatetime = - defined(selectionRange) && defined(selectionRange) && defined(selectionRange) - ? ({ - start: selectionRange.start, - end: selectionRange.end, - } as PageFilters['datetime']) - : undefined; - - const tracesTarget = generateTracesRouteWithQuery({ - orgSlug: organization.slug, - metric: - aggregation && mri - ? { - max: selectionRange?.max, - min: selectionRange?.min, - op: aggregation, - query: queryWithFocusedSeries, - mri: mri, - } - : undefined, - query: { - project: selection.projects as unknown as string[], - environment: selection.environments, - ...normalizeDateTimeParams(selectionDatetime ?? selection.datetime), - }, - }); + + const tracesTarget = useMemo(() => { + const selectionDatetime = + defined(selectionRange) && defined(selectionRange) && defined(selectionRange) + ? ({ + start: selectionRange.start, + end: selectionRange.end, + } as PageFilters['datetime']) + : undefined; + + if (mri && isVirtualMetric({mri})) { + const conditionQuery = getCondition(mri, condition || -1)?.value || ''; + + return generateTracesRouteWithQuery({ + orgSlug: organization.slug, + query: { + project: selection.projects as unknown as string[], + environment: selection.environments, + ...normalizeDateTimeParams(selectionDatetime ?? selection.datetime), + query: `${conditionQuery.trim()} ${queryWithFocusedSeries?.trim()}`, + }, + }); + } + if (aggregation && mri) { + return generateTracesRouteWithQuery({ + orgSlug: organization.slug, + metric: { + max: selectionRange?.max, + min: selectionRange?.min, + op: aggregation, + query: queryWithFocusedSeries, + mri: mri, + }, + query: { + project: selection.projects as unknown as string[], + environment: selection.environments, + ...normalizeDateTimeParams(selectionDatetime ?? selection.datetime), + }, + }); + } + return ''; + }, [ + aggregation, + mri, + organization.slug, + queryWithFocusedSeries, + selection, + selectionRange, + condition, + getCondition, + ]); return ( From 397a5ad9de16a9846369bbe72acf61d68c37544c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Vjeran=20Grozdani=C4=87?= Date: Wed, 24 Jul 2024 11:13:17 +0200 Subject: [PATCH 038/126] feat(lookup-field) Add custom lookup field `element_contains` for ArrayField (#74492) Current Django ORM is limited if there is a need for a check if the element of the array field contains some value. This custom lookup fields works in a similar way to Django ORM `contains` lookup field, but it is applied to the every element of the array field. ## Use case This is semi-pseudo code: ``` class CustomModel(models.Model): field = ArrayField() CustomModel.objects.filter(field__element_contains="") ``` --- src/sentry/db/models/fields/array.py | 5 ++ src/sentry/db/postgres/lookups/__init__.py | 0 .../lookups/array_element_contains.py | 28 ++++++++++ tests/sentry/db/postgres/lookups/__init__.py | 0 .../lookups/test_array_element_contains.py | 51 +++++++++++++++++++ 5 files changed, 84 insertions(+) create mode 100644 src/sentry/db/postgres/lookups/__init__.py create mode 100644 src/sentry/db/postgres/lookups/array_element_contains.py create mode 100644 tests/sentry/db/postgres/lookups/__init__.py create mode 100644 tests/sentry/db/postgres/lookups/test_array_element_contains.py diff --git a/src/sentry/db/models/fields/array.py b/src/sentry/db/models/fields/array.py index 7aa80341e02873..568e71b0bd14f3 100644 --- a/src/sentry/db/models/fields/array.py +++ b/src/sentry/db/models/fields/array.py @@ -2,9 +2,11 @@ import ast +from django.contrib.postgres.fields import ArrayField as DjangoArrayField from django.db import models from sentry.db.models.utils import Creator +from sentry.db.postgres.lookups.array_element_contains import ArrayElementContainsLookup from sentry.utils import json @@ -69,3 +71,6 @@ def to_python(self, value): assert "\\" not in value, "Unexpected ArrayField format" value = value[1:-1].split(",") return [self.of.to_python(x) for x in value] + + +DjangoArrayField.register_lookup(ArrayElementContainsLookup) diff --git a/src/sentry/db/postgres/lookups/__init__.py b/src/sentry/db/postgres/lookups/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/src/sentry/db/postgres/lookups/array_element_contains.py b/src/sentry/db/postgres/lookups/array_element_contains.py new file mode 100644 index 00000000000000..7dad2c4275ed09 --- /dev/null +++ b/src/sentry/db/postgres/lookups/array_element_contains.py @@ -0,0 +1,28 @@ +from django.db.backends.base.base import BaseDatabaseWrapper +from django.db.models import Lookup +from django.db.models.sql.compiler import SQLCompiler + +__all__ = ("ArrayElementContainsLookup",) + + +class ArrayElementContainsLookup(Lookup): + lookup_name = "element_contains" + + def as_sql( + self, compiler: SQLCompiler, connection: BaseDatabaseWrapper + ) -> tuple[str, list[int | str]]: + """ + Custom lookup for checking if an element of the array contains a value. + """ + + lhs, lhs_params = self.process_lhs(compiler, connection) + rhs, rhs_params = self.process_rhs(compiler, connection) + params = lhs_params + rhs_params + + clause = f"""\ +EXISTS ( + SELECT * FROM UNNEST({lhs}) AS elem + WHERE elem LIKE '%%' || {rhs} || '%%' +) +""" + return clause, params diff --git a/tests/sentry/db/postgres/lookups/__init__.py b/tests/sentry/db/postgres/lookups/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/tests/sentry/db/postgres/lookups/test_array_element_contains.py b/tests/sentry/db/postgres/lookups/test_array_element_contains.py new file mode 100644 index 00000000000000..7fcc942e6a1910 --- /dev/null +++ b/tests/sentry/db/postgres/lookups/test_array_element_contains.py @@ -0,0 +1,51 @@ +import pytest +from django.contrib.postgres.fields import ArrayField as DjangoArrayField +from django.db import models + + +class ArrayElementContainsLookupTestModel(models.Model): + id = models.AutoField(primary_key=True) + array_field = DjangoArrayField(models.TextField(), null=True) + + class Meta: + app_label = "fixtures" + + +@pytest.fixture +def array_element_contains_db(): + ArrayElementContainsLookupTestModel.objects.bulk_create( + [ + ArrayElementContainsLookupTestModel(array_field=["foo", "bar", "baz"]), + ArrayElementContainsLookupTestModel(array_field=["foo", "bar"]), + ArrayElementContainsLookupTestModel(array_field=[]), + ArrayElementContainsLookupTestModel(array_field=None), + ] + ) + yield + ArrayElementContainsLookupTestModel.objects.all().delete() + + +@pytest.mark.django_db +def test_basic_usage_for_array_field(array_element_contains_db): + assert ( + ArrayElementContainsLookupTestModel.objects.filter( + array_field__element_contains="foo" + ).count() + == 2 + ) + + result = ArrayElementContainsLookupTestModel.objects.filter(array_field__element_contains="baz") + assert len(result) == 1 + assert result[0].array_field == ["foo", "bar", "baz"] + + assert ( + ArrayElementContainsLookupTestModel.objects.filter( + array_field__element_contains="qux" + ).count() + == 0 + ) + + assert ( + ArrayElementContainsLookupTestModel.objects.filter(array_field__element_contains="").count() + == 2 + ) # only non empty arrays are considered, and it's elements are checked if they contain '' From 86c6bcbbfe3cfda58cc89d727b19bad39436da2e Mon Sep 17 00:00:00 2001 From: Priscila Oliveira Date: Wed, 24 Jul 2024 11:29:20 +0200 Subject: [PATCH 039/126] ref(onboarding): Sort platform list alphabetically (#74832) --- static/app/components/platformPicker.spec.tsx | 39 +++++++++++++++++++ static/app/components/platformPicker.tsx | 14 ++++++- 2 files changed, 52 insertions(+), 1 deletion(-) diff --git a/static/app/components/platformPicker.spec.tsx b/static/app/components/platformPicker.spec.tsx index b4293e4d9e1723..8d75b8d3949743 100644 --- a/static/app/components/platformPicker.spec.tsx +++ b/static/app/components/platformPicker.spec.tsx @@ -67,4 +67,43 @@ describe('PlatformPicker', function () { await userEvent.click(screen.getByRole('button', {name: 'Clear'})); expect(props.setPlatform).toHaveBeenCalledWith(null); }); + + it('platforms shall be sorted alphabetically', function () { + render(); + + const alphabeticallyOrderedPlatformNames = [ + 'Android', + 'Angular', + 'ASP.NET Core', + 'Browser JavaScript', + 'Django', + 'Express', + 'FastAPI', + 'Flask', + 'Flutter', + 'Go', + 'iOS', + 'Java', + 'Laravel', + 'Nest.js', + 'Next.js', + 'Node.js', + 'PHP', + 'Python', + 'Rails', + 'React', + 'React Native', + 'Ruby', + 'Spring Boot', + 'Unity', + 'Vue', + '.NET', + ]; + + const platformNames = screen.getAllByRole('heading', {level: 3}); + + platformNames.forEach((platform, index) => { + expect(platform).toHaveTextContent(alphabeticallyOrderedPlatformNames[index]); + }); + }); }); diff --git a/static/app/components/platformPicker.tsx b/static/app/components/platformPicker.tsx index e194bb33fb9bd5..9d70f4cbaa0a33 100644 --- a/static/app/components/platformPicker.tsx +++ b/static/app/components/platformPicker.tsx @@ -35,6 +35,10 @@ const selectablePlatforms = platforms.filter(platform => createablePlatforms.has(platform.id) ); +function startsWithPunctuation(name: string) { + return /^[\p{P}]/u.test(name); +} + export type Category = (typeof categoryList)[number]['id']; export type Platform = PlatformIntegration & { @@ -100,7 +104,15 @@ class PlatformPicker extends Component { const filtered = tempSelectablePlatforms .filter(this.state.filter ? subsetMatch : categoryMatch) - .sort((a, b) => a.id.localeCompare(b.id)); + .sort((a, b) => { + if (startsWithPunctuation(a.name) && !startsWithPunctuation(b.name)) { + return 1; + } + if (!startsWithPunctuation(a.name) && startsWithPunctuation(b.name)) { + return -1; + } + return a.name.localeCompare(b.name); + }); return this.props.showOther ? filtered : filtered.filter(({id}) => id !== 'other'); } From a4bc7c3c80663a42d1ea0ee8df99af33312bcd5e Mon Sep 17 00:00:00 2001 From: Lukas Stracke Date: Wed, 24 Jul 2024 11:44:19 +0200 Subject: [PATCH 040/126] fix(getting-started): Add missing Node SDK import to verification snippet (#74833) Add a missing `require` call to the verification snippet in the Node onboarding and shorten the errors-only verification snippet. --- static/app/gettingStartedDocs/node/node.tsx | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/static/app/gettingStartedDocs/node/node.tsx b/static/app/gettingStartedDocs/node/node.tsx index 1220fd7078e627..9a9661e9382430 100644 --- a/static/app/gettingStartedDocs/node/node.tsx +++ b/static/app/gettingStartedDocs/node/node.tsx @@ -103,6 +103,8 @@ const onboarding: OnboardingConfig = { language: 'javascript', code: isPerformanceSelected ? ` +const Sentry = require("@sentry/node"); + Sentry.startSpan({ op: "test", name: "My First Test Span", @@ -114,13 +116,13 @@ Sentry.startSpan({ } });` : ` -setTimeout(() => { - try { - foo(); - } catch (e) { - Sentry.captureException(e); - } -}, 99);`, +const Sentry = require("@sentry/node"); + +try { + foo(); +} catch (e) { + Sentry.captureException(e); +}`, }, ], }, From 6ecdb709e4066ab42de9b33ca739e046bf2b9ece Mon Sep 17 00:00:00 2001 From: Priscila Oliveira Date: Wed, 24 Jul 2024 12:15:21 +0200 Subject: [PATCH 041/126] fix(metrics): Only show uppercase if rows>1 (#74836) --- static/app/views/metrics/summaryTable.tsx | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/static/app/views/metrics/summaryTable.tsx b/static/app/views/metrics/summaryTable.tsx index 953bfa4323e8d6..fe774705d39605 100644 --- a/static/app/views/metrics/summaryTable.tsx +++ b/static/app/views/metrics/summaryTable.tsx @@ -241,7 +241,9 @@ export const SummaryTable = memo(function SummaryTable({ > 1 + } > {row.seriesName.split(':')[0]}: @@ -566,6 +568,6 @@ const Row = styled('div')` } `; -const SerieNamePrefix = styled('span')<{hasMetricsNewInputs: boolean}>` - text-transform: ${p => (p.hasMetricsNewInputs ? 'uppercase' : 'lowercase')}; +const SerieNamePrefix = styled('span')<{uppercaseText: boolean}>` + text-transform: ${p => (p.uppercaseText ? 'uppercase' : 'lowercase')}; `; From cad8ea34246ded64e10150228315b969b4edc953 Mon Sep 17 00:00:00 2001 From: Priscila Oliveira Date: Wed, 24 Jul 2024 13:38:42 +0200 Subject: [PATCH 042/126] fix(metrics): Remove colon when single value (#74837) --- static/app/views/metrics/summaryTable.tsx | 39 +++++++++++++++++------ 1 file changed, 29 insertions(+), 10 deletions(-) diff --git a/static/app/views/metrics/summaryTable.tsx b/static/app/views/metrics/summaryTable.tsx index fe774705d39605..41fd59c548e4eb 100644 --- a/static/app/views/metrics/summaryTable.tsx +++ b/static/app/views/metrics/summaryTable.tsx @@ -27,6 +27,31 @@ import useOrganization from 'sentry/utils/useOrganization'; import usePageFilters from 'sentry/utils/usePageFilters'; import {transactionSummaryRouteWithQuery} from 'sentry/views/performance/transactionSummary/utils'; +function SeriesName({ + seriesName, + isSingleSeries, +}: { + isSingleSeries: boolean; + seriesName: string; +}) { + const organization = useOrganization(); + + const prefix = seriesName.split(':')[0]; + const sufix = seriesName.split(':')[1] ?? null; + + return ( + + + {prefix} + {sufix && ':'} + + {sufix} + + ); +} + export const SummaryTable = memo(function SummaryTable({ series, onRowClick, @@ -239,16 +264,10 @@ export const SummaryTable = memo(function SummaryTable({ delay={500} overlayStyle={{maxWidth: '80vw'}} > - - 1 - } - > - {row.seriesName.split(':')[0]}: - - {row.seriesName.split(':')[1]} - + {totalColumns.map(aggregate => ( From 9002fcbc22d28da52d2dcdd8f85482afbcbbe943 Mon Sep 17 00:00:00 2001 From: Priscila Oliveira Date: Wed, 24 Jul 2024 15:14:41 +0200 Subject: [PATCH 043/126] fix(onboarding): Add 'other' platform to all platforms list (#74838) --- static/app/components/platformPicker.spec.tsx | 15 +++++++++++++++ static/app/components/platformPicker.tsx | 9 ++++++++- 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/static/app/components/platformPicker.spec.tsx b/static/app/components/platformPicker.spec.tsx index 8d75b8d3949743..2f79bbb0ba6ee6 100644 --- a/static/app/components/platformPicker.spec.tsx +++ b/static/app/components/platformPicker.spec.tsx @@ -106,4 +106,19 @@ describe('PlatformPicker', function () { expect(platform).toHaveTextContent(alphabeticallyOrderedPlatformNames[index]); }); }); + + it('"other" platform shall be rendered if filter contains it', async function () { + render(); + + expect(screen.queryByTestId('platform-other')).not.toBeInTheDocument(); + + await userEvent.type(screen.getByRole('textbox'), 'Oth'); + + expect(screen.queryByTestId('platform-other')).not.toBeInTheDocument(); + + // complete the word 'other' + await userEvent.type(screen.getByRole('textbox'), 'er'); + + expect(screen.getByTestId('platform-other')).toBeInTheDocument(); + }); }); diff --git a/static/app/components/platformPicker.tsx b/static/app/components/platformPicker.tsx index 9d70f4cbaa0a33..556bda7af631fb 100644 --- a/static/app/components/platformPicker.tsx +++ b/static/app/components/platformPicker.tsx @@ -93,6 +93,7 @@ class PlatformPicker extends Component { // temporary replacement of selectablePlatforms while `nintendo-switch` is behind feature flag const tempSelectablePlatforms = selectablePlatforms; + if (this.props.organization?.features.includes('selectable-nintendo-platform')) { const nintendo = platforms.find(p => p.id === 'nintendo-switch'); if (nintendo) { @@ -114,7 +115,13 @@ class PlatformPicker extends Component { return a.name.localeCompare(b.name); }); - return this.props.showOther ? filtered : filtered.filter(({id}) => id !== 'other'); + if (this.props.showOther && this.state.filter.toLocaleLowerCase() === 'other') { + // We only show 'Other' if users click on the 'Other' suggestion rendered in the not found state or type this word in the search bar + return [otherPlatform]; + } + + // 'other' is not part of the createablePlatforms list, therefore it won't be included in the filtered list + return filtered; } logSearch = debounce(() => { From f94da8691979e6b61b77cbeba0058cd54fe8f868 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 24 Jul 2024 09:20:31 -0400 Subject: [PATCH 044/126] ref: unify the signature of Interface.to_string (#74798) --- src/sentry/interfaces/base.py | 2 +- src/sentry/interfaces/exception.py | 4 ++-- src/sentry/interfaces/message.py | 2 +- src/sentry/interfaces/security.py | 2 +- src/sentry/interfaces/stacktrace.py | 4 ++-- src/sentry/interfaces/template.py | 2 +- tests/sentry_plugins/victorops/test_plugin.py | 2 +- 7 files changed, 9 insertions(+), 9 deletions(-) diff --git a/src/sentry/interfaces/base.py b/src/sentry/interfaces/base.py index a580872f05ada1..1c5eeb43423371 100644 --- a/src/sentry/interfaces/base.py +++ b/src/sentry/interfaces/base.py @@ -155,7 +155,7 @@ def get_score(self) -> int: def iter_tags(self): return iter(()) - def to_string(self, event, is_public=False, **kwargs): + def to_string(self, event) -> str: return "" def to_email_html(self, event, **kwargs): diff --git a/src/sentry/interfaces/exception.py b/src/sentry/interfaces/exception.py index 6e9a4dc035b515..0ba8666e75f95d 100644 --- a/src/sentry/interfaces/exception.py +++ b/src/sentry/interfaces/exception.py @@ -454,7 +454,7 @@ def get_api_meta(self, meta, is_public=False, platform=None): return {"values": result} - def to_string(self, event, is_public=False, **kwargs): + def to_string(self, event) -> str: if not self.values: return "" @@ -471,7 +471,7 @@ def to_string(self, event, is_public=False, **kwargs): ) + "\n\n" ) - return ("".join(output)).strip() + return "".join(output).strip() def get_stacktrace(self, *args, **kwargs): exc = self.values[-1] diff --git a/src/sentry/interfaces/message.py b/src/sentry/interfaces/message.py index 8fd69bf3269803..2a838149eb620e 100644 --- a/src/sentry/interfaces/message.py +++ b/src/sentry/interfaces/message.py @@ -47,5 +47,5 @@ def to_json(self): {"message": self.message, "formatted": self.formatted, "params": self.params or None} ) - def to_string(self, event, is_public=False, **kwargs): + def to_string(self, event) -> str: return self.formatted or self.message diff --git a/src/sentry/interfaces/security.py b/src/sentry/interfaces/security.py index ec086d68ff285e..92f7b24bcdbfb3 100644 --- a/src/sentry/interfaces/security.py +++ b/src/sentry/interfaces/security.py @@ -172,7 +172,7 @@ def to_python(cls, data, **kwargs): data.setdefault("effective_directive", None) return super().to_python(data, **kwargs) - def to_string(self, is_public=False, **kwargs): + def to_string(self, event) -> str: return orjson.dumps( {"csp-report": self.get_api_context()}, option=orjson.OPT_UTC_Z | orjson.OPT_NON_STR_KEYS, diff --git a/src/sentry/interfaces/stacktrace.py b/src/sentry/interfaces/stacktrace.py index caa047f3636824..83f14310b7196c 100644 --- a/src/sentry/interfaces/stacktrace.py +++ b/src/sentry/interfaces/stacktrace.py @@ -325,7 +325,7 @@ def is_unhashable_function(self): # queries and JSON data) return self.function.startswith(("lambda$", "[Anonymous")) - def to_string(self, event): + def to_string(self, event) -> str: if event.platform is not None: choices = [event.platform] else: @@ -525,7 +525,7 @@ def to_json(self): } ) - def to_string(self, event, is_public=False, **kwargs): + def to_string(self, event) -> str: return self.get_stacktrace(event, system_frames=False, max_frames=10) def get_stacktrace( diff --git a/src/sentry/interfaces/template.py b/src/sentry/interfaces/template.py index 3ae0f1a30cbd8f..ab2a5ca6a36680 100644 --- a/src/sentry/interfaces/template.py +++ b/src/sentry/interfaces/template.py @@ -45,7 +45,7 @@ def to_python(cls, data, **kwargs): return super().to_python(data, **kwargs) - def to_string(self, event, is_public=False, **kwargs): + def to_string(self, event) -> str: context = get_context( lineno=self.lineno, context_line=self.context_line, diff --git a/tests/sentry_plugins/victorops/test_plugin.py b/tests/sentry_plugins/victorops/test_plugin.py index 028cf37026590f..f03ef5f5e0a580 100644 --- a/tests/sentry_plugins/victorops/test_plugin.py +++ b/tests/sentry_plugins/victorops/test_plugin.py @@ -16,7 +16,7 @@ class UnicodeTestInterface(Interface): - def to_string(self, event): + def to_string(self, event) -> str: return self.body def get_title(self): From a7451877bbdd38be62025a1427a29922e1737ec2 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 24 Jul 2024 09:21:35 -0400 Subject: [PATCH 045/126] ref: use baseclass implementation of monitor creation (#74806) this fixes an error in mypy 1.11 where the baseclass implementation is not compatible --- .../test_monitor_ingest_checkin_attachment.py | 21 +------------------ 1 file changed, 1 insertion(+), 20 deletions(-) diff --git a/tests/sentry/monitors/endpoints/test_monitor_ingest_checkin_attachment.py b/tests/sentry/monitors/endpoints/test_monitor_ingest_checkin_attachment.py index 305812520df7fb..319cc3f1a15502 100644 --- a/tests/sentry/monitors/endpoints/test_monitor_ingest_checkin_attachment.py +++ b/tests/sentry/monitors/endpoints/test_monitor_ingest_checkin_attachment.py @@ -1,13 +1,10 @@ -from datetime import timedelta from unittest import mock from django.core.files.uploadedfile import SimpleUploadedFile from django.urls import reverse -from django.utils import timezone -from sentry.models.environment import Environment from sentry.models.files.file import File -from sentry.monitors.models import CheckInStatus, MonitorCheckIn, MonitorEnvironment +from sentry.monitors.models import CheckInStatus, MonitorCheckIn from sentry.testutils.cases import MonitorIngestTestCase @@ -17,22 +14,6 @@ class MonitorIngestCheckinAttachmentEndpointTest(MonitorIngestTestCase): def get_path(self, monitor, checkin): return reverse(self.endpoint, args=[self.organization.slug, monitor.slug, checkin.guid]) - def _create_monitor(self): - return self.create_monitor() - - def _create_monitor_environment(self, monitor, name="production", **kwargs): - environment = Environment.get_or_create(project=self.project, name=name) - - monitorenvironment_defaults = { - "status": monitor.status, - "next_checkin": timezone.now() - timedelta(minutes=1), - **kwargs, - } - - return MonitorEnvironment.objects.create( - monitor=monitor, environment_id=environment.id, **monitorenvironment_defaults - ) - def test_upload(self): monitor = self._create_monitor() monitor_environment = self._create_monitor_environment(monitor) From 9261122cb88499cae6c2a6457411aa92c16ec558 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 24 Jul 2024 09:22:15 -0400 Subject: [PATCH 046/126] ref: unify get_actions signature (#74803) pointed out as inconsistent by mypy 1.11 --- src/sentry/plugins/base/v2.py | 4 ++-- tests/sentry/templatetags/test_sentry_plugins.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/sentry/plugins/base/v2.py b/src/sentry/plugins/base/v2.py index 7b1f2349252f46..062e4b13b2e6fe 100644 --- a/src/sentry/plugins/base/v2.py +++ b/src/sentry/plugins/base/v2.py @@ -316,7 +316,7 @@ def get_rules(self, **kwargs): """ return [] - def get_actions(self, request, group, **kwargs): + def get_actions(self, request, group) -> list[tuple[str, str]]: """ Return a list of available actions to append this aggregate. @@ -326,7 +326,7 @@ def get_actions(self, request, group, **kwargs): ('Action Label', '/uri/to/action/') - >>> def get_actions(self, request, group, **kwargs): + >>> def get_actions(self, request, group): >>> return [('Google', 'http://google.com')] """ return [] diff --git a/tests/sentry/templatetags/test_sentry_plugins.py b/tests/sentry/templatetags/test_sentry_plugins.py index 7b03f9ceef8659..b643074b0ba084 100644 --- a/tests/sentry/templatetags/test_sentry_plugins.py +++ b/tests/sentry/templatetags/test_sentry_plugins.py @@ -7,7 +7,7 @@ class SamplePlugin(Plugin2): - def get_actions(self, request, group): + def get_actions(self, request, group) -> list[tuple[str, str]]: return [("Example Action", f"http://example.com?id={group.id}")] def get_annotations(self, group): From 1bb6a50bb069b7a8279c2c41db678d3407140345 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Wed, 24 Jul 2024 13:31:15 +0000 Subject: [PATCH 047/126] Revert "ref(seer grouping): Use new `CircuitBreaker` class for circuit breaking (#74563)" This reverts commit e117cd090148df00e02e9f7304623fdc64d06afc. Co-authored-by: asottile-sentry <103459774+asottile-sentry@users.noreply.github.com> --- src/sentry/conf/server.py | 1 - src/sentry/event_manager.py | 40 +++++++--- src/sentry/grouping/ingest/seer.py | 40 ++-------- src/sentry/options/defaults.py | 15 +--- src/sentry/seer/similarity/similar_issues.py | 17 +---- .../grouping/test_seer_grouping.py | 18 +++++ tests/sentry/grouping/ingest/test_seer.py | 12 --- .../seer/similarity/test_similar_issues.py | 73 ++++--------------- 8 files changed, 75 insertions(+), 141 deletions(-) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index f089a823b04a46..91665cb55f6798 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -3423,7 +3423,6 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: SEER_HASH_GROUPING_RECORDS_DELETE_URL = ( f"/{SEER_SIMILARITY_MODEL_VERSION}/issues/similar-issues/grouping-record/delete-by-hash" ) -SEER_SIMILARITY_CIRCUIT_BREAKER_KEY = "seer.similarity" SIMILARITY_BACKFILL_COHORT_MAP: dict[str, list[int]] = {} diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index 7847eb8d1ab323..068de7ec47a0f3 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -129,7 +129,9 @@ from sentry.utils.circuit_breaker import ( ERROR_COUNT_CACHE_KEY, CircuitBreakerPassthrough, + CircuitBreakerTripped, circuit_breaker_activated, + with_circuit_breaker, ) from sentry.utils.dates import to_datetime from sentry.utils.event import has_event_minified_stack_trace, has_stacktrace, is_handled @@ -1528,18 +1530,13 @@ def _save_aggregate( seer_matched_group = None if should_call_seer_for_grouping(event, primary_hashes): - metrics.incr( - "grouping.similarity.did_call_seer", - # TODO: Consider lowering this (in all the spots this metric is - # collected) once we roll Seer grouping out more widely - sample_rate=1.0, - tags={"call_made": True, "blocker": "none"}, - ) try: # If the `projects:similarity-embeddings-grouping` feature is disabled, # we'll still get back result metadata, but `seer_matched_group` will be None - seer_response_data, seer_matched_group = get_seer_similar_issues( - event, primary_hashes + seer_response_data, seer_matched_group = with_circuit_breaker( + "event_manager.get_seer_similar_issues", + lambda: get_seer_similar_issues(event, primary_hashes), + options.get("seer.similarity.circuit-breaker-config"), ) event.data["seer_similarity"] = seer_response_data @@ -1550,8 +1547,33 @@ def _save_aggregate( "seer_similarity" ] = seer_response_data + metrics.incr( + "grouping.similarity.did_call_seer", + # TODO: Consider lowering this (in all the spots this metric is + # collected) once we roll Seer grouping out more widely + sample_rate=1.0, + tags={"call_made": True, "blocker": "none"}, + ) + + except CircuitBreakerTripped: + # TODO: Do we want to include all of the conditions which cause us to log a + # `grouping.similarity.seer_call_blocked` metric (here and in + # `should_call_seer_for_grouping`) under a single outcome tag on the span + # and timer metric below and in `record_calculation_metric_with_result` + # (also below)? Right now they just fall into the `new_group` bucket. + metrics.incr( + "grouping.similarity.did_call_seer", + sample_rate=1.0, + tags={"call_made": False, "blocker": "circuit-breaker"}, + ) + # Insurance - in theory we shouldn't ever land here except Exception as e: + metrics.incr( + "grouping.similarity.did_call_seer", + sample_rate=1.0, + tags={"call_made": True, "blocker": "none"}, + ) sentry_sdk.capture_exception( e, tags={"event": event.event_id, "project": project.id} ) diff --git a/src/sentry/grouping/ingest/seer.py b/src/sentry/grouping/ingest/seer.py index 6f07277a5780b8..c86576d6b7294a 100644 --- a/src/sentry/grouping/ingest/seer.py +++ b/src/sentry/grouping/ingest/seer.py @@ -8,10 +8,7 @@ from sentry.grouping.result import CalculatedHashes from sentry.models.group import Group from sentry.models.project import Project -from sentry.seer.similarity.similar_issues import ( - get_similarity_data_from_seer, - seer_similarity_circuit_breaker, -) +from sentry.seer.similarity.similar_issues import get_similarity_data_from_seer from sentry.seer.similarity.types import SeerSimilarIssuesMetadata, SimilarIssuesEmbeddingsRequest from sentry.seer.similarity.utils import ( event_content_is_seer_eligible, @@ -48,11 +45,12 @@ def should_call_seer_for_grouping(event: Event, primary_hashes: CalculatedHashes # (Checking the rate limit for calling Seer also increments the counter of how many times we've # tried to call it, and if we fail any of the other checks, it shouldn't count as an attempt. # Thus we only want to run the rate limit check if every other check has already succeeded.) - if ( - killswitch_enabled(project.id, event) - or _circuit_breaker_broken(event, project) - or _ratelimiting_enabled(event, project) - ): + # + # Note: The circuit breaker check which might naturally be here alongside its killswitch + # and rate limiting friends instead happens in the `with_circuit_breaker` helper used where + # `get_seer_similar_issues` is actually called. (It has to be there in order for it to track + # errors arising from that call.) + if killswitch_enabled(project.id, event) or _ratelimiting_enabled(event, project): return False return True @@ -159,30 +157,6 @@ def _ratelimiting_enabled(event: Event, project: Project) -> bool: return False -def _circuit_breaker_broken(event: Event, project: Project) -> bool: - circuit_broken = not seer_similarity_circuit_breaker.should_allow_request() - - if circuit_broken: - logger.warning( - "should_call_seer_for_grouping.circuit_breaker_tripped", - extra={ - "event_id": event.event_id, - "project_id": project.id, - **options.get("seer.similarity.circuit-breaker-config"), - }, - ) - metrics.incr( - "grouping.similarity.circuit_breaker_tripped", - ) - metrics.incr( - "grouping.similarity.did_call_seer", - sample_rate=1.0, - tags={"call_made": False, "blocker": "circuit-breaker"}, - ) - - return circuit_broken - - def get_seer_similar_issues( event: Event, primary_hashes: CalculatedHashes, diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index 65f1361143159f..4426ae5b7d5014 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -892,21 +892,12 @@ flags=FLAG_ALLOW_EMPTY | FLAG_AUTOMATOR_MODIFIABLE, ) -# TODO: The default error limit here was estimated based on EA traffic. (In an average 10 min -# period, there are roughly 35K events without matching hashes. About 2% of orgs are EA, so for -# simplicity, assume 2% of those events are from EA orgs. If we're willing to tolerate up to a 95% -# failure rate, then we need 35K * 0.02 * 0.95 events to fail to trip the breaker.) -# -# When we GA, we should multiply both the limits by 50 (to remove the 2% part of the current -# calculation), and remove this TODO. register( "seer.similarity.circuit-breaker-config", type=Dict, - default={ - "error_limit": 666, - "error_limit_window": 600, # 10 min - "broken_state_duration": 300, # 5 min - }, + # TODO: For now we're using the defaults for everything but `allow_passthrough`. We may want to + # revisit that choice in the future. + default={"allow_passthrough": True}, flags=FLAG_ALLOW_EMPTY | FLAG_AUTOMATOR_MODIFIABLE, ) diff --git a/src/sentry/seer/similarity/similar_issues.py b/src/sentry/seer/similarity/similar_issues.py index 41139dad50fa33..f7b6fbd02df1d0 100644 --- a/src/sentry/seer/similarity/similar_issues.py +++ b/src/sentry/seer/similarity/similar_issues.py @@ -3,12 +3,7 @@ from django.conf import settings from urllib3.exceptions import MaxRetryError, TimeoutError -from sentry import options -from sentry.conf.server import ( - SEER_MAX_GROUPING_DISTANCE, - SEER_SIMILAR_ISSUES_URL, - SEER_SIMILARITY_CIRCUIT_BREAKER_KEY, -) +from sentry.conf.server import SEER_MAX_GROUPING_DISTANCE, SEER_SIMILAR_ISSUES_URL from sentry.models.grouphash import GroupHash from sentry.net.http import connection_from_url from sentry.seer.signed_seer_api import make_signed_seer_api_request @@ -20,7 +15,6 @@ ) from sentry.tasks.delete_seer_grouping_records import delete_seer_grouping_records_by_hash from sentry.utils import json, metrics -from sentry.utils.circuit_breaker2 import CircuitBreaker from sentry.utils.json import JSONDecodeError, apply_key_filter logger = logging.getLogger(__name__) @@ -36,11 +30,6 @@ timeout=settings.SEER_GROUPING_TIMEOUT, ) -seer_similarity_circuit_breaker = CircuitBreaker( - SEER_SIMILARITY_CIRCUIT_BREAKER_KEY, - options.get("seer.similarity.circuit-breaker-config"), -) - def get_similarity_data_from_seer( similar_issues_request: SimilarIssuesEmbeddingsRequest, @@ -122,7 +111,6 @@ def get_similarity_data_from_seer( sample_rate=SIMILARITY_REQUEST_METRIC_SAMPLE_RATE, tags={**metric_tags, "outcome": "error", "error": type(e).__name__}, ) - seer_similarity_circuit_breaker.record_error() return [] metric_tags["response_status"] = response.status @@ -149,9 +137,6 @@ def get_similarity_data_from_seer( }, ) - if response.status >= 500: - seer_similarity_circuit_breaker.record_error() - return [] try: diff --git a/tests/sentry/event_manager/grouping/test_seer_grouping.py b/tests/sentry/event_manager/grouping/test_seer_grouping.py index aefe287aa83dff..bd9212e51ad250 100644 --- a/tests/sentry/event_manager/grouping/test_seer_grouping.py +++ b/tests/sentry/event_manager/grouping/test_seer_grouping.py @@ -10,6 +10,7 @@ from sentry.testutils.helpers.eventprocessing import save_new_event from sentry.testutils.helpers.features import with_feature from sentry.testutils.pytest.mocking import capture_results +from sentry.utils.circuit_breaker import with_circuit_breaker from sentry.utils.types import NonNone @@ -151,6 +152,23 @@ def test_obeys_seer_similarity_flags(self): assert get_seer_similar_issues_return_values[0][1] == existing_event.group assert new_event.group_id == existing_event.group_id + @patch("sentry.event_manager.should_call_seer_for_grouping", return_value=True) + @patch("sentry.event_manager.with_circuit_breaker", wraps=with_circuit_breaker) + @patch("sentry.event_manager.get_seer_similar_issues", return_value=({}, None)) + def test_obeys_circult_breaker( + self, mock_get_seer_similar_issues: MagicMock, mock_with_circuit_breaker: MagicMock, _ + ): + with patch("sentry.utils.circuit_breaker._should_call_callback", return_value=True): + save_new_event({"message": "Dogs are great!"}, self.project) + assert mock_with_circuit_breaker.call_count == 1 + assert mock_get_seer_similar_issues.call_count == 1 + + with patch("sentry.utils.circuit_breaker._should_call_callback", return_value=False): + save_new_event({"message": "Adopt don't shop"}, self.project) + + assert mock_with_circuit_breaker.call_count == 2 # increased + assert mock_get_seer_similar_issues.call_count == 1 # didn't increase + @patch("sentry.event_manager.should_call_seer_for_grouping", return_value=True) @patch("sentry.event_manager.get_seer_similar_issues", return_value=({}, None)) def test_calls_seer_if_no_group_found(self, mock_get_seer_similar_issues: MagicMock, _): diff --git a/tests/sentry/grouping/ingest/test_seer.py b/tests/sentry/grouping/ingest/test_seer.py index 90da60da248c7a..945ed67422d272 100644 --- a/tests/sentry/grouping/ingest/test_seer.py +++ b/tests/sentry/grouping/ingest/test_seer.py @@ -137,18 +137,6 @@ def test_obeys_project_ratelimit(self): is expected_result ) - @with_feature("projects:similarity-embeddings-grouping") - def test_obeys_circuit_breaker(self): - for request_allowed, expected_result in [(True, True), (False, False)]: - with patch( - "sentry.grouping.ingest.seer.seer_similarity_circuit_breaker.should_allow_request", - return_value=request_allowed, - ): - assert ( - should_call_seer_for_grouping(self.event, self.primary_hashes) - is expected_result - ) - @with_feature("projects:similarity-embeddings-grouping") def test_obeys_customized_fingerprint_check(self): default_fingerprint_event = Event( diff --git a/tests/sentry/seer/similarity/test_similar_issues.py b/tests/sentry/seer/similarity/test_similar_issues.py index 72fe03bdf74815..5efd172f635fde 100644 --- a/tests/sentry/seer/similarity/test_similar_issues.py +++ b/tests/sentry/seer/similarity/test_similar_issues.py @@ -96,17 +96,9 @@ def test_no_groups_found(self, mock_seer_request: MagicMock, mock_metrics_incr: tags={"response_status": 200, "outcome": "no_similar_groups"}, ) - @mock.patch( - "sentry.seer.similarity.similar_issues.seer_similarity_circuit_breaker.record_error" - ) @mock.patch("sentry.seer.similarity.similar_issues.metrics.incr") @mock.patch("sentry.seer.similarity.similar_issues.seer_grouping_connection_pool.urlopen") - def test_bad_response_data( - self, - mock_seer_request: MagicMock, - mock_metrics_incr: MagicMock, - mock_record_circuit_breaker_error: MagicMock, - ): + def test_bad_response_data(self, mock_seer_request: MagicMock, mock_metrics_incr: MagicMock): cases: list[tuple[Any, str]] = [ (None, "AttributeError"), ([], "AttributeError"), @@ -147,22 +139,14 @@ def test_bad_response_data( sample_rate=SIMILARITY_REQUEST_METRIC_SAMPLE_RATE, tags={"response_status": 200, "outcome": "error", "error": expected_error}, ) - assert mock_record_circuit_breaker_error.call_count == 0 mock_metrics_incr.reset_mock() - @mock.patch( - "sentry.seer.similarity.similar_issues.seer_similarity_circuit_breaker.record_error" - ) @mock.patch("sentry.seer.similarity.similar_issues.metrics.incr") @mock.patch("sentry.seer.similarity.similar_issues.logger") @mock.patch("sentry.seer.similarity.similar_issues.seer_grouping_connection_pool.urlopen") def test_redirect( - self, - mock_seer_request: MagicMock, - mock_logger: MagicMock, - mock_metrics_incr: MagicMock, - mock_record_circuit_breaker_error: MagicMock, + self, mock_seer_request: MagicMock, mock_logger: MagicMock, mock_metrics_incr: MagicMock ): mock_seer_request.return_value = HTTPResponse( status=308, headers={"location": "/new/and/improved/endpoint/"} @@ -177,20 +161,12 @@ def test_redirect( sample_rate=SIMILARITY_REQUEST_METRIC_SAMPLE_RATE, tags={"response_status": 308, "outcome": "error", "error": "Redirect"}, ) - assert mock_record_circuit_breaker_error.call_count == 0 - @mock.patch( - "sentry.seer.similarity.similar_issues.seer_similarity_circuit_breaker.record_error" - ) @mock.patch("sentry.seer.similarity.similar_issues.metrics.incr") @mock.patch("sentry.seer.similarity.similar_issues.logger") @mock.patch("sentry.seer.similarity.similar_issues.seer_grouping_connection_pool.urlopen") def test_request_error( - self, - mock_seer_request: MagicMock, - mock_logger: MagicMock, - mock_metrics_incr: MagicMock, - mock_record_circuit_breaker_error: MagicMock, + self, mock_seer_request: MagicMock, mock_logger: MagicMock, mock_metrics_incr: MagicMock ): for request_error, expected_error_tag in [ (TimeoutError, "TimeoutError"), @@ -216,44 +192,25 @@ def test_request_error( sample_rate=SIMILARITY_REQUEST_METRIC_SAMPLE_RATE, tags={"outcome": "error", "error": expected_error_tag}, ) - assert mock_record_circuit_breaker_error.call_count == 1 - - mock_logger.warning.reset_mock() - mock_metrics_incr.reset_mock() - mock_record_circuit_breaker_error.reset_mock() - @mock.patch( - "sentry.seer.similarity.similar_issues.seer_similarity_circuit_breaker.record_error" - ) @mock.patch("sentry.seer.similarity.similar_issues.metrics.incr") @mock.patch("sentry.seer.similarity.similar_issues.logger") @mock.patch("sentry.seer.similarity.similar_issues.seer_grouping_connection_pool.urlopen") def test_error_status( - self, - mock_seer_request: MagicMock, - mock_logger: MagicMock, - mock_metrics_incr: MagicMock, - mock_record_circuit_breaker_error: MagicMock, + self, mock_seer_request: MagicMock, mock_logger: MagicMock, mock_metrics_incr: MagicMock ): - for response, status, counts_for_circuit_breaker in [ - ("No soup for you", 403, False), - ("No soup, period", 500, True), - ]: - mock_seer_request.return_value = HTTPResponse(response, status=status) + mock_seer_request.return_value = HTTPResponse("No soup for you", status=403) - assert get_similarity_data_from_seer(self.request_params) == [] - mock_logger.error.assert_called_with( - f"Received {status} when calling Seer endpoint {SEER_SIMILAR_ISSUES_URL}.", - extra={"response_data": response}, - ) - mock_metrics_incr.assert_any_call( - "seer.similar_issues_request", - sample_rate=SIMILARITY_REQUEST_METRIC_SAMPLE_RATE, - tags={"response_status": status, "outcome": "error", "error": "RequestError"}, - ) - assert mock_record_circuit_breaker_error.call_count == ( - 1 if counts_for_circuit_breaker else 0 - ) + assert get_similarity_data_from_seer(self.request_params) == [] + mock_logger.error.assert_called_with( + f"Received 403 when calling Seer endpoint {SEER_SIMILAR_ISSUES_URL}.", + extra={"response_data": "No soup for you"}, + ) + mock_metrics_incr.assert_any_call( + "seer.similar_issues_request", + sample_rate=SIMILARITY_REQUEST_METRIC_SAMPLE_RATE, + tags={"response_status": 403, "outcome": "error", "error": "RequestError"}, + ) @mock.patch("sentry.seer.similarity.similar_issues.seer_grouping_connection_pool.urlopen") def test_returns_sorted_results(self, mock_seer_request: MagicMock): From 825da457715fa240c7050a6d7e60d6917ed6df8c Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 24 Jul 2024 10:03:06 -0400 Subject: [PATCH 048/126] ref: match signatures in mocked bigtable implementation (#74805) fixes an error in mypy 1.11 --- src/sentry/utils/kvstore/abstract.py | 2 +- src/sentry/utils/kvstore/cache.py | 4 ++-- src/sentry/utils/kvstore/encoding.py | 2 +- src/sentry/utils/kvstore/memory.py | 2 +- src/sentry/utils/kvstore/redis.py | 2 +- .../sentry/nodestore/bigtable/test_backend.py | 22 ++++++++++++++----- 6 files changed, 22 insertions(+), 12 deletions(-) diff --git a/src/sentry/utils/kvstore/abstract.py b/src/sentry/utils/kvstore/abstract.py index d5a84fa7df22b8..dd7da19b893b43 100644 --- a/src/sentry/utils/kvstore/abstract.py +++ b/src/sentry/utils/kvstore/abstract.py @@ -68,7 +68,7 @@ def delete_many(self, keys: Sequence[K]) -> None: self.delete(key) @abstractmethod - def bootstrap(self) -> None: + def bootstrap(self, automatic_expiry: bool = True) -> None: """ Allocate the resources (create tables, etc.) required by the store to be usable. diff --git a/src/sentry/utils/kvstore/cache.py b/src/sentry/utils/kvstore/cache.py index e52cc9578522d3..827e88e38d1a7e 100644 --- a/src/sentry/utils/kvstore/cache.py +++ b/src/sentry/utils/kvstore/cache.py @@ -38,7 +38,7 @@ def set(self, key: Any, value: Any, ttl: timedelta | None = None) -> None: def delete(self, key: Any) -> None: self.backend.delete(key) - def bootstrap(self) -> None: + def bootstrap(self, automatic_expiry: bool = True) -> None: # Nothing to do in this method: the backend is expected to either not # require any explicit setup action (memcached, Redis) or that setup is # assumed to be managed elsewhere (e.g. the Django database cache is @@ -98,7 +98,7 @@ def delete(self, key: str) -> None: def delete_many(self, keys: Sequence[str]) -> None: return self.storage.delete_many([wrap_key(self.prefix, self.version, key) for key in keys]) - def bootstrap(self) -> None: + def bootstrap(self, automatic_expiry: bool = True) -> None: self.storage.bootstrap() def destroy(self) -> None: diff --git a/src/sentry/utils/kvstore/encoding.py b/src/sentry/utils/kvstore/encoding.py index 189d040b56fd29..e697fed4a1866f 100644 --- a/src/sentry/utils/kvstore/encoding.py +++ b/src/sentry/utils/kvstore/encoding.py @@ -40,7 +40,7 @@ def delete(self, key: K) -> None: def delete_many(self, keys: Sequence[K]) -> None: return self.store.delete_many(keys) - def bootstrap(self) -> None: + def bootstrap(self, automatic_expiry: bool = True) -> None: return self.store.bootstrap() def destroy(self) -> None: diff --git a/src/sentry/utils/kvstore/memory.py b/src/sentry/utils/kvstore/memory.py index 748d3fdeb85630..c4c775aa99d3ec 100644 --- a/src/sentry/utils/kvstore/memory.py +++ b/src/sentry/utils/kvstore/memory.py @@ -42,7 +42,7 @@ def delete(self, key: K) -> None: except KeyError: pass - def bootstrap(self) -> None: + def bootstrap(self, automatic_expiry: bool = True) -> None: pass def destroy(self) -> None: diff --git a/src/sentry/utils/kvstore/redis.py b/src/sentry/utils/kvstore/redis.py index 3daab1115f0d15..b8e0ac3e1e4bc1 100644 --- a/src/sentry/utils/kvstore/redis.py +++ b/src/sentry/utils/kvstore/redis.py @@ -29,7 +29,7 @@ def set(self, key: str, value: T, ttl: timedelta | None = None) -> None: def delete(self, key: str) -> None: self.client.delete(key.encode("utf8")) - def bootstrap(self) -> None: + def bootstrap(self, automatic_expiry: bool = True) -> None: pass # nothing to do def destroy(self) -> None: diff --git a/tests/sentry/nodestore/bigtable/test_backend.py b/tests/sentry/nodestore/bigtable/test_backend.py index 5753bdd784a7de..3434face26aac9 100644 --- a/tests/sentry/nodestore/bigtable/test_backend.py +++ b/tests/sentry/nodestore/bigtable/test_backend.py @@ -46,14 +46,23 @@ def __init__(self): def direct_row(self, key): return MockedBigtableKVStorage.Row(self, key) - def read_row(self, key): - return MockedBigtableKVStorage.Row(self, key) - - def read_rows(self, row_set): + def read_row(self, row_key, filter_=None): + return MockedBigtableKVStorage.Row(self, row_key) + + def read_rows( + self, + start_key=None, + end_key=None, + limit=None, + filter_=None, + end_inclusive=False, + row_set=None, + retry=None, + ): assert not row_set.row_ranges, "unsupported" return [self.read_row(key) for key in row_set.row_keys] - def mutate_rows(self, rows): + def mutate_rows(self, rows, retry=None, timeout=None): # commits not implemented, changes are applied immediately return [Status(code=0) for row in rows] @@ -65,7 +74,7 @@ def _get_table(self, admin: bool = False): return table - def bootstrap(self, automatic_expiry): + def bootstrap(self, automatic_expiry: bool = True) -> None: pass @@ -98,6 +107,7 @@ def ns(request): yield MockedBigtableNodeStorage(project="test") +@pytest.mark.django_db def test_cache(ns): node_1 = ("a" * 32, {"foo": "a"}) node_2 = ("b" * 32, {"foo": "b"}) From 6429634ade50baed2887d901f284aea1d31ad6d0 Mon Sep 17 00:00:00 2001 From: Jonas Date: Wed, 24 Jul 2024 10:06:21 -0400 Subject: [PATCH 049/126] fix(tracing): do not reparent under span if it was never a parent (#74842) If a txn wasnt the parent of pageload, it should not be reparented. We can remove the logic once span streaming becomes a thing or if we can ever load entire traces through a single request. --- .../traceModels/traceTree.spec.tsx | 38 +++++++++++++++++++ .../newTraceDetails/traceModels/traceTree.tsx | 11 +++++- 2 files changed, 47 insertions(+), 2 deletions(-) diff --git a/static/app/views/performance/newTraceDetails/traceModels/traceTree.spec.tsx b/static/app/views/performance/newTraceDetails/traceModels/traceTree.spec.tsx index e02e8a41af95eb..0ded7aaa811640 100644 --- a/static/app/views/performance/newTraceDetails/traceModels/traceTree.spec.tsx +++ b/static/app/views/performance/newTraceDetails/traceModels/traceTree.spec.tsx @@ -2803,6 +2803,44 @@ describe('TraceTree', () => { assertTransactionNode(secondPageload); expect(secondPageload.value.transaction).toBe('second pageload'); }); + it('doesnt reparent http.server child txn under browser request span if it was not reparented', async () => { + const tree: TraceTree = TraceTree.FromTrace( + makeTrace({ + transactions: [ + makeTransaction({ + transaction: 'pageload', + ['transaction.op']: 'pageload', + event_id: 'pageload', + project_slug: 'js', + children: [ + makeTransaction({ + transaction: 'http.server', + ['transaction.op']: 'http.server', + }), + ], + }), + ], + }), + null + ); + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/events/js:pageload/?averageColumn=span.self_time&averageColumn=span.duration', + method: 'GET', + body: makeEvent({}, [makeSpan({description: 'request', op: 'browser'})]), + }); + + tree.zoomIn(tree.list[1], true, { + api: new MockApiClient(), + organization: OrganizationFixture(), + }); + + await waitFor(() => tree.list.length === 4); + tree.print(); + + const pageloadTransaction = tree.list[1]; + const serverHandlerTransaction = tree.list[3]; + expect(serverHandlerTransaction.parent).toBe(pageloadTransaction); + }); describe('expanded', () => { it('server handler transaction becomes a child of browser request span if present', async () => { const tree: TraceTree = TraceTree.FromTrace( diff --git a/static/app/views/performance/newTraceDetails/traceModels/traceTree.tsx b/static/app/views/performance/newTraceDetails/traceModels/traceTree.tsx index 47507053da11e0..ab34a6ab7e326b 100644 --- a/static/app/views/performance/newTraceDetails/traceModels/traceTree.tsx +++ b/static/app/views/performance/newTraceDetails/traceModels/traceTree.tsx @@ -238,9 +238,11 @@ function isBrowserRequestSpan(value: TraceTree.Span): boolean { function childParentSwap({ parent, child, + reason, }: { child: TraceTreeNode; parent: TraceTreeNode; + reason: TraceTreeNode['reparent_reason']; }) { const parentOfParent = parent.parent!; @@ -251,6 +253,9 @@ function childParentSwap({ // We need to remove the portion of the tree that was previously a child, else we will have a circular reference parent.parent = child; child.children.push(parent.filter(parent, n => n !== child)); + + child.reparent_reason = reason; + parent.reparent_reason = reason; } function measurementToTimestamp( @@ -595,7 +600,7 @@ export class TraceTree { ) { // The swap can occur at a later point when new transactions are fetched, // which means we need to invalidate the tree and re-render the UI. - childParentSwap({parent, child: node}); + childParentSwap({parent, child: node, reason: 'pageload server handler'}); parent.invalidate(parent); node.invalidate(node); } @@ -903,7 +908,7 @@ export class TraceTree { let firstTransaction: TraceTreeNode | null = null; for (const child of parent.children) { if (isTransactionNode(child)) { - firstTransaction = firstTransaction || child; + firstTransaction = firstTransaction ?? child; // keep track of the transaction nodes that should be reparented under the newly fetched spans. const key = 'parent_span_id' in child.value && @@ -934,6 +939,7 @@ export class TraceTree { // was the parent of the browser request span which likely served the document. if ( firstTransaction && + firstTransaction.reparent_reason === 'pageload server handler' && !childTransactions.length && isBrowserRequestSpan(spanNodeValue) && isServerRequestHandlerTransactionNode(firstTransaction) @@ -1734,6 +1740,7 @@ export class TraceTreeNode canFetch: boolean = false; fetchStatus: 'resolved' | 'error' | 'idle' | 'loading' = 'idle'; parent: TraceTreeNode | null = null; + reparent_reason: 'pageload server handler' | null = null; value: T; expanded: boolean = false; zoomedIn: boolean = false; From 815e565405cf72eec0e233851b8774a104337460 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 24 Jul 2024 10:11:58 -0400 Subject: [PATCH 050/126] ref: adjust typing so rate_limits can be a callable (#74807) this fixes an error in mypy 1.11 -- it also fixes an error in a currently-ignored file which has a callable `rate_limits` --- src/sentry/api/base.py | 4 +-- .../middleware/test_ratelimit_middleware.py | 3 +- .../utils/test_get_rate_limit_value.py | 28 ------------------- 3 files changed, 4 insertions(+), 31 deletions(-) diff --git a/src/sentry/api/base.py b/src/sentry/api/base.py index 80dec21aa17769..034504d1b2fd15 100644 --- a/src/sentry/api/base.py +++ b/src/sentry/api/base.py @@ -221,8 +221,8 @@ class Endpoint(APIView): owner: ApiOwner = ApiOwner.UNOWNED publish_status: dict[HTTP_METHOD_NAME, ApiPublishStatus] = {} - rate_limits: RateLimitConfig | dict[ - str, dict[RateLimitCategory, RateLimit] + rate_limits: RateLimitConfig | dict[str, dict[RateLimitCategory, RateLimit]] | Callable[ + ..., RateLimitConfig | dict[str, dict[RateLimitCategory, RateLimit]] ] = DEFAULT_RATE_LIMIT_CONFIG enforce_rate_limit: bool = settings.SENTRY_RATELIMITER_ENABLED snuba_methods: list[HTTP_METHOD_NAME] = [] diff --git a/tests/sentry/middleware/test_ratelimit_middleware.py b/tests/sentry/middleware/test_ratelimit_middleware.py index 16b2ec75949eff..05a18c59f7e603 100644 --- a/tests/sentry/middleware/test_ratelimit_middleware.py +++ b/tests/sentry/middleware/test_ratelimit_middleware.py @@ -288,7 +288,8 @@ class CallableRateLimitConfigEndpoint(Endpoint): permission_classes = (AllowAny,) enforce_rate_limit = True - def rate_limits(request): + @staticmethod + def rate_limits(*a, **k): return { "GET": { RateLimitCategory.IP: RateLimit(limit=20, window=1), diff --git a/tests/sentry/ratelimits/utils/test_get_rate_limit_value.py b/tests/sentry/ratelimits/utils/test_get_rate_limit_value.py index 9d0ae5abee4a19..8cd36a1f44f6f4 100644 --- a/tests/sentry/ratelimits/utils/test_get_rate_limit_value.py +++ b/tests/sentry/ratelimits/utils/test_get_rate_limit_value.py @@ -88,31 +88,3 @@ class ChildEndpoint(ParentEndpoint): assert get_rate_limit_value( "GET", RateLimitCategory.IP, rate_limit_config ) == get_default_rate_limits_for_group("foo", RateLimitCategory.IP) - - def test_multiple_inheritance(self): - class ParentEndpoint(Endpoint): - rate_limits: RateLimitConfig | dict[str, dict[RateLimitCategory, RateLimit]] - rate_limits = {"GET": {RateLimitCategory.IP: RateLimit(limit=100, window=5)}} - - class Mixin: - rate_limits: RateLimitConfig | dict[str, dict[RateLimitCategory, RateLimit]] - rate_limits = {"GET": {RateLimitCategory.IP: RateLimit(limit=2, window=4)}} - - class ChildEndpoint(ParentEndpoint, Mixin): - pass - - _child_endpoint = ChildEndpoint.as_view() - rate_limit_config = get_rate_limit_config(_child_endpoint.view_class) - - class ChildEndpointReverse(Mixin, ParentEndpoint): - pass - - _child_endpoint_reverse = ChildEndpointReverse.as_view() - rate_limit_config_reverse = get_rate_limit_config(_child_endpoint_reverse.view_class) - - assert get_rate_limit_value("GET", RateLimitCategory.IP, rate_limit_config) == RateLimit( - 100, 5 - ) - assert get_rate_limit_value( - "GET", RateLimitCategory.IP, rate_limit_config_reverse - ) == RateLimit(limit=2, window=4) From e0acd953f293ef4ad9f23cc5ff0c6f1c834ba57e Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Wed, 24 Jul 2024 10:13:50 -0400 Subject: [PATCH 051/126] feat(profiling): Add setting to enable continuous profiling (#74676) This adds a setting to enable continuous profiling via the environment variable `SENTRY_CONTINUOUS_PROFILING_ENABLE`. --- src/sentry/conf/server.py | 7 +++++++ src/sentry/utils/sdk.py | 6 +++++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 91665cb55f6798..539431a1525b57 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -2539,6 +2539,13 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: # profiler. For example, only on the web server. SENTRY_PROFILING_ENABLED = os.environ.get("SENTRY_PROFILING_ENABLED", False) +# To have finer control over which process will have continuous profiling enabled, +# this environment variable will be required to enable continuous profiling. +# +# This setting takes precedence over `SENTRY_PROFILING_ENABLED` forcing the SDK +# to operate under the continuous profiling model. +SENTRY_CONTINUOUS_PROFILING_ENABLED = os.environ.get("SENTRY_CONTINUOUS_PROFILING_ENABLED", False) + # Callable to bind additional context for the Sentry SDK # # def get_org_context(scope, organization, **kwargs): diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 39ced26f02c9bf..dde2ad2cd59c5c 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -302,7 +302,11 @@ def configure_sdk(): else: sentry_saas_transport = None - if settings.SENTRY_PROFILING_ENABLED: + if settings.SENTRY_CONTINUOUS_PROFILING_ENABLED: + sdk_options.setdefault("_experiments", {}).update( + continuous_profiling_auto_start=True, + ) + elif settings.SENTRY_PROFILING_ENABLED: sdk_options["profiles_sampler"] = profiles_sampler sdk_options["profiler_mode"] = settings.SENTRY_PROFILER_MODE From 2c4f02414bd4af0a415e276c4cb1a4b0e645ce27 Mon Sep 17 00:00:00 2001 From: Stefan Jandl Date: Wed, 24 Jul 2024 16:18:30 +0200 Subject: [PATCH 052/126] chore: disable profiling for .NET (#74654) --- static/app/components/onboarding/productSelection.tsx | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/static/app/components/onboarding/productSelection.tsx b/static/app/components/onboarding/productSelection.tsx index b66aae985b103e..3585b9d148b10f 100644 --- a/static/app/components/onboarding/productSelection.tsx +++ b/static/app/components/onboarding/productSelection.tsx @@ -98,10 +98,10 @@ export const platformProductAvailability = { 'dotnet-aspnetcore': [ProductSolution.PERFORMANCE_MONITORING], 'dotnet-awslambda': [ProductSolution.PERFORMANCE_MONITORING], 'dotnet-gcpfunctions': [ProductSolution.PERFORMANCE_MONITORING], - 'dotnet-maui': [ProductSolution.PERFORMANCE_MONITORING, ProductSolution.PROFILING], + 'dotnet-maui': [ProductSolution.PERFORMANCE_MONITORING], 'dotnet-uwp': [ProductSolution.PERFORMANCE_MONITORING], - 'dotnet-winforms': [ProductSolution.PERFORMANCE_MONITORING, ProductSolution.PROFILING], - 'dotnet-wpf': [ProductSolution.PERFORMANCE_MONITORING, ProductSolution.PROFILING], + 'dotnet-winforms': [ProductSolution.PERFORMANCE_MONITORING], + 'dotnet-wpf': [ProductSolution.PERFORMANCE_MONITORING], 'dotnet-xamarin': [ProductSolution.PERFORMANCE_MONITORING], flutter: [ProductSolution.PERFORMANCE_MONITORING, ProductSolution.PROFILING], kotlin: [ProductSolution.PERFORMANCE_MONITORING], From 6b84f161805a992c6f997492738f9ddf5746227e Mon Sep 17 00:00:00 2001 From: Nar Saynorath Date: Wed, 24 Jul 2024 10:25:37 -0400 Subject: [PATCH 053/126] feat(dashboards): Default widget split decision to errors (#74723) In the case that neither side has data, or both sides have data, force the dataset to errors and record the reason in the source field. This way we can ensure the split occurs and also warn the user that they may need to select a dataset. Note: To avoid changing behaviour for existing on-demand users by forcing the split to use errors, I feature flagged the default scenario so without the feature flag the default is still discover. --- src/sentry/api/bases/organization_events.py | 42 +++++++++++-------- .../api/endpoints/organization_events.py | 6 ++- .../endpoints/organization_events_stats.py | 4 +- .../endpoints/test_organization_events_mep.py | 23 ++++++---- .../test_organization_events_stats_mep.py | 15 +++++-- 5 files changed, 58 insertions(+), 32 deletions(-) diff --git a/src/sentry/api/bases/organization_events.py b/src/sentry/api/bases/organization_events.py index ee9302e6c304c4..b2608373a9a7ce 100644 --- a/src/sentry/api/bases/organization_events.py +++ b/src/sentry/api/bases/organization_events.py @@ -240,33 +240,39 @@ def handle_on_demand(self, request: Request) -> tuple[bool, MetricSpecType]: return use_on_demand_metrics, on_demand_metric_type - def get_split_decision(self, has_errors, has_transactions_data): + def save_split_decision(self, widget, has_errors, has_transactions_data, organization, user): """This can be removed once the discover dataset has been fully split""" + source = DashboardDatasetSourcesTypes.INFERRED.value if has_errors and not has_transactions_data: decision = DashboardWidgetTypes.ERROR_EVENTS + sentry_sdk.set_tag("discover.split_reason", "query_result") elif not has_errors and has_transactions_data: decision = DashboardWidgetTypes.TRANSACTION_LIKE - elif has_errors and has_transactions_data: - decision = DashboardWidgetTypes.DISCOVER + sentry_sdk.set_tag("discover.split_reason", "query_result") else: - # In the case that neither side has data, we do not need to split this yet and can make multiple queries to check each time. - # This will help newly created widgets or infrequent count widgets that shouldn't be prematurely assigned a side. - decision = None - sentry_sdk.set_tag("split_decision", decision) - return decision + if features.has( + "organizations:performance-discover-dataset-selector", organization, actor=user + ): + # In the case that neither side has data, or both sides have data, default to errors. + decision = DashboardWidgetTypes.ERROR_EVENTS + source = DashboardDatasetSourcesTypes.FORCED.value + sentry_sdk.set_tag("discover.split_reason", "default") + else: + # This branch can be deleted once the feature flag for the discover split is removed + if has_errors and has_transactions_data: + decision = DashboardWidgetTypes.DISCOVER + else: + # In the case that neither side has data, we do not need to split this yet and can make multiple queries to check each time. + # This will help newly created widgets or infrequent count widgets that shouldn't be prematurely assigned a side. + decision = None - def save_split_decision(self, widget, has_errors, has_transactions_data): - """This can be removed once the discover dataset has been fully split""" - new_discover_widget_split = self.get_split_decision(has_errors, has_transactions_data) - if ( - new_discover_widget_split is not None - and widget.discover_widget_split != new_discover_widget_split - ): - widget.discover_widget_split = new_discover_widget_split - widget.dataset_source = DashboardDatasetSourcesTypes.INFERRED.value + sentry_sdk.set_tag("discover.split_decision", decision) + if decision is not None and widget.discover_widget_split != decision: + widget.discover_widget_split = decision + widget.dataset_source = source widget.save() - return new_discover_widget_split + return decision def save_discover_saved_query_split_decision( self, query, dataset_inferred_from_query, has_errors, has_transactions_data diff --git a/src/sentry/api/endpoints/organization_events.py b/src/sentry/api/endpoints/organization_events.py index 6b14bbe8c1fe6e..c997f1021bd354 100644 --- a/src/sentry/api/endpoints/organization_events.py +++ b/src/sentry/api/endpoints/organization_events.py @@ -438,7 +438,7 @@ def _dashboards_data_fn(scoped_dataset, offset, limit, scoped_query, dashboard_w error_results = None original_results = _data_fn(scoped_dataset, offset, limit, scoped_query) - if original_results.get("data"): + if original_results.get("data") is not None: dataset_meta = original_results.get("meta", {}) else: dataset_meta = list(original_results.values())[0].get("data").get("meta", {}) @@ -460,7 +460,9 @@ def _dashboards_data_fn(scoped_dataset, offset, limit, scoped_query, dashboard_w transaction_results = _data_fn(discover, offset, limit, transactions_only_query) has_transactions = len(transaction_results["data"]) > 0 - decision = self.save_split_decision(widget, has_errors, has_transactions) + decision = self.save_split_decision( + widget, has_errors, has_transactions, organization, request.user + ) if decision == DashboardWidgetTypes.DISCOVER: return _data_fn(discover, offset, limit, scoped_query) diff --git a/src/sentry/api/endpoints/organization_events_stats.py b/src/sentry/api/endpoints/organization_events_stats.py index 5fb5fb247e3dc2..a9af5af442fb0f 100644 --- a/src/sentry/api/endpoints/organization_events_stats.py +++ b/src/sentry/api/endpoints/organization_events_stats.py @@ -421,7 +421,9 @@ def fn( ) has_transactions = self.check_if_results_have_data(transaction_results) - decision = self.save_split_decision(widget, has_errors, has_transactions) + decision = self.save_split_decision( + widget, has_errors, has_transactions, organization, request.user + ) if decision == DashboardWidgetTypes.DISCOVER: # The user needs to be warned to split in this case. diff --git a/tests/snuba/api/endpoints/test_organization_events_mep.py b/tests/snuba/api/endpoints/test_organization_events_mep.py index 398856c353d4aa..8e644620b2a776 100644 --- a/tests/snuba/api/endpoints/test_organization_events_mep.py +++ b/tests/snuba/api/endpoints/test_organization_events_mep.py @@ -3725,20 +3725,23 @@ def test_split_decision_for_ambiguous_widget_without_data(self): response = self.do_request( { - "field": ["count()", "transaction.name", "error.type"], + "field": ["count()", "transaction.op", "error.type"], "query": "", "dataset": "metricsEnhanced", "per_page": 50, "dashboardWidgetId": widget.id, - } + }, + features={"organizations:performance-discover-dataset-selector": True}, ) assert response.status_code == 200, response.content - assert response.data.get("meta").get("discoverSplitDecision") is None + assert response.data.get("meta").get( + "discoverSplitDecision" + ) == DashboardWidgetTypes.get_type_name(DashboardWidgetTypes.ERROR_EVENTS) widget.refresh_from_db() - assert widget.discover_widget_split is None - assert widget.dataset_source == DatasetSourcesTypes.UNKNOWN.value + assert widget.discover_widget_split == DashboardWidgetTypes.ERROR_EVENTS + assert widget.dataset_source == DatasetSourcesTypes.FORCED.value def test_split_decision_for_ambiguous_widget_with_data(self): # Store a transaction @@ -3775,14 +3778,18 @@ def test_split_decision_for_ambiguous_widget_with_data(self): "dataset": "metricsEnhanced", "per_page": 50, "dashboardWidgetId": widget.id, - } + }, + features={"organizations:performance-discover-dataset-selector": True}, ) assert response.status_code == 200, response.content - assert response.data.get("meta").get("discoverSplitDecision") is None + assert response.data.get("meta").get( + "discoverSplitDecision" + ) == DashboardWidgetTypes.get_type_name(DashboardWidgetTypes.ERROR_EVENTS) widget.refresh_from_db() - assert widget.discover_widget_split is DashboardWidgetTypes.DISCOVER + assert widget.discover_widget_split == DashboardWidgetTypes.ERROR_EVENTS + assert widget.dataset_source == DatasetSourcesTypes.FORCED.value class OrganizationEventsMetricsEnhancedPerformanceEndpointTestWithMetricLayer( diff --git a/tests/snuba/api/endpoints/test_organization_events_stats_mep.py b/tests/snuba/api/endpoints/test_organization_events_stats_mep.py index 2aab9c89017ce6..048cf1142a9a9c 100644 --- a/tests/snuba/api/endpoints/test_organization_events_stats_mep.py +++ b/tests/snuba/api/endpoints/test_organization_events_stats_mep.py @@ -8,6 +8,7 @@ from django.urls import reverse from rest_framework.response import Response +from sentry.discover.models import DatasetSourcesTypes from sentry.models.dashboard_widget import DashboardWidget, DashboardWidgetTypes from sentry.models.environment import Environment from sentry.sentry_metrics.use_case_id_registry import UseCaseID @@ -791,6 +792,7 @@ def test_split_decision_for_errors_widget(self): widget.refresh_from_db() assert widget.discover_widget_split == DashboardWidgetTypes.ERROR_EVENTS + assert widget.dataset_source == DatasetSourcesTypes.INFERRED.value def test_split_decision_for_transactions_widget(self): self.store_transaction_metric( @@ -820,6 +822,7 @@ def test_split_decision_for_transactions_widget(self): widget.refresh_from_db() assert widget.discover_widget_split == DashboardWidgetTypes.TRANSACTION_LIKE + assert widget.dataset_source == DatasetSourcesTypes.INFERRED.value def test_split_decision_for_top_events_errors_widget(self): error_data = load_data("python", timestamp=before_now(minutes=1)) @@ -860,6 +863,7 @@ def test_split_decision_for_top_events_errors_widget(self): widget.refresh_from_db() assert widget.discover_widget_split == DashboardWidgetTypes.ERROR_EVENTS + assert widget.dataset_source == DatasetSourcesTypes.INFERRED.value def test_split_decision_for_top_events_transactions_widget(self): self.store_transaction_metric( @@ -896,6 +900,7 @@ def test_split_decision_for_top_events_transactions_widget(self): widget.refresh_from_db() assert widget.discover_widget_split == DashboardWidgetTypes.TRANSACTION_LIKE + assert widget.dataset_source == DatasetSourcesTypes.INFERRED.value def test_split_decision_for_ambiguous_widget_without_data(self): _, widget, __ = create_widget( @@ -913,14 +918,18 @@ def test_split_decision_for_ambiguous_widget_without_data(self): "dataset": "metricsEnhanced", "per_page": 50, "dashboardWidgetId": widget.id, - } + }, + features={"organizations:performance-discover-dataset-selector": True}, ) assert response.status_code == 200, response.content - assert response.data.get("meta").get("discoverSplitDecision") is None + assert response.data.get("meta").get( + "discoverSplitDecision" + ) == DashboardWidgetTypes.get_type_name(DashboardWidgetTypes.ERROR_EVENTS) widget.refresh_from_db() - assert widget.discover_widget_split is None + assert widget.discover_widget_split == DashboardWidgetTypes.ERROR_EVENTS + assert widget.dataset_source == DatasetSourcesTypes.FORCED.value class OrganizationEventsStatsMetricsEnhancedPerformanceEndpointTestWithMetricLayer( From 6e764301ff87f120706a699a151f56e5cf3d8446 Mon Sep 17 00:00:00 2001 From: Jonas Date: Wed, 24 Jul 2024 10:55:42 -0400 Subject: [PATCH 054/126] ref: add threadId to continuous profile link (#74730) We need tid (being added to be response) to be able to link to the correct segment in the code where the span/even occurred. --- .../traceApi/useTransaction.tsx | 4 ++-- .../traceDrawer/details/span/index.tsx | 14 +++++------ .../traceDrawer/details/styles.tsx | 23 ++++++++++++++++++- .../traceDrawer/traceProfilingLink.spec.tsx | 3 +++ .../traceDrawer/traceProfilingLink.ts | 11 ++++++--- .../newTraceDetails/traceModels/traceTree.tsx | 15 ++++++------ 6 files changed, 50 insertions(+), 20 deletions(-) diff --git a/static/app/views/performance/newTraceDetails/traceApi/useTransaction.tsx b/static/app/views/performance/newTraceDetails/traceApi/useTransaction.tsx index 39c2736c951daa..814b4d1e1f321b 100644 --- a/static/app/views/performance/newTraceDetails/traceApi/useTransaction.tsx +++ b/static/app/views/performance/newTraceDetails/traceApi/useTransaction.tsx @@ -6,14 +6,14 @@ import type { } from 'sentry/views/performance/newTraceDetails/traceModels/traceTree'; interface UseTransactionProps { - node: TraceTreeNode; + node: TraceTreeNode | null; organization: Organization; } export function useTransaction(props: UseTransactionProps) { return useApiQuery( [ - `/organizations/${props.organization.slug}/events/${props.node.value.project_slug}:${props.node.value.event_id}/`, + `/organizations/${props.organization.slug}/events/${props.node?.value?.project_slug}:${props?.node?.value.event_id}/`, { query: { referrer: 'trace-details-summary', diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/details/span/index.tsx b/static/app/views/performance/newTraceDetails/traceDrawer/details/span/index.tsx index 9a447a1e087171..749ecae9c4b4c7 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/details/span/index.tsx +++ b/static/app/views/performance/newTraceDetails/traceDrawer/details/span/index.tsx @@ -76,12 +76,12 @@ export function SpanNodeDetails({ }: TraceTreeNodeDetailsProps>) { const location = useLocation(); const {projects} = useProjects(); - const {event} = node.value; const issues = useMemo(() => { return [...node.errors, ...node.performance_issues]; }, [node.errors, node.performance_issues]); - const project = projects.find(proj => proj.slug === event?.projectSlug); - const profileId = event?.contexts?.profile?.profile_id ?? null; + + const project = projects.find(proj => proj.slug === node.value.event?.projectSlug); + const profileId = node.value.event?.contexts?.profile?.profile_id ?? null; return ( @@ -91,10 +91,10 @@ export function SpanNodeDetails({ project={project} onTabScrollToNode={onTabScrollToNode} /> - {event.projectSlug ? ( + {node.value.event.projectSlug ? ( @@ -129,9 +129,9 @@ export function SpanNodeDetails({ startTimestamp={node.value.start_timestamp} /> - + {organization.features.includes('profiling') ? ( - + ) : null} )} diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/details/styles.tsx b/static/app/views/performance/newTraceDetails/traceDrawer/details/styles.tsx index bfb6cb3289a47a..662436766af8a3 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/details/styles.tsx +++ b/static/app/views/performance/newTraceDetails/traceDrawer/details/styles.tsx @@ -38,6 +38,7 @@ import { isTransactionNode, } from 'sentry/views/performance/newTraceDetails/guards'; import {traceAnalytics} from 'sentry/views/performance/newTraceDetails/traceAnalytics'; +import {useTransaction} from 'sentry/views/performance/newTraceDetails/traceApi/useTransaction'; import {makeTraceContinuousProfilingLink} from 'sentry/views/performance/newTraceDetails/traceDrawer/traceProfilingLink'; import type { MissingInstrumentationNode, @@ -329,6 +330,21 @@ const ValueTd = styled('td')` position: relative; `; +function getThreadIdFromNode( + node: TraceTreeNode, + transaction: EventTransaction | undefined +): string | undefined { + if (isSpanNode(node) && node.value.data['thread.id']) { + return node.value.data['thread.id']; + } + + if (transaction) { + return transaction.context?.trace?.data?.['thread.id']; + } + + return undefined; +} + function NodeActions(props: { node: TraceTreeNode; onTabScrollToNode: ( @@ -409,12 +425,17 @@ function NodeActions(props: { return ''; }, [props]); - const params = useParams<{traceSlug?: string}>(); + const {data: transaction} = useTransaction({ + node: isTransactionNode(props.node) ? props.node : null, + organization, + }); + const params = useParams<{traceSlug?: string}>(); const profileLink = makeTraceContinuousProfilingLink(props.node, profilerId, { orgSlug: props.organization.slug, projectSlug: props.node.metadata.project_slug ?? '', traceId: params.traceSlug ?? '', + threadId: getThreadIdFromNode(props.node, transaction), }); return ( diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/traceProfilingLink.spec.tsx b/static/app/views/performance/newTraceDetails/traceDrawer/traceProfilingLink.spec.tsx index 02789a716df6a8..e8aeaff7be5414 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/traceProfilingLink.spec.tsx +++ b/static/app/views/performance/newTraceDetails/traceDrawer/traceProfilingLink.spec.tsx @@ -37,6 +37,7 @@ describe('traceProfilingLink', () => { projectSlug: 'project', orgSlug: '', traceId: '', + threadId: '0', }) ).toBeNull(); }); @@ -47,6 +48,7 @@ describe('traceProfilingLink', () => { projectSlug: '', orgSlug: 'sentry', traceId: '', + threadId: '0', }) ).toBeNull(); }); @@ -84,6 +86,7 @@ describe('traceProfilingLink', () => { projectSlug: 'project', orgSlug: 'sentry', traceId: 'trace', + threadId: '0', } ); diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/traceProfilingLink.ts b/static/app/views/performance/newTraceDetails/traceDrawer/traceProfilingLink.ts index 1674a6ef9121d4..03bc9456c67c73 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/traceProfilingLink.ts +++ b/static/app/views/performance/newTraceDetails/traceDrawer/traceProfilingLink.ts @@ -51,6 +51,7 @@ export function makeTraceContinuousProfilingLink( options: { orgSlug: string; projectSlug: string; + threadId: string | undefined; traceId: string; }, query: Location['query'] = {} @@ -94,15 +95,19 @@ export function makeTraceContinuousProfilingLink( return null; } - const queryWithSpanIdAndTraceId: Record = { + const queryWithEventData: Record = { ...query, eventId, traceId: options.traceId, }; + if (typeof options.threadId === 'string') { + queryWithEventData.tid = options.threadId; + } + const spanId = getNodeId(node); if (spanId) { - queryWithSpanIdAndTraceId.spanId = spanId; + queryWithEventData.spanId = spanId; } return generateContinuousProfileFlamechartRouteWithQuery( @@ -111,6 +116,6 @@ export function makeTraceContinuousProfilingLink( profilerId, start.toISOString(), end.toISOString(), - queryWithSpanIdAndTraceId + queryWithEventData ); } diff --git a/static/app/views/performance/newTraceDetails/traceModels/traceTree.tsx b/static/app/views/performance/newTraceDetails/traceModels/traceTree.tsx index ab34a6ab7e326b..9f7092a1aac17e 100644 --- a/static/app/views/performance/newTraceDetails/traceModels/traceTree.tsx +++ b/static/app/views/performance/newTraceDetails/traceModels/traceTree.tsx @@ -119,11 +119,12 @@ import {TraceType} from '../traceType'; type ArgumentTypes = F extends (...args: infer A) => any ? A : never; export declare namespace TraceTree { + interface RawSpan extends RawSpanType {} interface Transaction extends TraceFullDetailed { profiler_id: string; sdk_name: string; } - interface Span extends RawSpanType { + interface Span extends RawSpan { childTransactions: TraceTreeNode[]; event: EventTransaction; measurements?: Record; @@ -138,14 +139,14 @@ export declare namespace TraceTree { timestamp: number; type: 'missing_instrumentation'; } - interface SiblingAutogroup extends RawSpanType { + interface SiblingAutogroup extends RawSpan { autogrouped_by: { description: string; op: string; }; } - interface ChildrenAutogroup extends RawSpanType { + interface ChildrenAutogroup extends RawSpan { autogrouped_by: { op: string; }; @@ -866,7 +867,7 @@ export class TraceTree { static FromSpans( parent: TraceTreeNode, data: Event, - spans: RawSpanType[], + spans: TraceTree.RawSpan[], options: {sdk: string | undefined} | undefined ): [TraceTreeNode, [number, number] | null] { parent.invalidate(parent); @@ -877,7 +878,7 @@ export class TraceTree { const parentIsSpan = isSpanNode(parent); const lookuptable: Record< - RawSpanType['span_id'], + TraceTree.RawSpan['span_id'], TraceTreeNode > = {}; @@ -2490,7 +2491,7 @@ export function computeAutogroupedBarSegments( // Returns a list of errors related to the txn with ids matching the span id function getRelatedSpanErrorsFromTransaction( - span: RawSpanType, + span: TraceTree.RawSpan, node?: TraceTreeNode ): TraceErrorType[] { if (!node || !node.value || !isTransactionNode(node)) { @@ -2512,7 +2513,7 @@ function getRelatedSpanErrorsFromTransaction( // Returns a list of performance errors related to the txn with ids matching the span id function getRelatedPerformanceIssuesFromTransaction( - span: RawSpanType, + span: TraceTree.RawSpan, node?: TraceTreeNode ): TraceTree.TracePerformanceIssue[] { if (!node || !node.value || !isTransactionNode(node)) { From 57b2cc203bc5e05c41d17b9ec8aa4634a0a4d24d Mon Sep 17 00:00:00 2001 From: Priscila Oliveira Date: Wed, 24 Jul 2024 16:56:24 +0200 Subject: [PATCH 055/126] ref(metrics): Add deprecated tag and other updates (#74843) --- .../alerts/rules/metric/spanMetricsField.tsx | 2 +- static/app/views/alerts/wizard/index.tsx | 21 ++++++++++++++++--- .../app/views/alerts/wizard/panelContent.tsx | 7 +++++-- static/app/views/dashboards/addWidget.tsx | 12 +++++++++-- .../spot/alerts-wizard-span-metrics.svg | 1 + 5 files changed, 35 insertions(+), 8 deletions(-) create mode 100644 static/images/spot/alerts-wizard-span-metrics.svg diff --git a/static/app/views/alerts/rules/metric/spanMetricsField.tsx b/static/app/views/alerts/rules/metric/spanMetricsField.tsx index f102cd140b595f..c99c6b2877df31 100644 --- a/static/app/views/alerts/rules/metric/spanMetricsField.tsx +++ b/static/app/views/alerts/rules/metric/spanMetricsField.tsx @@ -197,7 +197,7 @@ function SpanMetricField({field, project, onChange}: Props) { (
{categoryHeading} - { - return [alertType, AlertWizardAlertNames[alertType]]; + return [ + alertType, + AlertWizardAlertNames[alertType], + alertType === 'custom_metrics' && + hasCustomMetricsExtractionRules(organization) ? ( + {t('deprecated')} + ) : null, + ]; })} - onChange={handleChangeAlertOption} + onChange={option => handleChangeAlertOption(option as AlertType)} value={alertOption} label="alert-option" /> @@ -288,4 +297,10 @@ const WizardButtonContainer = styled('div')` } `; +const WizardGroupedOptions = styled(RadioPanelGroup)` + label { + grid-template-columns: repeat(3, max-content); + } +`; + export default AlertWizard; diff --git a/static/app/views/alerts/wizard/panelContent.tsx b/static/app/views/alerts/wizard/panelContent.tsx index 321dc63800208d..fec158c3ed15ef 100644 --- a/static/app/views/alerts/wizard/panelContent.tsx +++ b/static/app/views/alerts/wizard/panelContent.tsx @@ -9,6 +9,7 @@ import diagramFailureRate from 'sentry-images/spot/alerts-wizard-failure-rate.sv import diagramFID from 'sentry-images/spot/alerts-wizard-fid.svg'; import diagramIssues from 'sentry-images/spot/alerts-wizard-issues.svg'; import diagramLCP from 'sentry-images/spot/alerts-wizard-lcp.svg'; +import diagramSpanMetrics from 'sentry-images/spot/alerts-wizard-span-metrics.svg'; import diagramThroughput from 'sentry-images/spot/alerts-wizard-throughput.svg'; import diagramTransactionDuration from 'sentry-images/spot/alerts-wizard-transaction-duration.svg'; import diagramUsers from 'sentry-images/spot/alerts-wizard-users-experiencing-errors.svg'; @@ -143,14 +144,16 @@ export const AlertWizardPanelContent: Record = { illustration: diagramCustomMetrics, }, span_metrics: { - description: t('Alert on span based metrics that you have configured.'), + description: t( + 'Alert on custom span metrics that you have configured, such as number of sign-ups or duration of your login.' + ), examples: [ t('When the number of sign-ups dropped by 10% compared to the previous week.'), t( 'When the 75th percentile of your login flow is taking longer than 500 milliseconds.' ), ], - illustration: diagramCustomMetrics, + illustration: diagramSpanMetrics, }, llm_tokens: { description: t( diff --git a/static/app/views/dashboards/addWidget.tsx b/static/app/views/dashboards/addWidget.tsx index a46adc107128c7..305073cc6cc374 100644 --- a/static/app/views/dashboards/addWidget.tsx +++ b/static/app/views/dashboards/addWidget.tsx @@ -4,6 +4,7 @@ import styled from '@emotion/styled'; import Feature from 'sentry/components/acl/feature'; import FeatureBadge from 'sentry/components/badge/featureBadge'; +import Tag from 'sentry/components/badge/tag'; import type {ButtonProps} from 'sentry/components/button'; import {Button} from 'sentry/components/button'; import DropdownButton from 'sentry/components/dropdownButton'; @@ -14,7 +15,10 @@ import {IconAdd} from 'sentry/icons'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import {trackAnalytics} from 'sentry/utils/analytics'; -import {hasCustomMetrics} from 'sentry/utils/metrics/features'; +import { + hasCustomMetrics, + hasCustomMetricsExtractionRules, +} from 'sentry/utils/metrics/features'; import useOrganization from 'sentry/utils/useOrganization'; import {DataSet} from 'sentry/views/dashboards/widgetBuilder/utils'; @@ -142,7 +146,11 @@ export function AddWidgetButton({onAddWidget, ...buttonProps}: Props & ButtonPro key: DataSet.METRICS, label: t('Custom Metrics'), onAction: () => handleAction(DataSet.METRICS), - trailingItems: , + trailingItems: hasCustomMetricsExtractionRules(organization) ? ( + {t('deprecated')} + ) : ( + + ), }); } diff --git a/static/images/spot/alerts-wizard-span-metrics.svg b/static/images/spot/alerts-wizard-span-metrics.svg new file mode 100644 index 00000000000000..b1d17080ac746f --- /dev/null +++ b/static/images/spot/alerts-wizard-span-metrics.svg @@ -0,0 +1 @@ + \ No newline at end of file From b123e463393994139b7298b4f07c877b76e89386 Mon Sep 17 00:00:00 2001 From: Priscila Oliveira Date: Wed, 24 Jul 2024 16:58:43 +0200 Subject: [PATCH 056/126] fix(metrics): check for queries length and not series for uppercase (#74845) --- .../metricWidgetViewerModal/visualization.tsx | 1 + static/app/views/metrics/summaryTable.tsx | 15 ++++++--------- static/app/views/metrics/widget.tsx | 1 + 3 files changed, 8 insertions(+), 9 deletions(-) diff --git a/static/app/components/modals/metricWidgetViewerModal/visualization.tsx b/static/app/components/modals/metricWidgetViewerModal/visualization.tsx index 05b0f44e73a6c3..1da243e00666a2 100644 --- a/static/app/components/modals/metricWidgetViewerModal/visualization.tsx +++ b/static/app/components/modals/metricWidgetViewerModal/visualization.tsx @@ -332,6 +332,7 @@ function MetricChartVisualization({ height={200} /> - + {prefix} {sufix && ':'} @@ -60,10 +58,12 @@ export const SummaryTable = memo(function SummaryTable({ sort = DEFAULT_SORT_STATE as SortState, onRowHover, onRowFilter, + singleQuery, }: { onRowClick: (series: FocusedMetricsSeries) => void; onSortChange: (sortState: SortState) => void; series: Series[]; + singleQuery: boolean; onColorDotClick?: (series: FocusedMetricsSeries) => void; onRowFilter?: ( index: number, @@ -264,10 +264,7 @@ export const SummaryTable = memo(function SummaryTable({ delay={500} overlayStyle={{maxWidth: '80vw'}} > - + {totalColumns.map(aggregate => ( diff --git a/static/app/views/metrics/widget.tsx b/static/app/views/metrics/widget.tsx index e3c45e793ed34e..bbb34a6bc1d356 100644 --- a/static/app/views/metrics/widget.tsx +++ b/static/app/views/metrics/widget.tsx @@ -558,6 +558,7 @@ const MetricWidgetBody = memo( /> Date: Wed, 24 Jul 2024 11:10:49 -0400 Subject: [PATCH 057/126] feat(api): Document dashboard details endpoint (#74385) Documents the dashboard details endpoint and makes GET, PUT, DELETE methods public API. I had to rename the model serializer, which had the same name as the DRF serializer, to avoid build issues with the API docs. --- .../organization_dashboard_details.py | 79 ++++++++++------ .../api/serializers/models/dashboard.py | 75 ++++++++++++++-- .../serializers/rest_framework/dashboard.py | 51 ++++++++--- src/sentry/apidocs/build.py | 10 +++ .../apidocs/examples/dashboard_examples.py | 89 +++++++++++++++++++ src/sentry/apidocs/parameters.py | 10 +++ 6 files changed, 269 insertions(+), 45 deletions(-) create mode 100644 src/sentry/apidocs/examples/dashboard_examples.py diff --git a/src/sentry/api/endpoints/organization_dashboard_details.py b/src/sentry/api/endpoints/organization_dashboard_details.py index e8a1b30e3958a9..a001e44a28c41e 100644 --- a/src/sentry/api/endpoints/organization_dashboard_details.py +++ b/src/sentry/api/endpoints/organization_dashboard_details.py @@ -2,6 +2,7 @@ from django.db import IntegrityError, router, transaction from django.db.models import F from django.utils import timezone +from drf_spectacular.utils import extend_schema from rest_framework.request import Request from rest_framework.response import Response @@ -13,7 +14,16 @@ from sentry.api.endpoints.organization_dashboards import OrganizationDashboardsPermission from sentry.api.exceptions import ResourceDoesNotExist from sentry.api.serializers import serialize +from sentry.api.serializers.models.dashboard import DashboardDetailsModelSerializer from sentry.api.serializers.rest_framework import DashboardDetailsSerializer +from sentry.apidocs.constants import ( + RESPONSE_BAD_REQUEST, + RESPONSE_FORBIDDEN, + RESPONSE_NO_CONTENT, + RESPONSE_NOT_FOUND, +) +from sentry.apidocs.examples.dashboard_examples import DashboardExamples +from sentry.apidocs.parameters import DashboardParams, GlobalParams from sentry.models.dashboard import Dashboard, DashboardTombstone EDIT_FEATURE = "organizations:dashboards-edit" @@ -44,24 +54,28 @@ def _get_dashboard(self, request: Request, organization, dashboard_id): return Dashboard.objects.get(id=dashboard_id, organization_id=organization.id) +@extend_schema(tags=["Dashboards"]) @region_silo_endpoint class OrganizationDashboardDetailsEndpoint(OrganizationDashboardBase): publish_status = { - "DELETE": ApiPublishStatus.UNKNOWN, - "GET": ApiPublishStatus.UNKNOWN, - "PUT": ApiPublishStatus.UNKNOWN, + "DELETE": ApiPublishStatus.PUBLIC, + "GET": ApiPublishStatus.PUBLIC, + "PUT": ApiPublishStatus.PUBLIC, } + @extend_schema( + operation_id="Retrieve an Organization's Custom Dashboard", + parameters=[GlobalParams.ORG_ID_OR_SLUG, DashboardParams.DASHBOARD_ID], + responses={ + 200: DashboardDetailsModelSerializer, + 403: RESPONSE_FORBIDDEN, + 404: RESPONSE_NOT_FOUND, + }, + examples=DashboardExamples.DASHBOARD_GET_RESPONSE, + ) def get(self, request: Request, organization, dashboard) -> Response: """ - Retrieve an Organization's Dashboard - ```````````````````````````````````` - - Return details on an individual organization's dashboard. - - :pparam Organization organization: the organization the dashboard belongs to. - :pparam Dashboard dashboard: the dashboard object - :auth: required + Return details about an organization's custom dashboard. """ if not features.has(READ_FEATURE, organization, actor=request.user): return Response(status=404) @@ -71,17 +85,19 @@ def get(self, request: Request, organization, dashboard) -> Response: return self.respond(serialize(dashboard, request.user)) + @extend_schema( + operation_id="Delete an Organization's Custom Dashboard", + parameters=[GlobalParams.ORG_ID_OR_SLUG, DashboardParams.DASHBOARD_ID], + responses={ + 204: RESPONSE_NO_CONTENT, + 403: RESPONSE_FORBIDDEN, + 404: RESPONSE_NOT_FOUND, + }, + ) def delete(self, request: Request, organization, dashboard) -> Response: """ - Delete an Organization's Dashboard - ``````````````````````````````````` - - Delete an individual organization's dashboard, or tombstone + Delete an organization's custom dashboard, or tombstone a pre-built dashboard which effectively deletes it. - - :pparam Organization organization: the organization the dashboard belongs to. - :pparam Dashboard dashboard: the dashboard object - :auth: required """ if not features.has(EDIT_FEATURE, organization, actor=request.user): return Response(status=404) @@ -105,17 +121,24 @@ def delete(self, request: Request, organization, dashboard) -> Response: return self.respond(status=204) + @extend_schema( + operation_id="Edit an Organization's Custom Dashboard", + parameters=[GlobalParams.ORG_ID_OR_SLUG, DashboardParams.DASHBOARD_ID], + request=DashboardDetailsSerializer, + responses={ + 200: DashboardDetailsModelSerializer, + 400: RESPONSE_BAD_REQUEST, + 403: RESPONSE_FORBIDDEN, + 404: RESPONSE_NOT_FOUND, + }, + examples=DashboardExamples.DASHBOARD_PUT_RESPONSE, + ) def put(self, request: Request, organization, dashboard) -> Response: """ - Edit an Organization's Dashboard - ``````````````````````````````````` - - Edit an individual organization's dashboard as well as - bulk edits on widgets (i.e. rearranging widget order). - - :pparam Organization organization: the organization the dashboard belongs to. - :pparam Dashboard dashboard: the old dashboard object - :auth: required + Edit an organization's custom dashboard as well as any bulk + edits on widgets that may have been made. (For example, widgets + that have been rearranged, updated queries and fields, specific + display types, and so on.) """ if not features.has(EDIT_FEATURE, organization, actor=request.user): return Response(status=404) diff --git a/src/sentry/api/serializers/models/dashboard.py b/src/sentry/api/serializers/models/dashboard.py index efae73190d009b..f3f26123b32f7f 100644 --- a/src/sentry/api/serializers/models/dashboard.py +++ b/src/sentry/api/serializers/models/dashboard.py @@ -1,9 +1,11 @@ from collections import defaultdict +from typing import TypedDict import orjson from sentry import features from sentry.api.serializers import Serializer, register, serialize +from sentry.api.serializers.models.user import UserSerializerResponse from sentry.constants import ALL_ACCESS_PROJECTS from sentry.discover.models import DatasetSourcesTypes from sentry.models.dashboard import Dashboard @@ -21,6 +23,46 @@ DATASET_SOURCES = dict(DatasetSourcesTypes.as_choices()) +class OnDemandResponse(TypedDict): + enabled: bool + extractionState: str + dashboardWidgetQueryId: int + + +class DashboardWidgetQueryResponse(TypedDict): + id: str + name: str + fields: list[str] + aggregates: list[str] + columns: list[str] + fieldAliases: list[str] + conditions: str + orderby: str + widgetId: str + onDemand: list[OnDemandResponse] + isHidden: bool + + +class ThresholdType(TypedDict): + max_values: dict[str, int] + unit: str + + +class DashboardWidgetResponse(TypedDict): + id: str + title: str + description: str | None + displayType: str + thresholds: ThresholdType | None + interval: str + dateCreated: str + dashboardId: str + queries: list[DashboardWidgetQueryResponse] + limit: int | None + widgetType: str + layout: dict[str, int] + + @register(DashboardWidget) class DashboardWidgetSerializer(Serializer): def get_attrs(self, item_list, user, **kwargs): @@ -39,7 +81,7 @@ def get_attrs(self, item_list, user, **kwargs): return result - def serialize(self, obj, attrs, user, **kwargs): + def serialize(self, obj, attrs, user, **kwargs) -> DashboardWidgetResponse: widget_type = ( DashboardWidgetTypes.get_type_name(obj.widget_type) or DashboardWidgetTypes.TYPE_NAMES[0] @@ -76,7 +118,7 @@ def serialize(self, obj, attrs, user, **kwargs): @register(DashboardWidgetQueryOnDemand) class DashboardWidgetQueryOnDemandSerializer(Serializer): - def serialize(self, obj, attrs, user, **kwargs): + def serialize(self, obj, attrs, user, **kwargs) -> OnDemandResponse: return { "enabled": obj.extraction_enabled(), "extractionState": obj.extraction_state, @@ -109,7 +151,7 @@ def get_attrs(self, item_list, user, **kwargs): return result - def serialize(self, obj, attrs, user, **kwargs): + def serialize(self, obj, attrs, user, **kwargs) -> DashboardWidgetQueryResponse: return { "id": str(obj.id), "name": obj.name, @@ -185,8 +227,31 @@ def serialize(self, obj, attrs, user, **kwargs): return data +class DashboardFilters(TypedDict, total=False): + release: list[str] + + +class DashboardDetailsResponseOptional(TypedDict, total=False): + environment: list[str] + period: str + utc: str + expired: bool + start: str + end: str + + +class DashboardDetailsResponse(DashboardDetailsResponseOptional): + id: str + title: str + dateCreated: str + createdBy: UserSerializerResponse + widgets: list[DashboardWidgetResponse] + projects: list[int] + filters: DashboardFilters + + @register(Dashboard) -class DashboardDetailsSerializer(Serializer): +class DashboardDetailsModelSerializer(Serializer): def get_attrs(self, item_list, user, **kwargs): result = {} @@ -204,7 +269,7 @@ def get_attrs(self, item_list, user, **kwargs): return result - def serialize(self, obj, attrs, user, **kwargs): + def serialize(self, obj, attrs, user, **kwargs) -> DashboardDetailsResponse: from sentry.api.serializers.rest_framework.base import camel_to_snake_case page_filter_keys = ["environment", "period", "utc"] diff --git a/src/sentry/api/serializers/rest_framework/dashboard.py b/src/sentry/api/serializers/rest_framework/dashboard.py index 32fb120c3d2c84..71f9d0140b7633 100644 --- a/src/sentry/api/serializers/rest_framework/dashboard.py +++ b/src/sentry/api/serializers/rest_framework/dashboard.py @@ -5,6 +5,8 @@ from typing import TypedDict from django.db.models import Max +from drf_spectacular.types import OpenApiTypes +from drf_spectacular.utils import extend_schema_field from rest_framework import serializers from sentry import features, options @@ -96,6 +98,7 @@ def is_table_display_type(display_type): ) +@extend_schema_field(field=OpenApiTypes.OBJECT) class LayoutField(serializers.Field): REQUIRED_KEYS = { "x", @@ -452,19 +455,41 @@ def validate(self, data): class DashboardDetailsSerializer(CamelSnakeSerializer[Dashboard]): # Is a string because output serializers also make it a string. - id = serializers.CharField(required=False) - title = serializers.CharField(required=False, max_length=255) - widgets = DashboardWidgetSerializer(many=True, required=False) - projects = serializers.ListField(child=serializers.IntegerField(), required=False, default=[]) + id = serializers.CharField(required=False, help_text="A dashboard's unique id.") + title = serializers.CharField( + required=False, max_length=255, help_text="The user-defined dashboard title." + ) + widgets = DashboardWidgetSerializer( + many=True, required=False, help_text="A json list of widgets saved in this dashboard." + ) + projects = serializers.ListField( + child=serializers.IntegerField(), + required=False, + default=[], + help_text="The saved projects filter for this dashboard.", + ) environment = serializers.ListField( - child=serializers.CharField(), required=False, allow_null=True + child=serializers.CharField(), + required=False, + allow_null=True, + help_text="The saved environment filter for this dashboard.", + ) + period = serializers.CharField( + required=False, allow_null=True, help_text="The saved time range period for this dashboard." + ) + start = serializers.DateTimeField( + required=False, allow_null=True, help_text="The saved start time for this dashboard." + ) + end = serializers.DateTimeField( + required=False, allow_null=True, help_text="The saved end time for this dashboard." + ) + filters = serializers.DictField( + required=False, help_text="The saved filters for this dashboard." + ) + utc = serializers.BooleanField( + required=False, + help_text="Setting that lets you display saved time range for this dashboard in UTC.", ) - period = serializers.CharField(required=False, allow_null=True) - start = serializers.DateTimeField(required=False, allow_null=True) - end = serializers.DateTimeField(required=False, allow_null=True) - filters = serializers.DictField(required=False) - utc = serializers.BooleanField(required=False) - validate_id = validate_id def validate_projects(self, projects): @@ -727,7 +752,9 @@ def remove_missing_queries(self, widget_id, keep_ids): class DashboardSerializer(DashboardDetailsSerializer): - title = serializers.CharField(required=True, max_length=255) + title = serializers.CharField( + required=True, max_length=255, help_text="The user defined title for this dashboard." + ) def schedule_update_project_configs(dashboard: Dashboard): diff --git a/src/sentry/apidocs/build.py b/src/sentry/apidocs/build.py index 3484c4e965ab57..10075e282612ba 100644 --- a/src/sentry/apidocs/build.py +++ b/src/sentry/apidocs/build.py @@ -104,6 +104,16 @@ def get_old_json_components(filename: str) -> Any: "url": "https://github.com/getsentry/sentry-docs/issues/new/?title=API%20Documentation%20Error:%20/api/integration-platform/&template=api_error_template.md", }, }, + { + "name": "Dashboards", + "x-sidebar-name": "Dashboards", + "description": "Endpoints for Dashboards", + "x-display-description": False, + "externalDocs": { + "description": "Found an error? Let us know.", + "url": "https://github.com/getsentry/sentry-docs/issues/new/?title=API%20Documentation%20Error:%20/api/integration-platform/&template=api_error_template.md", + }, + }, { "name": "Crons", "x-sidebar-name": "Crons", diff --git a/src/sentry/apidocs/examples/dashboard_examples.py b/src/sentry/apidocs/examples/dashboard_examples.py new file mode 100644 index 00000000000000..169b3b8320e1e4 --- /dev/null +++ b/src/sentry/apidocs/examples/dashboard_examples.py @@ -0,0 +1,89 @@ +from drf_spectacular.utils import OpenApiExample + +DASHBOARD_OBJECT = { + "id": "1", + "title": "Dashboard", + "dateCreated": "2024-06-20T14:38:03.498574Z", + "createdBy": { + "id": "1", + "name": "Admin", + "username": "admin", + "email": "admin@sentry.io", + "avatarUrl": "www.example.com", + "isActive": True, + "hasPasswordAuth": True, + "isManaged": False, + "dateJoined": "2021-10-25T17:07:33.190596Z", + "lastLogin": "2024-07-16T15:28:39.261659Z", + "has2fa": True, + "lastActive": "2024-07-16T20:45:49.364197Z", + "isSuperuser": False, + "isStaff": False, + "experiments": {}, + "emails": [{"id": "1", "email": "admin@sentry.io", "is_verified": True}], + "avatar": { + "avatarType": "letter_avatar", + "avatarUuid": None, + "avatarUrl": "www.example.com", + }, + }, + "widgets": [ + { + "id": "658714", + "title": "Custom Widget", + "description": None, + "displayType": "table", + "thresholds": None, + "interval": "5m", + "dateCreated": "2024-07-16T15:36:46.048343Z", + "dashboardId": "1", + "queries": [ + { + "id": "1", + "name": "", + "fields": ["avg(transaction.duration)", "transaction"], + "aggregates": ["avg(transaction.duration)"], + "columns": ["transaction"], + "fieldAliases": ["", ""], + "conditions": "", + "orderby": "-avg(transaction.duration)", + "widgetId": "1", + "onDemand": [ + { + "enabled": False, + "extractionState": "disabled:not-applicable", + "dashboardWidgetQueryId": 1, + } + ], + "isHidden": False, + } + ], + "limit": None, + "widgetType": "discover", + "layout": {"w": 2, "y": 0, "h": 2, "minH": 2, "x": 0}, + } + ], + "projects": [1], + "filters": {}, + "period": "7d", +} + + +class DashboardExamples: + DASHBOARD_GET_RESPONSE = [ + OpenApiExample( + "Dashboard GET response", + value=DASHBOARD_OBJECT, + status_codes=["200"], + response_only=True, + ) + ] + + DASHBOARD_PUT_RESPONSE = [ + OpenApiExample( + "Dashboard PUT response", + value=DASHBOARD_OBJECT, + status_codes=["200"], + response_only=True, + ) + ] diff --git a/src/sentry/apidocs/parameters.py b/src/sentry/apidocs/parameters.py index 979b7f81681409..9d6907c6ba1302 100644 --- a/src/sentry/apidocs/parameters.py +++ b/src/sentry/apidocs/parameters.py @@ -599,3 +599,13 @@ class SessionsParams: type=int, description="""Specify `0` to exclude series from the response. The default is `1`""", ) + + +class DashboardParams: + DASHBOARD_ID = OpenApiParameter( + name="dashboard_id", + location="path", + required=True, + type=int, + description="""The ID of the dashboard you'd like to retrieve.""", + ) From 6065a6d3bd044bfe162ea191b0fea2106a7a83cd Mon Sep 17 00:00:00 2001 From: Matthew T <20070360+mdtro@users.noreply.github.com> Date: Wed, 24 Jul 2024 10:20:19 -0500 Subject: [PATCH 058/126] ref/fix: more robust signature verification (#74541) Fix for H1 [2547600](https://hackerone.com/reports/2547600). Increase the robustness of the GitHub Enterprise integration's webhook signature handling. - Add support for SHA256 signature headers - Verify the signature value looks as expected before continuing - Support for legacy GitHub Enterprise installations that omit the header by setting their host value in an option (not the most secure, but we will deprecate use of this legacy method very soon) --- .../integrations/github_enterprise/webhook.py | 171 +++++++++++++--- .../github_enterprise/test_webhooks.py | 188 +++++++++++++++++- 2 files changed, 323 insertions(+), 36 deletions(-) diff --git a/src/sentry/integrations/github_enterprise/webhook.py b/src/sentry/integrations/github_enterprise/webhook.py index 6e699aacd6ab7d..940aae34891f8b 100644 --- a/src/sentry/integrations/github_enterprise/webhook.py +++ b/src/sentry/integrations/github_enterprise/webhook.py @@ -3,14 +3,17 @@ import hashlib import hmac import logging +import re import orjson +import sentry_sdk from django.http import HttpRequest, HttpResponse from django.utils.crypto import constant_time_compare from django.utils.decorators import method_decorator from django.views.decorators.csrf import csrf_exempt from rest_framework.request import Request +from sentry import options from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.integrations.github.webhook import ( @@ -30,11 +33,42 @@ from sentry.integrations.services.integration import integration_service from sentry.integrations.services.integration.model import RpcIntegration +SHA1_PATTERN = r"^sha1=[0-9a-fA-F]{40}$" +SHA256_PATTERN = r"^sha256=[0-9a-fA-F]{64}$" + +INVALID_SIGNATURE_ERROR = "Provided signature does not match the computed body signature" +MALFORMED_SIGNATURE_ERROR = "Signature value does not match the expected format" +UNSUPPORTED_SIGNATURE_ALGORITHM_ERROR = "Signature algorithm is unsupported" +MISSING_WEBHOOK_PAYLOAD_ERROR = "Webhook payload not found" +MISSING_GITHUB_ENTERPRISE_HOST_ERROR = "Missing X-GitHub-Enterprise-Host header" +MISSING_GITHUB_EVENT_HEADER_ERROR = "Missing X-GitHub-Event header" +MISSING_SIGNATURE_HEADERS_ERROR = "Missing headers X-Hub-Signature-256 or X-Hub-Signature" + + +class MissingRequiredHeaderError(Exception): + pass + + +class MissingWebhookPayloadError(Exception): + """Webhook payload not found""" + + +class InvalidSignatureError(Exception): + """Provided signature does not match the computed body signature""" + + +class MalformedSignatureError(Exception): + """Signature value does not match the expected format""" + + +class UnsupportedSignatureAlgorithmError(Exception): + """Signature algorithm is unsupported""" + def get_host(request: HttpRequest) -> str | None: # XXX: There's lots of customers that are giving us an IP rather than a host name # Use HTTP_X_REAL_IP in a follow up PR (#42405) - return request.META.get("HTTP_X_GITHUB_ENTERPRISE_HOST") + return request.headers.get("x-github-enterprise-host") def get_installation_metadata(event, host): @@ -95,11 +129,18 @@ def get_handler(self, event_type): return self._handlers.get(event_type) def is_valid_signature(self, method, body, secret, signature): - if method != "sha1": - raise NotImplementedError(f"signature method {method} is not supported") - expected = hmac.new( - key=secret.encode("utf-8"), msg=body, digestmod=hashlib.sha1 - ).hexdigest() + if method != "sha1" and method != "sha256": + raise UnsupportedSignatureAlgorithmError() + + if method == "sha256": + expected = hmac.new( + key=secret.encode("utf-8"), msg=body, digestmod=hashlib.sha256 + ).hexdigest() + else: + expected = hmac.new( + key=secret.encode("utf-8"), msg=body, digestmod=hashlib.sha1 + ).hexdigest() + return constant_time_compare(expected, signature) @method_decorator(csrf_exempt) @@ -120,28 +161,38 @@ def handle(self, request: Request) -> HttpResponse: clear_tags_and_context() scope = Scope.get_isolation_scope() - meta = request.META - host = get_host(request=request) - if not host: - logger.warning("github_enterprise.webhook.missing-enterprise-host") - logger.error("Missing enterprise host.") - return HttpResponse(status=400) + try: + host = get_host(request=request) + if not host: + raise MissingRequiredHeaderError() + except MissingRequiredHeaderError as e: + logger.exception("github_enterprise.webhook.missing-enterprise-host") + sentry_sdk.capture_exception(e) + return HttpResponse(MISSING_GITHUB_ENTERPRISE_HOST_ERROR, status=400) extra = {"host": host} # If we do tag the host early we can't even investigate scope.set_tag("host", host) - body = bytes(request.body) - if not body: + try: + body = bytes(request.body) + if len(body) == 0: + raise MissingWebhookPayloadError() + except MissingWebhookPayloadError as e: logger.warning("github_enterprise.webhook.missing-body", extra=extra) - return HttpResponse(status=400) + sentry_sdk.capture_exception(e) + return HttpResponse(MISSING_WEBHOOK_PAYLOAD_ERROR, status=400) try: - handler = self.get_handler(meta["HTTP_X_GITHUB_EVENT"]) - except KeyError: - logger.warning("github_enterprise.webhook.missing-event", extra=extra) - logger.exception("Missing Github event in webhook.") - return HttpResponse(status=400) + github_event = request.headers.get("x-github-event") + if not github_event: + raise MissingRequiredHeaderError() + + handler = self.get_handler(github_event) + except MissingRequiredHeaderError as e: + logger.exception("github_enterprise.webhook.missing-event", extra=extra) + sentry_sdk.capture_exception(e) + return HttpResponse(MISSING_GITHUB_EVENT_HEADER_ERROR, status=400) if not handler: return HttpResponse(status=204) @@ -165,16 +216,76 @@ def handle(self, request: Request) -> HttpResponse: return HttpResponse(status=400) try: - # Attempt to validate the signature. Older versions of - # GitHub Enterprise do not send the signature so this is an optional step. - method, signature = meta["HTTP_X_HUB_SIGNATURE"].split("=", 1) - if not self.is_valid_signature(method, body, secret, signature): - logger.warning("github_enterprise.webhook.invalid-signature", extra=extra) - return HttpResponse(status=401) - except (KeyError, IndexError) as e: - extra["error"] = str(e) - logger.info("github_enterprise.webhook.missing-signature", extra=extra) - logger.exception("Missing webhook secret.") + sha256_signature = request.headers.get("x-hub-signature-256") + sha1_signature = request.headers.get("x-hub-signature") + + if not sha256_signature and not sha1_signature: + raise MissingRequiredHeaderError() + + if sha256_signature: + if not re.match(SHA256_PATTERN, sha256_signature): + # before we try to parse the parts of the signature, make sure it + # looks as expected to avoid any IndexErrors when we split it + raise MalformedSignatureError() + + _, signature = sha256_signature.split("=", 1) + extra["signature_algorithm"] = "sha256" + is_valid = self.is_valid_signature("sha256", body, secret, signature) + if not is_valid: + raise InvalidSignatureError() + + if sha1_signature: + if not re.match(SHA1_PATTERN, sha1_signature): + # before we try to parse the parts of the signature, make sure it + # looks as expected to avoid any IndexErrors when we split it + raise MalformedSignatureError() + + _, signature = sha1_signature.split("=", 1) + is_valid = self.is_valid_signature("sha1", body, secret, signature) + extra["signature_algorithm"] = "sha1" + if not is_valid: + raise InvalidSignatureError() + + except InvalidSignatureError as e: + logger.warning("github_enterprise.webhook.invalid-signature", extra=extra) + sentry_sdk.capture_exception(e) + + return HttpResponse(INVALID_SIGNATURE_ERROR, status=401) + except UnsupportedSignatureAlgorithmError as e: + # we should never end up here with the regex checks above on the signature format, + # but just in case + logger.exception( + "github-enterprise-app.webhook.unsupported-signature-algorithm", + extra=extra, + ) + sentry_sdk.capture_exception(e) + return HttpResponse(UNSUPPORTED_SIGNATURE_ALGORITHM_ERROR, 400) + + except MissingRequiredHeaderError as e: + # older versions of GitHub 2.14.0 and older do not always send signature headers + # Setting a signature secret is optional in GitHub, but we require it on Sentry + # Only a small subset of legacy hosts are allowed to skip the signature verification + # at the moment. + + allowed_legacy_hosts = options.get( + "github-enterprise-app.allowed-hosts-legacy-webhooks" + ) + + if host not in allowed_legacy_hosts: + # the host is not allowed to skip signature verification by omitting the headers + logger.warning("github_enterprise.webhook.missing-signature", extra=extra) + sentry_sdk.capture_exception(e) + return HttpResponse(MISSING_SIGNATURE_HEADERS_ERROR, status=400) + else: + # the host is allowed to skip signature verification + # log it, and continue on. + logger.info("github_enterprise.webhook.allowed-missing-signature", extra=extra) + + except (MalformedSignatureError, IndexError) as e: + logger.warning("github_enterprise.webhook.malformed-signature", extra=extra) + sentry_sdk.capture_exception(e) + return HttpResponse(MALFORMED_SIGNATURE_ERROR, status=400) + handler()(event, host) return HttpResponse(status=204) diff --git a/tests/sentry/integrations/github_enterprise/test_webhooks.py b/tests/sentry/integrations/github_enterprise/test_webhooks.py index fd70ec36926c91..4a77456e42ba00 100644 --- a/tests/sentry/integrations/github_enterprise/test_webhooks.py +++ b/tests/sentry/integrations/github_enterprise/test_webhooks.py @@ -15,6 +15,7 @@ from sentry.models.pullrequest import PullRequest from sentry.models.repository import Repository from sentry.testutils.cases import APITestCase +from sentry.testutils.helpers import override_options class WebhookTest(APITestCase): @@ -58,6 +59,50 @@ def test_unregistered_event(self): ) assert response.status_code == 204 + @patch("sentry.integrations.github_enterprise.webhook.get_installation_metadata") + def test_missing_payload(self, mock_installation): + mock_installation.return_value = self.metadata + + response = self.client.post( + path=self.url, + content_type="application/json", + HTTP_X_GITHUB_EVENT="push", + HTTP_X_GITHUB_ENTERPRISE_HOST="35.232.149.196", + HTTP_X_HUB_SIGNATURE="sha1=33521abeaaf9a57c2abf486e0ccd54d23cf36fec", + HTTP_X_GITHUB_DELIVERY=str(uuid4()), + ) + assert response.status_code == 400 + assert b"Webhook payload not found" in response.content + + @patch("sentry.integrations.github_enterprise.webhook.get_installation_metadata") + def test_missing_github_event_header(self, mock_installation): + mock_installation.return_value = self.metadata + + response = self.client.post( + path=self.url, + data=PUSH_EVENT_EXAMPLE_INSTALLATION, + content_type="application/json", + HTTP_X_GITHUB_ENTERPRISE_HOST="35.232.149.196", + HTTP_X_HUB_SIGNATURE="sha1=33521abeaaf9a57c2abf486e0ccd54d23cf36fec", + HTTP_X_GITHUB_DELIVERY=str(uuid4()), + ) + assert response.status_code == 400 + assert b"Missing X-GitHub-Event header" in response.content + + @patch("sentry.integrations.github_enterprise.webhook.get_installation_metadata") + def test_invalid_json(self, mock_installation): + mock_installation.return_value = self.metadata + + response = self.client.post( + path=self.url, + data=b'{"some_key": "value"', # missing closing bracket + content_type="application/json", + HTTP_X_GITHUB_ENTERPRISE_HOST="35.232.149.196", + HTTP_X_HUB_SIGNATURE="sha1=33521abeaaf9a57c2abf486e0ccd54d23cf36fec", + HTTP_X_GITHUB_DELIVERY=str(uuid4()), + ) + assert response.status_code == 400 + @patch("sentry.integrations.github_enterprise.webhook.get_installation_metadata") def test_invalid_signature_event(self, mock_installation): mock_installation.return_value = self.metadata @@ -72,10 +117,107 @@ def test_invalid_signature_event(self, mock_installation): HTTP_X_GITHUB_DELIVERY=str(uuid4()), ) assert response.status_code == 401 + assert b"Provided signature does not match the computed body signature" in response.content + + @patch("sentry.integrations.github_enterprise.webhook.get_installation_metadata") + def test_malformed_signature_too_short_sha1(self, mock_installation): + mock_installation.return_value = self.metadata + + response = self.client.post( + path=self.url, + data=PUSH_EVENT_EXAMPLE_INSTALLATION, + content_type="application/json", + HTTP_X_GITHUB_EVENT="push", + HTTP_X_GITHUB_ENTERPRISE_HOST="35.232.149.196", + HTTP_X_HUB_SIGNATURE="sha1=33521a2abfcf36fec", # hash is too short + HTTP_X_GITHUB_DELIVERY=str(uuid4()), + ) + assert response.status_code == 400 + assert b"Signature value does not match the expected format" in response.content + + @patch("sentry.integrations.github_enterprise.webhook.get_installation_metadata") + def test_malformed_signature_no_value_sha1(self, mock_installation): + mock_installation.return_value = self.metadata + + response = self.client.post( + path=self.url, + data=PUSH_EVENT_EXAMPLE_INSTALLATION, + content_type="application/json", + HTTP_X_GITHUB_EVENT="push", + HTTP_X_GITHUB_ENTERPRISE_HOST="35.232.149.196", + HTTP_X_HUB_SIGNATURE="sha1=", + HTTP_X_GITHUB_DELIVERY=str(uuid4()), + ) + assert response.status_code == 400 + assert b"Signature value does not match the expected format" in response.content + + @patch("sentry.integrations.github_enterprise.webhook.get_installation_metadata") + def test_malformed_signature_too_short_sha256(self, mock_installation): + mock_installation.return_value = self.metadata + + response = self.client.post( + path=self.url, + data=PUSH_EVENT_EXAMPLE_INSTALLATION, + content_type="application/json", + HTTP_X_GITHUB_EVENT="push", + HTTP_X_GITHUB_ENTERPRISE_HOST="35.232.149.196", + HTTP_X_HUB_SIGNATURE_256="sha256=33521a2abfcf36fec", # hash is too short + HTTP_X_GITHUB_DELIVERY=str(uuid4()), + ) + assert response.status_code == 400 + assert b"Signature value does not match the expected format" in response.content + + @patch("sentry.integrations.github_enterprise.webhook.get_installation_metadata") + def test_malformed_signature_no_value_sha256(self, mock_installation): + mock_installation.return_value = self.metadata + + response = self.client.post( + path=self.url, + data=PUSH_EVENT_EXAMPLE_INSTALLATION, + content_type="application/json", + HTTP_X_GITHUB_EVENT="push", + HTTP_X_GITHUB_ENTERPRISE_HOST="35.232.149.196", + HTTP_X_HUB_SIGNATURE_256="sha256=", + HTTP_X_GITHUB_DELIVERY=str(uuid4()), + ) + assert response.status_code == 400 + assert b"Signature value does not match the expected format" in response.content + + @patch("sentry.integrations.github_enterprise.webhook.get_installation_metadata") + def test_sha256_signature_ok(self, mock_installation): + mock_installation.return_value = self.metadata + + response = self.client.post( + path=self.url, + data=PUSH_EVENT_EXAMPLE_INSTALLATION, + content_type="application/json", + HTTP_X_GITHUB_EVENT="push", + HTTP_X_GITHUB_ENTERPRISE_HOST="35.232.149.196", + HTTP_X_HUB_SIGNATURE_256="sha256=7fb2fed663d2f386f29c1cff8980e11738a435b7e3c9332c1ab1fcc870f8964b", + HTTP_X_GITHUB_DELIVERY=str(uuid4()), + ) + assert response.status_code == 204 + + @patch("sentry.integrations.github_enterprise.webhook.get_installation_metadata") + def test_sha256_signature_invalid(self, mock_installation): + mock_installation.return_value = self.metadata + + response = self.client.post( + path=self.url, + data=PUSH_EVENT_EXAMPLE_INSTALLATION, + content_type="application/json", + HTTP_X_GITHUB_EVENT="push", + HTTP_X_GITHUB_ENTERPRISE_HOST="35.232.149.196", + HTTP_X_HUB_SIGNATURE_256="sha256=7fb2fed663d2f386f29c1cff8980e11738a435b7e3c9332c1ab1fcc870f8abcd", + HTTP_X_GITHUB_DELIVERY=str(uuid4()), + ) + assert response.status_code == 401 + assert b"Provided signature does not match the computed body signature" in response.content @patch("sentry.integrations.github_enterprise.webhook.get_installation_metadata") + @override_options({"github-enterprise-app.allowed-hosts-legacy-webhooks": ["35.232.149.196"]}) def test_missing_signature_ok(self, mock_installation): - # Old Github:e doesn't send a signature, so we have to accept that. + # Old Github:e doesn't send a signature, so we have to accept that, but only for specific hosts. mock_installation.return_value = self.metadata response = self.client.post( @@ -88,6 +230,22 @@ def test_missing_signature_ok(self, mock_installation): ) assert response.status_code == 204 + @patch("sentry.integrations.github_enterprise.webhook.get_installation_metadata") + def test_missing_signature_fail_without_option_set(self, mock_installation): + # Old Github:e doesn't send a signature, so we have to accept that, but only for specific hosts. + mock_installation.return_value = self.metadata + + response = self.client.post( + path=self.url, + data=PUSH_EVENT_EXAMPLE_INSTALLATION, + content_type="application/json", + HTTP_X_GITHUB_EVENT="push", + HTTP_X_GITHUB_ENTERPRISE_HOST="35.232.149.196", + HTTP_X_GITHUB_DELIVERY=str(uuid4()), + ) + assert response.status_code == 400 + assert b"Missing headers X-Hub-Signature-256 or X-Hub-Signature" in response.content + class PushEventWebhookTest(APITestCase): def setUp(self): @@ -127,7 +285,11 @@ def test_simple(self, mock_get_installation_metadata, mock_get_jwt): metadata={ "domain_name": "35.232.149.196/baxterthehacker", "installation_id": "12345", - "installation": {"id": "2", "private_key": "private_key", "verify_ssl": True}, + "installation": { + "id": "2", + "private_key": "private_key", + "verify_ssl": True, + }, }, ) @@ -182,7 +344,11 @@ def test_anonymous_lookup(self, mock_get_installation_metadata): name="octocat", metadata={ "domain_name": "35.232.149.196/baxterthehacker", - "installation": {"id": "2", "private_key": "private_key", "verify_ssl": True}, + "installation": { + "id": "2", + "private_key": "private_key", + "verify_ssl": True, + }, }, ) @@ -250,7 +416,11 @@ def test_multiple_orgs(self, mock_get_installation_metadata, mock_get_jwt): metadata={ "domain_name": "35.232.149.196/baxterthehacker", "installation_id": "12345", - "installation": {"id": "2", "private_key": "private_key", "verify_ssl": True}, + "installation": { + "id": "2", + "private_key": "private_key", + "verify_ssl": True, + }, }, ) @@ -325,7 +495,11 @@ def setUp(self): name="octocat", metadata={ "domain_name": "35.232.149.196/baxterthehacker", - "installation": {"id": "2", "private_key": "private_key", "verify_ssl": True}, + "installation": { + "id": "2", + "private_key": "private_key", + "verify_ssl": True, + }, }, ) self.repo = Repository.objects.create( @@ -369,7 +543,9 @@ def test_edited(self, mock_get_installation_metadata): mock_get_installation_metadata.return_value = self.metadata pr = PullRequest.objects.create( - key="1", repository_id=self.repo.id, organization_id=self.project.organization.id + key="1", + repository_id=self.repo.id, + organization_id=self.project.organization.id, ) response = self.client.post( From a9de22ba49f7bd524ce31530963355eb95656971 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Wed, 24 Jul 2024 15:26:12 +0000 Subject: [PATCH 059/126] Revert "ref: add threadId to continuous profile link (#74730)" This reverts commit 6e764301ff87f120706a699a151f56e5cf3d8446. Co-authored-by: JonasBa <9317857+JonasBa@users.noreply.github.com> --- .../traceApi/useTransaction.tsx | 4 ++-- .../traceDrawer/details/span/index.tsx | 14 +++++------ .../traceDrawer/details/styles.tsx | 23 +------------------ .../traceDrawer/traceProfilingLink.spec.tsx | 3 --- .../traceDrawer/traceProfilingLink.ts | 11 +++------ .../newTraceDetails/traceModels/traceTree.tsx | 15 ++++++------ 6 files changed, 20 insertions(+), 50 deletions(-) diff --git a/static/app/views/performance/newTraceDetails/traceApi/useTransaction.tsx b/static/app/views/performance/newTraceDetails/traceApi/useTransaction.tsx index 814b4d1e1f321b..39c2736c951daa 100644 --- a/static/app/views/performance/newTraceDetails/traceApi/useTransaction.tsx +++ b/static/app/views/performance/newTraceDetails/traceApi/useTransaction.tsx @@ -6,14 +6,14 @@ import type { } from 'sentry/views/performance/newTraceDetails/traceModels/traceTree'; interface UseTransactionProps { - node: TraceTreeNode | null; + node: TraceTreeNode; organization: Organization; } export function useTransaction(props: UseTransactionProps) { return useApiQuery( [ - `/organizations/${props.organization.slug}/events/${props.node?.value?.project_slug}:${props?.node?.value.event_id}/`, + `/organizations/${props.organization.slug}/events/${props.node.value.project_slug}:${props.node.value.event_id}/`, { query: { referrer: 'trace-details-summary', diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/details/span/index.tsx b/static/app/views/performance/newTraceDetails/traceDrawer/details/span/index.tsx index 749ecae9c4b4c7..9a447a1e087171 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/details/span/index.tsx +++ b/static/app/views/performance/newTraceDetails/traceDrawer/details/span/index.tsx @@ -76,12 +76,12 @@ export function SpanNodeDetails({ }: TraceTreeNodeDetailsProps>) { const location = useLocation(); const {projects} = useProjects(); + const {event} = node.value; const issues = useMemo(() => { return [...node.errors, ...node.performance_issues]; }, [node.errors, node.performance_issues]); - - const project = projects.find(proj => proj.slug === node.value.event?.projectSlug); - const profileId = node.value.event?.contexts?.profile?.profile_id ?? null; + const project = projects.find(proj => proj.slug === event?.projectSlug); + const profileId = event?.contexts?.profile?.profile_id ?? null; return ( @@ -91,10 +91,10 @@ export function SpanNodeDetails({ project={project} onTabScrollToNode={onTabScrollToNode} /> - {node.value.event.projectSlug ? ( + {event.projectSlug ? ( @@ -129,9 +129,9 @@ export function SpanNodeDetails({ startTimestamp={node.value.start_timestamp} /> - + {organization.features.includes('profiling') ? ( - + ) : null} )} diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/details/styles.tsx b/static/app/views/performance/newTraceDetails/traceDrawer/details/styles.tsx index 662436766af8a3..bfb6cb3289a47a 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/details/styles.tsx +++ b/static/app/views/performance/newTraceDetails/traceDrawer/details/styles.tsx @@ -38,7 +38,6 @@ import { isTransactionNode, } from 'sentry/views/performance/newTraceDetails/guards'; import {traceAnalytics} from 'sentry/views/performance/newTraceDetails/traceAnalytics'; -import {useTransaction} from 'sentry/views/performance/newTraceDetails/traceApi/useTransaction'; import {makeTraceContinuousProfilingLink} from 'sentry/views/performance/newTraceDetails/traceDrawer/traceProfilingLink'; import type { MissingInstrumentationNode, @@ -330,21 +329,6 @@ const ValueTd = styled('td')` position: relative; `; -function getThreadIdFromNode( - node: TraceTreeNode, - transaction: EventTransaction | undefined -): string | undefined { - if (isSpanNode(node) && node.value.data['thread.id']) { - return node.value.data['thread.id']; - } - - if (transaction) { - return transaction.context?.trace?.data?.['thread.id']; - } - - return undefined; -} - function NodeActions(props: { node: TraceTreeNode; onTabScrollToNode: ( @@ -425,17 +409,12 @@ function NodeActions(props: { return ''; }, [props]); - const {data: transaction} = useTransaction({ - node: isTransactionNode(props.node) ? props.node : null, - organization, - }); - const params = useParams<{traceSlug?: string}>(); + const profileLink = makeTraceContinuousProfilingLink(props.node, profilerId, { orgSlug: props.organization.slug, projectSlug: props.node.metadata.project_slug ?? '', traceId: params.traceSlug ?? '', - threadId: getThreadIdFromNode(props.node, transaction), }); return ( diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/traceProfilingLink.spec.tsx b/static/app/views/performance/newTraceDetails/traceDrawer/traceProfilingLink.spec.tsx index e8aeaff7be5414..02789a716df6a8 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/traceProfilingLink.spec.tsx +++ b/static/app/views/performance/newTraceDetails/traceDrawer/traceProfilingLink.spec.tsx @@ -37,7 +37,6 @@ describe('traceProfilingLink', () => { projectSlug: 'project', orgSlug: '', traceId: '', - threadId: '0', }) ).toBeNull(); }); @@ -48,7 +47,6 @@ describe('traceProfilingLink', () => { projectSlug: '', orgSlug: 'sentry', traceId: '', - threadId: '0', }) ).toBeNull(); }); @@ -86,7 +84,6 @@ describe('traceProfilingLink', () => { projectSlug: 'project', orgSlug: 'sentry', traceId: 'trace', - threadId: '0', } ); diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/traceProfilingLink.ts b/static/app/views/performance/newTraceDetails/traceDrawer/traceProfilingLink.ts index 03bc9456c67c73..1674a6ef9121d4 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/traceProfilingLink.ts +++ b/static/app/views/performance/newTraceDetails/traceDrawer/traceProfilingLink.ts @@ -51,7 +51,6 @@ export function makeTraceContinuousProfilingLink( options: { orgSlug: string; projectSlug: string; - threadId: string | undefined; traceId: string; }, query: Location['query'] = {} @@ -95,19 +94,15 @@ export function makeTraceContinuousProfilingLink( return null; } - const queryWithEventData: Record = { + const queryWithSpanIdAndTraceId: Record = { ...query, eventId, traceId: options.traceId, }; - if (typeof options.threadId === 'string') { - queryWithEventData.tid = options.threadId; - } - const spanId = getNodeId(node); if (spanId) { - queryWithEventData.spanId = spanId; + queryWithSpanIdAndTraceId.spanId = spanId; } return generateContinuousProfileFlamechartRouteWithQuery( @@ -116,6 +111,6 @@ export function makeTraceContinuousProfilingLink( profilerId, start.toISOString(), end.toISOString(), - queryWithEventData + queryWithSpanIdAndTraceId ); } diff --git a/static/app/views/performance/newTraceDetails/traceModels/traceTree.tsx b/static/app/views/performance/newTraceDetails/traceModels/traceTree.tsx index 9f7092a1aac17e..ab34a6ab7e326b 100644 --- a/static/app/views/performance/newTraceDetails/traceModels/traceTree.tsx +++ b/static/app/views/performance/newTraceDetails/traceModels/traceTree.tsx @@ -119,12 +119,11 @@ import {TraceType} from '../traceType'; type ArgumentTypes = F extends (...args: infer A) => any ? A : never; export declare namespace TraceTree { - interface RawSpan extends RawSpanType {} interface Transaction extends TraceFullDetailed { profiler_id: string; sdk_name: string; } - interface Span extends RawSpan { + interface Span extends RawSpanType { childTransactions: TraceTreeNode[]; event: EventTransaction; measurements?: Record; @@ -139,14 +138,14 @@ export declare namespace TraceTree { timestamp: number; type: 'missing_instrumentation'; } - interface SiblingAutogroup extends RawSpan { + interface SiblingAutogroup extends RawSpanType { autogrouped_by: { description: string; op: string; }; } - interface ChildrenAutogroup extends RawSpan { + interface ChildrenAutogroup extends RawSpanType { autogrouped_by: { op: string; }; @@ -867,7 +866,7 @@ export class TraceTree { static FromSpans( parent: TraceTreeNode, data: Event, - spans: TraceTree.RawSpan[], + spans: RawSpanType[], options: {sdk: string | undefined} | undefined ): [TraceTreeNode, [number, number] | null] { parent.invalidate(parent); @@ -878,7 +877,7 @@ export class TraceTree { const parentIsSpan = isSpanNode(parent); const lookuptable: Record< - TraceTree.RawSpan['span_id'], + RawSpanType['span_id'], TraceTreeNode > = {}; @@ -2491,7 +2490,7 @@ export function computeAutogroupedBarSegments( // Returns a list of errors related to the txn with ids matching the span id function getRelatedSpanErrorsFromTransaction( - span: TraceTree.RawSpan, + span: RawSpanType, node?: TraceTreeNode ): TraceErrorType[] { if (!node || !node.value || !isTransactionNode(node)) { @@ -2513,7 +2512,7 @@ function getRelatedSpanErrorsFromTransaction( // Returns a list of performance errors related to the txn with ids matching the span id function getRelatedPerformanceIssuesFromTransaction( - span: TraceTree.RawSpan, + span: RawSpanType, node?: TraceTreeNode ): TraceTree.TracePerformanceIssue[] { if (!node || !node.value || !isTransactionNode(node)) { From 39e7488967b4c7463899f5dadd96e24355e9f014 Mon Sep 17 00:00:00 2001 From: William Mak Date: Wed, 24 Jul 2024 11:33:36 -0400 Subject: [PATCH 060/126] ref(snubaparams): Move spans perf to SnubaParams (#74651) - This updates the events_spans_performance endpoint to use SnubaParams - Not updating event stats code just yet, going to do that in a separate pass --- .../organization_events_spans_performance.py | 38 +++++++++++-------- 1 file changed, 22 insertions(+), 16 deletions(-) diff --git a/src/sentry/api/endpoints/organization_events_spans_performance.py b/src/sentry/api/endpoints/organization_events_spans_performance.py index 97220cc1a75d2f..384147a06cc6e0 100644 --- a/src/sentry/api/endpoints/organization_events_spans_performance.py +++ b/src/sentry/api/endpoints/organization_events_spans_performance.py @@ -26,7 +26,7 @@ from sentry.search.events.builder.base import BaseQueryBuilder from sentry.search.events.builder.discover import DiscoverQueryBuilder, TimeseriesQueryBuilder from sentry.search.events.datasets.discover import DiscoverDatasetConfig -from sentry.search.events.types import ParamsType, QueryBuilderConfig, Span +from sentry.search.events.types import ParamsType, QueryBuilderConfig, SnubaParams, Span from sentry.snuba import discover from sentry.snuba.dataset import Dataset from sentry.utils.cursors import Cursor, CursorResult @@ -87,15 +87,19 @@ class SpanPerformanceColumn: class OrganizationEventsSpansEndpointBase(OrganizationEventsV2EndpointBase): - def get_snuba_params( + def get_snuba_dataclass( self, request: Request, organization: Organization, check_global_views: bool = True - ) -> ParamsType: - params = super().get_snuba_params(request, organization, check_global_views) + ) -> tuple[SnubaParams, ParamsType]: + snuba_params, params = super().get_snuba_dataclass( + request, organization, check_global_views + ) if len(params.get("project_id", [])) != 1: raise ParseError(detail="You must specify exactly 1 project.") + if len(snuba_params.project_ids) != 1: + raise ParseError(detail="You must specify exactly 1 project.") - return params + return snuba_params, params def get_orderby_column(self, request: Request) -> tuple[str, str]: orderbys = super().get_orderby(request) @@ -157,7 +161,7 @@ class OrganizationEventsSpansPerformanceEndpoint(OrganizationEventsSpansEndpoint def get(self, request: Request, organization: Organization) -> Response: try: - params = self.get_snuba_params(request, organization) + snuba_params, _ = self.get_snuba_dataclass(request, organization) except NoProjects: return Response(status=404) @@ -178,7 +182,7 @@ def get(self, request: Request, organization: Organization) -> Response: def data_fn(offset: int, limit: int) -> Any: suspects = query_suspect_span_groups( - params, + snuba_params, fields, query, span_ops, @@ -235,7 +239,7 @@ class OrganizationEventsSpansExamplesEndpoint(OrganizationEventsSpansEndpointBas def get(self, request: Request, organization: Organization) -> Response: try: - params = self.get_snuba_params(request, organization) + snuba_params, _ = self.get_snuba_dataclass(request, organization) except NoProjects: return Response(status=404) @@ -253,7 +257,7 @@ def get(self, request: Request, organization: Organization) -> Response: def data_fn(offset: int, limit: int) -> Any: example_transactions = query_example_transactions( - params, + snuba_params, query, direction, orderby_column, @@ -328,7 +332,7 @@ def get(self, request: Request, organization: Organization) -> Response: def get_event_stats( query_columns: Sequence[str], query: str, - params: dict[str, str], + params: ParamsType, rollup: int, zerofill_results: bool, comparison_delta: datetime | None = None, @@ -470,7 +474,7 @@ class EventID: def query_suspect_span_groups( - params: ParamsType, + snuba_params: SnubaParams, fields: list[str], query: str | None, span_ops: list[str] | None, @@ -504,7 +508,8 @@ def query_suspect_span_groups( builder = DiscoverQueryBuilder( dataset=Dataset.Discover, - params=params, + params={}, + snuba_params=snuba_params, selected_columns=selected_columns, equations=equations, query=query, @@ -576,7 +581,7 @@ def query_suspect_span_groups( op=suspect["array_join_spans_op"], group=suspect["array_join_spans_group"], description=get_span_description( - EventID(params["project_id"][0], suspect["any_id"]), + EventID(snuba_params.project_ids[0], suspect["any_id"]), span_op=suspect["array_join_spans_op"], span_group=suspect["array_join_spans_group"], ), @@ -648,7 +653,7 @@ def resolve_span_function( def query_example_transactions( - params: ParamsType, + snuba_params: SnubaParams, query: str | None, direction: str, orderby: str, @@ -669,7 +674,8 @@ def query_example_transactions( builder = SpanQueryBuilder( dataset=Dataset.Discover, - params=params, + params={}, + snuba_params=snuba_params, selected_columns=selected_columns, query=query, orderby=[], @@ -712,7 +718,7 @@ def query_example_transactions( examples: dict[Span, list[EventID]] = {Span(span.op, span.group): []} for example in results["data"]: - value = EventID(params["project_id"][0], example["id"]) + value = EventID(snuba_params.project_ids[0], example["id"]) examples[span].append(value) return examples From 8fb9a674c0baa2e6bbc12f7efcfd0dc0c4f1c714 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 24 Jul 2024 10:33:49 -0500 Subject: [PATCH 061/126] chore(deps): bump django from 5.0.6 to 5.0.7 (#74662) Bumps [django](https://github.com/django/django) from 5.0.6 to 5.0.7.
Commits
  • deec9b9 [5.0.x] Bumped version for 5.0.7 release.
  • 3a7bf7f [5.0.x] Made cosmetic edits to 5.0.7 release notes.
  • 8e7a44e [5.0.x] Fixed CVE-2024-39614 -- Mitigated potential DoS in get_supported_lang...
  • 9f4f63e [5.0.x] Fixed CVE-2024-39330 -- Added extra file name validation in Storage's...
  • 07cefde [5.0.x] Fixed CVE-2024-39329 -- Standarized timing of verify_password() when ...
  • 7285644 [5.0.x] Fixed CVE-2024-38875 -- Mitigated potential DoS in urlize and urlizet...
  • 8303400 [5.0.x] Fixed 35506 -- Clarified initial references to URLconf in tutorial 1.
  • c76089b [5.0.x] Refs #35560 -- Corrected CheckConstraint argument name in model_field...
  • 43aa0c1 [5.0.x] Removed outdated note about limitations in Clickjacking protection.
  • 0602fc2 [5.0.x] Fixed #35560 -- Made Model.full_clean() ignore GeneratedFields for co...
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=django&package-manager=pip&previous-version=5.0.6&new-version=5.0.7)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/getsentry/sentry/network/alerts).
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements-dev-frozen.txt | 2 +- requirements-frozen.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index 6e5a0b4586c5ce..39930056e1df5c 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -38,7 +38,7 @@ cssutils==2.9.0 datadog==0.49.1 distlib==0.3.8 distro==1.8.0 -django==5.0.6 +django==5.0.7 django-crispy-forms==1.14.0 django-csp==3.8 django-pg-zero-downtime-migrations==0.13 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index 3f84fc78948450..151751da11ffdf 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -32,7 +32,7 @@ cssselect==1.0.3 cssutils==2.9.0 datadog==0.49.1 distro==1.8.0 -django==5.0.6 +django==5.0.7 django-crispy-forms==1.14.0 django-csp==3.8 django-pg-zero-downtime-migrations==0.13 From 571bb58048519dfdadf2a7dc77e7cd4d711e6b0e Mon Sep 17 00:00:00 2001 From: Alexander Tarasov Date: Wed, 24 Jul 2024 17:49:08 +0200 Subject: [PATCH 062/126] feat(security): allow custom `salt` value in `utils.signing` (#74839) This will allow to use a custom `salt` value for sign/unsign operations, while maintaining backward compatibility during a transitional period (default max_age is 2 days). --- src/sentry/utils/signing.py | 38 +++++++++++++++++++++++++----- tests/sentry/utils/test_signing.py | 28 ++++++++++++++++++++++ 2 files changed, 60 insertions(+), 6 deletions(-) create mode 100644 tests/sentry/utils/test_signing.py diff --git a/src/sentry/utils/signing.py b/src/sentry/utils/signing.py index a210a92c20c5e6..7ad4a215514744 100644 --- a/src/sentry/utils/signing.py +++ b/src/sentry/utils/signing.py @@ -4,9 +4,10 @@ import base64 -from django.core.signing import TimestampSigner +from django.core.signing import BadSignature, TimestampSigner from django.utils.encoding import force_bytes, force_str +from sentry.utils import metrics from sentry.utils.json import dumps, loads SALT = "sentry-generic-signing" @@ -17,20 +18,45 @@ def sign(**kwargs): Signs all passed kwargs and produces a base64 string which may be passed to unsign which will verify the string has not been tampered with. """ + salt = SALT + if "salt" in kwargs: + salt = kwargs["salt"] + del kwargs["salt"] + return force_str( base64.urlsafe_b64encode( - TimestampSigner(salt=SALT).sign(dumps(kwargs)).encode("utf-8") + TimestampSigner(salt=salt).sign(dumps(kwargs)).encode("utf-8") ).rstrip(b"=") ) -def unsign(data, max_age=60 * 60 * 24 * 2): +def unsign(data, salt=SALT, max_age=60 * 60 * 24 * 2): """ Unsign a signed base64 string. Accepts the base64 value as a string or bytes """ - return loads( - TimestampSigner(salt=SALT).unsign(urlsafe_b64decode(data).decode("utf-8"), max_age=max_age) - ) + if salt == SALT: + return loads( + TimestampSigner(salt=SALT).unsign( + urlsafe_b64decode(data).decode("utf-8"), max_age=max_age + ) + ) + + result = None + try: + result = loads( + TimestampSigner(salt=salt).unsign( + urlsafe_b64decode(data).decode("utf-8"), max_age=max_age + ) + ) + except BadSignature: + result = loads( + TimestampSigner(salt=SALT).unsign( + urlsafe_b64decode(data).decode("utf-8"), max_age=max_age + ) + ) + + metrics.incr("utils.signing.salt_compatibility_mode", tags={"salt": salt}) + return result def urlsafe_b64decode(b64string): diff --git a/tests/sentry/utils/test_signing.py b/tests/sentry/utils/test_signing.py new file mode 100644 index 00000000000000..a039545895b59a --- /dev/null +++ b/tests/sentry/utils/test_signing.py @@ -0,0 +1,28 @@ +import pytest +from django.core.signing import BadSignature + +from sentry.testutils.cases import TestCase +from sentry.utils.signing import sign, unsign + + +class SigningTestCase(TestCase): + def test_sign(self): + with self.settings(SECRET_KEY="a"): + # standard case + assert unsign(sign(foo="bar")) == {"foo": "bar"} + + # sign with aaa, unsign with aaa + assert unsign(sign(foo="bar", salt="aaa"), salt="aaa") == {"foo": "bar"} + + # sign with aaa, unsign with bbb + with pytest.raises(BadSignature): + unsign(sign(foo="bar", salt="aaa"), salt="bbb") + + def test_backward_compatible_sign(self): + with self.settings(SECRET_KEY="a"): + # sign with old salt, unsign with new (transitional period) + assert unsign(sign(foo="bar"), salt="new") == {"foo": "bar"} + + # sign with new salt, unsign with old + with pytest.raises(BadSignature): + unsign(sign(foo="bar", salt="new")) From 1a4ad42e0a3332223df6942d96f14ca081a0ae90 Mon Sep 17 00:00:00 2001 From: Jonas Date: Wed, 24 Jul 2024 11:55:20 -0400 Subject: [PATCH 063/126] fix: link to threadId from continuous profile link (#74848) This was reverted as an optional chain operator was missing. --- .../events/interfaces/spans/types.tsx | 2 +- .../traceApi/useTransaction.tsx | 4 ++-- .../traceDrawer/details/span/index.tsx | 14 +++++------ .../details/span/sections/keys.tsx | 6 +++++ .../traceDrawer/details/styles.tsx | 23 ++++++++++++++++++- .../traceDrawer/traceProfilingLink.spec.tsx | 3 +++ .../traceDrawer/traceProfilingLink.ts | 11 ++++++--- .../newTraceDetails/traceModels/traceTree.tsx | 15 ++++++------ 8 files changed, 57 insertions(+), 21 deletions(-) diff --git a/static/app/components/events/interfaces/spans/types.tsx b/static/app/components/events/interfaces/spans/types.tsx index df38187d57840a..7a2b916262ee33 100644 --- a/static/app/components/events/interfaces/spans/types.tsx +++ b/static/app/components/events/interfaces/spans/types.tsx @@ -43,13 +43,13 @@ export interface MetricsSummary { } export type RawSpanType = { - data: SpanSourceCodeAttributes & SpanDatabaseAttributes & Record; span_id: string; start_timestamp: number; // this is essentially end_timestamp timestamp: number; trace_id: string; _metrics_summary?: MetricsSummary; + data?: SpanSourceCodeAttributes & SpanDatabaseAttributes & Record; description?: string; exclusive_time?: number; hash?: string; diff --git a/static/app/views/performance/newTraceDetails/traceApi/useTransaction.tsx b/static/app/views/performance/newTraceDetails/traceApi/useTransaction.tsx index 39c2736c951daa..814b4d1e1f321b 100644 --- a/static/app/views/performance/newTraceDetails/traceApi/useTransaction.tsx +++ b/static/app/views/performance/newTraceDetails/traceApi/useTransaction.tsx @@ -6,14 +6,14 @@ import type { } from 'sentry/views/performance/newTraceDetails/traceModels/traceTree'; interface UseTransactionProps { - node: TraceTreeNode; + node: TraceTreeNode | null; organization: Organization; } export function useTransaction(props: UseTransactionProps) { return useApiQuery( [ - `/organizations/${props.organization.slug}/events/${props.node.value.project_slug}:${props.node.value.event_id}/`, + `/organizations/${props.organization.slug}/events/${props.node?.value?.project_slug}:${props?.node?.value.event_id}/`, { query: { referrer: 'trace-details-summary', diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/details/span/index.tsx b/static/app/views/performance/newTraceDetails/traceDrawer/details/span/index.tsx index 9a447a1e087171..749ecae9c4b4c7 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/details/span/index.tsx +++ b/static/app/views/performance/newTraceDetails/traceDrawer/details/span/index.tsx @@ -76,12 +76,12 @@ export function SpanNodeDetails({ }: TraceTreeNodeDetailsProps>) { const location = useLocation(); const {projects} = useProjects(); - const {event} = node.value; const issues = useMemo(() => { return [...node.errors, ...node.performance_issues]; }, [node.errors, node.performance_issues]); - const project = projects.find(proj => proj.slug === event?.projectSlug); - const profileId = event?.contexts?.profile?.profile_id ?? null; + + const project = projects.find(proj => proj.slug === node.value.event?.projectSlug); + const profileId = node.value.event?.contexts?.profile?.profile_id ?? null; return ( @@ -91,10 +91,10 @@ export function SpanNodeDetails({ project={project} onTabScrollToNode={onTabScrollToNode} /> - {event.projectSlug ? ( + {node.value.event.projectSlug ? ( @@ -129,9 +129,9 @@ export function SpanNodeDetails({ startTimestamp={node.value.start_timestamp} /> - + {organization.features.includes('profiling') ? ( - + ) : null} )} diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/details/span/sections/keys.tsx b/static/app/views/performance/newTraceDetails/traceDrawer/details/span/sections/keys.tsx index bfe2733cfd7d54..7b9cd319a1a977 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/details/span/sections/keys.tsx +++ b/static/app/views/performance/newTraceDetails/traceDrawer/details/span/sections/keys.tsx @@ -57,6 +57,12 @@ function partitionSizes(data: RawSpanType['data']): { nonSizeKeys: {[key: string]: unknown}; sizeKeys: {[key: string]: number}; } { + if (!data) { + return { + sizeKeys: {}, + nonSizeKeys: {}, + }; + } const sizeKeys = SIZE_DATA_KEYS.reduce((keys, key) => { if (data.hasOwnProperty(key) && defined(data[key])) { try { diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/details/styles.tsx b/static/app/views/performance/newTraceDetails/traceDrawer/details/styles.tsx index bfb6cb3289a47a..007d16bf096aaa 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/details/styles.tsx +++ b/static/app/views/performance/newTraceDetails/traceDrawer/details/styles.tsx @@ -38,6 +38,7 @@ import { isTransactionNode, } from 'sentry/views/performance/newTraceDetails/guards'; import {traceAnalytics} from 'sentry/views/performance/newTraceDetails/traceAnalytics'; +import {useTransaction} from 'sentry/views/performance/newTraceDetails/traceApi/useTransaction'; import {makeTraceContinuousProfilingLink} from 'sentry/views/performance/newTraceDetails/traceDrawer/traceProfilingLink'; import type { MissingInstrumentationNode, @@ -329,6 +330,21 @@ const ValueTd = styled('td')` position: relative; `; +function getThreadIdFromNode( + node: TraceTreeNode, + transaction: EventTransaction | undefined +): string | undefined { + if (isSpanNode(node) && node.value.data?.['thread.id']) { + return node.value.data['thread.id']; + } + + if (transaction) { + return transaction.context?.trace?.data?.['thread.id']; + } + + return undefined; +} + function NodeActions(props: { node: TraceTreeNode; onTabScrollToNode: ( @@ -409,12 +425,17 @@ function NodeActions(props: { return ''; }, [props]); - const params = useParams<{traceSlug?: string}>(); + const {data: transaction} = useTransaction({ + node: isTransactionNode(props.node) ? props.node : null, + organization, + }); + const params = useParams<{traceSlug?: string}>(); const profileLink = makeTraceContinuousProfilingLink(props.node, profilerId, { orgSlug: props.organization.slug, projectSlug: props.node.metadata.project_slug ?? '', traceId: params.traceSlug ?? '', + threadId: getThreadIdFromNode(props.node, transaction), }); return ( diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/traceProfilingLink.spec.tsx b/static/app/views/performance/newTraceDetails/traceDrawer/traceProfilingLink.spec.tsx index 02789a716df6a8..e8aeaff7be5414 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/traceProfilingLink.spec.tsx +++ b/static/app/views/performance/newTraceDetails/traceDrawer/traceProfilingLink.spec.tsx @@ -37,6 +37,7 @@ describe('traceProfilingLink', () => { projectSlug: 'project', orgSlug: '', traceId: '', + threadId: '0', }) ).toBeNull(); }); @@ -47,6 +48,7 @@ describe('traceProfilingLink', () => { projectSlug: '', orgSlug: 'sentry', traceId: '', + threadId: '0', }) ).toBeNull(); }); @@ -84,6 +86,7 @@ describe('traceProfilingLink', () => { projectSlug: 'project', orgSlug: 'sentry', traceId: 'trace', + threadId: '0', } ); diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/traceProfilingLink.ts b/static/app/views/performance/newTraceDetails/traceDrawer/traceProfilingLink.ts index 1674a6ef9121d4..03bc9456c67c73 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/traceProfilingLink.ts +++ b/static/app/views/performance/newTraceDetails/traceDrawer/traceProfilingLink.ts @@ -51,6 +51,7 @@ export function makeTraceContinuousProfilingLink( options: { orgSlug: string; projectSlug: string; + threadId: string | undefined; traceId: string; }, query: Location['query'] = {} @@ -94,15 +95,19 @@ export function makeTraceContinuousProfilingLink( return null; } - const queryWithSpanIdAndTraceId: Record = { + const queryWithEventData: Record = { ...query, eventId, traceId: options.traceId, }; + if (typeof options.threadId === 'string') { + queryWithEventData.tid = options.threadId; + } + const spanId = getNodeId(node); if (spanId) { - queryWithSpanIdAndTraceId.spanId = spanId; + queryWithEventData.spanId = spanId; } return generateContinuousProfileFlamechartRouteWithQuery( @@ -111,6 +116,6 @@ export function makeTraceContinuousProfilingLink( profilerId, start.toISOString(), end.toISOString(), - queryWithSpanIdAndTraceId + queryWithEventData ); } diff --git a/static/app/views/performance/newTraceDetails/traceModels/traceTree.tsx b/static/app/views/performance/newTraceDetails/traceModels/traceTree.tsx index ab34a6ab7e326b..9f7092a1aac17e 100644 --- a/static/app/views/performance/newTraceDetails/traceModels/traceTree.tsx +++ b/static/app/views/performance/newTraceDetails/traceModels/traceTree.tsx @@ -119,11 +119,12 @@ import {TraceType} from '../traceType'; type ArgumentTypes = F extends (...args: infer A) => any ? A : never; export declare namespace TraceTree { + interface RawSpan extends RawSpanType {} interface Transaction extends TraceFullDetailed { profiler_id: string; sdk_name: string; } - interface Span extends RawSpanType { + interface Span extends RawSpan { childTransactions: TraceTreeNode[]; event: EventTransaction; measurements?: Record; @@ -138,14 +139,14 @@ export declare namespace TraceTree { timestamp: number; type: 'missing_instrumentation'; } - interface SiblingAutogroup extends RawSpanType { + interface SiblingAutogroup extends RawSpan { autogrouped_by: { description: string; op: string; }; } - interface ChildrenAutogroup extends RawSpanType { + interface ChildrenAutogroup extends RawSpan { autogrouped_by: { op: string; }; @@ -866,7 +867,7 @@ export class TraceTree { static FromSpans( parent: TraceTreeNode, data: Event, - spans: RawSpanType[], + spans: TraceTree.RawSpan[], options: {sdk: string | undefined} | undefined ): [TraceTreeNode, [number, number] | null] { parent.invalidate(parent); @@ -877,7 +878,7 @@ export class TraceTree { const parentIsSpan = isSpanNode(parent); const lookuptable: Record< - RawSpanType['span_id'], + TraceTree.RawSpan['span_id'], TraceTreeNode > = {}; @@ -2490,7 +2491,7 @@ export function computeAutogroupedBarSegments( // Returns a list of errors related to the txn with ids matching the span id function getRelatedSpanErrorsFromTransaction( - span: RawSpanType, + span: TraceTree.RawSpan, node?: TraceTreeNode ): TraceErrorType[] { if (!node || !node.value || !isTransactionNode(node)) { @@ -2512,7 +2513,7 @@ function getRelatedSpanErrorsFromTransaction( // Returns a list of performance errors related to the txn with ids matching the span id function getRelatedPerformanceIssuesFromTransaction( - span: RawSpanType, + span: TraceTree.RawSpan, node?: TraceTreeNode ): TraceTree.TracePerformanceIssue[] { if (!node || !node.value || !isTransactionNode(node)) { From 22f169737289a2c55b6662047d7b0e3773b8f7d0 Mon Sep 17 00:00:00 2001 From: Jonas Date: Wed, 24 Jul 2024 12:02:07 -0400 Subject: [PATCH 064/126] fix(trace) limit view to a min value (#74844) Enforce a min width value on list view and correct any badly stored value --- .../traceRenderers/virtualizedViewManager.tsx | 25 ++++++++++++++----- .../traceState/tracePreferences.tsx | 15 ++++++++--- 2 files changed, 31 insertions(+), 9 deletions(-) diff --git a/static/app/views/performance/newTraceDetails/traceRenderers/virtualizedViewManager.tsx b/static/app/views/performance/newTraceDetails/traceRenderers/virtualizedViewManager.tsx index ef4f4f9be8324d..4e5afdc732a4b1 100644 --- a/static/app/views/performance/newTraceDetails/traceRenderers/virtualizedViewManager.tsx +++ b/static/app/views/performance/newTraceDetails/traceRenderers/virtualizedViewManager.tsx @@ -194,8 +194,11 @@ export class VirtualizedViewManager { const distance = event.clientX - this.dividerStartVec[0]; const distancePercentage = distance / this.view.trace_container_physical_space.width; - this.columns.list.width = this.columns.list.width + distancePercentage; - this.columns.span_list.width = this.columns.span_list.width - distancePercentage; + const list = clamp(this.columns.list.width + distancePercentage, 0.1, 0.9); + const span_list = clamp(this.columns.span_list.width - distancePercentage, 0.1, 0.9); + + this.columns.list.width = list; + this.columns.span_list.width = span_list; document.body.style.cursor = ''; document.body.style.userSelect = ''; @@ -218,17 +221,27 @@ export class VirtualizedViewManager { const distance = event.clientX - this.dividerStartVec[0]; const distancePercentage = distance / this.view.trace_container_physical_space.width; + const list = clamp(this.columns.list.width + distancePercentage, 0, 1); + const span_list = clamp(this.columns.span_list.width - distancePercentage, 0, 1); + + if (span_list * this.view.trace_container_physical_space.width <= 100) { + return; + } + if (list * this.view.trace_container_physical_space.width <= 100) { + return; + } + this.view.trace_physical_space.width = - (this.columns.span_list.width - distancePercentage) * - this.view.trace_container_physical_space.width; + span_list * this.view.trace_container_physical_space.width; this.scheduler.dispatch('set trace view', { x: this.view.trace_view.x, width: this.view.trace_view.width, }); + this.scheduler.dispatch('divider resize', { - list: this.columns.list.width + distancePercentage, - span_list: this.columns.span_list.width - distancePercentage, + list, + span_list, }); this.previousDividerClientVec = [event.clientX, event.clientY]; } diff --git a/static/app/views/performance/newTraceDetails/traceState/tracePreferences.tsx b/static/app/views/performance/newTraceDetails/traceState/tracePreferences.tsx index e99a88947c35e0..2d1caab8e38faf 100644 --- a/static/app/views/performance/newTraceDetails/traceState/tracePreferences.tsx +++ b/static/app/views/performance/newTraceDetails/traceState/tracePreferences.tsx @@ -1,5 +1,6 @@ import * as Sentry from '@sentry/react'; +import clamp from 'sentry/utils/number/clamp'; import {traceReducerExhaustiveActionCheck} from 'sentry/views/performance/newTraceDetails/traceState'; type TraceLayoutPreferences = 'drawer left' | 'drawer bottom' | 'drawer right'; @@ -68,6 +69,14 @@ export function storeTraceViewPreferences( function isInt(value: any): value is number { return typeof value === 'number' && !isNaN(value); } + +function correctListWidth(state: TracePreferencesState): TracePreferencesState { + if (state.list.width < 0.1 || state.list.width > 0.9) { + state.list.width = 0.5; + } + return state; +} + export function loadTraceViewPreferences(key: string): TracePreferencesState | null { const stored = localStorage.getItem(key); @@ -89,6 +98,7 @@ export function loadTraceViewPreferences(key: string): TracePreferencesState | n parsed.list && isInt(parsed.list.width) ) { + correctListWidth(parsed); return parsed; } } catch (e) { @@ -119,8 +129,7 @@ export function tracePreferencesReducer( ...state.drawer, sizes: { ...state.drawer.sizes, - [state.layout]: - action.payload < 0 ? 0 : action.payload > 1 ? 1 : action.payload, + [state.layout]: clamp(action.payload, 0, 1), }, }, }; @@ -128,7 +137,7 @@ export function tracePreferencesReducer( return { ...state, list: { - width: action.payload < 0 ? 0 : action.payload > 1 ? 1 : action.payload, + width: clamp(action.payload, 0.1, 0.9), }, }; default: From 62e090965d6ff2e6a967c45df23f73430c7cf410 Mon Sep 17 00:00:00 2001 From: William Mak Date: Wed, 24 Jul 2024 12:02:25 -0400 Subject: [PATCH 065/126] fix(discover): Handle project.name alias (#74849) - The special project handling for top events didn't include the project.name alias - This fixes SENTRY-3CB0 --- src/sentry/search/events/builder/discover.py | 6 ++-- .../test_organization_events_stats.py | 33 +++++++++++++++++++ 2 files changed, 36 insertions(+), 3 deletions(-) diff --git a/src/sentry/search/events/builder/discover.py b/src/sentry/search/events/builder/discover.py index 07a05f414cd97f..e5c911920422c5 100644 --- a/src/sentry/search/events/builder/discover.py +++ b/src/sentry/search/events/builder/discover.py @@ -335,7 +335,7 @@ def resolve_top_event_conditions( conditions = [] for field in self.fields: # If we have a project field, we need to limit results by project so we don't hit the result limit - if field in ["project", "project.id"] and top_events: + if field in ["project", "project.id", "project.name"] and top_events: # Iterate through the existing conditions to find the project one # the project condition is a requirement of queries so there should always be one project_condition = [ @@ -345,9 +345,9 @@ def resolve_top_event_conditions( and condition.lhs == self.column("project_id") ][0] self.where.remove(project_condition) - if field == "project": + if field in ["project", "project.name"]: projects = list( - {self.params.project_slug_map[event["project"]] for event in top_events} + {self.params.project_slug_map[event[field]] for event in top_events} ) else: projects = list({event["project.id"] for event in top_events}) diff --git a/tests/snuba/api/endpoints/test_organization_events_stats.py b/tests/snuba/api/endpoints/test_organization_events_stats.py index 3e22dbdc396bd4..5d8080cc99285a 100644 --- a/tests/snuba/api/endpoints/test_organization_events_stats.py +++ b/tests/snuba/api/endpoints/test_organization_events_stats.py @@ -1361,6 +1361,39 @@ def test_top_events_with_projects_other(self): assert data["Other"]["order"] == 1 assert [attrs[0]["count"] for _, attrs in data["Other"]["data"]] == [7, 6] + def test_top_events_with_projects_fields(self): + # We need to handle the project name fields differently + for project_field in ["project", "project.name"]: + with self.feature(self.enabled_features): + response = self.client.get( + self.url, + data={ + "start": iso_format(self.day_ago), + "end": iso_format(self.day_ago + timedelta(hours=2)), + "interval": "1h", + "yAxis": "count()", + "orderby": ["-count()"], + "field": ["count()", project_field], + "topEvents": 5, + }, + format="json", + ) + + data = response.data + assert response.status_code == 200, response.content + + assert data[self.project.slug]["order"] == 0, project_field + assert [attrs[0]["count"] for _, attrs in data[self.project.slug]["data"]] == [ + 15, + 0, + ], project_field + + assert data[self.project2.slug]["order"] == 1, project_field + assert [attrs[0]["count"] for _, attrs in data[self.project2.slug]["data"]] == [ + 7, + 6, + ], project_field + def test_tag_with_conflicting_function_alias_simple(self): event_data: _EventDataDict = { "data": { From 369200831dc050ece6bdff86530de129a392ebb8 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 24 Jul 2024 12:04:32 -0400 Subject: [PATCH 066/126] ref: prevent database calls at the module via mypy (#74840) for example: ```console $ PYTHONWARNINGS=error::RuntimeWarning mypy Error constructing plugin instance of NewSemanalDjangoPlugin Traceback (most recent call last): File "/Users/asottile/workspace/sentry/.venv/bin/mypy", line 8, in sys.exit(console_entry()) ^^^^^^^^^^^^^^^ File "/Users/asottile/workspace/sentry/.venv/lib/python3.11/site-packages/mypy/__main__.py", line 15, in console_entry main() File "mypy/main.py", line 103, in main File "mypy/main.py", line 187, in run_build File "mypy/build.py", line 193, in build File "mypy/build.py", line 238, in _build File "mypy/build.py", line 503, in load_plugins File "mypy/build.py", line 484, in load_plugins_from_config File "/Users/asottile/workspace/sentry/.venv/lib/python3.11/site-packages/mypy_django_plugin/main.py", line 65, in __init__ self.django_context = DjangoContext(self.plugin_config.django_settings_module) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/asottile/workspace/sentry/.venv/lib/python3.11/site-packages/mypy_django_plugin/django/context.py", line 98, in __init__ apps, settings = initialize_django(self.django_settings_module) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/asottile/workspace/sentry/.venv/lib/python3.11/site-packages/mypy_django_plugin/django/context.py", line 81, in initialize_django settings._setup() # type: ignore[misc] ^^^^^^^^^^^^^^^^^ File "/Users/asottile/workspace/sentry/.venv/lib/python3.11/site-packages/django/conf/__init__.py", line 76, in _setup self._wrapped = Settings(settings_module) ^^^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/asottile/workspace/sentry/.venv/lib/python3.11/site-packages/django/conf/__init__.py", line 190, in __init__ mod = importlib.import_module(self.SETTINGS_MODULE) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/asottile/.local/share/sentry-devenv/pythons/3.11.8/python/lib/python3.11/importlib/__init__.py", line 126, in import_module return _bootstrap._gcd_import(name[level:], package, level) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "", line 1204, in _gcd_import File "", line 1176, in _find_and_load File "", line 1147, in _find_and_load_unlocked File "", line 690, in _load_unlocked File "", line 940, in exec_module File "", line 241, in _call_with_frames_removed File "/Users/asottile/workspace/sentry/src/sentry/conf/server_mypy.py", line 3, in configure(skip_service_validation=True) File "/Users/asottile/workspace/sentry/src/sentry/runner/__init__.py", line 33, in configure _configure(ctx, py, yaml, skip_service_validation) File "/Users/asottile/workspace/sentry/src/sentry/runner/settings.py", line 121, in configure initialize_app( File "/Users/asottile/workspace/sentry/src/sentry/runner/initializer.py", line 366, in initialize_app django.setup() File "/Users/asottile/workspace/sentry/.venv/lib/python3.11/site-packages/django/__init__.py", line 24, in setup apps.populate(settings.INSTALLED_APPS) File "/Users/asottile/workspace/sentry/.venv/lib/python3.11/site-packages/django/apps/registry.py", line 116, in populate app_config.import_models() File "/Users/asottile/workspace/sentry/.venv/lib/python3.11/site-packages/django/apps/config.py", line 269, in import_models self.models_module = import_module(models_module_name) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/asottile/.local/share/sentry-devenv/pythons/3.11.8/python/lib/python3.11/importlib/__init__.py", line 126, in import_module return _bootstrap._gcd_import(name[level:], package, level) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "", line 1204, in _gcd_import File "", line 1176, in _find_and_load File "", line 1147, in _find_and_load_unlocked File "", line 690, in _load_unlocked File "", line 940, in exec_module File "", line 241, in _call_with_frames_removed File "/Users/asottile/workspace/sentry/src/sentry/plugins/sentry_interface_types/models.py", line 2, in from sentry.plugins.bases.tag import TagPlugin File "/Users/asottile/workspace/sentry/src/sentry/plugins/bases/__init__.py", line 2, in from .issue2 import IssueTrackingPlugin2 # NOQA ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/asottile/workspace/sentry/src/sentry/plugins/bases/issue2.py", line 18, in from sentry.plugins.endpoints import PluginGroupEndpoint File "/Users/asottile/workspace/sentry/src/sentry/plugins/endpoints.py", line 9, in from sentry.api.bases.group import GroupEndpoint File "/Users/asottile/workspace/sentry/src/sentry/api/bases/__init__.py", line 7, in from .sentryapps import * # NOQA ^^^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/asottile/workspace/sentry/src/sentry/api/bases/sentryapps.py", line 20, in from sentry.coreapi import APIError File "/Users/asottile/workspace/sentry/src/sentry/coreapi.py", line 8, in from sentry.ingest.consumer.processors import CACHE_TIMEOUT File "/Users/asottile/workspace/sentry/src/sentry/ingest/consumer/processors.py", line 14, in from sentry.event_manager import save_attachment File "/Users/asottile/workspace/sentry/src/sentry/event_manager.py", line 74, in from sentry.grouping.ingest.seer import get_seer_similar_issues, should_call_seer_for_grouping File "/Users/asottile/workspace/sentry/src/sentry/grouping/ingest/seer.py", line 11, in from sentry.seer.similarity.similar_issues import ( File "/Users/asottile/workspace/sentry/src/sentry/seer/similarity/similar_issues.py", line 41, in options.get("seer.similarity.circuit-breaker-config"), ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/asottile/workspace/sentry/src/sentry/options/manager.py", line 299, in get result = self.store.get(opt, silent=silent) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/asottile/workspace/sentry/src/sentry/options/store.py", line 97, in get result = self.get_store(key, silent=silent) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/asottile/workspace/sentry/src/sentry/options/store.py", line 193, in get_store value = self.model.objects.get(key=key.name).value ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/asottile/workspace/sentry/.venv/lib/python3.11/site-packages/django/db/models/manager.py", line 87, in manager_method return getattr(self.get_queryset(), name)(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/asottile/workspace/sentry/.venv/lib/python3.11/site-packages/django/db/models/query.py", line 645, in get num = len(clone) ^^^^^^^^^^ File "/Users/asottile/workspace/sentry/.venv/lib/python3.11/site-packages/django/db/models/query.py", line 382, in __len__ self._fetch_all() File "/Users/asottile/workspace/sentry/.venv/lib/python3.11/site-packages/django/db/models/query.py", line 1928, in _fetch_all self._result_cache = list(self._iterable_class(self)) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/asottile/workspace/sentry/.venv/lib/python3.11/site-packages/django/db/models/query.py", line 91, in __iter__ results = compiler.execute_sql( ^^^^^^^^^^^^^^^^^^^^^ File "/Users/asottile/workspace/sentry/.venv/lib/python3.11/site-packages/django/db/models/sql/compiler.py", line 1562, in execute_sql cursor.execute(sql, params) File "/Users/asottile/workspace/sentry/.venv/lib/python3.11/site-packages/django/db/backends/utils.py", line 79, in execute return self._execute_with_wrappers( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/asottile/workspace/sentry/.venv/lib/python3.11/site-packages/django/db/backends/utils.py", line 92, in _execute_with_wrappers return executor(sql, params, many, context) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/asottile/workspace/sentry/.venv/lib/python3.11/site-packages/django/db/backends/utils.py", line 98, in _execute warnings.warn(self.APPS_NOT_READY_WARNING_MSG, category=RuntimeWarning) RuntimeWarning: Accessing the database during app initialization is discouraged. To fix this warning, avoid executing queries in AppConfig.ready() or when your app modules are imported. ``` --- .github/workflows/backend.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml index 5c26711437a7fb..67292e6cbfa943 100644 --- a/.github/workflows/backend.yml +++ b/.github/workflows/backend.yml @@ -307,7 +307,7 @@ jobs: python3 -m tools.fast_editable --path . sentry init - - run: mypy + - run: PYTHONWARNINGS=error::RuntimeWarning mypy id: run - uses: getsentry/action-github-app-token@d4b5da6c5e37703f8c3b3e43abb5705b46e159cc # v3.0.0 From d8cc8c00503371818e51b28991d2da3b8a147fc3 Mon Sep 17 00:00:00 2001 From: Dominik Buszowiecki <44422760+DominikB2014@users.noreply.github.com> Date: Wed, 24 Jul 2024 12:07:24 -0400 Subject: [PATCH 067/126] feat(insights): project selection tooltip (#74653) 1. closes #74562 by adding a tooltip under the project selector. The tooltip shows up if the project currently viewed has no module data AND there exists one with data. The tooltip closes after 5 seconds or if the user clicks anywhere else on the screen. image 2. add a `extraFilters` prop in the new `ModulePageFilterBar` component that takes in any extra filters to be rendered next to the default ones. This ensures the consistent behaviour of hiding them when the module doesn't have data. (Or any future behaviour we add) --------- Co-authored-by: Ash <0Calories@users.noreply.github.com> --- .../resources/views/resourcesLandingPage.tsx | 35 ++++------ .../webVitals/views/webVitalsLandingPage.tsx | 17 ++--- .../insights/cache/views/cacheLandingPage.tsx | 11 +-- .../common/components/modulePageFilterBar.tsx | 70 +++++++++++++++++++ .../common/queries/useHasFirstSpan.tsx | 13 +++- .../database/views/databaseLandingPage.tsx | 11 +-- .../insights/http/views/httpLandingPage.tsx | 11 +-- .../views/llmMonitoringLandingPage.tsx | 11 +-- .../common/components/screensTemplate.tsx | 28 +++----- .../views/screenloadLandingPage.tsx | 17 ++--- .../queues/views/queuesLandingPage.tsx | 11 +-- 11 files changed, 126 insertions(+), 109 deletions(-) create mode 100644 static/app/views/insights/common/components/modulePageFilterBar.tsx diff --git a/static/app/views/insights/browser/resources/views/resourcesLandingPage.tsx b/static/app/views/insights/browser/resources/views/resourcesLandingPage.tsx index 08a58e8f74acbc..6fbc01487a4074 100644 --- a/static/app/views/insights/browser/resources/views/resourcesLandingPage.tsx +++ b/static/app/views/insights/browser/resources/views/resourcesLandingPage.tsx @@ -5,10 +5,6 @@ import {Breadcrumbs} from 'sentry/components/breadcrumbs'; import ButtonBar from 'sentry/components/buttonBar'; import FeedbackWidgetButton from 'sentry/components/feedback/widget/feedbackWidgetButton'; import * as Layout from 'sentry/components/layouts/thirds'; -import {DatePageFilter} from 'sentry/components/organizations/datePageFilter'; -import {EnvironmentPageFilter} from 'sentry/components/organizations/environmentPageFilter'; -import PageFilterBar from 'sentry/components/organizations/pageFilterBar'; -import {ProjectPageFilter} from 'sentry/components/organizations/projectPageFilter'; import {PageHeadingQuestionTooltip} from 'sentry/components/pageHeadingQuestionTooltip'; import {space} from 'sentry/styles/space'; import {PageAlert, PageAlertProvider} from 'sentry/utils/performance/contexts/pageAlert'; @@ -26,9 +22,9 @@ import { BrowserStarfishFields, useResourceModuleFilters, } from 'sentry/views/insights/browser/resources/utils/useResourceFilters'; +import {ModulePageFilterBar} from 'sentry/views/insights/common/components/modulePageFilterBar'; import {ModulePageProviders} from 'sentry/views/insights/common/components/modulePageProviders'; import {ModulesOnboarding} from 'sentry/views/insights/common/components/modulesOnboarding'; -import {useHasFirstSpan} from 'sentry/views/insights/common/queries/useHasFirstSpan'; import {useModuleBreadcrumbs} from 'sentry/views/insights/common/utils/useModuleBreadcrumbs'; import {DomainSelector} from 'sentry/views/insights/common/views/spans/selectors/domainSelector'; import {ModuleName} from 'sentry/views/insights/types'; @@ -37,7 +33,6 @@ const {SPAN_OP, SPAN_DOMAIN} = BrowserStarfishFields; function ResourcesLandingPage() { const filters = useResourceModuleFilters(); - const hasModuleData = useHasFirstSpan(ModuleName.RESOURCE); const crumbs = useModuleBreadcrumbs('resource'); return ( @@ -65,21 +60,19 @@ function ResourcesLandingPage() { - - - - - - {hasModuleData && ( - - )} + + } + /> diff --git a/static/app/views/insights/browser/webVitals/views/webVitalsLandingPage.tsx b/static/app/views/insights/browser/webVitals/views/webVitalsLandingPage.tsx index 3158bf4cd5495f..279e44c57c734a 100644 --- a/static/app/views/insights/browser/webVitals/views/webVitalsLandingPage.tsx +++ b/static/app/views/insights/browser/webVitals/views/webVitalsLandingPage.tsx @@ -9,10 +9,6 @@ import ButtonBar from 'sentry/components/buttonBar'; import FeedbackWidgetButton from 'sentry/components/feedback/widget/feedbackWidgetButton'; import * as Layout from 'sentry/components/layouts/thirds'; import ExternalLink from 'sentry/components/links/externalLink'; -import {DatePageFilter} from 'sentry/components/organizations/datePageFilter'; -import {EnvironmentPageFilter} from 'sentry/components/organizations/environmentPageFilter'; -import PageFilterBar from 'sentry/components/organizations/pageFilterBar'; -import {ProjectPageFilter} from 'sentry/components/organizations/projectPageFilter'; import {PageHeadingQuestionTooltip} from 'sentry/components/pageHeadingQuestionTooltip'; import {Tooltip} from 'sentry/components/tooltip'; import {t, tct} from 'sentry/locale'; @@ -34,16 +30,15 @@ import { } from 'sentry/views/insights/browser/webVitals/settings'; import type {WebVitals} from 'sentry/views/insights/browser/webVitals/types'; import decodeBrowserTypes from 'sentry/views/insights/browser/webVitals/utils/queryParameterDecoders/browserType'; +import {ModulePageFilterBar} from 'sentry/views/insights/common/components/modulePageFilterBar'; import {ModulePageProviders} from 'sentry/views/insights/common/components/modulePageProviders'; import {ModulesOnboarding} from 'sentry/views/insights/common/components/modulesOnboarding'; -import {useHasFirstSpan} from 'sentry/views/insights/common/queries/useHasFirstSpan'; import {useHasDataTrackAnalytics} from 'sentry/views/insights/common/utils/useHasDataTrackAnalytics'; import {useModuleBreadcrumbs} from 'sentry/views/insights/common/utils/useModuleBreadcrumbs'; import {ModuleName, SpanIndexedField} from 'sentry/views/insights/types'; export function WebVitalsLandingPage() { const location = useLocation(); - const hasModuleData = useHasFirstSpan(ModuleName.VITAL); const router = useRouter(); @@ -90,12 +85,10 @@ export function WebVitalsLandingPage() { - - - - - - {hasModuleData && } + } + /> diff --git a/static/app/views/insights/cache/views/cacheLandingPage.tsx b/static/app/views/insights/cache/views/cacheLandingPage.tsx index 61a2e01b327978..1287adc03b3718 100644 --- a/static/app/views/insights/cache/views/cacheLandingPage.tsx +++ b/static/app/views/insights/cache/views/cacheLandingPage.tsx @@ -6,10 +6,6 @@ import ButtonBar from 'sentry/components/buttonBar'; import FeedbackWidgetButton from 'sentry/components/feedback/widget/feedbackWidgetButton'; import * as Layout from 'sentry/components/layouts/thirds'; import ExternalLink from 'sentry/components/links/externalLink'; -import {DatePageFilter} from 'sentry/components/organizations/datePageFilter'; -import {EnvironmentPageFilter} from 'sentry/components/organizations/environmentPageFilter'; -import PageFilterBar from 'sentry/components/organizations/pageFilterBar'; -import {ProjectPageFilter} from 'sentry/components/organizations/projectPageFilter'; import {PageHeadingQuestionTooltip} from 'sentry/components/pageHeadingQuestionTooltip'; import {t} from 'sentry/locale'; import type {EventsMetaType} from 'sentry/utils/discover/eventView'; @@ -37,6 +33,7 @@ import { MODULE_TITLE, } from 'sentry/views/insights/cache/settings'; import * as ModuleLayout from 'sentry/views/insights/common/components/moduleLayout'; +import {ModulePageFilterBar} from 'sentry/views/insights/common/components/modulePageFilterBar'; import {ModulePageProviders} from 'sentry/views/insights/common/components/modulePageProviders'; import {ModulesOnboarding} from 'sentry/views/insights/common/components/modulesOnboarding'; import { @@ -210,11 +207,7 @@ export function CacheLandingPage() { - - - - - + diff --git a/static/app/views/insights/common/components/modulePageFilterBar.tsx b/static/app/views/insights/common/components/modulePageFilterBar.tsx new file mode 100644 index 00000000000000..e8687e58535d86 --- /dev/null +++ b/static/app/views/insights/common/components/modulePageFilterBar.tsx @@ -0,0 +1,70 @@ +import {type ComponentProps, Fragment, useEffect, useState} from 'react'; + +import {DatePageFilter} from 'sentry/components/organizations/datePageFilter'; +import {EnvironmentPageFilter} from 'sentry/components/organizations/environmentPageFilter'; +import PageFilterBar from 'sentry/components/organizations/pageFilterBar'; +import {ProjectPageFilter} from 'sentry/components/organizations/projectPageFilter'; +import {Tooltip} from 'sentry/components/tooltip'; +import {t} from 'sentry/locale'; +import {SECOND} from 'sentry/utils/formatters'; +import useProjects from 'sentry/utils/useProjects'; +import {useHasFirstSpan} from 'sentry/views/insights/common/queries/useHasFirstSpan'; +import type {ModuleName} from 'sentry/views/insights/types'; + +type Props = { + moduleName: ModuleName; + extraFilters?: React.ReactNode; + onProjectChange?: ComponentProps['onChange']; +}; + +const CHANGE_PROJECT_TEXT = t('Make sure you have the correct project selected.'); + +export function ModulePageFilterBar({moduleName, onProjectChange, extraFilters}: Props) { + const {projects: allProjects} = useProjects(); + + const hasDataWithSelectedProjects = useHasFirstSpan(moduleName); + const hasDataWithAllProjects = useHasFirstSpan(moduleName, allProjects); + const [showTooltip, setShowTooltip] = useState(false); + + const handleClickAnywhereOnPage = () => { + setShowTooltip(false); + }; + + useEffect(() => { + if (!hasDataWithSelectedProjects && hasDataWithAllProjects) { + const startTime = 0.5 * SECOND; + const endTime = startTime + 5 * SECOND; + // by adding a small delay to show the tooltip, we ensure the animation occurs and the tooltip popping up is more obvious + setTimeout(() => setShowTooltip(true), startTime); + setTimeout(() => setShowTooltip(false), endTime); + } + // We intentially do not include hasDataWithSelectedProjects in the dependencies, + // as we only want to show the tooltip once per component load and not every time the data changes. + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [hasDataWithAllProjects]); + + useEffect(() => { + document.body.addEventListener('click', handleClickAnywhereOnPage); + return () => { + document.body.removeEventListener('click', handleClickAnywhereOnPage); + }; + }, []); + + return ( + + + + + + + + + {hasDataWithSelectedProjects && extraFilters} + + ); +} diff --git a/static/app/views/insights/common/queries/useHasFirstSpan.tsx b/static/app/views/insights/common/queries/useHasFirstSpan.tsx index 16cf40625fa6a8..f249344adb7727 100644 --- a/static/app/views/insights/common/queries/useHasFirstSpan.tsx +++ b/static/app/views/insights/common/queries/useHasFirstSpan.tsx @@ -27,14 +27,23 @@ const modulePropertyMap: Record< [ModuleName.AI]: 'hasInsightsLlmMonitoring', }; -/* Returns whether the module and current project selection has received a first insight span */ -export function useHasFirstSpan(module: ModuleName): boolean { +/** + * Returns whether the module and current project selection has received a first insight span + * @param module The name of the module that will be checked for a first span + * @param projects The projects to check for the first span. If not provided, the selected projects will be used + * @returns true if the module has a first span in the selected projects, false otherwise + */ +export function useHasFirstSpan(module: ModuleName, projects?: Project[]): boolean { const {projects: allProjects} = useProjects(); const pageFilters = usePageFilters(); // Unsupported modules. Remove MOBILE_UI from this list once released. if ((excludedModuleNames as readonly ModuleName[]).includes(module)) return false; + if (projects) { + return projects.some(p => p[modulePropertyMap[module]] === true); + } + let selectedProjects: Project[] = []; // There are three cases for the selected pageFilter projects: // - [] empty list represents "My Projects" diff --git a/static/app/views/insights/database/views/databaseLandingPage.tsx b/static/app/views/insights/database/views/databaseLandingPage.tsx index 45ecd95015fb1f..d520911b085d89 100644 --- a/static/app/views/insights/database/views/databaseLandingPage.tsx +++ b/static/app/views/insights/database/views/databaseLandingPage.tsx @@ -6,10 +6,6 @@ import {Breadcrumbs} from 'sentry/components/breadcrumbs'; import ButtonBar from 'sentry/components/buttonBar'; import FeedbackWidgetButton from 'sentry/components/feedback/widget/feedbackWidgetButton'; import * as Layout from 'sentry/components/layouts/thirds'; -import {DatePageFilter} from 'sentry/components/organizations/datePageFilter'; -import {EnvironmentPageFilter} from 'sentry/components/organizations/environmentPageFilter'; -import PageFilterBar from 'sentry/components/organizations/pageFilterBar'; -import {ProjectPageFilter} from 'sentry/components/organizations/projectPageFilter'; import {PageHeadingQuestionTooltip} from 'sentry/components/pageHeadingQuestionTooltip'; import SearchBar from 'sentry/components/searchBar'; import {t} from 'sentry/locale'; @@ -22,6 +18,7 @@ import {useLocation} from 'sentry/utils/useLocation'; import useOrganization from 'sentry/utils/useOrganization'; import {useSynchronizeCharts} from 'sentry/views/insights/common/components/chart'; import * as ModuleLayout from 'sentry/views/insights/common/components/moduleLayout'; +import {ModulePageFilterBar} from 'sentry/views/insights/common/components/modulePageFilterBar'; import {ModulePageProviders} from 'sentry/views/insights/common/components/modulePageProviders'; import {ModulesOnboarding} from 'sentry/views/insights/common/components/modulesOnboarding'; import {useSpanMetrics} from 'sentry/views/insights/common/queries/useDiscover'; @@ -186,11 +183,7 @@ export function DatabaseLandingPage() { )} - - - - - + diff --git a/static/app/views/insights/http/views/httpLandingPage.tsx b/static/app/views/insights/http/views/httpLandingPage.tsx index 0bc31a5c34bd46..fcd307b24968ab 100644 --- a/static/app/views/insights/http/views/httpLandingPage.tsx +++ b/static/app/views/insights/http/views/httpLandingPage.tsx @@ -4,10 +4,6 @@ import {Breadcrumbs} from 'sentry/components/breadcrumbs'; import ButtonBar from 'sentry/components/buttonBar'; import FeedbackWidgetButton from 'sentry/components/feedback/widget/feedbackWidgetButton'; import * as Layout from 'sentry/components/layouts/thirds'; -import {DatePageFilter} from 'sentry/components/organizations/datePageFilter'; -import {EnvironmentPageFilter} from 'sentry/components/organizations/environmentPageFilter'; -import PageFilterBar from 'sentry/components/organizations/pageFilterBar'; -import {ProjectPageFilter} from 'sentry/components/organizations/projectPageFilter'; import {PageHeadingQuestionTooltip} from 'sentry/components/pageHeadingQuestionTooltip'; import SearchBar from 'sentry/components/searchBar'; import {t} from 'sentry/locale'; @@ -20,6 +16,7 @@ import {useLocation} from 'sentry/utils/useLocation'; import useOrganization from 'sentry/utils/useOrganization'; import {useSynchronizeCharts} from 'sentry/views/insights/common/components/chart'; import * as ModuleLayout from 'sentry/views/insights/common/components/moduleLayout'; +import {ModulePageFilterBar} from 'sentry/views/insights/common/components/modulePageFilterBar'; import {ModulePageProviders} from 'sentry/views/insights/common/components/modulePageProviders'; import {ModulesOnboarding} from 'sentry/views/insights/common/components/modulesOnboarding'; import {useSpanMetrics} from 'sentry/views/insights/common/queries/useDiscover'; @@ -174,11 +171,7 @@ export function HTTPLandingPage() { - - - - - + diff --git a/static/app/views/insights/llmMonitoring/views/llmMonitoringLandingPage.tsx b/static/app/views/insights/llmMonitoring/views/llmMonitoringLandingPage.tsx index 7f53c16d2fcdca..0a18f6856d8447 100644 --- a/static/app/views/insights/llmMonitoring/views/llmMonitoringLandingPage.tsx +++ b/static/app/views/insights/llmMonitoring/views/llmMonitoringLandingPage.tsx @@ -4,14 +4,11 @@ import ButtonBar from 'sentry/components/buttonBar'; import FeedbackWidgetButton from 'sentry/components/feedback/widget/feedbackWidgetButton'; import * as Layout from 'sentry/components/layouts/thirds'; import NoProjectMessage from 'sentry/components/noProjectMessage'; -import {DatePageFilter} from 'sentry/components/organizations/datePageFilter'; -import {EnvironmentPageFilter} from 'sentry/components/organizations/environmentPageFilter'; -import PageFilterBar from 'sentry/components/organizations/pageFilterBar'; -import {ProjectPageFilter} from 'sentry/components/organizations/projectPageFilter'; import {PageHeadingQuestionTooltip} from 'sentry/components/pageHeadingQuestionTooltip'; import {t} from 'sentry/locale'; import useOrganization from 'sentry/utils/useOrganization'; import * as ModuleLayout from 'sentry/views/insights/common/components/moduleLayout'; +import {ModulePageFilterBar} from 'sentry/views/insights/common/components/modulePageFilterBar'; import {ModulePageProviders} from 'sentry/views/insights/common/components/modulePageProviders'; import {ModulesOnboarding} from 'sentry/views/insights/common/components/modulesOnboarding'; import {useHasDataTrackAnalytics} from 'sentry/views/insights/common/utils/useHasDataTrackAnalytics'; @@ -61,11 +58,7 @@ export function LLMMonitoringPage() { - - - - - + diff --git a/static/app/views/insights/mobile/common/components/screensTemplate.tsx b/static/app/views/insights/mobile/common/components/screensTemplate.tsx index 581316a1c2c5fb..bc471fdf5593d3 100644 --- a/static/app/views/insights/mobile/common/components/screensTemplate.tsx +++ b/static/app/views/insights/mobile/common/components/screensTemplate.tsx @@ -7,18 +7,14 @@ import ButtonBar from 'sentry/components/buttonBar'; import ErrorBoundary from 'sentry/components/errorBoundary'; import FeedbackWidgetButton from 'sentry/components/feedback/widget/feedbackWidgetButton'; import * as Layout from 'sentry/components/layouts/thirds'; -import {DatePageFilter} from 'sentry/components/organizations/datePageFilter'; -import {EnvironmentPageFilter} from 'sentry/components/organizations/environmentPageFilter'; -import PageFilterBar from 'sentry/components/organizations/pageFilterBar'; -import {ProjectPageFilter} from 'sentry/components/organizations/projectPageFilter'; import {PageHeadingQuestionTooltip} from 'sentry/components/pageHeadingQuestionTooltip'; import {space} from 'sentry/styles/space'; import {browserHistory} from 'sentry/utils/browserHistory'; import {PageAlert, PageAlertProvider} from 'sentry/utils/performance/contexts/pageAlert'; import {useLocation} from 'sentry/utils/useLocation'; +import {ModulePageFilterBar} from 'sentry/views/insights/common/components/modulePageFilterBar'; import {ModulesOnboarding} from 'sentry/views/insights/common/components/modulesOnboarding'; import {ReleaseComparisonSelector} from 'sentry/views/insights/common/components/releaseSelector'; -import {useHasFirstSpan} from 'sentry/views/insights/common/queries/useHasFirstSpan'; import {useModuleBreadcrumbs} from 'sentry/views/insights/common/utils/useModuleBreadcrumbs'; import useCrossPlatformProject from 'sentry/views/insights/mobile/common/queries/useCrossPlatformProject'; import {PlatformSelector} from 'sentry/views/insights/mobile/screenload/components/platformSelector'; @@ -43,7 +39,6 @@ export default function ScreensTemplate({ }: ScreensTemplateProps) { const location = useLocation(); const {isProjectCrossPlatform} = useCrossPlatformProject(); - const hasModuleData = useHasFirstSpan(moduleName); const handleProjectChange = useCallback(() => { browserHistory.replace({ @@ -81,17 +76,16 @@ export default function ScreensTemplate({ - - - - - - {hasModuleData && ( - - - {additionalSelectors} - - )} + + + {additionalSelectors} + + } + /> diff --git a/static/app/views/insights/mobile/screenload/views/screenloadLandingPage.tsx b/static/app/views/insights/mobile/screenload/views/screenloadLandingPage.tsx index 1bd0edaf894a68..dec3ace42496db 100644 --- a/static/app/views/insights/mobile/screenload/views/screenloadLandingPage.tsx +++ b/static/app/views/insights/mobile/screenload/views/screenloadLandingPage.tsx @@ -5,18 +5,14 @@ import ButtonBar from 'sentry/components/buttonBar'; import ErrorBoundary from 'sentry/components/errorBoundary'; import FeedbackWidgetButton from 'sentry/components/feedback/widget/feedbackWidgetButton'; import * as Layout from 'sentry/components/layouts/thirds'; -import {DatePageFilter} from 'sentry/components/organizations/datePageFilter'; -import {EnvironmentPageFilter} from 'sentry/components/organizations/environmentPageFilter'; -import PageFilterBar from 'sentry/components/organizations/pageFilterBar'; -import {ProjectPageFilter} from 'sentry/components/organizations/projectPageFilter'; import {PageHeadingQuestionTooltip} from 'sentry/components/pageHeadingQuestionTooltip'; import {space} from 'sentry/styles/space'; import {PageAlert, PageAlertProvider} from 'sentry/utils/performance/contexts/pageAlert'; import useOrganization from 'sentry/utils/useOrganization'; +import {ModulePageFilterBar} from 'sentry/views/insights/common/components/modulePageFilterBar'; import {ModulePageProviders} from 'sentry/views/insights/common/components/modulePageProviders'; import {ModulesOnboarding} from 'sentry/views/insights/common/components/modulesOnboarding'; import {ReleaseComparisonSelector} from 'sentry/views/insights/common/components/releaseSelector'; -import {useHasFirstSpan} from 'sentry/views/insights/common/queries/useHasFirstSpan'; import {useOnboardingProject} from 'sentry/views/insights/common/queries/useOnboardingProject'; import {useModuleBreadcrumbs} from 'sentry/views/insights/common/utils/useModuleBreadcrumbs'; import useCrossPlatformProject from 'sentry/views/insights/mobile/common/queries/useCrossPlatformProject'; @@ -34,7 +30,6 @@ import Onboarding from 'sentry/views/performance/onboarding'; export function PageloadModule() { const organization = useOrganization(); const onboardingProject = useOnboardingProject(); - const hasModuleData = useHasFirstSpan(ModuleName.SCREEN_LOAD); const {isProjectCrossPlatform} = useCrossPlatformProject(); const crumbs = useModuleBreadcrumbs('screen_load'); @@ -66,12 +61,10 @@ export function PageloadModule() { - - - - - - {hasModuleData && } + } + /> diff --git a/static/app/views/insights/queues/views/queuesLandingPage.tsx b/static/app/views/insights/queues/views/queuesLandingPage.tsx index 8705dc1ac8b181..61622e0b246f09 100644 --- a/static/app/views/insights/queues/views/queuesLandingPage.tsx +++ b/static/app/views/insights/queues/views/queuesLandingPage.tsx @@ -5,10 +5,6 @@ import {Breadcrumbs} from 'sentry/components/breadcrumbs'; import ButtonBar from 'sentry/components/buttonBar'; import FeedbackWidgetButton from 'sentry/components/feedback/widget/feedbackWidgetButton'; import * as Layout from 'sentry/components/layouts/thirds'; -import {DatePageFilter} from 'sentry/components/organizations/datePageFilter'; -import {EnvironmentPageFilter} from 'sentry/components/organizations/environmentPageFilter'; -import PageFilterBar from 'sentry/components/organizations/pageFilterBar'; -import {ProjectPageFilter} from 'sentry/components/organizations/projectPageFilter'; import {PageHeadingQuestionTooltip} from 'sentry/components/pageHeadingQuestionTooltip'; import SearchBar from 'sentry/components/searchBar'; import {t} from 'sentry/locale'; @@ -21,6 +17,7 @@ import useLocationQuery from 'sentry/utils/url/useLocationQuery'; import {useLocation} from 'sentry/utils/useLocation'; import useOrganization from 'sentry/utils/useOrganization'; import * as ModuleLayout from 'sentry/views/insights/common/components/moduleLayout'; +import {ModulePageFilterBar} from 'sentry/views/insights/common/components/modulePageFilterBar'; import {ModulePageProviders} from 'sentry/views/insights/common/components/modulePageProviders'; import {ModulesOnboarding} from 'sentry/views/insights/common/components/modulesOnboarding'; import {useHasDataTrackAnalytics} from 'sentry/views/insights/common/utils/useHasDataTrackAnalytics'; @@ -112,11 +109,7 @@ function QueuesLandingPage() { - - - - - + From 8e9093c6fb32d506037c82af0e676c5167e778d5 Mon Sep 17 00:00:00 2001 From: Malachi Willey Date: Wed, 24 Jul 2024 09:10:47 -0700 Subject: [PATCH 068/126] feat(query-builder): Add ability to customize invalid token messages (#74769) --- .../searchQueryBuilder/index.spec.tsx | 24 ++++++++++++++++++ .../searchQueryBuilder/index.stories.tsx | 25 ++++++++++++++++++- .../components/searchQueryBuilder/index.tsx | 14 ++++++++--- .../components/searchQueryBuilder/utils.tsx | 2 ++ 4 files changed, 61 insertions(+), 4 deletions(-) diff --git a/static/app/components/searchQueryBuilder/index.spec.tsx b/static/app/components/searchQueryBuilder/index.spec.tsx index 795b4b670cef39..d13e8b58835ab7 100644 --- a/static/app/components/searchQueryBuilder/index.spec.tsx +++ b/static/app/components/searchQueryBuilder/index.spec.tsx @@ -19,6 +19,7 @@ import { QueryInterfaceType, } from 'sentry/components/searchQueryBuilder/types'; import {INTERFACE_TYPE_LOCALSTORAGE_KEY} from 'sentry/components/searchQueryBuilder/utils'; +import {InvalidReason} from 'sentry/components/searchSyntax/parser'; import type {TagCollection} from 'sentry/types/group'; import {FieldKey, FieldKind, FieldValueType} from 'sentry/utils/fields'; import localStorageWrapper from 'sentry/utils/localStorage'; @@ -2051,4 +2052,27 @@ describe('SearchQueryBuilder', function () { ).toBeInTheDocument(); }); }); + + describe('invalidMessages', function () { + it('should customize invalid messages', async function () { + render( + + ); + + expect(screen.getByRole('row', {name: 'foo:'})).toHaveAttribute( + 'aria-invalid', + 'true' + ); + + await userEvent.click(getLastInput()); + await userEvent.keyboard('{ArrowLeft}'); + expect(await screen.findByText('foo bar baz')).toBeInTheDocument(); + }); + }); }); diff --git a/static/app/components/searchQueryBuilder/index.stories.tsx b/static/app/components/searchQueryBuilder/index.stories.tsx index f96f564a154d30..d6875b1b78e788 100644 --- a/static/app/components/searchQueryBuilder/index.stories.tsx +++ b/static/app/components/searchQueryBuilder/index.stories.tsx @@ -5,6 +5,8 @@ import Alert from 'sentry/components/alert'; import MultipleCheckbox from 'sentry/components/forms/controls/multipleCheckbox'; import {SearchQueryBuilder} from 'sentry/components/searchQueryBuilder'; import type {FilterKeySection} from 'sentry/components/searchQueryBuilder/types'; +import {InvalidReason} from 'sentry/components/searchSyntax/parser'; +import JSXProperty from 'sentry/components/stories/jsxProperty'; import SizingWindow from 'sentry/components/stories/sizingWindow'; import storyBook from 'sentry/stories/storyBook'; import type {TagCollection} from 'sentry/types/group'; @@ -132,7 +134,8 @@ export default storyBook(SearchQueryBuilder, story => {

There are some config options which allow you to customize which types of syntax are considered valid. This should be used when the search backend does not - support certain operators like boolean logic or wildcards. + support certain operators like boolean logic or wildcards. Use the checkboxes + below to enable/disable the following options:

{ searchSource="storybook" {...queryBuilderOptions} /> +

+ The query above has a few invalid tokens. The invalid tokens are highlighted in + red and display a tooltip with a message when focused. The invalid token + messages can be customized using the invalidMessages prop. In this + case, the unsupported tag message is modified with{' '} + + . +

+ ); }); diff --git a/static/app/components/searchQueryBuilder/index.tsx b/static/app/components/searchQueryBuilder/index.tsx index d45597dd586c6e..a31355f4eea491 100644 --- a/static/app/components/searchQueryBuilder/index.tsx +++ b/static/app/components/searchQueryBuilder/index.tsx @@ -17,6 +17,7 @@ import { QueryInterfaceType, } from 'sentry/components/searchQueryBuilder/types'; import {parseQueryBuilderValue} from 'sentry/components/searchQueryBuilder/utils'; +import type {SearchConfig} from 'sentry/components/searchSyntax/parser'; import {IconClose, IconSearch} from 'sentry/icons'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; @@ -67,6 +68,10 @@ export interface SearchQueryBuilderProps { * Sections and filter keys are displayed in the order they are provided. */ filterKeySections?: FilterKeySection[]; + /** + * Allows for customization of the invalid token messages. + */ + invalidMessages?: SearchConfig['invalidMessages']; label?: string; onBlur?: (query: string) => void; /** @@ -107,6 +112,7 @@ export function SearchQueryBuilder({ disallowFreeText, disallowUnsupportedFilters, disallowWildcard, + invalidMessages, label, initialQuery, fieldDefinitionGetter = getFieldDefinition, @@ -132,15 +138,17 @@ export function SearchQueryBuilder({ disallowUnsupportedFilters, disallowWildcard, filterKeys, + invalidMessages, }), [ + state.query, + fieldDefinitionGetter, disallowFreeText, disallowLogicalOperators, + disallowUnsupportedFilters, disallowWildcard, - fieldDefinitionGetter, filterKeys, - disallowUnsupportedFilters, - state.query, + invalidMessages, ] ); diff --git a/static/app/components/searchQueryBuilder/utils.tsx b/static/app/components/searchQueryBuilder/utils.tsx index 6506d5dd1d18f8..9869fcbec92b79 100644 --- a/static/app/components/searchQueryBuilder/utils.tsx +++ b/static/app/components/searchQueryBuilder/utils.tsx @@ -69,6 +69,7 @@ export function parseQueryBuilderValue( disallowLogicalOperators?: boolean; disallowUnsupportedFilters?: boolean; disallowWildcard?: boolean; + invalidMessages?: SearchConfig['invalidMessages']; } ): ParseResult | null { return collapseTextTokens( @@ -82,6 +83,7 @@ export function parseQueryBuilderValue( : undefined, disallowParens: options?.disallowLogicalOperators, ...getSearchConfigFromKeys(options?.filterKeys ?? {}, getFieldDefinition), + invalidMessages: options?.invalidMessages, supportedTags: options?.filterKeys, }) ); From 680965dc0e76522dca0797dc8e22ec87c0c342b3 Mon Sep 17 00:00:00 2001 From: Malachi Willey Date: Wed, 24 Jul 2024 09:11:43 -0700 Subject: [PATCH 069/126] fix(query-builder): Correctly place selection background behind only text portion of input (#74808) Uses the invisible text div to correctly place the selection background. Prior to this, we were changing the background of the input, which was sometimes much larger that its text contents (the last input takes up the remaining space for usability reasons). --- .../searchQueryBuilder/tokens/freeText.tsx | 31 ++++++++++--------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/static/app/components/searchQueryBuilder/tokens/freeText.tsx b/static/app/components/searchQueryBuilder/tokens/freeText.tsx index f14245c1866ddc..a6753c4ac305f5 100644 --- a/static/app/components/searchQueryBuilder/tokens/freeText.tsx +++ b/static/app/components/searchQueryBuilder/tokens/freeText.tsx @@ -285,7 +285,10 @@ function shouldHideInvalidTooltip({ } } -function InvalidText({ +// Because the text input may be larger than the actual text, we use a hidden div +// with the same text content to measure the width of the text. This is used for +// centering the invalid tooltip, as well as for placing the selection background. +function HiddenText({ token, state, item, @@ -298,9 +301,6 @@ function InvalidText({ state: ListState; token: TokenResult; }) { - // Because the text input may be larger than the actual text, we use a div - // with the same text contents to determine where the tooltip should be - // positioned. return ( - {inputValue} + + {inputValue} + ); } @@ -437,6 +439,13 @@ function SearchQueryBuilderInputInternal({ return ( + - ); } @@ -609,7 +611,7 @@ const Row = styled('div')` } &[aria-selected='true'] { - &::before { + [data-hidden-text='true']::before { content: ''; position: absolute; left: ${space(0.5)}; @@ -667,15 +669,14 @@ const Details = styled('dd')``; const PositionedTooltip = styled(InvalidTokenTooltip)` position: absolute; - z-index: -1; top: 0; left: 0; height: 100%; `; const InvisibleText = styled('div')` + position: relative; color: transparent; - visibility: hidden; padding: 0 ${space(0.5)}; min-width: 9px; height: 100%; From b3fb6cdde687b858b9a59508aa013d1088642f81 Mon Sep 17 00:00:00 2001 From: colin-sentry <161344340+colin-sentry@users.noreply.github.com> Date: Wed, 24 Jul 2024 12:14:19 -0400 Subject: [PATCH 070/126] Make sure end_timestamp_precise and organization_id are sent to snuba (#74788) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit … from tests This lets us remove some Optionals from snuba --- src/sentry/testutils/cases.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/sentry/testutils/cases.py b/src/sentry/testutils/cases.py index 801e7b67421cdc..8eb0a6cba6ef9f 100644 --- a/src/sentry/testutils/cases.py +++ b/src/sentry/testutils/cases.py @@ -1521,9 +1521,12 @@ def store_segment( payload = { "project_id": project_id, + "organization_id": 1, "span_id": span_id, "trace_id": trace_id, "duration_ms": int(duration), + "start_timestamp_precise": timestamp.timestamp(), + "end_timestamp_precise": timestamp.timestamp() + duration / 1000, "exclusive_time_ms": int(exclusive_time), "is_segment": True, "received": timezone.now().timestamp(), @@ -1585,6 +1588,7 @@ def store_indexed_span( payload = { "project_id": project_id, + "organization_id": 1, "span_id": span_id, "trace_id": trace_id, "duration_ms": int(duration), @@ -1592,6 +1596,8 @@ def store_indexed_span( "is_segment": False, "received": timezone.now().timestamp(), "start_timestamp_ms": int(timestamp.timestamp() * 1000), + "start_timestamp_precise": timestamp.timestamp(), + "end_timestamp_precise": timestamp.timestamp() + duration / 1000, "sentry_tags": { "transaction": transaction or "/hello", "op": op or "http", @@ -3334,6 +3340,8 @@ def create_span( "profile_id": uuid4().hex, # Multiply by 1000 cause it needs to be ms "start_timestamp_ms": int(start_ts.timestamp() * 1000), + "start_timestamp_precise": start_ts.timestamp(), + "end_timestamp_precise": start_ts.timestamp() + duration / 1000, "timestamp": int(start_ts.timestamp() * 1000), "received": start_ts.timestamp(), "duration_ms": duration, From bef124f0ed31b7fd143e7fde7d998ceb2a602c46 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 24 Jul 2024 16:14:43 +0000 Subject: [PATCH 071/126] chore(deps): bump setuptools from 68.2.2 to 70.0.0 (#74661) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 68.2.2 to 70.0.0.
Changelog

Sourced from setuptools's changelog.

v70.0.0

Features

  • Emit a warning when [tools.setuptools] is present in pyproject.toml and will be ignored. -- by :user:SnoopJ (#4150)
  • Improved AttributeError error message if pkg_resources.EntryPoint.require is called without extras or distribution Gracefully "do nothing" when trying to activate a pkg_resources.Distribution with a None location, rather than raising a TypeError -- by :user:Avasam (#4262)
  • Typed the dynamically defined variables from pkg_resources -- by :user:Avasam (#4267)
  • Modernized and refactored VCS handling in package_index. (#4332)

Bugfixes

  • In install command, use super to call the superclass methods. Avoids race conditions when monkeypatching from _distutils_system_mod occurs late. (#4136)
  • Fix finder template for lenient editable installs of implicit nested namespaces constructed by using package_dir to reorganise directory structure. (#4278)
  • Fix an error with UnicodeDecodeError handling in pkg_resources when trying to read files in UTF-8 with a fallback -- by :user:Avasam (#4348)

Improved Documentation

  • Uses RST substitution to put badges in 1 line. (#4312)

Deprecations and Removals

  • Further adoption of UTF-8 in setuptools. This change regards mostly files produced and consumed during the build process (e.g. metadata files, script wrappers, automatically updated config files, etc..) Although precautions were taken to minimize disruptions, some edge cases might be subject to backwards incompatibility.

    Support for "locale" encoding is now deprecated. (#4309)

  • Remove setuptools.convert_path after long deprecation period. This function was never defined by setuptools itself, but rather a side-effect of an import for internal usage. (#4322)

  • Remove fallback for customisations of distutils' build.sub_command after long deprecated period. Users are advised to import build directly from setuptools.command.build. (#4322)

  • Removed typing_extensions from vendored dependencies -- by :user:Avasam (#4324)

  • Remove deprecated setuptools.dep_util. The provided alternative is setuptools.modified. (#4360)

... (truncated)

Commits
  • 5cbf12a Workaround for release error in v70
  • 9c1bcc3 Bump version: 69.5.1 → 70.0.0
  • 4dc0c31 Remove deprecated setuptools.dep_util (#4360)
  • 6c1ef57 Remove xfail now that test passes. Ref #4371.
  • d14fa01 Add all site-packages dirs when creating simulated environment for test_edita...
  • 6b7f7a1 Prevent bin folders to be taken as extern packages when vendoring (#4370)
  • 69141f6 Add doctest for vendorised bin folder
  • 2a53cc1 Prevent 'bin' folders to be taken as extern packages
  • 7208628 Replace call to deprecated validate_pyproject command (#4363)
  • 96d681a Remove call to deprecated validate_pyproject command
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=68.2.2&new-version=70.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/getsentry/sentry/network/alerts).
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements-dev-frozen.txt | 2 +- requirements-frozen.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index 39930056e1df5c..2adb5cf3113ac8 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -245,4 +245,4 @@ zstandard==0.18.0 # The following packages are considered to be unsafe in a requirements file: pip==23.3.1 -setuptools==68.2.2 +setuptools==70.0.0 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index 151751da11ffdf..bafd6ba4d0379c 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -156,4 +156,4 @@ xmlsec==1.3.13 zstandard==0.18.0 # The following packages are considered to be unsafe in a requirements file: -setuptools==68.2.2 +setuptools==70.0.0 From 5a5203391998884b6f27efeffc5188d0a2864288 Mon Sep 17 00:00:00 2001 From: Jonas Date: Wed, 24 Jul 2024 12:16:00 -0400 Subject: [PATCH 072/126] ref: add threadId to profile link and initialize the profile to thread id (#74792) Pass tid from spans and transactions so that it is initialized to the correct value --- .../hooks/useDifferentialFlamegraphModel.tsx | 9 ++++++++- .../utils/profiling/profile/importProfile.spec.tsx | 12 +++++++++--- static/app/utils/profiling/profile/importProfile.tsx | 10 +++++++++- .../newTraceDetails/traceDrawer/details/styles.tsx | 2 +- static/app/views/profiling/profileGroupProvider.tsx | 11 ++++++++++- 5 files changed, 37 insertions(+), 7 deletions(-) diff --git a/static/app/utils/profiling/hooks/useDifferentialFlamegraphModel.tsx b/static/app/utils/profiling/hooks/useDifferentialFlamegraphModel.tsx index af26d95b1eb2a9..7937c5726b48c2 100644 --- a/static/app/utils/profiling/hooks/useDifferentialFlamegraphModel.tsx +++ b/static/app/utils/profiling/hooks/useDifferentialFlamegraphModel.tsx @@ -36,7 +36,13 @@ export function useDifferentialFlamegraphModel( return null; } - const profile = importProfile(props.before.data, '', 'flamegraph', props.frameFilter); + const profile = importProfile( + props.before.data, + '', + null, + 'flamegraph', + props.frameFilter + ); return new Flamegraph(profile.profiles[0], { sort: flamegraphPreferences.sorting, inverted: flamegraphPreferences.view === 'bottom up', @@ -56,6 +62,7 @@ export function useDifferentialFlamegraphModel( return importProfile( props.after.data, '', + null, 'flamegraph', props.frameFilter ) as ProfileGroup; diff --git a/static/app/utils/profiling/profile/importProfile.spec.tsx b/static/app/utils/profiling/profile/importProfile.spec.tsx index 36fcafc51a41c0..8a3e4c57702e88 100644 --- a/static/app/utils/profiling/profile/importProfile.spec.tsx +++ b/static/app/utils/profiling/profile/importProfile.spec.tsx @@ -35,6 +35,7 @@ describe('importProfile', () => { metadata: {} as Profiling.Schema['metadata'], }, '', + '', 'flamechart' ); @@ -64,6 +65,7 @@ describe('importProfile', () => { metadata: {} as Profiling.Schema['metadata'], }, '', + '', 'flamechart' ); @@ -102,6 +104,7 @@ describe('importProfile', () => { }, }, '', + '', 'flamechart' ); @@ -129,7 +132,7 @@ describe('importProfile', () => { ], }; - const imported = importProfile(jsSelfProfile, 'profile', 'flamechart'); + const imported = importProfile(jsSelfProfile, 'profile', '', 'flamechart'); expect(imported.profiles[0]).toBeInstanceOf(JSSelfProfile); }); @@ -137,7 +140,7 @@ describe('importProfile', () => { it('imports sentry sampled profile', () => { const sentrySampledProfile = makeSentrySampledProfile(); - const imported = importProfile(sentrySampledProfile, 'profile', 'flamegraph'); + const imported = importProfile(sentrySampledProfile, 'profile', '', 'flamegraph'); expect(imported.profiles[0]).toBeInstanceOf(SentrySampledProfile); }); @@ -145,7 +148,7 @@ describe('importProfile', () => { it('imports sentry continuous profile', () => { const continuousProfile = makeSentryContinuousProfile(); - const imported = importProfile(continuousProfile, 'profile', 'flamegraph'); + const imported = importProfile(continuousProfile, 'profile', '', 'flamegraph'); expect(imported.profiles[0]).toBeInstanceOf(ContinuousProfile); }); @@ -156,6 +159,7 @@ describe('importProfile', () => { // @ts-expect-error {name: 'profile', activeProfileIndex: 0, profiles: [{type: 'unrecognized'}]}, '', + '', 'flamechart' ) ).toThrow(); @@ -246,6 +250,7 @@ describe('parseDroppedProfile', () => { const imported = importProfile( await parseDroppedProfile(file), file.name, + '', 'flamechart' ); @@ -277,6 +282,7 @@ describe('parseDroppedProfile', () => { const imported = importProfile( await parseDroppedProfile(file), file.name, + '', 'flamechart' ); diff --git a/static/app/utils/profiling/profile/importProfile.tsx b/static/app/utils/profiling/profile/importProfile.tsx index d5bb494b3fb0e7..9e4d77ee2db09a 100644 --- a/static/app/utils/profiling/profile/importProfile.tsx +++ b/static/app/utils/profiling/profile/importProfile.tsx @@ -30,6 +30,7 @@ import { export interface ImportOptions { span: Span | undefined; type: 'flamegraph' | 'flamechart'; + activeThreadId?: string | null; continuous?: boolean; frameFilter?: (frame: Frame) => boolean; profileIds?: Readonly; @@ -62,6 +63,7 @@ export interface ContinuousProfileGroup { export function importProfile( input: Readonly, traceID: string, + activeThreadId: string | null, type: 'flamegraph' | 'flamechart', frameFilter?: (frame: Frame) => boolean ): ProfileGroup | ContinuousProfileGroup { @@ -78,6 +80,7 @@ export function importProfile( span, type, frameFilter, + activeThreadId, continuous: true, }); } @@ -276,6 +279,7 @@ export function importSentryContinuousProfileChunk( } const profiles: Profile[] = []; + let activeProfileIndex = 0; for (const key in samplesByThread) { const profile: Profiling.ContinuousProfile = { @@ -284,6 +288,10 @@ export function importSentryContinuousProfileChunk( samples: samplesByThread[key], }; + if (options.activeThreadId && key === options.activeThreadId) { + activeProfileIndex = profiles.length; + } + profiles.push( wrapWithSpan( options.span, @@ -301,7 +309,7 @@ export function importSentryContinuousProfileChunk( name: '', type: 'continuous', transactionID: null, - activeProfileIndex: 0, + activeProfileIndex, profiles, measurements: input.measurements ?? {}, metadata: { diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/details/styles.tsx b/static/app/views/performance/newTraceDetails/traceDrawer/details/styles.tsx index 007d16bf096aaa..aba80d7d0ab4c9 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/details/styles.tsx +++ b/static/app/views/performance/newTraceDetails/traceDrawer/details/styles.tsx @@ -339,7 +339,7 @@ function getThreadIdFromNode( } if (transaction) { - return transaction.context?.trace?.data?.['thread.id']; + return transaction.contexts?.trace?.data?.['thread.id']; } return undefined; diff --git a/static/app/views/profiling/profileGroupProvider.tsx b/static/app/views/profiling/profileGroupProvider.tsx index fbb416baa535f8..4f6e152a98b09a 100644 --- a/static/app/views/profiling/profileGroupProvider.tsx +++ b/static/app/views/profiling/profileGroupProvider.tsx @@ -71,8 +71,17 @@ export function ProfileGroupProvider(props: ProfileGroupProviderProps) { if (!props.input) { return LOADING_PROFILE_GROUP; } + const qs = new URLSearchParams(window.location.search); + const threadId = qs.get('tid'); + try { - return importProfile(props.input, props.traceID, props.type, props.frameFilter); + return importProfile( + props.input, + props.traceID, + threadId, + props.type, + props.frameFilter + ); } catch (err) { Sentry.captureException(err); return LOADING_PROFILE_GROUP; From 2663e8f7d3d1440d56e57c0063c213ae2a22cbe7 Mon Sep 17 00:00:00 2001 From: Michael Sun <55160142+MichaelSun48@users.noreply.github.com> Date: Wed, 24 Jul 2024 09:19:20 -0700 Subject: [PATCH 073/126] ref(tags): Update tag key-value request to use new dataset param (#74695) This PR updates the fetch made to the `organization//tags//values/` endpoint on the frontend within the issue stream search bar and the issue details page. Specifically, it makes the request twice now, and utilizes the extra `dataset` query parameter (which was added in [this PR](https://github.com/getsentry/sentry/pull/74525)) to query only the tag values from the Errors and IssuePlatform datasets. This is in an effort to make the requests faster and more accurate. Should be merged after #74696 so we can track the performance improvements of these changes. --- static/app/actionCreators/tags.tsx | 6 + static/app/views/issueDetails/groupEvents.tsx | 31 +++- static/app/views/issueDetails/utils.tsx | 35 ++++- static/app/views/issueList/searchBar.spec.tsx | 65 ++++++++- static/app/views/issueList/searchBar.tsx | 34 +++-- static/app/views/issueList/utils.spec.tsx | 138 ++++++++++++++++++ 6 files changed, 288 insertions(+), 21 deletions(-) diff --git a/static/app/actionCreators/tags.tsx b/static/app/actionCreators/tags.tsx index 1c40bea74e22d9..0e7fdcd49fdabc 100644 --- a/static/app/actionCreators/tags.tsx +++ b/static/app/actionCreators/tags.tsx @@ -102,10 +102,12 @@ export function fetchTagValues({ projectIds, search, sort, + dataset, }: { api: Client; orgSlug: string; tagKey: string; + dataset?: Dataset; endpointParams?: Query; includeReplays?: boolean; includeSessions?: boolean; @@ -151,6 +153,10 @@ export function fetchTagValues({ query.sort = sort; } + if (dataset) { + query.dataset = dataset; + } + return api.requestPromise(url, { method: 'GET', query, diff --git a/static/app/views/issueDetails/groupEvents.tsx b/static/app/views/issueDetails/groupEvents.tsx index c7748318b46cce..a4550274887581 100644 --- a/static/app/views/issueDetails/groupEvents.tsx +++ b/static/app/views/issueDetails/groupEvents.tsx @@ -18,6 +18,8 @@ import normalizeUrl from 'sentry/utils/url/normalizeUrl'; import useApi from 'sentry/utils/useApi'; import useCleanQueryParamsOnRouteLeave from 'sentry/utils/useCleanQueryParamsOnRouteLeave'; import useOrganization from 'sentry/utils/useOrganization'; +import {Dataset} from 'sentry/views/alerts/rules/metric/types'; +import {mergeAndSortTagValues} from 'sentry/views/issueDetails/utils'; import {makeGetIssueTagValues} from 'sentry/views/issueList/utils/getIssueTagValues'; import AllEventsTable from './allEventsTable'; @@ -101,17 +103,30 @@ function UpdatedSearchBar({ }, [data]); const tagValueLoader = useCallback( - (key: string, search: string) => { + async (key: string, search: string) => { const orgSlug = organization.slug; const projectIds = [group.project.id]; - return fetchTagValues({ - api, - orgSlug, - tagKey: key, - search, - projectIds, - }); + const [eventsDatasetValues, issuePlatformDatasetValues] = await Promise.all([ + fetchTagValues({ + api, + orgSlug, + tagKey: key, + search, + projectIds, + dataset: Dataset.ERRORS, + }), + fetchTagValues({ + api, + orgSlug, + tagKey: key, + search, + projectIds, + dataset: Dataset.ISSUE_PLATFORM, + }), + ]); + + return mergeAndSortTagValues(eventsDatasetValues, issuePlatformDatasetValues); }, [api, group.project.id, organization.slug] ); diff --git a/static/app/views/issueDetails/utils.tsx b/static/app/views/issueDetails/utils.tsx index bce7b28c74586a..dabbfc66352d8a 100644 --- a/static/app/views/issueDetails/utils.tsx +++ b/static/app/views/issueDetails/utils.tsx @@ -6,7 +6,7 @@ import {Client} from 'sentry/api'; import {t} from 'sentry/locale'; import ConfigStore from 'sentry/stores/configStore'; import {useLegacyStore} from 'sentry/stores/useLegacyStore'; -import type {Group, GroupActivity} from 'sentry/types'; +import type {Group, GroupActivity, TagValue} from 'sentry/types'; import type {Event} from 'sentry/types/event'; import {useLocation} from 'sentry/utils/useLocation'; import useOrganization from 'sentry/utils/useOrganization'; @@ -48,6 +48,39 @@ export function useDefaultIssueEvent() { const options = user ? user.options : null; return options?.defaultIssueEvent ?? 'recommended'; } +/** + * Combines two TagValue arrays and combines TagValue.count upon conflict + */ +export function mergeAndSortTagValues( + tagValues1: TagValue[], + tagValues2: TagValue[], + sort: 'count' | 'lastSeen' = 'lastSeen' +): TagValue[] { + const tagValueCollection = tagValues1.reduce>( + (acc, tagValue) => { + acc[tagValue.value] = tagValue; + return acc; + }, + {} + ); + tagValues2.forEach(tagValue => { + if (tagValueCollection[tagValue.value]) { + tagValueCollection[tagValue.value].count += tagValue.count; + if (tagValue.lastSeen > tagValueCollection[tagValue.value].lastSeen) { + tagValueCollection[tagValue.value].lastSeen = tagValue.lastSeen; + } + } else { + tagValueCollection[tagValue.value] = tagValue; + } + }); + const allTagValues: TagValue[] = Object.values(tagValueCollection); + if (sort === 'count') { + allTagValues.sort((a, b) => b.count - a.count); + } else { + allTagValues.sort((a, b) => (b.lastSeen < a.lastSeen ? -1 : 1)); + } + return allTagValues; +} /** * Returns the environment name for an event or null diff --git a/static/app/views/issueList/searchBar.spec.tsx b/static/app/views/issueList/searchBar.spec.tsx index 5a7842da44bd2e..808c675ed1b04d 100644 --- a/static/app/views/issueList/searchBar.spec.tsx +++ b/static/app/views/issueList/searchBar.spec.tsx @@ -1,9 +1,10 @@ import {TagsFixture} from 'sentry-fixture/tags'; import {initializeOrg} from 'sentry-test/initializeOrg'; -import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; +import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary'; import TagStore from 'sentry/stores/tagStore'; +import type {Tag, TagValue} from 'sentry/types'; import {IsFieldValues} from 'sentry/utils/fields'; import IssueListSearchBar from 'sentry/views/issueList/searchBar'; @@ -205,8 +206,6 @@ describe('IssueListSearchBar', function () { body: [{key: 'someTag', name: 'Some Tag'}], }); - defaultProps.organization.features = ['issue-stream-search-query-builder']; - render(, { router: routerWithFlag, }); @@ -238,4 +237,64 @@ describe('IssueListSearchBar', function () { expect(await screen.findByRole('option', {name: 'someTag'})).toBeInTheDocument(); }); }); + + describe('Tag Values', function () { + const {router: routerWithFlag, organization: orgWithFlag} = initializeOrg(); + orgWithFlag.features = ['issue-stream-search-query-builder']; + + const newDefaultProps = { + organization: orgWithFlag, + query: '', + statsPeriod: '7d', + onSearch: jest.fn(), + }; + + it('displays the correct tag values for a key', async () => { + const tagKey = 'random'; + const tagValue = 'randomValue'; + const tagValueResponse: TagValue[] = [ + { + name: tagValue, + value: tagValue, + count: 1, + firstSeen: '2021-01-01T00:00:00Z', + lastSeen: '2021-01-01T00:00:00Z', + email: 'a@sentry.io', + username: 'a', + id: '1', + ip_address: '1', + }, + ]; + const tag: Tag = { + key: tagKey, + name: tagKey, + }; + + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/tags/', + method: 'GET', + body: [tag], + }); + const tagValueMock = MockApiClient.addMockResponse({ + url: `/organizations/org-slug/tags/${tagKey}/values/`, + method: 'GET', + body: tagValueResponse, + }); + + render(, { + router: routerWithFlag, + }); + + await userEvent.click(screen.getByRole('combobox', {name: 'Add a search term'})); + await userEvent.paste(tagKey, {delay: null}); + await userEvent.click(screen.getByRole('option', {name: tagKey})); + expect(await screen.findByRole('option', {name: tagValue})).toBeInTheDocument(); + + await waitFor(() => { + // Expected twice since we make one request for values in events dataset + // and another for values in IssuePlatform dataset. + expect(tagValueMock).toHaveBeenCalledTimes(2); + }); + }); + }); }); diff --git a/static/app/views/issueList/searchBar.tsx b/static/app/views/issueList/searchBar.tsx index 20d09091014d1b..edbe8ccfd6bdf0 100644 --- a/static/app/views/issueList/searchBar.tsx +++ b/static/app/views/issueList/searchBar.tsx @@ -24,6 +24,8 @@ import useApi from 'sentry/utils/useApi'; import usePageFilters from 'sentry/utils/usePageFilters'; import type {WithIssueTagsProps} from 'sentry/utils/withIssueTags'; import withIssueTags from 'sentry/utils/withIssueTags'; +import {Dataset} from 'sentry/views/alerts/rules/metric/types'; +import {mergeAndSortTagValues} from 'sentry/views/issueDetails/utils'; import {makeGetIssueTagValues} from 'sentry/views/issueList/utils/getIssueTagValues'; import {useFetchIssueTags} from 'sentry/views/issueList/utils/useFetchIssueTags'; @@ -109,7 +111,7 @@ function IssueListSearchBar({organization, tags, onClose, ...props}: Props) { }); const tagValueLoader = useCallback( - (key: string, search: string) => { + async (key: string, search: string) => { const orgSlug = organization.slug; const projectIds = pageFilters.projects.map(id => id.toString()); const endpointParams = { @@ -122,14 +124,28 @@ function IssueListSearchBar({organization, tags, onClose, ...props}: Props) { statsPeriod: pageFilters.datetime.period, }; - return fetchTagValues({ - api, - orgSlug, - tagKey: key, - search, - projectIds, - endpointParams, - }); + const [eventsDatasetValues, issuePlatformDatasetValues] = await Promise.all([ + fetchTagValues({ + api, + orgSlug, + tagKey: key, + search, + projectIds, + endpointParams, + dataset: Dataset.ERRORS, + }), + fetchTagValues({ + api, + orgSlug, + tagKey: key, + search, + projectIds, + endpointParams, + dataset: Dataset.ISSUE_PLATFORM, + }), + ]); + + return mergeAndSortTagValues(eventsDatasetValues, issuePlatformDatasetValues); }, [ api, diff --git a/static/app/views/issueList/utils.spec.tsx b/static/app/views/issueList/utils.spec.tsx index d35a1166acfcdf..902828bdb389de 100644 --- a/static/app/views/issueList/utils.spec.tsx +++ b/static/app/views/issueList/utils.spec.tsx @@ -1,3 +1,6 @@ +import type {TagValue} from 'sentry/types'; +import {mergeAndSortTagValues} from 'sentry/views/issueDetails/utils'; + import {getTabs} from './utils'; describe('getTabs', () => { @@ -17,4 +20,139 @@ describe('getTabs', () => { ['is:reprocessing', expect.objectContaining({name: 'Reprocessing'})], ]); }); + + it('merges and sorts tagValues by count correctly', () => { + const defaultTagValueFields = { + email: '', + id: '', + name: '', + username: '', + ip_address: '', + }; + const tagValues1: TagValue[] = [ + { + value: 'a', + count: 1, + lastSeen: '2021-01-01T00:00:00', + firstSeen: '2021-01-01T00:00:00', + ...defaultTagValueFields, + }, + { + value: 'b', + count: 1, + lastSeen: '2021-01-02T00:00:00', + firstSeen: '2021-01-02T00:00:00', + ...defaultTagValueFields, + }, + ]; + + const tagValues2: TagValue[] = [ + { + value: 'a', + count: 1, + lastSeen: '2021-01-01T00:00:00', + firstSeen: '2021-01-01T00:00:00', + ...defaultTagValueFields, + }, + { + value: 'c', + count: 3, + lastSeen: '2021-01-03T00:00:00', + firstSeen: '2021-01-03T00:00:00', + ...defaultTagValueFields, + }, + ]; + const sortByCount = mergeAndSortTagValues(tagValues1, tagValues2, 'count'); + expect(sortByCount).toEqual([ + { + value: 'c', + count: 3, + lastSeen: '2021-01-03T00:00:00', + firstSeen: '2021-01-03T00:00:00', + ...defaultTagValueFields, + }, + { + value: 'a', + count: 2, + lastSeen: '2021-01-01T00:00:00', + firstSeen: '2021-01-01T00:00:00', + ...defaultTagValueFields, + }, + { + value: 'b', + count: 1, + lastSeen: '2021-01-02T00:00:00', + firstSeen: '2021-01-02T00:00:00', + ...defaultTagValueFields, + }, + ]); + }); + + it('merges and sorts tagValues by lastSeen correctly', () => { + const defaultTagValueFields = { + email: '', + id: '', + name: '', + username: '', + ip_address: '', + }; + const tagValues1: TagValue[] = [ + { + value: 'a', + count: 1, + lastSeen: '2021-01-01T00:00:00', + firstSeen: '2021-01-01T00:00:00', + ...defaultTagValueFields, + }, + { + value: 'b', + count: 1, + lastSeen: '2021-01-02T00:00:00', + firstSeen: '2021-01-02T00:00:00', + ...defaultTagValueFields, + }, + ]; + + const tagValues2: TagValue[] = [ + { + value: 'a', + count: 1, + lastSeen: '2021-01-01T00:00:00', + firstSeen: '2021-01-01T00:00:00', + ...defaultTagValueFields, + }, + { + value: 'c', + count: 3, + lastSeen: '2021-01-03T00:00:00', + firstSeen: '2021-01-03T00:00:00', + ...defaultTagValueFields, + }, + ]; + + const sortByLastSeen = mergeAndSortTagValues(tagValues1, tagValues2, 'lastSeen'); + expect(sortByLastSeen).toEqual([ + { + value: 'c', + count: 3, + lastSeen: '2021-01-03T00:00:00', + firstSeen: '2021-01-03T00:00:00', + ...defaultTagValueFields, + }, + { + value: 'b', + count: 1, + lastSeen: '2021-01-02T00:00:00', + firstSeen: '2021-01-02T00:00:00', + ...defaultTagValueFields, + }, + { + value: 'a', + count: 2, + lastSeen: '2021-01-01T00:00:00', + firstSeen: '2021-01-01T00:00:00', + ...defaultTagValueFields, + }, + ]); + }); }); From be18d81af8962d0cff3999606cbc9d86f390a62c Mon Sep 17 00:00:00 2001 From: Gabe Villalobos Date: Wed, 24 Jul 2024 09:34:41 -0700 Subject: [PATCH 074/126] fix(hybrid-cloud): Adds defaults to provisioning model fields, lost_password_hash model (#74766) --- .../services/control_organization_provisioning/model.py | 4 ++-- src/sentry/services/organization/model.py | 4 ++-- src/sentry/users/services/lost_password_hash/model.py | 7 +++++-- tests/sentry/api/endpoints/test_event_ai_suggested_fix.py | 2 +- tests/sentry/feedback/usecases/test_create_feedback.py | 2 +- 5 files changed, 11 insertions(+), 8 deletions(-) diff --git a/src/sentry/hybridcloud/services/control_organization_provisioning/model.py b/src/sentry/hybridcloud/services/control_organization_provisioning/model.py index e5a904059603f4..20e76051f7c38b 100644 --- a/src/sentry/hybridcloud/services/control_organization_provisioning/model.py +++ b/src/sentry/hybridcloud/services/control_organization_provisioning/model.py @@ -1,7 +1,7 @@ -import pydantic +from sentry.hybridcloud.rpc import RpcModel -class RpcOrganizationSlugReservation(pydantic.BaseModel): +class RpcOrganizationSlugReservation(RpcModel): id: int organization_id: int user_id: int | None diff --git a/src/sentry/services/organization/model.py b/src/sentry/services/organization/model.py index 8730c9864c41eb..cb9a171dca9be7 100644 --- a/src/sentry/services/organization/model.py +++ b/src/sentry/services/organization/model.py @@ -13,8 +13,8 @@ class OrganizationOptions(pydantic.BaseModel): class PostProvisionOptions(pydantic.BaseModel): - sentry_options: Any | None # Placeholder for any sentry post-provisioning data - getsentry_options: Any | None # Reserved for getsentry post-provisioning data + sentry_options: Any | None = None # Placeholder for any sentry post-provisioning data + getsentry_options: Any | None = None # Reserved for getsentry post-provisioning data class OrganizationProvisioningOptions(pydantic.BaseModel): diff --git a/src/sentry/users/services/lost_password_hash/model.py b/src/sentry/users/services/lost_password_hash/model.py index 7a6d9c07cf0366..bf2d1f6e5f95f8 100644 --- a/src/sentry/users/services/lost_password_hash/model.py +++ b/src/sentry/users/services/lost_password_hash/model.py @@ -3,7 +3,10 @@ # in modules such as this one where hybrid cloud data models or service classes are # defined, because we want to reflect on type annotations and avoid forward references. -import datetime +from datetime import datetime + +from django.utils import timezone +from pydantic import Field from sentry.hybridcloud.rpc import RpcModel from sentry.models.lostpasswordhash import LostPasswordHash @@ -13,7 +16,7 @@ class RpcLostPasswordHash(RpcModel): id: int = -1 user_id: int = -1 hash: str = "" - date_added = datetime.datetime + date_added: datetime = Field(default_factory=timezone.now) def get_absolute_url(self, mode: str = "recover") -> str: return LostPasswordHash.get_lostpassword_url(self.user_id, self.hash, mode) diff --git a/tests/sentry/api/endpoints/test_event_ai_suggested_fix.py b/tests/sentry/api/endpoints/test_event_ai_suggested_fix.py index b1029c188a3fab..f990f8d5010332 100644 --- a/tests/sentry/api/endpoints/test_event_ai_suggested_fix.py +++ b/tests/sentry/api/endpoints/test_event_ai_suggested_fix.py @@ -34,7 +34,7 @@ def dummy_response(*args, **kwargs): finish_reason="stop", ) ], - created=time.time(), + created=int(time.time()), model="gpt3.5-trubo", object="chat.completion", ) diff --git a/tests/sentry/feedback/usecases/test_create_feedback.py b/tests/sentry/feedback/usecases/test_create_feedback.py index 8ef46e7d19f71a..986a4141e025df 100644 --- a/tests/sentry/feedback/usecases/test_create_feedback.py +++ b/tests/sentry/feedback/usecases/test_create_feedback.py @@ -64,7 +64,7 @@ def create_dummy_response(*args, **kwargs): finish_reason="stop", ) ], - created=time.time(), + created=int(time.time()), model="gpt3.5-trubo", object="chat.completion", ) From 53c127c7e1f0a4fc8c8f033abb89dd6615b38b26 Mon Sep 17 00:00:00 2001 From: Jenn Mueng <30991498+jennmueng@users.noreply.github.com> Date: Wed, 24 Jul 2024 23:38:01 +0700 Subject: [PATCH 075/126] feat(autofix): Add status check for autofix runs and log an error if it fails (#74444) Adds a celery task that will log an error to sentry if an autofix state has been processing and hasn't been updated in 5 minutes. Scheduled to run 15 minutes after the run has started. This is in order to alert the ML team if Autofix is unresponsive again. --- src/sentry/api/endpoints/group_ai_autofix.py | 48 +++----- src/sentry/autofix/utils.py | 61 +++++++++- src/sentry/autofix/webhooks.py | 4 +- src/sentry/tasks/autofix.py | 25 +++++ .../api/endpoints/test_group_ai_autofix.py | 52 ++++++--- tests/sentry/autofix/test_utils.py | 106 +++++++++++++++++- tests/sentry/autofix/test_webhooks.py | 23 +++- tests/sentry/tasks/test_autofix.py | 76 +++++++++++++ 8 files changed, 335 insertions(+), 60 deletions(-) create mode 100644 src/sentry/tasks/autofix.py create mode 100644 tests/sentry/tasks/test_autofix.py diff --git a/src/sentry/api/endpoints/group_ai_autofix.py b/src/sentry/api/endpoints/group_ai_autofix.py index b483915d519428..9dcb17df59006b 100644 --- a/src/sentry/api/endpoints/group_ai_autofix.py +++ b/src/sentry/api/endpoints/group_ai_autofix.py @@ -1,7 +1,7 @@ from __future__ import annotations import logging -from datetime import datetime +from datetime import datetime, timedelta from typing import Any import orjson @@ -16,10 +16,11 @@ from sentry.api.base import region_silo_endpoint from sentry.api.bases.group import GroupEndpoint from sentry.api.serializers import EventSerializer, serialize -from sentry.autofix.utils import get_autofix_repos_from_project_code_mappings +from sentry.autofix.utils import get_autofix_repos_from_project_code_mappings, get_autofix_state from sentry.models.group import Group from sentry.models.user import User from sentry.seer.signed_seer_api import sign_with_seer_secret +from sentry.tasks.autofix import check_autofix_status from sentry.types.ratelimit import RateLimit, RateLimitCategory from sentry.users.services.user.service import user_service @@ -133,33 +134,7 @@ def _call_autofix( response.raise_for_status() - def _call_get_autofix_state(self, group_id: int) -> dict[str, Any] | None: - path = "/v1/automation/autofix/state" - body = orjson.dumps( - { - "group_id": group_id, - } - ) - response = requests.post( - f"{settings.SEER_AUTOFIX_URL}{path}", - data=body, - headers={ - "content-type": "application/json;charset=utf-8", - **sign_with_seer_secret( - url=f"{settings.SEER_AUTOFIX_URL}{path}", - body=body, - ), - }, - ) - - response.raise_for_status() - - result = response.json() - - if result and result["group_id"] == group_id: - return result["state"] - - return None + return response.json().get("run_id") def post(self, request: Request, group: Group) -> Response: data = orjson.loads(request.body) @@ -203,7 +178,7 @@ def post(self, request: Request, group: Group) -> Response: ) try: - self._call_autofix( + run_id = self._call_autofix( request.user, group, repos, @@ -226,15 +201,20 @@ def post(self, request: Request, group: Group) -> Response: 500, ) + check_autofix_status.apply_async(args=[run_id], countdown=timedelta(minutes=15).seconds) + return Response( status=202, ) def get(self, request: Request, group: Group) -> Response: - autofix_state = self._call_get_autofix_state(group.id) + autofix_state = get_autofix_state(group_id=group.id) + + response_state: dict[str, Any] | None = None if autofix_state: - user_ids = autofix_state.get("actor_ids", []) + response_state = autofix_state.dict() + user_ids = autofix_state.actor_ids if user_ids: users = user_service.serialize_many( filter={"user_ids": user_ids, "organization_id": request.organization.id}, @@ -243,6 +223,6 @@ def get(self, request: Request, group: Group) -> Response: users_map = {user["id"]: user for user in users} - autofix_state["users"] = users_map + response_state["users"] = users_map - return Response({"autofix": autofix_state}) + return Response({"autofix": response_state}) diff --git a/src/sentry/autofix/utils.py b/src/sentry/autofix/utils.py index 27dc946819654c..ede2a34cb2cb85 100644 --- a/src/sentry/autofix/utils.py +++ b/src/sentry/autofix/utils.py @@ -1,7 +1,11 @@ +import datetime +import enum from typing import TypedDict +import orjson import requests from django.conf import settings +from pydantic import BaseModel from sentry.integrations.utils.code_mapping import get_sorted_code_mapping_configs from sentry.models.project import Project @@ -19,9 +23,24 @@ class AutofixRequest(TypedDict): issue: AutofixIssue -class AutofixState(TypedDict): +class AutofixStatus(str, enum.Enum): + COMPLETED = "COMPLETED" + ERROR = "ERROR" + PENDING = "PENDING" + PROCESSING = "PROCESSING" + NEED_MORE_INFORMATION = "NEED_MORE_INFORMATION" + CANCELLED = "CANCELLED" + + +class AutofixState(BaseModel): run_id: int request: AutofixRequest + updated_at: datetime.datetime + status: AutofixStatus + actor_ids: list[str] | None = None + + class Config: + extra = "allow" def get_autofix_repos_from_project_code_mappings(project: Project) -> list[dict]: @@ -51,6 +70,44 @@ def get_autofix_repos_from_project_code_mappings(project: Project) -> list[dict] return list(repos.values()) +def get_autofix_state( + *, group_id: int | None = None, run_id: int | None = None +) -> AutofixState | None: + path = "/v1/automation/autofix/state" + body = orjson.dumps( + { + "group_id": group_id, + "run_id": run_id, + } + ) + response = requests.post( + f"{settings.SEER_AUTOFIX_URL}{path}", + data=body, + headers={ + "content-type": "application/json;charset=utf-8", + **sign_with_seer_secret( + url=f"{settings.SEER_AUTOFIX_URL}{path}", + body=body, + ), + }, + ) + + response.raise_for_status() + + result = response.json() + + if result: + if ( + group_id is not None + and result["group_id"] == group_id + or run_id is not None + and result["run_id"] == run_id + ): + return AutofixState.validate(result["state"]) + + return None + + def get_autofix_state_from_pr_id(provider: str, pr_id: int) -> AutofixState | None: path = "/v1/automation/autofix/state/pr" body = json.dumps( @@ -77,4 +134,4 @@ def get_autofix_state_from_pr_id(provider: str, pr_id: int) -> AutofixState | No if not result: return None - return result.get("state", None) + return AutofixState.validate(result.get("state", None)) diff --git a/src/sentry/autofix/webhooks.py b/src/sentry/autofix/webhooks.py index 2b12496413ea59..1cd08cc75ff394 100644 --- a/src/sentry/autofix/webhooks.py +++ b/src/sentry/autofix/webhooks.py @@ -11,14 +11,14 @@ def get_webhook_analytics_fields(autofix_state: AutofixState) -> dict[str, Any]: webhook_analytics_fields = {} - autofix_request = autofix_state.get("request", {}) + autofix_request = autofix_state.request webhook_analytics_fields["project_id"] = autofix_request.get("project_id", None) issue = autofix_request.get("issue", None) webhook_analytics_fields["group_id"] = issue.get("id", None) if issue else None - webhook_analytics_fields["run_id"] = autofix_state.get("run_id", None) + webhook_analytics_fields["run_id"] = autofix_state.run_id return webhook_analytics_fields diff --git a/src/sentry/tasks/autofix.py b/src/sentry/tasks/autofix.py new file mode 100644 index 00000000000000..5eaffd179d2d2a --- /dev/null +++ b/src/sentry/tasks/autofix.py @@ -0,0 +1,25 @@ +import logging +from datetime import datetime, timedelta + +from sentry.autofix.utils import AutofixStatus, get_autofix_state +from sentry.tasks.base import instrumented_task + +logger = logging.getLogger(__name__) + + +@instrumented_task( + name="sentry.tasks.autofix.check_autofix_status", + max_retries=1, +) +def check_autofix_status(run_id: int): + state = get_autofix_state(run_id=run_id) + + if ( + state + and state.status == AutofixStatus.PROCESSING + and state.updated_at < datetime.now() - timedelta(minutes=5) + ): + # This should log to sentry + logger.error( + "Autofix run has been processing for more than 5 minutes", extra={"run_id": run_id} + ) diff --git a/tests/sentry/api/endpoints/test_group_ai_autofix.py b/tests/sentry/api/endpoints/test_group_ai_autofix.py index 150a8d3464aa67..97d6ecf4152177 100644 --- a/tests/sentry/api/endpoints/test_group_ai_autofix.py +++ b/tests/sentry/api/endpoints/test_group_ai_autofix.py @@ -1,6 +1,8 @@ +from datetime import datetime from unittest.mock import ANY, patch from sentry.api.endpoints.group_ai_autofix import TIMEOUT_SECONDS +from sentry.autofix.utils import AutofixState, AutofixStatus from sentry.models.group import Group from sentry.testutils.cases import APITestCase, SnubaTestCase from sentry.testutils.helpers.datetime import before_now @@ -16,12 +18,15 @@ class GroupAutofixEndpointTest(APITestCase, SnubaTestCase): def _get_url(self, group_id: int): return f"/api/0/issues/{group_id}/autofix/" - @patch( - "sentry.api.endpoints.group_ai_autofix.GroupAutofixEndpoint._call_get_autofix_state", - return_value={"status": "PROCESSING"}, - ) - def test_ai_autofix_get_endpoint_with_autofix(self, mock_autofix_state_call): + @patch("sentry.api.endpoints.group_ai_autofix.get_autofix_state") + def test_ai_autofix_get_endpoint_with_autofix(self, mock_get_autofix_state): group = self.create_group() + mock_get_autofix_state.return_value = AutofixState( + run_id=123, + request={"project_id": 456, "issue": {"id": 789}}, + updated_at=datetime.strptime("2023-07-18T12:00:00Z", "%Y-%m-%dT%H:%M:%SZ"), + status=AutofixStatus.PROCESSING, + ) self.login_as(user=self.user) response = self.client.get(self._get_url(group.id), format="json") @@ -30,15 +35,12 @@ def test_ai_autofix_get_endpoint_with_autofix(self, mock_autofix_state_call): assert response.data["autofix"] is not None assert response.data["autofix"]["status"] == "PROCESSING" - mock_autofix_state_call.assert_called_once() - mock_autofix_state_call.assert_called_with(group.id) + mock_get_autofix_state.assert_called_once_with(group_id=group.id) - @patch( - "sentry.api.endpoints.group_ai_autofix.GroupAutofixEndpoint._call_get_autofix_state", - return_value=None, - ) - def test_ai_autofix_get_endpoint_without_autofix(self, mock_autofix_state_call): + @patch("sentry.api.endpoints.group_ai_autofix.get_autofix_state") + def test_ai_autofix_get_endpoint_without_autofix(self, mock_get_autofix_state): group = self.create_group() + mock_get_autofix_state.return_value = None self.login_as(user=self.user) response = self.client.get(self._get_url(group.id), format="json") @@ -46,11 +48,11 @@ def test_ai_autofix_get_endpoint_without_autofix(self, mock_autofix_state_call): assert response.status_code == 200 assert response.data["autofix"] is None - mock_autofix_state_call.assert_called_once() - mock_autofix_state_call.assert_called_with(group.id) + mock_get_autofix_state.assert_called_once_with(group_id=group.id) @patch("sentry.api.endpoints.group_ai_autofix.GroupAutofixEndpoint._call_autofix") - def test_ai_autofix_post_endpoint(self, mock_call): + @patch("sentry.tasks.autofix.check_autofix_status.apply_async") + def test_ai_autofix_post_endpoint(self, mock_check_autofix_status, mock_call): release = self.create_release(project=self.project, version="1.0.0") repo = self.create_repo( @@ -76,6 +78,8 @@ def test_ai_autofix_post_endpoint(self, mock_call): assert group is not None group.save() + mock_call.return_value = 123 # Mocking the run_id returned by _call_autofix + self.login_as(user=self.user) response = self.client.post( self._get_url(group.id), @@ -107,8 +111,11 @@ def test_ai_autofix_post_endpoint(self, mock_call): ) assert response.status_code == 202 + mock_check_autofix_status.assert_called_once_with(args=[123], countdown=900) + @patch("sentry.api.endpoints.group_ai_autofix.GroupAutofixEndpoint._call_autofix") - def test_ai_autofix_post_without_event_id(self, mock_call): + @patch("sentry.tasks.autofix.check_autofix_status.apply_async") + def test_ai_autofix_post_without_event_id(self, mock_check_autofix_status, mock_call): release = self.create_release(project=self.project, version="1.0.0") repo = self.create_repo( @@ -134,6 +141,8 @@ def test_ai_autofix_post_without_event_id(self, mock_call): assert group is not None group.save() + mock_call.return_value = 123 # Mocking the run_id returned by _call_autofix + self.login_as(user=self.user) response = self.client.post( self._get_url(group.id), data={"instruction": "Yes"}, format="json" @@ -163,9 +172,14 @@ def test_ai_autofix_post_without_event_id(self, mock_call): ) assert response.status_code == 202 + mock_check_autofix_status.assert_called_once_with(args=[123], countdown=900) + @patch("sentry.models.Group.get_recommended_event_for_environments", return_value=None) @patch("sentry.api.endpoints.group_ai_autofix.GroupAutofixEndpoint._call_autofix") - def test_ai_autofix_post_without_event_id_no_recommended_event(self, mock_call, mock_event): + @patch("sentry.tasks.autofix.check_autofix_status.apply_async") + def test_ai_autofix_post_without_event_id_no_recommended_event( + self, mock_check_autofix_status, mock_call, mock_event + ): release = self.create_release(project=self.project, version="1.0.0") repo = self.create_repo( @@ -191,6 +205,8 @@ def test_ai_autofix_post_without_event_id_no_recommended_event(self, mock_call, assert group is not None group.save() + mock_call.return_value = 123 # Mocking the run_id returned by _call_autofix + self.login_as(user=self.user) response = self.client.post( self._get_url(group.id), data={"instruction": "Yes"}, format="json" @@ -221,6 +237,8 @@ def test_ai_autofix_post_without_event_id_no_recommended_event(self, mock_call, assert response.status_code == 202 + mock_check_autofix_status.assert_called_once_with(args=[123], countdown=900) + @patch("sentry.models.Group.get_recommended_event_for_environments", return_value=None) @patch("sentry.models.Group.get_latest_event", return_value=None) def test_ai_autofix_post_without_event_id_error( diff --git a/tests/sentry/autofix/test_utils.py b/tests/sentry/autofix/test_utils.py index 04d8f8f990499d..0a71585745f790 100644 --- a/tests/sentry/autofix/test_utils.py +++ b/tests/sentry/autofix/test_utils.py @@ -1,10 +1,14 @@ +from datetime import datetime, timezone from unittest.mock import patch import pytest from django.conf import settings from sentry.autofix.utils import ( + AutofixState, + AutofixStatus, get_autofix_repos_from_project_code_mappings, + get_autofix_state, get_autofix_state_from_pr_id, ) from sentry.testutils.cases import TestCase @@ -40,7 +44,12 @@ def test_get_autofix_state_from_pr_id_success(self, mock_post): mock_response = mock_post.return_value mock_response.raise_for_status = lambda: None mock_response.json.return_value = { - "state": {"run_id": 123, "request": {"project_id": 456, "issue": {"id": 789}}} + "state": { + "run_id": 123, + "request": {"project_id": 456, "issue": {"id": 789}}, + "updated_at": "2023-07-18T12:00:00Z", + "status": "PROCESSING", + } } # Call the function @@ -48,7 +57,10 @@ def test_get_autofix_state_from_pr_id_success(self, mock_post): # Assertions assert result is not None - assert result == {"run_id": 123, "request": {"project_id": 456, "issue": {"id": 789}}} + assert result.run_id == 123 + assert result.request == {"project_id": 456, "issue": {"id": 789}} + assert result.updated_at == datetime(2023, 7, 18, 12, 0, tzinfo=timezone.utc) + assert result.status == AutofixStatus.PROCESSING mock_post.assert_called_once_with( f"{settings.SEER_AUTOFIX_URL}/v1/automation/autofix/state/pr", @@ -81,3 +93,93 @@ def test_get_autofix_state_from_pr_id_http_error(self, mock_post): # Assertions assert "HTTP Error" in str(context.value) + + +class TestGetAutofixState(TestCase): + @patch("requests.post") + def test_get_autofix_state_success_with_group_id(self, mock_post): + # Setup mock response + mock_response = mock_post.return_value + mock_response.raise_for_status = lambda: None + mock_response.json.return_value = { + "group_id": 123, + "state": { + "run_id": 456, + "request": {"project_id": 789, "issue": {"id": 123}}, + "updated_at": "2023-07-18T12:00:00Z", + "status": "PROCESSING", + }, + } + + # Call the function + result = get_autofix_state(group_id=123) + + # Assertions + assert isinstance(result, AutofixState) + assert result.run_id == 456 + assert result.request == {"project_id": 789, "issue": {"id": 123}} + assert result.updated_at == datetime(2023, 7, 18, 12, 0, tzinfo=timezone.utc) + assert result.status == AutofixStatus.PROCESSING + + mock_post.assert_called_once_with( + f"{settings.SEER_AUTOFIX_URL}/v1/automation/autofix/state", + data=b'{"group_id":123,"run_id":null}', + headers={"content-type": "application/json;charset=utf-8"}, + ) + + @patch("requests.post") + def test_get_autofix_state_success_with_run_id(self, mock_post): + # Setup mock response + mock_response = mock_post.return_value + mock_response.raise_for_status = lambda: None + mock_response.json.return_value = { + "run_id": 456, + "state": { + "run_id": 456, + "request": {"project_id": 789, "issue": {"id": 123}}, + "updated_at": "2023-07-18T12:00:00Z", + "status": "COMPLETED", + }, + } + + # Call the function + result = get_autofix_state(run_id=456) + + # Assertions + assert isinstance(result, AutofixState) + assert result.run_id == 456 + assert result.request == {"project_id": 789, "issue": {"id": 123}} + assert result.updated_at == datetime(2023, 7, 18, 12, 0, tzinfo=timezone.utc) + assert result.status == AutofixStatus.COMPLETED + + mock_post.assert_called_once_with( + f"{settings.SEER_AUTOFIX_URL}/v1/automation/autofix/state", + data=b'{"group_id":null,"run_id":456}', + headers={"content-type": "application/json;charset=utf-8"}, + ) + + @patch("requests.post") + def test_get_autofix_state_no_result(self, mock_post): + # Setup mock response + mock_response = mock_post.return_value + mock_response.raise_for_status = lambda: None + mock_response.json.return_value = {} + + # Call the function + result = get_autofix_state(group_id=123) + + # Assertions + assert result is None + + @patch("requests.post") + def test_get_autofix_state_http_error(self, mock_post): + # Setup mock response to raise HTTP error + mock_response = mock_post.return_value + mock_response.raise_for_status.side_effect = Exception("HTTP Error") + + # Call the function and expect an exception + with pytest.raises(Exception) as context: + get_autofix_state(group_id=123) + + # Assertions + assert "HTTP Error" in str(context.value) diff --git a/tests/sentry/autofix/test_webhooks.py b/tests/sentry/autofix/test_webhooks.py index 2a1bdcbf58aecf..3276c0be7d9e4d 100644 --- a/tests/sentry/autofix/test_webhooks.py +++ b/tests/sentry/autofix/test_webhooks.py @@ -1,8 +1,10 @@ +from datetime import datetime, timezone from unittest.mock import call, patch from django.conf import settings from django.test import override_settings +from sentry.autofix.utils import AutofixState, AutofixStatus from sentry.autofix.webhooks import handle_github_pr_webhook_for_autofix from sentry.testutils.cases import APITestCase @@ -11,7 +13,12 @@ class AutofixPrWebhookTest(APITestCase): @override_settings(SEER_AUTOFIX_GITHUB_APP_USER_ID="12345") @patch( "sentry.autofix.webhooks.get_autofix_state_from_pr_id", - return_value={"run_id": 1, "request": {"project_id": 2, "issue": {"id": 3}}}, + return_value=AutofixState( + run_id=1, + request={"project_id": 2, "issue": {"id": 3}}, + updated_at=datetime.now(timezone.utc), + status=AutofixStatus.PROCESSING, + ), ) @patch("sentry.autofix.webhooks.analytics.record") @patch("sentry.autofix.webhooks.metrics.incr") @@ -38,7 +45,12 @@ def test_opened( @override_settings(SEER_AUTOFIX_GITHUB_APP_USER_ID="12345") @patch( "sentry.autofix.webhooks.get_autofix_state_from_pr_id", - return_value={"run_id": 1, "request": {"project_id": 2, "issue": {"id": 3}}}, + return_value=AutofixState( + run_id=1, + request={"project_id": 2, "issue": {"id": 3}}, + updated_at=datetime.now(timezone.utc), + status=AutofixStatus.PROCESSING, + ), ) @patch("sentry.autofix.webhooks.analytics.record") @patch("sentry.autofix.webhooks.metrics.incr") @@ -65,7 +77,12 @@ def test_closed( @override_settings(SEER_AUTOFIX_GITHUB_APP_USER_ID="12345") @patch( "sentry.autofix.webhooks.get_autofix_state_from_pr_id", - return_value={"run_id": 1, "request": {"project_id": 2, "issue": {"id": 3}}}, + return_value=AutofixState( + run_id=1, + request={"project_id": 2, "issue": {"id": 3}}, + updated_at=datetime.now(timezone.utc), + status=AutofixStatus.PROCESSING, + ), ) @patch("sentry.autofix.webhooks.analytics.record") @patch("sentry.autofix.webhooks.metrics.incr") diff --git a/tests/sentry/tasks/test_autofix.py b/tests/sentry/tasks/test_autofix.py new file mode 100644 index 00000000000000..6ce505dc9f4fd4 --- /dev/null +++ b/tests/sentry/tasks/test_autofix.py @@ -0,0 +1,76 @@ +from datetime import datetime, timedelta +from unittest.mock import patch + +from django.test import TestCase + +from sentry.autofix.utils import AutofixState, AutofixStatus +from sentry.tasks.autofix import check_autofix_status + + +class TestCheckAutofixStatus(TestCase): + @patch("sentry.tasks.autofix.get_autofix_state") + @patch("sentry.tasks.autofix.logger.error") + def test_check_autofix_status_processing_too_long(self, mock_logger, mock_get_autofix_state): + # Mock the get_autofix_state function to return a state that's been processing for too long + mock_get_autofix_state.return_value = AutofixState( + run_id=123, + request={"project_id": 456, "issue": {"id": 789}}, + updated_at=datetime.now() - timedelta(minutes=10), # Naive datetime + status=AutofixStatus.PROCESSING, + ) + + # Call the task + check_autofix_status(123) + + # Check that the logger.error was called + mock_logger.assert_called_once_with( + "Autofix run has been processing for more than 5 minutes", extra={"run_id": 123} + ) + + @patch("sentry.tasks.autofix.get_autofix_state") + @patch("sentry.tasks.autofix.logger.error") + def test_check_autofix_status_processing_within_time_limit( + self, mock_logger, mock_get_autofix_state + ): + # Mock the get_autofix_state function to return a state that's still within the time limit + mock_get_autofix_state.return_value = AutofixState( + run_id=123, + request={"project_id": 456, "issue": {"id": 789}}, + updated_at=datetime.now() - timedelta(minutes=3), # Naive datetime + status=AutofixStatus.PROCESSING, + ) + + # Call the task + check_autofix_status(123) + + # Check that the logger.error was not called + mock_logger.assert_not_called() + + @patch("sentry.tasks.autofix.get_autofix_state") + @patch("sentry.tasks.autofix.logger.error") + def test_check_autofix_status_completed(self, mock_logger, mock_get_autofix_state): + # Mock the get_autofix_state function to return a completed state + mock_get_autofix_state.return_value = AutofixState( + run_id=123, + request={"project_id": 456, "issue": {"id": 789}}, + updated_at=datetime.now() - timedelta(minutes=10), # Naive datetime + status=AutofixStatus.COMPLETED, + ) + + # Call the task + check_autofix_status(123) + + # Check that the logger.error was not called + mock_logger.assert_not_called() + + @patch("sentry.tasks.autofix.get_autofix_state") + @patch("sentry.tasks.autofix.logger.error") + def test_check_autofix_status_no_state(self, mock_logger, mock_get_autofix_state): + # Mock the get_autofix_state function to return None (no state found) + mock_get_autofix_state.return_value = None + + # Call the task + check_autofix_status(123) + + # Check that the logger.error was not called + mock_logger.assert_not_called() From 13e70f9a8261b2e195b247aafe2d307bc28a9b09 Mon Sep 17 00:00:00 2001 From: Shruthi Date: Wed, 24 Jul 2024 12:48:05 -0400 Subject: [PATCH 076/126] feat(discover): Set query source as user if flag is enabled (#74859) Once the flag is enabled and queries are created/updated through the API with anything other than Discover dataset, set as user created. --- .../endpoints/discover_homepage_query.py | 22 ++++++++++++++++--- .../endpoints/discover_saved_queries.py | 16 ++++++++++++-- .../endpoints/discover_saved_query_detail.py | 17 ++++++++++++-- 3 files changed, 48 insertions(+), 7 deletions(-) diff --git a/src/sentry/discover/endpoints/discover_homepage_query.py b/src/sentry/discover/endpoints/discover_homepage_query.py index 39b2b6df5237e6..7e6d379f9a13bd 100644 --- a/src/sentry/discover/endpoints/discover_homepage_query.py +++ b/src/sentry/discover/endpoints/discover_homepage_query.py @@ -13,7 +13,7 @@ from sentry.api.serializers import serialize from sentry.discover.endpoints.bases import DiscoverSavedQueryPermission from sentry.discover.endpoints.serializers import DiscoverSavedQuerySerializer -from sentry.discover.models import DatasetSourcesTypes, DiscoverSavedQuery +from sentry.discover.models import DatasetSourcesTypes, DiscoverSavedQuery, DiscoverSavedQueryTypes def get_homepage_query(organization, user): @@ -77,6 +77,14 @@ def put(self, request: Request, organization) -> Response: raise ParseError(serializer.errors) data = serializer.validated_data + user_selected_dataset = ( + features.has( + "organizations:performance-discover-dataset-selector", + organization, + actor=request.user, + ) + and data["query_dataset"] != DiscoverSavedQueryTypes.DISCOVER + ) if previous_homepage: previous_homepage.update( organization=organization, @@ -84,7 +92,11 @@ def put(self, request: Request, organization) -> Response: query=data["query"], version=data["version"], dataset=data["query_dataset"], - dataset_source=DatasetSourcesTypes.UNKNOWN.value, + dataset_source=( + DatasetSourcesTypes.USER.value + if user_selected_dataset + else DatasetSourcesTypes.UNKNOWN.value + ), ) previous_homepage.set_projects(data["project_ids"]) return Response(serialize(previous_homepage), status=status.HTTP_200_OK) @@ -95,7 +107,11 @@ def put(self, request: Request, organization) -> Response: query=data["query"], version=data["version"], dataset=data["query_dataset"], - dataset_source=DatasetSourcesTypes.UNKNOWN.value, + dataset_source=( + DatasetSourcesTypes.USER.value + if user_selected_dataset + else DatasetSourcesTypes.UNKNOWN.value + ), created_by_id=request.user.id, is_homepage=True, ) diff --git a/src/sentry/discover/endpoints/discover_saved_queries.py b/src/sentry/discover/endpoints/discover_saved_queries.py index 927eb91b6aac19..96aebd4c5ea98d 100644 --- a/src/sentry/discover/endpoints/discover_saved_queries.py +++ b/src/sentry/discover/endpoints/discover_saved_queries.py @@ -14,7 +14,7 @@ from sentry.api.serializers import serialize from sentry.discover.endpoints.bases import DiscoverSavedQueryPermission from sentry.discover.endpoints.serializers import DiscoverSavedQuerySerializer -from sentry.discover.models import DatasetSourcesTypes, DiscoverSavedQuery +from sentry.discover.models import DatasetSourcesTypes, DiscoverSavedQuery, DiscoverSavedQueryTypes from sentry.search.utils import tokenize_query @@ -135,6 +135,14 @@ def post(self, request: Request, organization) -> Response: return Response(serializer.errors, status=400) data = serializer.validated_data + user_selected_dataset = ( + features.has( + "organizations:performance-discover-dataset-selector", + organization, + actor=request.user, + ) + and data["query_dataset"] != DiscoverSavedQueryTypes.DISCOVER + ) model = DiscoverSavedQuery.objects.create( organization=organization, @@ -142,7 +150,11 @@ def post(self, request: Request, organization) -> Response: query=data["query"], version=data["version"], dataset=data["query_dataset"], - dataset_source=DatasetSourcesTypes.UNKNOWN.value, + dataset_source=( + DatasetSourcesTypes.USER.value + if user_selected_dataset + else DatasetSourcesTypes.UNKNOWN.value + ), created_by_id=request.user.id if request.user.is_authenticated else None, ) diff --git a/src/sentry/discover/endpoints/discover_saved_query_detail.py b/src/sentry/discover/endpoints/discover_saved_query_detail.py index dc30fe6cf53903..e28d7afc4e0eb6 100644 --- a/src/sentry/discover/endpoints/discover_saved_query_detail.py +++ b/src/sentry/discover/endpoints/discover_saved_query_detail.py @@ -13,7 +13,7 @@ from sentry.api.serializers import serialize from sentry.discover.endpoints.bases import DiscoverSavedQueryPermission from sentry.discover.endpoints.serializers import DiscoverSavedQuerySerializer -from sentry.discover.models import DatasetSourcesTypes, DiscoverSavedQuery +from sentry.discover.models import DatasetSourcesTypes, DiscoverSavedQuery, DiscoverSavedQueryTypes class DiscoverSavedQueryBase(OrganizationEndpoint): @@ -83,13 +83,26 @@ def put(self, request: Request, organization, query) -> Response: return Response(serializer.errors, status=400) data = serializer.validated_data + user_selected_dataset = ( + features.has( + "organizations:performance-discover-dataset-selector", + organization, + actor=request.user, + ) + and data["query_dataset"] != DiscoverSavedQueryTypes.DISCOVER + ) + query.update( organization=organization, name=data["name"], query=data["query"], version=data["version"], dataset=data["query_dataset"], - dataset_source=DatasetSourcesTypes.UNKNOWN.value, + dataset_source=( + DatasetSourcesTypes.USER.value + if user_selected_dataset + else DatasetSourcesTypes.UNKNOWN.value + ), ) query.set_projects(data["project_ids"]) From 552a8edad5695286633f750451d1859e3d136e8c Mon Sep 17 00:00:00 2001 From: Raj Joshi Date: Wed, 24 Jul 2024 09:55:10 -0700 Subject: [PATCH 077/126] chore(slack): Add Use SDK Client for Spike Protection (#74810) Need to use SDK Client in Spike Protection, so a FF for that. Also cleaning up an old FF from an older SDK Client usage that we already GA'ed. --- src/sentry/features/temporary.py | 4 ++-- tests/sentry/integrations/slack/test_tasks.py | 2 -- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/src/sentry/features/temporary.py b/src/sentry/features/temporary.py index 13af8c1b476a83..66a915d0d50c95 100644 --- a/src/sentry/features/temporary.py +++ b/src/sentry/features/temporary.py @@ -400,8 +400,8 @@ def register_temporary_features(manager: FeatureManager): # Enable improvements to Slack notifications manager.add("organizations:slack-improvements", OrganizationFeature, FeatureHandlerStrategy.OPTIONS, api_expose=False) # Feature flags for migrating to the Slack SDK WebClient - # Use new Slack SDK Client in get_channel_id_with_timeout - manager.add("organizations:slack-sdk-get-channel-id", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False) + # Use new Slack SDK Client for spike protection message + manager.add("organizations:slack-sdk-spike-protection", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False) # Add regression chart as image to slack message manager.add("organizations:slack-endpoint-regression-image", OrganizationFeature, FeatureHandlerStrategy.OPTIONS, api_expose=False) manager.add("organizations:slack-function-regression-image", OrganizationFeature, FeatureHandlerStrategy.OPTIONS, api_expose=False) diff --git a/tests/sentry/integrations/slack/test_tasks.py b/tests/sentry/integrations/slack/test_tasks.py index 4d21a0e157019c..8b402cda649581 100644 --- a/tests/sentry/integrations/slack/test_tasks.py +++ b/tests/sentry/integrations/slack/test_tasks.py @@ -19,7 +19,6 @@ ) from sentry.testutils.cases import TestCase from sentry.testutils.helpers import install_slack -from sentry.testutils.helpers.features import with_feature from sentry.testutils.skips import requires_snuba from tests.sentry.integrations.slack.utils.test_mock_slack_response import mock_slack_response @@ -283,7 +282,6 @@ def test_task_existing_metric_alert(self, mock_get_channel_id, mock_set_value): "sentry.integrations.slack.utils.channel.get_channel_id_with_timeout", return_value=SlackChannelIdData("#", "chan-id", False), ) - @with_feature("organizations:slack-sdk-get-channel-id") def test_task_existing_metric_alert_with_sdk(self, mock_get_channel_id, mock_set_value): alert_rule_data = self.metric_alert_data() alert_rule = self.create_alert_rule( From 27ff5a97f86ed45a23aa98ceeb40e9c79ae1232c Mon Sep 17 00:00:00 2001 From: Malachi Willey Date: Wed, 24 Jul 2024 09:58:49 -0700 Subject: [PATCH 078/126] fix(query-builder): Better support for adding filter keys by typing (#74817) When selecting a filter key through the menu, we add a default value in the `replaceFocusedWordWithFilter()` function. When typing (`:`) we don't do that, which results in a filter without a value. This wasn't as apparent before because we weren't highlighting invalid filters. These changes will ensure that this function gets called when we detect a filter after `:` is typed. Also added an analytics event so that we can track how common this is. --- .../searchQueryBuilder/index.spec.tsx | 34 +++++++++++++++-- .../searchQueryBuilder/tokens/freeText.tsx | 38 +++++++++++++++++-- .../utils/analytics/searchAnalyticsEvents.tsx | 6 +++ 3 files changed, 72 insertions(+), 6 deletions(-) diff --git a/static/app/components/searchQueryBuilder/index.spec.tsx b/static/app/components/searchQueryBuilder/index.spec.tsx index d13e8b58835ab7..85879dae867716 100644 --- a/static/app/components/searchQueryBuilder/index.spec.tsx +++ b/static/app/components/searchQueryBuilder/index.spec.tsx @@ -415,15 +415,43 @@ describe('SearchQueryBuilder', function () { describe('new search tokens', function () { it('can add an unsupported filter key and value', async function () { render(); - await userEvent.click(screen.getByRole('combobox', {name: 'Add a search term'})); + await userEvent.click(getLastInput()); + + // Typing "foo", then " a:b" should add the "foo" text followed by a new token "a:b" await userEvent.type( screen.getByRole('combobox', {name: 'Add a search term'}), - 'a:b{enter}' + 'foo a:b{enter}' ); - + expect(screen.getByRole('row', {name: 'foo'})).toBeInTheDocument(); expect(screen.getByRole('row', {name: 'a:b'})).toBeInTheDocument(); }); + it('adds default value for filter when typing :', async function () { + render(); + await userEvent.click(getLastInput()); + + // Typing `is:` and escaping should result in `is:unresolved` + await userEvent.type( + screen.getByRole('combobox', {name: 'Add a search term'}), + 'is:{escape}' + ); + expect(await screen.findByRole('row', {name: 'is:unresolved'})).toBeInTheDocument(); + }); + + it('does not automatically create a filter if the user intends to wrap in quotes', async function () { + render(); + await userEvent.click(getLastInput()); + + // Starting with an opening quote and typing out Error: should stay as raw text + await userEvent.type( + screen.getByRole('combobox', {name: 'Add a search term'}), + '"Error: foo"' + ); + await waitFor(() => { + expect(getLastInput()).toHaveValue('"Error: foo"'); + }); + }); + it('breaks keys into sections', async function () { render(); await userEvent.click(screen.getByRole('combobox', {name: 'Add a search term'})); diff --git a/static/app/components/searchQueryBuilder/tokens/freeText.tsx b/static/app/components/searchQueryBuilder/tokens/freeText.tsx index a6753c4ac305f5..cae0bd121271b9 100644 --- a/static/app/components/searchQueryBuilder/tokens/freeText.tsx +++ b/static/app/components/searchQueryBuilder/tokens/freeText.tsx @@ -24,6 +24,7 @@ import type { import { InvalidReason, type ParseResultToken, + parseSearch, Token, type TokenResult, } from 'sentry/components/searchSyntax/parser'; @@ -498,7 +499,16 @@ function SearchQueryBuilderInputInternal({ token={token} inputLabel={t('Add a search term')} onInputChange={e => { - if (e.target.value.includes('(') || e.target.value.includes(')')) { + // Parse text to see if this keystroke would have created any tokens. + // Add a trailing quote in case the user wants to wrap with quotes. + const parsedText = parseSearch(e.target.value + '"'); + + if ( + parsedText?.some( + textToken => + textToken.type === Token.L_PAREN || textToken.type === Token.R_PAREN + ) + ) { dispatch({ type: 'UPDATE_FREE_TEXT', tokens: [token], @@ -509,14 +519,36 @@ function SearchQueryBuilderInputInternal({ return; } - if (e.target.value.includes(':')) { + if ( + parsedText?.some( + textToken => + textToken.type === Token.FILTER && textToken.key.text === filterValue + ) + ) { + const filterKey = filterValue; + const key = filterKeys[filterKey]; dispatch({ type: 'UPDATE_FREE_TEXT', tokens: [token], - text: e.target.value, + text: replaceFocusedWordWithFilter( + inputValue, + selectionIndex, + filterKey, + getFieldDefinition + ), focusOverride: calculateNextFocusForFilter(state), }); resetInputValue(); + trackAnalytics('search.key_manually_typed', { + organization, + search_type: savedSearchType === 0 ? 'issues' : 'events', + search_source: searchSource, + item_name: filterKey, + item_kind: key?.kind ?? FieldKind.FIELD, + item_value_type: + getFieldDefinition(filterKey)?.valueType ?? FieldValueType.STRING, + new_experience: true, + }); return; } diff --git a/static/app/utils/analytics/searchAnalyticsEvents.tsx b/static/app/utils/analytics/searchAnalyticsEvents.tsx index 52537558060d8e..7dec41302ff6ef 100644 --- a/static/app/utils/analytics/searchAnalyticsEvents.tsx +++ b/static/app/utils/analytics/searchAnalyticsEvents.tsx @@ -47,6 +47,11 @@ export type SearchEventParameters = { item_value_type?: string; search_operator?: string; }; + 'search.key_manually_typed': Omit & { + item_kind: string; + item_name: string; + item_value_type: string; + }; 'search.operator_autocompleted': SearchEventBase & { search_operator: string; filter_key?: string; @@ -103,6 +108,7 @@ export const searchEventMap: Record = { 'search.searched': 'Search: Performed search', 'search.searched_filter': 'Search: Performed search filter', 'search.key_autocompleted': 'Search: Key Autocompleted', + 'search.key_manually_typed': 'Search: Key Manually Typed', 'search.shortcut_used': 'Search: Shortcut Used', 'search.docs_opened': 'Search: Docs Opened', 'search.search_with_invalid': 'Search: Attempted Invalid Search', From aa92d5efdbd4fb6d51e4124e30e91b0d86865b53 Mon Sep 17 00:00:00 2001 From: Matt Duncan <14761+mrduncan@users.noreply.github.com> Date: Wed, 24 Jul 2024 10:03:56 -0700 Subject: [PATCH 079/126] fix(issues): Parameterize multi-second durations (#74818) Previously these were incorrectly only including the last digit (ex: `12s` -> `1`). This also splits out the `ms` and `s` duration tests since the behavior for each is slightly different. --- src/sentry/grouping/parameterization.py | 2 +- tests/sentry/grouping/test_parameterization.py | 9 +++++++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/src/sentry/grouping/parameterization.py b/src/sentry/grouping/parameterization.py index a91c10200bac05..49309f7d4ca91c 100644 --- a/src/sentry/grouping/parameterization.py +++ b/src/sentry/grouping/parameterization.py @@ -153,7 +153,7 @@ def compiled_pattern(self) -> re.Pattern[str]: (datetime.datetime\(.*?\)) """, ), - ParameterizationRegex(name="duration", raw_pattern=r"""\b(\d+ms) | (\d(\.\d+)?s)\b"""), + ParameterizationRegex(name="duration", raw_pattern=r"""\b(\d+ms) | (\d+(\.\d+)?s)\b"""), ParameterizationRegex(name="hex", raw_pattern=r"""\b0[xX][0-9a-fA-F]+\b"""), ParameterizationRegex(name="float", raw_pattern=r"""-\d+\.\d+\b | \b\d+\.\d+\b"""), ParameterizationRegex(name="int", raw_pattern=r"""-\d+\b | \b\d+\b"""), diff --git a/tests/sentry/grouping/test_parameterization.py b/tests/sentry/grouping/test_parameterization.py index 1164afc50deaef..8c8d094368a8fa 100644 --- a/tests/sentry/grouping/test_parameterization.py +++ b/tests/sentry/grouping/test_parameterization.py @@ -134,8 +134,13 @@ def parameterizer(): ("bool", """blah a=true had a problem""", """blah a= had a problem"""), ( "Duration - ms", - """blah connection failed after 12345ms 1.899s 3s""", - """blah connection failed after """, + """connection failed after 1ms 23ms 4567890ms""", + """connection failed after """, + ), + ( + "Duration - s", + """connection failed after 1.234s 56s 78.90s""", + """connection failed after """, ), ( "Hostname - 2 levels", From 553e532588fe0588d41f00fa9849c9c980922872 Mon Sep 17 00:00:00 2001 From: Dan Fuller Date: Wed, 24 Jul 2024 10:11:51 -0700 Subject: [PATCH 080/126] feat(uptime): Add stats to uptime detector code (#74814) This adds more instrumentation around uptime detectors --- src/sentry/uptime/detectors/detector.py | 3 ++- src/sentry/uptime/detectors/ranking.py | 3 ++- src/sentry/uptime/detectors/tasks.py | 10 ++++++++++ 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/src/sentry/uptime/detectors/detector.py b/src/sentry/uptime/detectors/detector.py index d0d9d999423647..0e87a3591ebc24 100644 --- a/src/sentry/uptime/detectors/detector.py +++ b/src/sentry/uptime/detectors/detector.py @@ -23,6 +23,7 @@ def detect_base_url_for_project(project: Project, url: str) -> None: or not should_detect_for_project(project) or not should_detect_for_organization(project.organization) ): + metrics.incr("uptime.detectors.url_add_skipped_due_to_feature_flag") return base_url = extract_base_url(url) @@ -30,4 +31,4 @@ def detect_base_url_for_project(project: Project, url: str) -> None: return add_base_url_to_rank(project, base_url) - metrics.incr("uptime.url_added_to_rank") + metrics.incr("uptime.detectors.url_added_to_rank") diff --git a/src/sentry/uptime/detectors/ranking.py b/src/sentry/uptime/detectors/ranking.py index eaee986a3228f0..5240fdde4c6d55 100644 --- a/src/sentry/uptime/detectors/ranking.py +++ b/src/sentry/uptime/detectors/ranking.py @@ -9,7 +9,7 @@ from rediscluster import RedisCluster from sentry.constants import UPTIME_AUTODETECTION -from sentry.utils import redis +from sentry.utils import metrics, redis if TYPE_CHECKING: from sentry.models.organization import Organization @@ -63,6 +63,7 @@ def add_base_url_to_rank(project: Project, base_url: str): pipeline.zremrangebyrank(rank_key, 0, -(RANKED_MAX_SIZE + 1)) project_incr_result = pipeline.execute()[0] if project_incr_result == 1: + metrics.incr("uptime.detectors.added_project") pipeline = cluster.pipeline() # Avoid adding the org to this set constantly, and instead just do it once per project bucket_key = get_organization_bucket_key(project.organization) diff --git a/src/sentry/uptime/detectors/tasks.py b/src/sentry/uptime/detectors/tasks.py index 4157a6eadf17de..2568bd21439e13 100644 --- a/src/sentry/uptime/detectors/tasks.py +++ b/src/sentry/uptime/detectors/tasks.py @@ -79,6 +79,7 @@ def schedule_detections(): (timezone.now() - last_processed) / timedelta(minutes=1) ) for _ in range(minutes_since_last_processed): + metrics.incr("uptime.detectors.scheduler.scheduled_bucket") last_processed = last_processed + timedelta(minutes=1) process_detection_bucket.delay(last_processed) @@ -98,6 +99,7 @@ def process_detection_bucket(bucket: datetime.datetime): Schedules url detection for all projects in this time bucket that saw promising urls. """ for organization_id in get_organization_bucket(bucket): + metrics.incr("uptime.detectors.scheduler.scheduled_organization") process_organization_url_ranking.delay(organization_id) delete_organization_bucket(bucket) @@ -122,6 +124,7 @@ def process_organization_url_ranking(organization_id: int): delete_candidate_urls_for_project(project) else: if process_project_url_ranking(project, project_count): + metrics.incr("uptime.detectors.scheduler.detected_url_for_organization") should_detect = False delete_candidate_projects_for_org(org) @@ -139,6 +142,7 @@ def process_project_url_ranking(project: Project, project_url_count: int) -> boo }, ) if not should_detect_for_project(project): + metrics.incr("uptime.detectors.project_detection_skipped") return False found_url = False @@ -182,20 +186,24 @@ def process_candidate_url( # The url has to be seen a minimum number of times, and make up at least # a certain percentage of all urls seen in this project if url_count < URL_MIN_TIMES_SEEN or url_count / project_url_count < URL_MIN_PERCENT: + metrics.incr("uptime.detectors.candidate_url.failed", tags={"reason": "below_thresholds"}) return False # See if we're already auto monitoring this url on this project if is_url_auto_monitored_for_project(project, url): # Just mark this successful so `process_project_url_ranking` will choose to not process urls for this project # for a week + metrics.incr("uptime.detectors.candidate_url.failed", tags={"reason": "already_monitored"}) return True # Check whether we've recently attempted to monitor this url recently and failed. if is_failed_url(url): + metrics.incr("uptime.detectors.candidate_url.failed", tags={"reason": "previously_failed"}) return False # Check robots.txt to see if it's ok for us to attempt to monitor this url if not check_url_robots_txt(url): + metrics.incr("uptime.detectors.candidate_url.failed", tags={"reason": "robots_txt"}) logger.info( "uptime.url_failed_robots_txt_check", extra={ @@ -219,6 +227,7 @@ def process_candidate_url( # Disable auto-detection on this project now that we've successfully found a hostname project.update_option("sentry:uptime_autodetection", False) + metrics.incr("uptime.detectors.candidate_url.succeeded") return True @@ -242,6 +251,7 @@ def monitor_url_for_project(project: Project, url: str): create_project_uptime_subscription( project, subscription, ProjectUptimeSubscriptionMode.AUTO_DETECTED_ONBOARDING ) + metrics.incr("uptime.detectors.candidate_url.monitor_created") def is_failed_url(url: str) -> bool: From 81633196e63210a5a10d346d8f7b24f47df573ab Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 24 Jul 2024 13:27:27 -0400 Subject: [PATCH 081/126] ref: fix incorrect signature of setUp for SCIMTestCase (#74858) --- src/sentry/testutils/cases.py | 10 +++++++--- tests/sentry/api/endpoints/test_scim_user_details.py | 4 ++-- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/src/sentry/testutils/cases.py b/src/sentry/testutils/cases.py index 8eb0a6cba6ef9f..b23a24a478b328 100644 --- a/src/sentry/testutils/cases.py +++ b/src/sentry/testutils/cases.py @@ -2763,11 +2763,13 @@ def setup_before_migration(self, apps): class SCIMTestCase(APITestCase): - def setUp(self, provider="dummy"): + provider = "dummy" + + def setUp(self): super().setUp() with assume_test_silo_mode(SiloMode.CONTROL): self.auth_provider_inst = AuthProviderModel( - organization_id=self.organization.id, provider=provider + organization_id=self.organization.id, provider=self.provider ) self.auth_provider_inst.enable_scim(self.user) self.auth_provider_inst.save() @@ -2778,9 +2780,11 @@ def setUp(self, provider="dummy"): class SCIMAzureTestCase(SCIMTestCase): + provider = ACTIVE_DIRECTORY_PROVIDER_NAME + def setUp(self): auth.register(ACTIVE_DIRECTORY_PROVIDER_NAME, DummyProvider) - super().setUp(provider=ACTIVE_DIRECTORY_PROVIDER_NAME) + super().setUp() self.addCleanup(auth.unregister, ACTIVE_DIRECTORY_PROVIDER_NAME, DummyProvider) diff --git a/tests/sentry/api/endpoints/test_scim_user_details.py b/tests/sentry/api/endpoints/test_scim_user_details.py index c9fe2b15ea5bd6..a189e8082cf5ed 100644 --- a/tests/sentry/api/endpoints/test_scim_user_details.py +++ b/tests/sentry/api/endpoints/test_scim_user_details.py @@ -54,8 +54,8 @@ class SCIMMemberRoleUpdateTests(SCIMTestCase): endpoint = "sentry-api-0-organization-scim-member-details" method = "put" - def setUp(self, provider="dummy"): - super().setUp(provider=provider) + def setUp(self): + super().setUp() self.unrestricted_default_role_member = self.create_member( user=self.create_user(), organization=self.organization ) From f2c0b4cf0727b10a61c5f3f67be74a076731e0d1 Mon Sep 17 00:00:00 2001 From: Jodi Jang <116035587+jangjodi@users.noreply.github.com> Date: Wed, 24 Jul 2024 10:27:49 -0700 Subject: [PATCH 082/126] chore(similarity): Add metrics for record delete call (#74759) Add metrics for seer record deletion --- src/sentry/seer/similarity/grouping_records.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/sentry/seer/similarity/grouping_records.py b/src/sentry/seer/similarity/grouping_records.py index 461b0b1e3c07f3..2a38eb054fae2b 100644 --- a/src/sentry/seer/similarity/grouping_records.py +++ b/src/sentry/seer/similarity/grouping_records.py @@ -12,7 +12,7 @@ from sentry.net.http import connection_from_url from sentry.seer.signed_seer_api import make_signed_seer_api_request from sentry.seer.similarity.types import RawSeerSimilarIssueData -from sentry.utils import json +from sentry.utils import json, metrics logger = logging.getLogger(__name__) @@ -104,11 +104,13 @@ def delete_project_grouping_records( "seer.delete_grouping_records.project.success", extra={"project_id": project_id}, ) + metrics.incr("grouping.similarity.delete_records_by_project", tags={"success": True}) return True else: logger.error( "seer.delete_grouping_records.project.failure", ) + metrics.incr("grouping.similarity.delete_records_by_project", tags={"success": False}) return False @@ -136,7 +138,9 @@ def delete_grouping_records_by_hash(project_id: int, hashes: list[str]) -> bool: "seer.delete_grouping_records.hashes.success", extra=extra, ) + metrics.incr("grouping.similarity.delete_records_by_hash", tags={"success": True}) return True else: logger.error("seer.delete_grouping_records.hashes.failure", extra=extra) + metrics.incr("grouping.similarity.delete_records_by_hash", tags={"success": False}) return False From e0fcb5451cc4c59a25d4207c8543b8faa7e9a126 Mon Sep 17 00:00:00 2001 From: Michelle Fu <83109586+mifu67@users.noreply.github.com> Date: Wed, 24 Jul 2024 10:28:08 -0700 Subject: [PATCH 083/126] feat(anomaly detection): Send subscription update data to Seer (#74522) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Update the subscription processor to send the updated data from Snuba to Seer’s `detect_anomalies` endpoint. Seer will respond with data about whether or not there has been an anomaly, and we fire an alert if an anomaly was detected. Closes https://getsentry.atlassian.net/browse/ALRT-141 --- src/sentry/conf/server.py | 5 + .../incidents/subscription_processor.py | 197 ++++++++++++-- .../incidents/test_subscription_processor.py | 247 +++++++++++++++++- 3 files changed, 425 insertions(+), 24 deletions(-) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 539431a1525b57..fe139502dee4af 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -3136,9 +3136,14 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: SEER_GROUPING_URL = SEER_DEFAULT_URL # for local development, these share a URL SEER_GROUPING_TIMEOUT = 1 +SEER_ANOMALY_DETECTION_MODEL_VERSION = "v1" SEER_ANOMALY_DETECTION_URL = SEER_DEFAULT_URL # for local development, these share a URL SEER_ANOMALY_DETECTION_TIMEOUT = 5 +SEER_ANOMALY_DETECTION_ENDPOINT_URL = ( + f"/{SEER_ANOMALY_DETECTION_MODEL_VERSION}/anomaly-detection/detect" +) + SEER_AUTOFIX_GITHUB_APP_USER_ID = 157164994 SEER_AUTOFIX_FORCE_USE_REPOS: list[dict] = [] diff --git a/src/sentry/incidents/subscription_processor.py b/src/sentry/incidents/subscription_processor.py index 72a96f1b58d38f..48a3f167f37f13 100644 --- a/src/sentry/incidents/subscription_processor.py +++ b/src/sentry/incidents/subscription_processor.py @@ -12,8 +12,10 @@ from django.utils import timezone from sentry_redis_tools.retrying_cluster import RetryingRedisCluster from snuba_sdk import Column, Condition, Limit, Op +from urllib3.exceptions import MaxRetryError, TimeoutError from sentry import features +from sentry.conf.server import SEER_ANOMALY_DETECTION_ENDPOINT_URL from sentry.constants import CRASH_RATE_ALERT_AGGREGATE_ALIAS, CRASH_RATE_ALERT_SESSION_COUNT_ALIAS from sentry.incidents.logic import ( CRITICAL_TRIGGER_LABEL, @@ -24,6 +26,7 @@ ) from sentry.incidents.models.alert_rule import ( AlertRule, + AlertRuleDetectionType, AlertRuleMonitorTypeInt, AlertRuleThresholdType, AlertRuleTrigger, @@ -43,6 +46,8 @@ from sentry.incidents.tasks import handle_trigger_action from sentry.incidents.utils.types import QuerySubscriptionUpdate from sentry.models.project import Project +from sentry.net.http import connection_from_url +from sentry.seer.signed_seer_api import make_signed_seer_api_request from sentry.snuba.dataset import Dataset from sentry.snuba.entity_subscription import ( ENTITY_TIME_COLUMNS, @@ -51,8 +56,9 @@ get_entity_subscription_from_snuba_query, ) from sentry.snuba.models import QuerySubscription -from sentry.utils import metrics, redis +from sentry.utils import json, metrics, redis from sentry.utils.dates import to_datetime +from sentry.utils.json import JSONDecodeError logger = logging.getLogger(__name__) REDIS_TTL = int(timedelta(days=7).total_seconds()) @@ -89,6 +95,11 @@ class SubscriptionProcessor: AlertRuleThresholdType.BELOW: (operator.lt, operator.gt), } + seer_anomaly_detection_connection_pool = connection_from_url( + settings.SEER_ANOMALY_DETECTION_URL, + timeout=settings.SEER_ANOMALY_DETECTION_TIMEOUT, + ) + def __init__(self, subscription: QuerySubscription) -> None: self.subscription = subscription try: @@ -500,6 +511,15 @@ def process_update(self, subscription_update: QuerySubscriptionUpdate) -> None: aggregation_value = self.get_aggregation_value(subscription_update) + self.has_anomaly_detection = features.has( + "organizations:anomaly-detection-alerts", self.subscription.project.organization + ) + + if self.has_anomaly_detection: + potential_anomalies = self.get_anomaly_data_from_seer(aggregation_value) + if potential_anomalies is None: + return [] + # Trigger callbacks for any AlertRules that may need to know about the subscription update # Current callback will update the activation metric values & delete querysubscription on finish # TODO: register over/under triggers as alert rule callbacks as well @@ -522,31 +542,70 @@ def process_update(self, subscription_update: QuerySubscriptionUpdate) -> None: with transaction.atomic(router.db_for_write(AlertRule)): # Triggers is the threshold - NOT an instance of a trigger for trigger in self.triggers: - if alert_operator( - aggregation_value, trigger.alert_threshold - ) and not self.check_trigger_matches_status(trigger, TriggerStatus.ACTIVE): - # If the value has breached our threshold (above/below) - # And the trigger is not yet active - metrics.incr("incidents.alert_rules.threshold", tags={"type": "alert"}) - # triggering a threshold will create an incident and set the status to active - incident_trigger = self.trigger_alert_threshold(trigger, aggregation_value) - if incident_trigger is not None: - fired_incident_triggers.append(incident_trigger) - else: - self.trigger_alert_counts[trigger.id] = 0 - if ( - resolve_operator(aggregation_value, self.calculate_resolve_threshold(trigger)) - and self.active_incident - and self.check_trigger_matches_status(trigger, TriggerStatus.ACTIVE) + self.has_anomaly_detection + and trigger.alert_rule.detection_type == AlertRuleDetectionType.DYNAMIC ): - metrics.incr("incidents.alert_rules.threshold", tags={"type": "resolve"}) - incident_trigger = self.trigger_resolve_threshold(trigger, aggregation_value) - - if incident_trigger is not None: - fired_incident_triggers.append(incident_trigger) + # NOTE: There should only be one anomaly in the list + for potential_anomaly in potential_anomalies: + if self.has_anomaly( + potential_anomaly, trigger.label + ) and not self.check_trigger_matches_status(trigger, TriggerStatus.ACTIVE): + metrics.incr("incidents.alert_rules.threshold", tags={"type": "alert"}) + incident_trigger = self.trigger_alert_threshold( + trigger, aggregation_value + ) + if incident_trigger is not None: + fired_incident_triggers.append(incident_trigger) + else: + self.trigger_alert_counts[trigger.id] = 0 + + if ( + not self.has_anomaly(potential_anomaly, trigger.label) + and self.active_incident + and self.check_trigger_matches_status(trigger, TriggerStatus.ACTIVE) + ): + metrics.incr( + "incidents.alert_rules.threshold", tags={"type": "resolve"} + ) + incident_trigger = self.trigger_resolve_threshold( + trigger, aggregation_value + ) + + if incident_trigger is not None: + fired_incident_triggers.append(incident_trigger) + else: + self.trigger_resolve_counts[trigger.id] = 0 else: - self.trigger_resolve_counts[trigger.id] = 0 + if alert_operator( + aggregation_value, trigger.alert_threshold + ) and not self.check_trigger_matches_status(trigger, TriggerStatus.ACTIVE): + # If the value has breached our threshold (above/below) + # And the trigger is not yet active + metrics.incr("incidents.alert_rules.threshold", tags={"type": "alert"}) + # triggering a threshold will create an incident and set the status to active + incident_trigger = self.trigger_alert_threshold(trigger, aggregation_value) + if incident_trigger is not None: + fired_incident_triggers.append(incident_trigger) + else: + self.trigger_alert_counts[trigger.id] = 0 + + if ( + resolve_operator( + aggregation_value, self.calculate_resolve_threshold(trigger) + ) + and self.active_incident + and self.check_trigger_matches_status(trigger, TriggerStatus.ACTIVE) + ): + metrics.incr("incidents.alert_rules.threshold", tags={"type": "resolve"}) + incident_trigger = self.trigger_resolve_threshold( + trigger, aggregation_value + ) + + if incident_trigger is not None: + fired_incident_triggers.append(incident_trigger) + else: + self.trigger_resolve_counts[trigger.id] = 0 if fired_incident_triggers: # For all the newly created incidents @@ -562,6 +621,98 @@ def process_update(self, subscription_update: QuerySubscriptionUpdate) -> None: # before the next one then we might alert twice. self.update_alert_rule_stats() + def has_anomaly(self, anomaly, label: str) -> bool: + """ + Helper function to determine whether we care about an anomaly based on the + anomaly type and trigger type. + TODO: replace the anomaly types with constants (once they're added to Sentry) + """ + anomaly_type = anomaly.get("anomaly", {}).get("anomaly_type") + + if anomaly_type == "anomaly_high" or ( + label == WARNING_TRIGGER_LABEL and anomaly_type == "anomaly_low" + ): + return True + return False + + def get_anomaly_data_from_seer(self, aggregation_value: float | None): + try: + anomaly_detection_config = { + "time_period": self.alert_rule.threshold_period, + "sensitivity": self.alert_rule.sensitivity, + "seasonality": self.alert_rule.seasonality, + "direction": self.alert_rule.threshold_type, + } + + context = { + "id": self.alert_rule.id, + "cur_window": { + "timestamp": self.last_update, + "value": aggregation_value, + }, + } + response = make_signed_seer_api_request( + self.seer_anomaly_detection_connection_pool, + SEER_ANOMALY_DETECTION_ENDPOINT_URL, + json.dumps( + { + "organization_id": self.subscription.project.organization.id, + "project_id": self.subscription.project_id, + "config": anomaly_detection_config, + "context": context, + } + ).encode("utf-8"), + ) + except (TimeoutError, MaxRetryError): + logger.warning( + "Timeout error when hitting anomaly detection endpoint", + extra={ + "subscription_id": self.subscription.id, + "dataset": self.subscription.snuba_query.dataset, + "organization_id": self.subscription.project.organization.id, + "project_id": self.subscription.project_id, + "alert_rule_id": self.alert_rule.id, + }, + ) + return None + + if response.status != 200: + logger.error( + f"Received {response.status} when calling Seer endpoint {SEER_ANOMALY_DETECTION_ENDPOINT_URL}.", # noqa + extra={"response_data": response.data}, + ) + return None + + try: + results = json.loads(response.data.decode("utf-8")).get("anomalies") + if not results: + logger.warning( + "Seer anomaly detection response returned no potential anomalies", + extra={ + "ad_config": anomaly_detection_config, + "context": context, + "response_data": response.data, + "reponse_code": response.status, + }, + ) + return None + return results + except ( + AttributeError, + UnicodeError, + JSONDecodeError, + ): + logger.exception( + "Failed to parse Seer anomaly detection response", + extra={ + "ad_config": anomaly_detection_config, + "context": context, + "response_data": response.data, + "reponse_code": response.status, + }, + ) + return None + def calculate_event_date_from_update_date(self, update_date: datetime) -> datetime: """ Calculates the date that an event actually happened based on the date that we diff --git a/tests/sentry/incidents/test_subscription_processor.py b/tests/sentry/incidents/test_subscription_processor.py index 34c886baaf34d5..d06db81808e5f2 100644 --- a/tests/sentry/incidents/test_subscription_processor.py +++ b/tests/sentry/incidents/test_subscription_processor.py @@ -4,12 +4,15 @@ from functools import cached_property from random import randint from unittest import mock -from unittest.mock import Mock, call, patch +from unittest.mock import MagicMock, Mock, call, patch from uuid import uuid4 +import orjson import pytest from django.utils import timezone +from urllib3.response import HTTPResponse +from sentry.conf.server import SEER_ANOMALY_DETECTION_ENDPOINT_URL from sentry.incidents.logic import ( CRITICAL_TRIGGER_LABEL, WARNING_TRIGGER_LABEL, @@ -19,7 +22,10 @@ ) from sentry.incidents.models.alert_rule import ( AlertRule, + AlertRuleDetectionType, AlertRuleMonitorTypeInt, + AlertRuleSeasonality, + AlertRuleSensitivity, AlertRuleThresholdType, AlertRuleTrigger, AlertRuleTriggerAction, @@ -52,6 +58,7 @@ from sentry.testutils.cases import BaseMetricsTestCase, SnubaTestCase, TestCase from sentry.testutils.factories import DEFAULT_EVENT_DATA from sentry.testutils.helpers.datetime import freeze_time, iso_format +from sentry.testutils.helpers.features import with_feature from sentry.utils import json EMPTY = object() @@ -285,6 +292,18 @@ def comparison_rule_below(self): self.trigger.update(alert_threshold=50) return rule + @cached_property + def dynamic_rule(self): + rule = self.rule + rule.update( + detection_type=AlertRuleDetectionType.DYNAMIC, + sensitivity=AlertRuleSensitivity.HIGH, + seasonality=AlertRuleSeasonality.AUTO, + ) + # dynamic alert rules have a threshold of 0.0 + self.trigger.update(alert_threshold=0) + return rule + @cached_property def trigger(self): return self.rule.alertruletrigger_set.get() @@ -408,6 +427,232 @@ def test_no_alert(self): self.assert_trigger_does_not_exist(self.trigger) self.assert_action_handler_called_with_actions(None, []) + @mock.patch( + "sentry.incidents.subscription_processor.SubscriptionProcessor.seer_anomaly_detection_connection_pool.urlopen" + ) + @with_feature("organizations:incidents") + @with_feature("organizations:anomaly-detection-alerts") + def test_seer_call(self, mock_seer_request: MagicMock): + # trigger a warning + rule = self.dynamic_rule + trigger = self.trigger + warning_trigger = create_alert_rule_trigger(rule, WARNING_TRIGGER_LABEL, 0) + warning_action = create_alert_rule_trigger_action( + warning_trigger, + AlertRuleTriggerAction.Type.EMAIL, + AlertRuleTriggerAction.TargetType.USER, + str(self.user.id), + ) + seer_return_value_1 = { + "anomalies": [ + { + "anomaly": {"anomaly_score": 0.7, "anomaly_type": "anomaly_low"}, + "timestamp": 1, + "value": 5, + } + ] + } + + mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value_1), status=200) + processor = self.send_update(rule, 5, timedelta(minutes=-3)) + + assert mock_seer_request.call_args.args[0] == "POST" + assert mock_seer_request.call_args.args[1] == SEER_ANOMALY_DETECTION_ENDPOINT_URL + deserialized_body = json.loads(mock_seer_request.call_args.kwargs["body"]) + assert deserialized_body["organization_id"] == self.sub.project.organization.id + assert deserialized_body["project_id"] == self.sub.project_id + assert deserialized_body["config"]["time_period"] == rule.threshold_period + assert deserialized_body["config"]["sensitivity"] == rule.sensitivity.value + assert deserialized_body["config"]["seasonality"] == rule.seasonality.value + assert deserialized_body["config"]["direction"] == rule.threshold_type + assert deserialized_body["context"]["id"] == rule.id + assert deserialized_body["context"]["cur_window"]["value"] == 5 + + self.assert_trigger_counts(processor, trigger, 0, 0) + self.assert_trigger_counts(processor, warning_trigger, 0, 0) + incident = self.assert_active_incident(rule) + self.assert_trigger_exists_with_status(incident, warning_trigger, TriggerStatus.ACTIVE) + self.assert_trigger_does_not_exist(trigger) + self.assert_actions_fired_for_incident( + incident, + [warning_action], + [(5, IncidentStatus.WARNING, mock.ANY)], + ) + + # trigger critical + seer_return_value_2 = { + "anomalies": [ + { + "anomaly": {"anomaly_score": 0.9, "anomaly_type": "anomaly_high"}, + "timestamp": 1, + "value": 10, + } + ] + } + + mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value_2), status=200) + processor = self.send_update(rule, 10, timedelta(minutes=-2)) + + assert mock_seer_request.call_args.args[0] == "POST" + assert mock_seer_request.call_args.args[1] == SEER_ANOMALY_DETECTION_ENDPOINT_URL + deserialized_body = json.loads(mock_seer_request.call_args.kwargs["body"]) + assert deserialized_body["organization_id"] == self.sub.project.organization.id + assert deserialized_body["project_id"] == self.sub.project_id + assert deserialized_body["config"]["time_period"] == rule.threshold_period + assert deserialized_body["config"]["sensitivity"] == rule.sensitivity.value + assert deserialized_body["config"]["seasonality"] == rule.seasonality.value + assert deserialized_body["config"]["direction"] == rule.threshold_type + assert deserialized_body["context"]["id"] == rule.id + assert deserialized_body["context"]["cur_window"]["value"] == 10 + + self.assert_trigger_counts(processor, trigger, 0, 0) + self.assert_trigger_counts(processor, warning_trigger, 0, 0) + incident = self.assert_active_incident(rule) + self.assert_trigger_exists_with_status(incident, warning_trigger, TriggerStatus.ACTIVE) + self.assert_trigger_exists_with_status(incident, trigger, TriggerStatus.ACTIVE) + self.assert_actions_fired_for_incident( + incident, + [warning_action], + [(10, IncidentStatus.CRITICAL, mock.ANY)], + ) + + # trigger a resolution + seer_return_value_3 = { + "anomalies": [ + { + "anomaly": {"anomaly_score": 0.5, "anomaly_type": "none"}, + "timestamp": 1, + "value": 1, + } + ] + } + + mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value_3), status=200) + processor = self.send_update(rule, 1, timedelta(minutes=-1)) + + assert mock_seer_request.call_args.args[0] == "POST" + assert mock_seer_request.call_args.args[1] == SEER_ANOMALY_DETECTION_ENDPOINT_URL + deserialized_body = json.loads(mock_seer_request.call_args.kwargs["body"]) + assert deserialized_body["organization_id"] == self.sub.project.organization.id + assert deserialized_body["project_id"] == self.sub.project_id + assert deserialized_body["config"]["time_period"] == rule.threshold_period + assert deserialized_body["config"]["sensitivity"] == rule.sensitivity.value + assert deserialized_body["config"]["seasonality"] == rule.seasonality.value + assert deserialized_body["config"]["direction"] == rule.threshold_type + assert deserialized_body["context"]["id"] == rule.id + assert deserialized_body["context"]["cur_window"]["value"] == 1 + + self.assert_trigger_counts(processor, self.trigger, 0, 0) + self.assert_trigger_counts(processor, warning_trigger, 0, 0) + self.assert_no_active_incident(rule) + self.assert_trigger_exists_with_status(incident, warning_trigger, TriggerStatus.RESOLVED) + self.assert_trigger_exists_with_status(incident, trigger, TriggerStatus.RESOLVED) + self.assert_actions_resolved_for_incident( + incident, [warning_action], [(1, IncidentStatus.CLOSED, mock.ANY)] + ) + + def test_has_anomaly(self): + rule = self.dynamic_rule + # test alert ABOVE + anomaly1 = { + "anomaly": {"anomaly_score": 0.9, "anomaly_type": "anomaly_high"}, + "timestamp": 1, + "value": 10, + } + + anomaly2 = { + "anomaly": {"anomaly_score": 0.6, "anomaly_type": "anomaly_low"}, + "timestamp": 1, + "value": 10, + } + + not_anomaly = { + "anomaly": {"anomaly_score": 0.2, "anomaly_type": "none"}, + "timestamp": 1, + "value": 10, + } + + warning_trigger = create_alert_rule_trigger(rule, WARNING_TRIGGER_LABEL, 0) + warning_label = warning_trigger.label + + label = self.trigger.label + + processor = SubscriptionProcessor(self.sub) + assert processor.has_anomaly(anomaly1, label) + assert processor.has_anomaly(anomaly1, warning_label) + assert not processor.has_anomaly(anomaly2, label) + assert processor.has_anomaly(anomaly2, warning_label) + assert not processor.has_anomaly(not_anomaly, label) + assert not processor.has_anomaly(not_anomaly, warning_label) + + @with_feature("organizations:anomaly-detection-alerts") + @mock.patch( + "sentry.incidents.subscription_processor.SubscriptionProcessor.seer_anomaly_detection_connection_pool.urlopen" + ) + @mock.patch("sentry.incidents.subscription_processor.logger") + def test_seer_call_timeout_error(self, mock_logger, mock_seer_request): + rule = self.dynamic_rule + processor = SubscriptionProcessor(self.sub) + from urllib3.exceptions import TimeoutError + + mock_seer_request.side_effect = TimeoutError + result = processor.get_anomaly_data_from_seer(10) + timeout_extra = { + "subscription_id": self.sub.id, + "dataset": self.sub.snuba_query.dataset, + "organization_id": self.sub.project.organization.id, + "project_id": self.sub.project_id, + "alert_rule_id": rule.id, + } + mock_logger.warning.assert_called_with( + "Timeout error when hitting anomaly detection endpoint", + extra=timeout_extra, + ) + + assert result is None + + @with_feature("organizations:anomaly-detection-alerts") + @mock.patch( + "sentry.incidents.subscription_processor.SubscriptionProcessor.seer_anomaly_detection_connection_pool.urlopen" + ) + @mock.patch("sentry.incidents.subscription_processor.logger") + def test_seer_call_empty_list(self, mock_logger, mock_seer_request): + processor = SubscriptionProcessor(self.sub) + seer_return_value: dict[str, list] = {"anomalies": []} + mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value), status=200) + result = processor.get_anomaly_data_from_seer(10) + assert mock_logger.warning.call_args[0] == ( + "Seer anomaly detection response returned no potential anomalies", + ) + assert result is None + + @with_feature("organizations:anomaly-detection-alerts") + @mock.patch( + "sentry.incidents.subscription_processor.SubscriptionProcessor.seer_anomaly_detection_connection_pool.urlopen" + ) + @mock.patch("sentry.incidents.subscription_processor.logger") + def test_seer_call_bad_status(self, mock_logger, mock_seer_request): + processor = SubscriptionProcessor(self.sub) + mock_seer_request.return_value = HTTPResponse("You flew too close to the sun", status=403) + result = processor.get_anomaly_data_from_seer(10) + assert mock_logger.error.called_with( + f"Received 403 when calling Seer endpoint {SEER_ANOMALY_DETECTION_ENDPOINT_URL}.", # noqa + extra={"response_data": "You flew too close to the sun"}, + ) + assert result is None + + @with_feature("organizations:anomaly-detection-alerts") + @mock.patch( + "sentry.incidents.subscription_processor.SubscriptionProcessor.seer_anomaly_detection_connection_pool.urlopen" + ) + @mock.patch("sentry.incidents.subscription_processor.logger") + def test_seer_call_failed_parse(self, mock_logger, mock_seer_request): + processor = SubscriptionProcessor(self.sub) + mock_seer_request.return_value = HTTPResponse(None, status=200) # type: ignore[arg-type] + result = processor.get_anomaly_data_from_seer(10) + assert mock_logger.exception.called_with("Failed to parse Seer anomaly detection response") + assert result is None + def test_alert(self): # Verify that an alert rule that only expects a single update to be over the # alert threshold triggers correctly From a2994629f530a0b917936a8eda5cd5d7a2b4ab98 Mon Sep 17 00:00:00 2001 From: Snigdha Sharma Date: Wed, 24 Jul 2024 10:29:32 -0700 Subject: [PATCH 084/126] feat(issue-priority): Add flag for seer-based-priority (#74820) Adding a new org-level flag to determine if priority should factor in calculations from Seer and the severity microservice. This flag will need to be moved to features/permanent.py and out of flagpole before we release. --- src/sentry/features/temporary.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/sentry/features/temporary.py b/src/sentry/features/temporary.py index 66a915d0d50c95..e955f77d1bca67 100644 --- a/src/sentry/features/temporary.py +++ b/src/sentry/features/temporary.py @@ -355,6 +355,8 @@ def register_temporary_features(manager: FeatureManager): manager.add("organizations:scim-team-roles", OrganizationFeature, FeatureHandlerStrategy.REMOTE, api_expose=False) # Enable detecting SDK crashes during event processing manager.add("organizations:sdk-crash-detection", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False) + # Enable priority alerts using the Seer calculations. This flag will move to a permanent flag before we release. + manager.add("organizations:seer-based-priority", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False) # Enable the Replay Details > Accessibility tab manager.add("organizations:session-replay-a11y-tab", OrganizationFeature, FeatureHandlerStrategy.REMOTE, api_expose=False) # Enable the accessibility issues endpoint From ecc0096954a5799d52f18e9d07327d5c54ca0251 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 24 Jul 2024 13:32:15 -0400 Subject: [PATCH 085/126] ref: fix serialize method in serializers.test_base (#74857) fixes an error in mypy 1.11 --- tests/sentry/api/serializers/test_base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/sentry/api/serializers/test_base.py b/tests/sentry/api/serializers/test_base.py index 379c78c8e0a400..f49a87a4eee10f 100644 --- a/tests/sentry/api/serializers/test_base.py +++ b/tests/sentry/api/serializers/test_base.py @@ -13,7 +13,7 @@ def serialize(self, *args, **kwargs): class VariadicSerializer(Serializer): - def serialize(self, obj, attrs, user, kw): + def serialize(self, obj, attrs, user, **kw): return {"kw": kw} @@ -70,7 +70,7 @@ def test_serialize_additional_kwargs(self): foo = Foo() user = self.create_user() result = serialize(foo, user, VariadicSerializer(), kw="keyword") - assert result["kw"] == "keyword" + assert result["kw"] == {"kw": "keyword"} def test_child_serializer_failure(self): foo = Foo() From b4b80b152676a8f5ca9f409acd1828d8e26aa76f Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Wed, 24 Jul 2024 10:35:14 -0700 Subject: [PATCH 086/126] ref(seer grouping): Consider half-snipped lines snipped for stacktrace string (#74826) This relaxes our criterion for considering a context line minified for purposes of creating a stacktrace string to send to Seer. Until now, the line had to both start _and_ end with `{snip}`, but we're now allowing it to either start _or_ end with `{snip}`. This may occasionally yield false positives (non-minified lines being considered minified), but since the stacktrace string is only truncated if _all_ frames are minified, it would only make a difference in the highly unlikely scenario where we get a false positive for every frame in the entire stacktrace, a risk with which we're willing to live. --- src/sentry/seer/similarity/utils.py | 2 +- tests/sentry/seer/similarity/test_utils.py | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/src/sentry/seer/similarity/utils.py b/src/sentry/seer/similarity/utils.py index 6a2e54f593f024..afbef9c81d1cc0 100644 --- a/src/sentry/seer/similarity/utils.py +++ b/src/sentry/seer/similarity/utils.py @@ -183,4 +183,4 @@ def _is_snipped_context_line(context_line: str) -> bool: # This check is implicitly restricted to JS (and friends) events by the fact that the `{snip]` # is only added in the JS processor. See # https://github.com/getsentry/sentry/blob/d077a5bb7e13a5927794b35d9ae667a4f181feb7/src/sentry/lang/javascript/utils.py#L72-L77. - return context_line.startswith("{snip}") and context_line.endswith("{snip}") + return context_line.startswith("{snip}") or context_line.endswith("{snip}") diff --git a/tests/sentry/seer/similarity/test_utils.py b/tests/sentry/seer/similarity/test_utils.py index eec7259cc97cae..c206abd4894255 100644 --- a/tests/sentry/seer/similarity/test_utils.py +++ b/tests/sentry/seer/similarity/test_utils.py @@ -6,6 +6,7 @@ from sentry.eventstore.models import Event from sentry.seer.similarity.utils import ( SEER_ELIGIBLE_PLATFORMS, + _is_snipped_context_line, event_content_is_seer_eligible, filter_null_from_event_title, get_stacktrace_string, @@ -712,6 +713,12 @@ def test_no_exception(self): stacktrace_str = get_stacktrace_string(data_no_exception) assert stacktrace_str == "" + def test_recognizes_snip_at_start_or_end(self): + assert _is_snipped_context_line("{snip} dogs are great") is True + assert _is_snipped_context_line("dogs are great {snip}") is True + assert _is_snipped_context_line("{snip} dogs are great {snip}") is True + assert _is_snipped_context_line("dogs are great") is False + class EventContentIsSeerEligibleTest(TestCase): def get_eligible_event_data(self) -> dict[str, Any]: From ffb2e650cc44dc0dca688245bdb684405e885eb6 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 24 Jul 2024 13:35:32 -0400 Subject: [PATCH 087/126] ref: fix some various test mypy errors in mypy 1.11 (#74853) --- .../api/endpoints/test_organization_stats.py | 16 +++++--------- .../test_organization_stats_summary.py | 22 +++++++++---------- 2 files changed, 17 insertions(+), 21 deletions(-) diff --git a/tests/sentry/api/endpoints/test_organization_stats.py b/tests/sentry/api/endpoints/test_organization_stats.py index 10ca77bc8f6047..60292fd37d62ec 100644 --- a/tests/sentry/api/endpoints/test_organization_stats.py +++ b/tests/sentry/api/endpoints/test_organization_stats.py @@ -1,4 +1,3 @@ -import functools import sys from django.urls import reverse @@ -82,16 +81,14 @@ def test_id_filtering(self): teams=[self.create_team(organization=org, members=[self.user])] ) - make_request = functools.partial( - self.client.get, reverse("sentry-api-0-organization-stats", args=[org.slug]) - ) + url = reverse("sentry-api-0-organization-stats", args=[org.slug]) - response = make_request({"id": [project.id], "group": "project"}) + response = self.client.get(url, {"id": str(project.id), "group": "project"}) assert response.status_code == 200, response.content assert project.id in response.data - response = make_request({"id": [sys.maxsize], "group": "project"}) + response = self.client.get(url, {"id": str(sys.maxsize), "group": "project"}) assert project.id not in response.data @@ -106,12 +103,11 @@ def test_project_id_only(self): teams=[self.create_team(organization=org, members=[self.user])] ) - make_request = functools.partial( - self.client.get, reverse("sentry-api-0-organization-stats", args=[org.slug]) + response = self.client.get( + reverse("sentry-api-0-organization-stats", args=[org.slug]), + {"projectID": str(project.id), "group": "project"}, ) - response = make_request({"projectID": [project.id], "group": "project"}) - assert response.status_code == 200, response.content assert project.id in response.data assert project2.id not in response.data diff --git a/tests/snuba/api/endpoints/test_organization_stats_summary.py b/tests/snuba/api/endpoints/test_organization_stats_summary.py index 8b1fe4bfdf0665..e703c5c7edabec 100644 --- a/tests/snuba/api/endpoints/test_organization_stats_summary.py +++ b/tests/snuba/api/endpoints/test_organization_stats_summary.py @@ -1,5 +1,8 @@ +from __future__ import annotations + import functools from datetime import datetime, timedelta, timezone +from typing import Any from django.urls import reverse @@ -736,17 +739,14 @@ def test_category_filter(self): } def test_download(self): - make_request = functools.partial( - self.client.get, - reverse("sentry-api-0-organization-stats-summary", args=[self.org.slug]), - ) - response = make_request( - { - "statsPeriod": "2d", - "interval": "1d", - "field": ["sum(quantity)", "sum(times_seen)"], - "download": True, - } + req: dict[str, Any] = { + "statsPeriod": "2d", + "interval": "1d", + "field": ["sum(quantity)", "sum(times_seen)"], + "download": True, + } + response = self.client.get( + reverse("sentry-api-0-organization-stats-summary", args=[self.org.slug]), req ) assert response.headers["Content-Type"] == "text/csv" From bbc2911a650495278f6e2683ce62fe3c82678bcb Mon Sep 17 00:00:00 2001 From: Malachi Willey Date: Wed, 24 Jul 2024 10:38:25 -0700 Subject: [PATCH 088/126] feat(query-builder): Add more information to the storybook page (#74866) Updated the storybook with all the information someone would need to use this component for themselves. --- .../searchQueryBuilder/index.stories.tsx | 354 ++++++++++++++++-- 1 file changed, 325 insertions(+), 29 deletions(-) diff --git a/static/app/components/searchQueryBuilder/index.stories.tsx b/static/app/components/searchQueryBuilder/index.stories.tsx index d6875b1b78e788..74982abf327594 100644 --- a/static/app/components/searchQueryBuilder/index.stories.tsx +++ b/static/app/components/searchQueryBuilder/index.stories.tsx @@ -1,19 +1,23 @@ import {Fragment, useState} from 'react'; -import styled from '@emotion/styled'; -import Alert from 'sentry/components/alert'; import MultipleCheckbox from 'sentry/components/forms/controls/multipleCheckbox'; import {SearchQueryBuilder} from 'sentry/components/searchQueryBuilder'; import type {FilterKeySection} from 'sentry/components/searchQueryBuilder/types'; import {InvalidReason} from 'sentry/components/searchSyntax/parser'; +import {ItemType} from 'sentry/components/smartSearchBar/types'; +import JSXNode from 'sentry/components/stories/jsxNode'; import JSXProperty from 'sentry/components/stories/jsxProperty'; -import SizingWindow from 'sentry/components/stories/sizingWindow'; import storyBook from 'sentry/stories/storyBook'; import type {TagCollection} from 'sentry/types/group'; -import {FieldKey, FieldKind, MobileVital, WebVital} from 'sentry/utils/fields'; +import { + FieldKey, + FieldKind, + FieldValueType, + MobileVital, + WebVital, +} from 'sentry/utils/fields'; const FILTER_KEYS: TagCollection = { - [FieldKey.AGE]: {key: FieldKey.AGE, name: 'Age', kind: FieldKind.FIELD}, [FieldKey.ASSIGNED]: { key: FieldKey.ASSIGNED, name: 'Assigned To', @@ -47,6 +51,11 @@ const FILTER_KEYS: TagCollection = { predefined: true, values: ['resolved', 'unresolved', 'ignored'], }, + [FieldKey.LAST_SEEN]: { + key: FieldKey.LAST_SEEN, + name: 'lastSeen', + kind: FieldKind.FIELD, + }, [FieldKey.TIMES_SEEN]: { key: FieldKey.TIMES_SEEN, name: 'timesSeen', @@ -70,21 +79,24 @@ const FILTER_KEYS: TagCollection = { const FITLER_KEY_SECTIONS: FilterKeySection[] = [ { - value: FieldKind.FIELD, + value: 'cat_1', label: 'Category 1', children: [ - FieldKey.AGE, FieldKey.ASSIGNED, FieldKey.BROWSER_NAME, FieldKey.IS, + FieldKey.LAST_SEEN, FieldKey.TIMES_SEEN, - WebVital.LCP, - MobileVital.FRAMES_SLOW_RATE, ], }, { - value: FieldKind.TAG, + value: 'cat_2', label: 'Category 2', + children: [WebVital.LCP, MobileVital.FRAMES_SLOW_RATE], + }, + { + value: 'cat_3', + label: 'Category 3', children: ['custom_tag_name'], }, ]; @@ -92,30 +104,274 @@ const FITLER_KEY_SECTIONS: FilterKeySection[] = [ const getTagValues = (): Promise => { return new Promise(resolve => { setTimeout(() => { - resolve(['tag value one', 'tag value two', 'tag value three']); + resolve(['foo', 'bar', 'baz']); }, 500); }); }; export default storyBook(SearchQueryBuilder, story => { - story('Default', () => { + story('Getting started', () => { return ( - This component and story is a WIP. - - - +

+ is a component which allows you to build a + search query using a set of predefined filter keys and values. +

+

+ The search query, unless configured otherwise, may contain filters, logical + operators, and free text. These filters can have defined data types, but default + to a multi-selectable string filter. +

+

+ Required props: +

    +
  • + + initialQuery + + : The initial query to display in the search input. +
  • +
  • + + filterKeys + + : A collection of filter keys which are used to populate the dropdowns. All + valid filter keys should be defined here. +
  • +
  • + + getTagValues + + : A function which returns an array of filter value suggestions. Any filter + key which does not have predefined: true will use this function + to get value suggestions. +
  • +
  • + + searchSource + + : Used to differentiate between different search bars for analytics. + Typically snake_case (e.g. issue_details,{' '} + performance_landing). +
  • +
+

+
); }); - story('Config Options', () => { + story('Defining filter value suggestions', () => { + const filterValueSuggestionKeys: TagCollection = { + predefined_values: { + key: 'predefined_values', + name: 'predefined_values', + kind: FieldKind.FIELD, + predefined: true, + values: ['value1', 'value2', 'value3'], + }, + predefined_categorized_values: { + key: 'predefined_categorized_values', + name: 'predefined_categorized_values', + kind: FieldKind.FIELD, + predefined: true, + values: [ + { + title: 'Category 1', + type: 'header', + icon: null, + children: [{value: 'special value 1'}], + }, + { + title: 'Category 2', + type: 'header', + icon: null, + children: [{value: 'special value 2'}, {value: 'special value 3'}], + }, + ], + }, + predefined_described_values: { + key: 'predefined_described_values', + name: 'predefined_described_values', + kind: FieldKind.FIELD, + predefined: true, + values: [ + { + title: '', + type: ItemType.TAG_VALUE, + value: 'special value 1', + icon: null, + documentation: 'Description for value 1', + children: [], + }, + { + title: '', + type: ItemType.TAG_VALUE, + value: 'special value 2', + icon: null, + documentation: 'Description for value 2', + children: [], + }, + ], + }, + async_values: { + key: 'async_values', + name: 'async_values', + kind: FieldKind.FIELD, + predefined: false, + }, + }; + + return ( + +

+ To guide the user in building a search query, filter value suggestions can be + provided in a few different ways: +

+

+

    +
  • + Predefined: If the full set of filter keys are already + known, they can be provided directly in filterKeys. These + suggestions can also be formatted: +
      +
    • + Simple: For most cases, an array of strings can be + provided in values. +
    • +
    • + Categorized: If the values should be grouped, an array + of objects can be provided in values. Each object should + have a title and children array. +
    • +
    • + Described: If descriptions are necessary, provide an + array of objects of type ItemType.TAG_VALUE with a{' '} + documentation property. +
    • +
    +
  • +
  • + Aync: If the filter key does not have{' '} + predefined: true, it will use the getTagValues{' '} + function to fetch suggestions. The filter key and query are provided, and it + is up to the consumer to return the suggestions. +
  • +
+

+ +
+ ); + }); + + story('Customizing the filter key menu', () => { + return ( + +

+ A special menu can be displayed when no text is entered in the search input, + allowing for better oranization and discovery of filter keys. +

+

+ This menu is defined by filterKeySections, which accepts a list of + sections. Each section contains a name and a list of filter keys. Note that the + order of both the sections and the items within each section are respected. +

+ +
+ ); + }); + + story('Field definitions', () => { + return ( + +

+ Field definitions very important for the search query builder to work correctly. + They provide information such as what data types are allow for a given filter, + as well as the description and keywords. +

+

+ By default, field definitions are sourced from{' '} + EVENT_FIELD_DEFINITIONS in sentry/utils/fields.ts. If + these definitions are not correct for the use case, they can be overridden by + passing fieldDefinitionGetter. +

+ { + return { + desc: 'Customized field defintion', + kind: FieldKind.FIELD, + valueType: FieldValueType.BOOLEAN, + }; + }} + searchSource="storybook" + /> +
+ ); + }); + + story('Callbacks', () => { + const [onChangeValue, setOnChangeValue] = useState(''); + const [onSearchValue, setOnSearchValue] = useState(''); + + return ( + +

+ onChange is called whenever the search query changes. This can be + used to update the UI as the user updates the query. +

+

+ onSearch is called when the user presses enter. This can be used to + submit the search query. +

+

+

    +
  • + + Last onChange value + + : {onChangeValue} +
  • +
  • + + Last onSearch value + + : {onSearchValue} +
  • +
+

+ +
+ ); + }); + + story('Configuring valid syntax', () => { const configs = [ 'disallowFreeText', 'disallowLogicalOperators', @@ -144,7 +400,7 @@ export default storyBook(SearchQueryBuilder, story => { > {configs.map(config => ( - {config} + {config} ))}
@@ -179,9 +435,49 @@ export default storyBook(SearchQueryBuilder, story => { ); }); -}); -const MinHeightSizingWindow = styled(SizingWindow)` - min-height: 500px; - align-items: flex-start; -`; + story('Migrating from SmartSearchBar', () => { + return ( + +

+ is a replacement for{' '} + . It provides a more flexible and powerful + search query builder. +

+

+ Some props have been renamed: +

    +
  • + supportedTags {'->'} filterKeys +
  • +
  • + onGetTagValues {'->'} getTagValues +
  • +
  • + highlightUnsupportedTags {'->'}{' '} + disallowUnsupportedFilters +
  • +
+

+

+ Some props have been removed: +

    +
  • + excludedTags is no longer supported. If a filter key should not + be shown, do not include it in filterKeys. +
  • +
  • + (boolean|date|duration)Keys no longer need to be specified. The + filter value types are inferred from the field definitions. +
  • +
  • + projectIds was used to add is_multi_project to + some of the analytics events. If your use case requires this, you can record + these events manually with the onSearch callback. +
  • +
+

+
+ ); + }); +}); From e3dcaddda121f73c847873960cb4c8e034e92dad Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 24 Jul 2024 13:41:26 -0400 Subject: [PATCH 089/126] ref: match signature of feature handler has (#74856) --- tests/sentry/api/serializers/test_project.py | 2 +- tests/sentry/features/test_manager.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/sentry/api/serializers/test_project.py b/tests/sentry/api/serializers/test_project.py index 82d788e50140f7..63a59ad448e549 100644 --- a/tests/sentry/api/serializers/test_project.py +++ b/tests/sentry/api/serializers/test_project.py @@ -231,7 +231,7 @@ def create_color_handler(color_flag, included_projects): class ProjectColorFeatureHandler(features.FeatureHandler): features = {color_flag} - def has(self, feature, actor): + def has(self, feature, actor, skip_entity: bool | None = False): return feature.project in included_projects def batch_has(self, *a, **k): diff --git a/tests/sentry/features/test_manager.py b/tests/sentry/features/test_manager.py index 4948f23ce8fdf4..b29ae367443acf 100644 --- a/tests/sentry/features/test_manager.py +++ b/tests/sentry/features/test_manager.py @@ -122,7 +122,7 @@ def __init__(self, true_set, false_set): self.true_set = frozenset(true_set) self.false_set = frozenset(false_set) - def has(self, feature, actor): + def has(self, feature, actor, skip_entity: bool | None = False): assert actor == test_user if feature.project in self.true_set: From 423b8063af496c2684c2e9040fabb752cb5ec438 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 24 Jul 2024 13:41:43 -0400 Subject: [PATCH 090/126] ref: match signature of run_test in test_slack (#74855) --- tests/sentry/incidents/action_handlers/test_slack.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/sentry/incidents/action_handlers/test_slack.py b/tests/sentry/incidents/action_handlers/test_slack.py index 3fd8a830757611..8499098972de9a 100644 --- a/tests/sentry/incidents/action_handlers/test_slack.py +++ b/tests/sentry/incidents/action_handlers/test_slack.py @@ -75,7 +75,8 @@ def setUp(self): ) self.alert_rule = self.create_alert_rule() - def run_test(self, incident, method, chart_url=None): + def run_test(self, incident, method, **kwargs): + chart_url = kwargs.get("chart_url") handler = SlackActionHandler(self.action, incident, self.project) metric_value = 1000 status = IncidentStatus(incident.status) From a3bef0ce608df83acfaf0699b405380e3adb9915 Mon Sep 17 00:00:00 2001 From: Shruthi Date: Wed, 24 Jul 2024 14:09:35 -0400 Subject: [PATCH 091/126] feat(discover): Add top-n support for errors dataset (#74761) Adds top-n support for errors dataset Allows errors dataset to pass aggregates as filter conditions - this is needed to maintain backwards compatibility with existing `discover` dataset queries that will query the `errors` dataset going forward Also adds missing test coverage for errors dataset --- .../endpoints/organization_events_stats.py | 2 + src/sentry/api/issue_search.py | 3 +- src/sentry/search/events/builder/errors.py | 33 +- src/sentry/snuba/errors.py | 188 +- tests/sentry/snuba/test_errors.py | 1782 +++++++++++++++++ .../test_organization_events_stats.py | 363 ++++ 6 files changed, 2364 insertions(+), 7 deletions(-) create mode 100644 tests/sentry/snuba/test_errors.py diff --git a/src/sentry/api/endpoints/organization_events_stats.py b/src/sentry/api/endpoints/organization_events_stats.py index a9af5af442fb0f..cb38407cce3ad6 100644 --- a/src/sentry/api/endpoints/organization_events_stats.py +++ b/src/sentry/api/endpoints/organization_events_stats.py @@ -16,6 +16,7 @@ from sentry.models.organization import Organization from sentry.snuba import ( discover, + errors, functions, metrics_enhanced_performance, metrics_performance, @@ -233,6 +234,7 @@ def get(self, request: Request, organization: Organization) -> Response: metrics_enhanced_performance, spans_indexed, spans_metrics, + errors, ] else discover ) diff --git a/src/sentry/api/issue_search.py b/src/sentry/api/issue_search.py index 0a46b1bc944599..5b714b5760fff8 100644 --- a/src/sentry/api/issue_search.py +++ b/src/sentry/api/issue_search.py @@ -263,6 +263,7 @@ def convert_query_values( user: User | RpcUser | None, environments: Sequence[Environment] | None, value_converters=value_converters, + allow_aggregate_filters=False, ) -> list[SearchFilter]: """ Accepts a collection of SearchFilter objects and converts their values into @@ -302,7 +303,7 @@ def convert_search_filter( value=SearchValue(new_value), operator=operator, ) - elif isinstance(search_filter, AggregateFilter): + elif isinstance(search_filter, AggregateFilter) and not allow_aggregate_filters: raise InvalidSearchQuery( f"Aggregate filters ({search_filter.key.name}) are not supported in issue searches." ) diff --git a/src/sentry/search/events/builder/errors.py b/src/sentry/search/events/builder/errors.py index c89f0148dbccb2..4869fe65ac191f 100644 --- a/src/sentry/search/events/builder/errors.py +++ b/src/sentry/search/events/builder/errors.py @@ -18,7 +18,11 @@ ) from sentry.api.issue_search import convert_query_values, convert_status_value -from sentry.search.events.builder.discover import DiscoverQueryBuilder, TimeseriesQueryBuilder +from sentry.search.events.builder.discover import ( + DiscoverQueryBuilder, + TimeseriesQueryBuilder, + TopEventsQueryBuilder, +) from sentry.search.events.filter import ParsedTerms from sentry.search.events.types import SelectType from sentry.snuba.entity_subscription import ENTITY_TIME_COLUMNS, get_entity_key_from_query_builder @@ -40,6 +44,7 @@ def parse_query(self, query: str | None) -> ParsedTerms: self.params.user, list(filter(None, self.params.environments)), value_converters=value_converters, + allow_aggregate_filters=True, ) return parsed_terms @@ -165,3 +170,29 @@ def get_snql_query(self) -> Request: ), tenant_ids=self.tenant_ids, ) + + +class ErrorsTopEventsQueryBuilder(ErrorsQueryBuilderMixin, TopEventsQueryBuilder): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + @property + def time_column(self) -> SelectType: + return Column("time", entity=Entity(self.dataset.value, alias=self.dataset.value)) + + def get_snql_query(self) -> Request: + return Request( + dataset=self.dataset.value, + app_id="errors", + query=Query( + match=self.match, + select=self.select, + where=self.where, + having=self.having, + groupby=self.groupby, + orderby=[OrderBy(self.time_column, Direction.ASC)], + granularity=self.granularity, + limit=self.limit, + ), + tenant_ids=self.tenant_ids, + ) diff --git a/src/sentry/snuba/errors.py b/src/sentry/snuba/errors.py index 801b3168b00b79..c0421762868453 100644 --- a/src/sentry/snuba/errors.py +++ b/src/sentry/snuba/errors.py @@ -1,17 +1,24 @@ -from collections.abc import Sequence +import logging +from collections.abc import Mapping, Sequence from copy import deepcopy from datetime import timedelta +from typing import cast import sentry_sdk from sentry.discover.arithmetic import categorize_columns from sentry.exceptions import InvalidSearchQuery -from sentry.models.group import STATUS_QUERY_CHOICES -from sentry.search.events.builder.errors import ErrorsQueryBuilder, ErrorsTimeseriesQueryBuilder +from sentry.models.group import STATUS_QUERY_CHOICES, Group +from sentry.models.organization import Organization +from sentry.search.events.builder.errors import ( + ErrorsQueryBuilder, + ErrorsTimeseriesQueryBuilder, + ErrorsTopEventsQueryBuilder, +) from sentry.search.events.fields import get_json_meta_type -from sentry.search.events.types import EventsResponse, QueryBuilderConfig +from sentry.search.events.types import EventsResponse, ParamsType, QueryBuilderConfig, SnubaParams from sentry.snuba.dataset import Dataset -from sentry.snuba.discover import transform_tips, zerofill +from sentry.snuba.discover import OTHER_KEY, create_result_key, transform_tips, zerofill from sentry.snuba.metrics.extraction import MetricSpecType from sentry.utils.snuba import SnubaTSResult, bulk_snuba_queries @@ -20,6 +27,8 @@ is_filter_translation[status_key] = ("status", status_value) PARSER_CONFIG_OVERRIDES = {"is_filter_translation": is_filter_translation} +logger = logging.getLogger(__name__) + def query( selected_columns, @@ -179,3 +188,172 @@ def timeseries_query( params["end"], rollup, ) + + +def top_events_timeseries( + timeseries_columns: list[str], + selected_columns: list[str], + user_query: str, + params: ParamsType, + orderby: list[str], + rollup: int, + limit: int, + organization: Organization, + snuba_params: SnubaParams | None = None, + equations: list[str] | None = None, + referrer: str | None = None, + top_events: EventsResponse | None = None, + allow_empty: bool = True, + zerofill_results: bool = True, + include_other: bool = False, + functions_acl: list[str] | None = None, + on_demand_metrics_enabled: bool = False, + on_demand_metrics_type: MetricSpecType | None = None, + dataset: Dataset = Dataset.Discover, +) -> dict[str, SnubaTSResult] | SnubaTSResult: + """ + High-level API for doing arbitrary user timeseries queries for a limited number of top events + + Returns a dictionary of SnubaTSResult objects that have been zerofilled in + case of gaps. Each value of the dictionary should match the result of a timeseries query + + timeseries_columns - List of public aliases to fetch for the timeseries query, + usually matches the y-axis of the graph + selected_columns - List of public aliases to fetch for the events query, + this is to determine what the top events are + user_query - Filter query string to create conditions from. needs to be user_query + to not conflict with the function query + params - Filtering parameters with start, end, project_id, environment, + orderby - The fields to order results by. + rollup - The bucket width in seconds + limit - The number of events to get timeseries for + organization - Used to map group ids to short ids + referrer - A referrer string to help locate the origin of this query. + top_events - A dictionary with a 'data' key containing a list of dictionaries that + represent the top events matching the query. Useful when you have found + the top events earlier and want to save a query. + """ + if len(params) == 0 and snuba_params is not None: + params = snuba_params.filter_params + + if top_events is None: + with sentry_sdk.start_span(op="discover.errors", description="top_events.fetch_events"): + top_events = query( + selected_columns, + query=user_query, + params=params, + equations=equations, + orderby=orderby, + limit=limit, + referrer=referrer, + snuba_params=snuba_params, + auto_aggregations=True, + use_aggregate_conditions=True, + include_equation_fields=True, + skip_tag_resolution=True, + ) + + top_events_builder = ErrorsTopEventsQueryBuilder( + Dataset.Events, + params, + rollup, + top_events["data"], + other=False, + query=user_query, + selected_columns=selected_columns, + timeseries_columns=timeseries_columns, + equations=equations, + snuba_params=snuba_params, + config=QueryBuilderConfig( + functions_acl=functions_acl, + skip_tag_resolution=True, + ), + ) + if len(top_events["data"]) == limit and include_other: + other_events_builder = ErrorsTopEventsQueryBuilder( + Dataset.Events, + params, + rollup, + top_events["data"], + other=True, + query=user_query, + selected_columns=selected_columns, + timeseries_columns=timeseries_columns, + equations=equations, + snuba_params=snuba_params, + ) + result, other_result = bulk_snuba_queries( + [top_events_builder.get_snql_query(), other_events_builder.get_snql_query()], + referrer=referrer, + ) + else: + result = top_events_builder.run_query(referrer) + other_result = {"data": []} + if ( + not allow_empty + and not len(result.get("data", [])) + and not len(other_result.get("data", [])) + ): + return SnubaTSResult( + { + "data": ( + zerofill([], params["start"], params["end"], rollup, ["time"]) + if zerofill_results + else [] + ), + }, + params["start"], + params["end"], + rollup, + ) + with sentry_sdk.start_span( + op="discover.errors", description="top_events.transform_results" + ) as span: + span.set_data("result_count", len(result.get("data", []))) + result = top_events_builder.process_results(result) + + issues: Mapping[int, str | None] = {} + if "issue" in selected_columns: + issues = Group.objects.get_issues_mapping( + {cast(int, event["issue.id"]) for event in top_events["data"]}, + params["project_id"], + organization, + ) + translated_groupby = top_events_builder.translated_groupby + + results = ( + {OTHER_KEY: {"order": limit, "data": other_result["data"]}} + if len(other_result.get("data", [])) + else {} + ) + # Using the top events add the order to the results + for index, item in enumerate(top_events["data"]): + result_key = create_result_key(item, translated_groupby, issues) + results[result_key] = {"order": index, "data": []} + for row in result["data"]: + result_key = create_result_key(row, translated_groupby, issues) + if result_key in results: + results[result_key]["data"].append(row) + else: + logger.warning( + "discover.top-events.timeseries.key-mismatch", + extra={"result_key": result_key, "top_event_keys": list(results.keys())}, + ) + + top_events_results: dict[str, SnubaTSResult] = {} + for key, item in results.items(): + top_events_results[key] = SnubaTSResult( + { + "data": ( + zerofill(item["data"], params["start"], params["end"], rollup, ["time"]) + if zerofill_results + else item["data"] + ), + "order": item["order"], + }, + params["start"], + params["end"], + rollup, + ) + + return top_events_results diff --git a/tests/sentry/snuba/test_errors.py b/tests/sentry/snuba/test_errors.py new file mode 100644 index 00000000000000..3fa9945c8794e9 --- /dev/null +++ b/tests/sentry/snuba/test_errors.py @@ -0,0 +1,1782 @@ +from __future__ import annotations + +from datetime import timedelta + +import pytest +from django.utils import timezone + +from sentry.exceptions import InvalidSearchQuery +from sentry.models.releaseprojectenvironment import ReleaseStages +from sentry.search.events.constants import ( + RELEASE_STAGE_ALIAS, + SEMVER_ALIAS, + SEMVER_BUILD_ALIAS, + SEMVER_PACKAGE_ALIAS, +) +from sentry.search.events.types import ParamsType +from sentry.snuba import errors +from sentry.testutils.cases import SnubaTestCase, TestCase +from sentry.testutils.helpers.datetime import before_now, iso_format +from sentry.utils.samples import load_data + +ARRAY_COLUMNS = ["measurements", "span_op_breakdowns"] + + +class ErrorsQueryIntegrationTest(SnubaTestCase, TestCase): + def setUp(self): + super().setUp() + self.environment = self.create_environment(self.project, name="prod") + self.release = self.create_release(self.project, version="first-release") + self.now = before_now() + self.one_min_ago = before_now(minutes=1) + self.two_min_ago = before_now(minutes=2) + + self.event_time = self.one_min_ago + # error event + self.event = self.store_event( + data={ + "message": "oh no", + "release": "first-release", + "environment": "prod", + "platform": "python", + "user": {"id": "99", "email": "bruce@example.com", "username": "brucew"}, + "timestamp": iso_format(self.event_time), + "tags": [["key1", "value1"]], + }, + project_id=self.project.id, + ) + + # transaction event + data = load_data("transaction", timestamp=self.event_time) + data["transaction"] = "a" * 32 + data["user"] = {"id": "99", "email": "bruce@example.com", "username": "brucew"} + data["release"] = "first-release" + data["environment"] = self.environment.name + data["tags"] = [["key1", "value1"]] + self.store_event(data=data, project_id=self.project.id) + + self.params = { + "organization_id": self.organization.id, + "project_id": [self.project.id], + "start": before_now(days=1), + "end": self.now, + } + + def test_errors_query(self): + result = errors.query( + selected_columns=["transaction"], + query="", + params=self.params, + referrer="test_errors_query", + ) + data = result["data"] + assert len(data) == 1 + assert data[0] == {"transaction": ""} + + def test_project_mapping(self): + other_project = self.create_project(organization=self.organization) + self.params["project_id"] = [other_project.id] + self.store_event( + data={"message": "hello", "timestamp": iso_format(self.one_min_ago)}, + project_id=other_project.id, + ) + + result = errors.query( + selected_columns=["project", "message"], + query="", + params=self.params, + orderby=["project"], + referrer="errors", + ) + + data = result["data"] + assert len(data) == 1 + assert data[0]["project"] == other_project.slug + + def test_issue_short_id_mapping(self): + tests = [ + ("issue", f"issue:{self.event.group.qualified_short_id}"), + ("issue", f"issue.id:{self.event.group_id}"), + ("issue.id", f"issue:{self.event.group.qualified_short_id}"), + ("issue.id", f"issue.id:{self.event.group_id}"), + ] + + for column, query in tests: + result = errors.query( + selected_columns=[column], + query=query, + referrer="errors", + params=self.params, + ) + data = result["data"] + assert len(data) == 1 + # The query will translate `issue` into `issue.id`. Additional post processing + # is required to insert the `issue` column. + assert [item["issue.id"] for item in data] == [self.event.group_id] + + def test_issue_filters(self): + tests = [ + "has:issue", + "has:issue.id", + f"issue:[{self.event.group.qualified_short_id}]", + f"issue.id:[{self.event.group_id}]", + ] + + for query in tests: + result = errors.query( + selected_columns=["issue", "issue.id"], + query=query, + params=self.params, + referrer="errors", + ) + data = result["data"] + assert len(data) == 1 + # The query will translate `issue` into `issue.id`. Additional post processing + # is required to insert the `issue` column. + assert [item["issue.id"] for item in data] == [self.event.group_id] + + def test_tags_orderby(self): + self.event = self.store_event( + data={ + "message": "oh no", + "release": "first-release", + "environment": "prod", + "platform": "python", + "user": {"id": "99", "email": "bruce@example.com", "username": "brucew"}, + "timestamp": iso_format(self.event_time), + "tags": [["key1", "value2"]], + }, + project_id=self.project.id, + ) + + tests = [ + ("key1", "key1", ["value1", "value2"]), + ("key1", "-key1", ["value2", "value1"]), + ("tags[key1]", "tags[key1]", ["value1", "value2"]), + ("tags[key1]", "-tags[key1]", ["value2", "value1"]), + ] + + for column, orderby, expected in tests: + result = errors.query( + selected_columns=[column], + query="", + params=self.params, + orderby=[orderby], + referrer="test_discover_query", + ) + data = result["data"] + assert len(data) == len(expected) + assert [item[column] for item in data] == expected + + def test_tags_filter(self): + self.event = self.store_event( + data={ + "message": "oh no", + "release": "first-release", + "environment": "prod", + "platform": "python", + "user": {"id": "99", "email": "bruce@example.com", "username": "brucew"}, + "timestamp": iso_format(self.event_time), + "tags": [["key1", "value2"]], + }, + project_id=self.project.id, + ) + + tests: list[tuple[str, str, list[str]]] = [ + ("key1", "", ["value1", "value2"]), + ("key1", "has:key1", ["value1", "value2"]), + ("key1", "!has:key1", []), + ("key1", "key1:value1", ["value1"]), + ("key1", "key1:value2", ["value2"]), + ("key1", 'key1:""', []), + ("key1", "key1:value*", ["value1", "value2"]), + ("key1", 'key1:["value1"]', ["value1"]), + ("key1", 'key1:["value1", "value2"]', ["value1", "value2"]), + ("tags[key1]", "", ["value1", "value2"]), + # has does not work with tags[...] syntax + # ("tags[key1]", 'has:"tags[key1]"', ["value1", "value2"]), + # ("tags[key1]", '!has:"tags[key1]"', []), + ("tags[key1]", "tags[key1]:value1", ["value1"]), + ("tags[key1]", "tags[key1]:value2", ["value2"]), + ("tags[key1]", 'tags[key1]:""', []), + ("tags[key1]", "tags[key1]:value*", ["value1", "value2"]), + ("tags[key1]", 'tags[key1]:["value1"]', ["value1"]), + ("tags[key1]", 'tags[key1]:["value1", "value2"]', ["value1", "value2"]), + ] + + for column, query, expected in tests: + result = errors.query( + selected_columns=[column], + query=query, + params=self.params, + orderby=[column], + referrer="test_discover_query", + ) + data = result["data"] + assert len(data) == len(expected), (column, query, expected) + assert [item[column] for item in data] == expected + + def test_tags_colliding_with_fields(self): + event = self.store_event( + data={ + "message": "oh no", + "release": "first-release", + "environment": "prod", + "platform": "python", + "user": {"id": "99", "email": "bruce@example.com", "username": "brucew"}, + "timestamp": iso_format(self.event_time), + "tags": [["id", "new"]], + }, + project_id=self.project.id, + ) + + tests = [ + ("id", "", sorted([self.event.event_id, event.event_id])), + ("id", f"id:{event.event_id}", [event.event_id]), + ("tags[id]", "", ["", "new"]), + ("tags[id]", "tags[id]:new", ["new"]), + ] + + for column, query, expected in tests: + result = errors.query( + selected_columns=[column], + query=query, + params=self.params, + orderby=[column], + referrer="test_discover_query", + ) + data = result["data"] + assert len(data) == len(expected), (query, expected) + assert [item[column] for item in data] == expected + + def test_reverse_sorting_issue(self): + other_event = self.store_event( + data={ + "message": "whoopsies", + "release": "first-release", + "environment": "prod", + "platform": "python", + "user": {"id": "99", "email": "bruce@example.com", "username": "brucew"}, + "timestamp": iso_format(self.event_time), + }, + project_id=self.project.id, + ) + + tests = [ + # issue is not sortable + # "issue", + "issue.id", + ] + + for column in tests: + for direction in ["", "-"]: + result = errors.query( + selected_columns=[column], + query="", + params=self.params, + orderby=[f"{direction}{column}"], + referrer="errors", + ) + data = result["data"] + assert len(data) == 2 + expected = [self.event.group_id, other_event.group_id] + if direction == "-": + expected.reverse() + assert [item["issue.id"] for item in data] == expected + + def test_timestamp_rounding_fields(self): + result = errors.query( + selected_columns=["timestamp.to_hour", "timestamp.to_day"], + query="", + params=self.params, + referrer="test_discover_query", + ) + data = result["data"] + assert len(data) == 1 + + hour = self.event_time.replace(minute=0, second=0, microsecond=0) + day = hour.replace(hour=0) + assert [item["timestamp.to_hour"] for item in data] == [f"{iso_format(hour)}+00:00"] + assert [item["timestamp.to_day"] for item in data] == [f"{iso_format(day)}+00:00"] + + def test_timestamp_rounding_filters(self): + one_day_ago = before_now(days=1) + two_day_ago = before_now(days=2) + three_day_ago = before_now(days=3) + self.params["start"] = three_day_ago + + self.store_event( + data={ + "message": "oh no", + "release": "first-release", + "environment": "prod", + "platform": "python", + "user": {"id": "99", "email": "bruce@example.com", "username": "brucew"}, + "timestamp": iso_format(two_day_ago), + }, + project_id=self.project.id, + ) + + result = errors.query( + selected_columns=["timestamp.to_hour", "timestamp.to_day"], + query=f"timestamp.to_hour:<{iso_format(one_day_ago)} timestamp.to_day:<{iso_format(one_day_ago)}", + params=self.params, + referrer="test_discover_query", + ) + data = result["data"] + assert len(data) == 1 + + hour = two_day_ago.replace(minute=0, second=0, microsecond=0) + day = hour.replace(hour=0) + assert [item["timestamp.to_hour"] for item in data] == [f"{iso_format(hour)}+00:00"] + assert [item["timestamp.to_day"] for item in data] == [f"{iso_format(day)}+00:00"] + + def test_user_display(self): + # `user.display` should give `username` + self.store_event( + data={ + "message": "oh no", + "release": "first-release", + "environment": "prod", + "platform": "python", + "user": {"username": "brucew", "id": "1234", "ip": "127.0.0.1"}, + "timestamp": iso_format(self.event_time), + }, + project_id=self.project.id, + ) + + # `user.display` should give `id` + self.store_event( + data={ + "message": "oh no", + "release": "first-release", + "environment": "prod", + "platform": "python", + "user": {"id": "1234", "ip": "127.0.0.1"}, + "timestamp": iso_format(self.event_time), + }, + project_id=self.project.id, + ) + + # `user.display` should give `ip` + self.store_event( + data={ + "message": "oh no", + "release": "first-release", + "environment": "prod", + "platform": "python", + "user": {"ip_address": "127.0.0.1"}, + "timestamp": iso_format(self.event_time), + }, + project_id=self.project.id, + ) + + result = errors.query( + selected_columns=["user.display"], + query="", + params=self.params, + referrer="test_discover_query", + ) + data = result["data"] + assert len(data) == 4 + assert {item["user.display"] for item in data} == { + "bruce@example.com", + "brucew", + "1234", + "127.0.0.1", + } + + def test_user_display_filter(self): + # `user.display` should give `username` + self.store_event( + data={ + "message": "oh no", + "release": "first-release", + "environment": "prod", + "platform": "python", + "user": {"username": "brucew", "ip": "127.0.0.1"}, + "timestamp": iso_format(self.event_time), + }, + project_id=self.project.id, + ) + + result = errors.query( + selected_columns=["user.display"], + query="has:user.display user.display:bruce@example.com", + params=self.params, + referrer="test_discover_query", + ) + data = result["data"] + assert len(data) == 1 + assert [item["user.display"] for item in data] == ["bruce@example.com"] + + def test_message_orderby(self): + self.event = self.store_event( + data={ + "message": "oh yeah", + "release": "first-release", + "environment": "prod", + "platform": "python", + "user": {"id": "99", "email": "bruce@example.com", "username": "brucew"}, + "timestamp": iso_format(self.event_time), + }, + project_id=self.project.id, + ) + + tests = [ + ("message", ["oh no", "oh yeah"]), + ("-message", ["oh yeah", "oh no"]), + ] + + for orderby, expected in tests: + result = errors.query( + selected_columns=["message"], + query="", + params=self.params, + orderby=[orderby], + referrer="test_discover_query", + ) + + data = result["data"] + assert len(data) == 2 + assert [item["message"] for item in data] == expected + + def test_message_filter(self): + self.event = self.store_event( + data={ + "message": "oh yeah", + "release": "first-release", + "environment": "prod", + "platform": "python", + "user": {"id": "99", "email": "bruce@example.com", "username": "brucew"}, + "timestamp": iso_format(self.event_time), + }, + project_id=self.project.id, + ) + + tests: list[tuple[str, list[str]]] = [ + ('message:"oh no"', ["oh no"]), + ('message:"oh yeah"', ["oh yeah"]), + ('message:""', []), + ("has:message", ["oh no", "oh yeah"]), + ("!has:message", []), + ("message:oh*", ["oh no", "oh yeah"]), + ('message:"oh *"', ["oh no", "oh yeah"]), + ('message:["oh meh"]', []), + ('message:["oh yeah"]', ["oh yeah"]), + ('message:["oh yeah", "oh no"]', ["oh no", "oh yeah"]), + ] + + for query, expected in tests: + result = errors.query( + selected_columns=["message"], + query=query, + params=self.params, + orderby=["message"], + referrer="test_discover_query", + ) + data = result["data"] + assert len(data) == len(expected) + assert [item["message"] for item in data] == expected + + def test_to_other_function(self): + project = self.create_project() + + for i in range(3): + data = load_data("javascript", timestamp=before_now(minutes=5)) + data["transaction"] = f"/to_other/{i}" + data["release"] = "aaaa" + self.store_event(data, project_id=project.id) + + data = load_data("javascript", timestamp=before_now(minutes=5)) + data["transaction"] = "/to_other/y" + data["release"] = "yyyy" + self.store_event(data, project_id=project.id) + + data = load_data("javascript", timestamp=before_now(minutes=5)) + data["transaction"] = "/to_other/z" + data["release"] = "zzzz" + self.store_event(data, project_id=project.id) + + columns1 = ["transaction", 'to_other(release,"aaaa")'] + columns2 = ["transaction", 'to_other(release,"aaaa",old,new)'] + + test_cases = [ + (columns1, "", ["this", "this", "this", "that", "that"], "to_other_release__aaaa"), + (columns2, "", ["new", "new", "new", "old", "old"], "to_other_release__aaaa__old_new"), + ] + + for cols, query, expected, alias in test_cases: + result = errors.query( + selected_columns=cols, + query=query, + orderby=["transaction"], + params={ + "start": before_now(minutes=10), + "end": before_now(minutes=2), + "project_id": [project.id], + }, + referrer="test_discover_query", + ) + + data = result["data"] + assert len(data) == len(expected) + assert [x[alias] for x in data] == expected + + def test_count_if_function(self): + for i in range(3): + data = load_data("javascript", timestamp=before_now(minutes=5)) + data["release"] = "aaaa" + self.store_event(data, project_id=self.project.id) + + data = load_data("javascript", timestamp=before_now(minutes=5)) + data["release"] = "bbbb" + self.store_event(data, project_id=self.project.id) + + data = load_data("javascript", timestamp=before_now(minutes=5)) + data["release"] = "cccc" + self.store_event(data, project_id=self.project.id) + + columns1 = ["count()", "count_if(release,equals,aaaa)", "count_if(release,notEquals,aaaa)"] + columns2 = ["count()", "count_if(release,less,bbbb)", "count_if(release,lessOrEquals,bbbb)"] + + test_cases = [ + ( + columns1, + "", + { + "count": 5, + "count_if_release_equals_aaaa": 3, + "count_if_release_notEquals_aaaa": 2, + }, + ), + ( + columns2, + "", + { + "count": 5, + "count_if_release_less_bbbb": 3, + "count_if_release_lessOrEquals_bbbb": 4, + }, + ), + ] + + for cols, query, expected in test_cases: + result = errors.query( + selected_columns=cols, + query=query, + params={ + "start": before_now(minutes=10), + "end": before_now(minutes=2), + "project_id": [self.project.id], + }, + referrer="test_discover_query", + ) + + data = result["data"] + assert len(data) == 1 + assert data[0] == expected + + def test_count_if_function_with_unicode(self): + unicode_phrase1 = "\u716e\u6211\u66f4\u591a\u7684\u98df\u7269\uff0c\u6211\u9913\u4e86" + unicode_phrase2 = "\u53cd\u6b63\u611b\u60c5\u4e0d\u5c31\u90a3\u6837" + for i in range(3): + data = load_data("javascript", timestamp=before_now(minutes=5)) + data["release"] = unicode_phrase1 + self.store_event(data, project_id=self.project.id) + + data = load_data("javascript", timestamp=before_now(minutes=5)) + data["release"] = unicode_phrase2 + self.store_event(data, project_id=self.project.id) + + columns1 = [ + "count()", + f"count_if(release,equals,{unicode_phrase1})", + f"count_if(release,notEquals,{unicode_phrase1})", + ] + + test_cases = [ + ( + columns1, + "", + { + "count": 4, + "count_if_release_equals__u716e_u6211_u66f4_u591a_u7684_u98df_u7269_uff0c_u6211_u9913_u4e86": 3, + "count_if_release_notEquals__u716e_u6211_u66f4_u591a_u7684_u98df_u7269_uff0c_u6211_u9913_u4e86": 1, + }, + ), + ] + + for cols, query, expected in test_cases: + result = errors.query( + selected_columns=cols, + query=query, + params={ + "start": before_now(minutes=10), + "end": before_now(minutes=2), + "project_id": [self.project.id], + }, + referrer="test_discover_query", + ) + + data = result["data"] + assert len(data) == 1 + assert data[0] == expected + + def test_last_seen(self): + project = self.create_project() + + expected_timestamp = before_now(minutes=3) + string_condition_timestamp = before_now(minutes=4).strftime("%Y-%m-%dT%H:%M:%S+00:00") + + data = load_data("javascript", timestamp=expected_timestamp) + data["transaction"] = "/last_seen" + self.store_event(data, project_id=project.id) + + for i in range(6): + data = load_data("javascript", timestamp=before_now(minutes=i + 4)) + data["transaction"] = "/last_seen" + self.store_event(data, project_id=project.id) + + queries = [ + ("", 1, True), + (f"last_seen():>{string_condition_timestamp}", 1, True), + ("last_seen():>0", 1, False), + ] + + for query, expected_length, use_aggregate_conditions in queries: + result = errors.query( + selected_columns=["transaction", "last_seen()"], + query=query, + referrer="errors", + orderby=["transaction"], + params={ + "start": before_now(minutes=10), + "end": before_now(minutes=2), + "project_id": [project.id], + }, + use_aggregate_conditions=use_aggregate_conditions, + ) + data = result["data"] + + assert len(data) == expected_length + assert data[0]["last_seen"] == expected_timestamp.strftime("%Y-%m-%dT%H:%M:%S+00:00") + + def test_latest_event(self): + project = self.create_project() + + expected_timestamp = before_now(minutes=3) + data = load_data("javascript", timestamp=expected_timestamp) + data["transaction"] = "/latest_event" + stored_event = self.store_event(data, project_id=project.id) + + for i in range(6): + data = load_data("javascript", timestamp=before_now(minutes=i + 4)) + data["transaction"] = "/latest_event" + self.store_event(data, project_id=project.id) + + result = errors.query( + selected_columns=["transaction", "latest_event()"], + query="", + orderby=["transaction"], + referrer="errors", + params={ + "start": before_now(minutes=10), + "end": before_now(minutes=2), + "project_id": [project.id], + }, + use_aggregate_conditions=False, + ) + data = result["data"] + + assert len(data) == 1 + assert data[0]["latest_event"] == stored_event.event_id + + def test_eps(self): + project = self.create_project() + + for _ in range(6): + data = load_data( + "javascript", + timestamp=before_now(minutes=3), + ) + data["transaction"] = "/eps" + self.store_event(data, project_id=project.id) + + queries = [ + ("", 1, True), + ("eps():>1", 0, True), + ("eps():>1", 1, False), + ("eps(10):>0.5", 1, True), + ("tps():>1", 0, True), + ("tps():>1", 1, False), + ("tps(10):>0.5", 1, True), + ] + + for query, expected_length, use_aggregate_conditions in queries: + result = errors.query( + selected_columns=[ + "transaction", + "eps()", + "eps(10)", + "eps(60)", + "tps()", + "tps(10)", + "tps(60)", + ], + query=query, + orderby=["transaction"], + params={ + "start": before_now(minutes=4), + "end": before_now(minutes=2), + "project_id": [project.id], + }, + use_aggregate_conditions=use_aggregate_conditions, + referrer="errors", + ) + data = result["data"] + + assert len(data) == expected_length + if expected_length > 0: + assert data[0]["eps"] == 0.05 + assert data[0]["eps_10"] == 0.6 + assert data[0]["eps_60"] == 0.1 + assert data[0]["tps"] == 0.05 + assert data[0]["tps_10"] == 0.6 + assert data[0]["tps_60"] == 0.1 + + def test_epm(self): + project = self.create_project() + + for _ in range(6): + data = load_data( + "javascript", + timestamp=before_now(minutes=3), + ) + data["transaction"] = "/epm" + self.store_event(data, project_id=project.id) + + queries = [ + ("", 1, True), + ("epm():>3", 0, True), + ("epm():>3", 1, False), + ("epm(10):>3", 1, True), + ("tpm():>3", 0, True), + ("tpm():>3", 1, False), + ("tpm(10):>3", 1, True), + ] + + for query, expected_length, use_aggregate_conditions in queries: + result = errors.query( + selected_columns=[ + "transaction", + "epm()", + "epm(10)", + "epm(60)", + "tpm()", + "tpm(10)", + "tpm(60)", + ], + query=query, + orderby=["transaction"], + params={ + "start": before_now(minutes=4), + "end": before_now(minutes=2), + "project_id": [project.id], + }, + use_aggregate_conditions=use_aggregate_conditions, + referrer="errors", + ) + data = result["data"] + + assert len(data) == expected_length + if expected_length > 0: + assert data[0]["epm"] == 3 + assert data[0]["epm_10"] == 36.0 + assert data[0]["epm_60"] == 6 + assert data[0]["tpm"] == 3 + assert data[0]["tpm_10"] == 36.0 + assert data[0]["tpm_60"] == 6 + + def test_error_handled_alias(self): + data = load_data("android-ndk", timestamp=before_now(minutes=10)) + events = ( + ("a" * 32, "not handled", False), + ("b" * 32, "is handled", True), + ("c" * 32, "undefined", None), + ) + for event in events: + data["event_id"] = event[0] + data["logentry"] = {"formatted": event[1]} + data["exception"]["values"][0]["value"] = event[1] + data["exception"]["values"][0]["mechanism"]["handled"] = event[2] + self.store_event(data=data, project_id=self.project.id) + + queries: list[tuple[str, list[int]]] = [ + ("", [0, 1, 1]), + ("error.handled:true", [1, 1]), + ("!error.handled:true", [0]), + ("has:error.handled", [1, 1]), + ("has:error.handled error.handled:true", [1, 1]), + ("error.handled:false", [0]), + ("has:error.handled error.handled:false", []), + ] + + for query, expected_data in queries: + result = errors.query( + selected_columns=["error.handled"], + query=query, + params={ + "organization_id": self.organization.id, + "project_id": [self.project.id], + "start": before_now(minutes=12), + "end": before_now(minutes=8), + }, + referrer="errors", + ) + + data = result["data"] + data = sorted(data, key=lambda k: (k["error.handled"] is None, k["error.handled"])) + + assert len(data) == len(expected_data) + assert [item["error.handled"] for item in data] == expected_data + + def test_error_unhandled_alias(self): + data = load_data("android-ndk", timestamp=before_now(minutes=10)) + events = ( + ("a" * 32, "not handled", False), + ("b" * 32, "is handled", True), + ("c" * 32, "undefined", None), + ) + for event in events: + data["event_id"] = event[0] + data["logentry"] = {"formatted": event[1]} + data["exception"]["values"][0]["value"] = event[1] + data["exception"]["values"][0]["mechanism"]["handled"] = event[2] + self.store_event(data=data, project_id=self.project.id) + + queries: list[tuple[str, list[str], list[int]]] = [ + ("error.unhandled:true", ["a" * 32], [1]), + ("!error.unhandled:true", ["b" * 32, "c" * 32], [0, 0]), + ("has:error.unhandled", ["a" * 32], [1]), + ("!has:error.unhandled", ["b" * 32, "c" * 32], [0, 0]), + ("has:error.unhandled error.unhandled:true", ["a" * 32], [1]), + ("error.unhandled:false", ["b" * 32, "c" * 32], [0, 0]), + ("has:error.unhandled error.unhandled:false", [], []), + ] + + for query, expected_events, error_handled in queries: + result = errors.query( + selected_columns=["error.unhandled"], + query=query, + params={ + "organization_id": self.organization.id, + "project_id": [self.project.id], + "start": before_now(minutes=12), + "end": before_now(minutes=8), + }, + referrer="errors", + ) + data = result["data"] + + assert len(data) == len(expected_events) + assert [item["error.unhandled"] for item in data] == error_handled + + def test_array_fields(self): + data = load_data("javascript") + data["timestamp"] = iso_format(before_now(minutes=10)) + self.store_event(data=data, project_id=self.project.id) + + expected_filenames = [ + "../../sentry/scripts/views.js", + "../../sentry/scripts/views.js", + "../../sentry/scripts/views.js", + "raven.js", + ] + + queries = [ + ("", 1), + ("stack.filename:*.js", 1), + ("stack.filename:*.py", 0), + ("has:stack.filename", 1), + ("!has:stack.filename", 0), + ] + + for query, expected_len in queries: + result = errors.query( + selected_columns=["stack.filename"], + query=query, + params={ + "organization_id": self.organization.id, + "project_id": [self.project.id], + "start": before_now(minutes=12), + "end": before_now(minutes=8), + }, + referrer="errors", + ) + + data = result["data"] + assert len(data) == expected_len + if len(data) == 0: + continue + assert len(data[0]["stack.filename"]) == len(expected_filenames) + assert sorted(data[0]["stack.filename"]) == expected_filenames + + result = errors.query( + selected_columns=["stack.filename"], + query="stack.filename:[raven.js]", + referrer="errors", + params={ + "organization_id": self.organization.id, + "project_id": [self.project.id], + "start": before_now(minutes=12), + "end": before_now(minutes=8), + }, + ) + + data = result["data"] + assert len(data) == 1 + assert len(data[0]["stack.filename"]) == len(expected_filenames) + assert sorted(data[0]["stack.filename"]) == expected_filenames + + def test_orderby_field_alias(self): + data = load_data("android-ndk", timestamp=before_now(minutes=10)) + events = ( + ("a" * 32, "not handled", False), + ("b" * 32, "is handled", True), + ("c" * 32, "undefined", None), + ) + for event in events: + data["event_id"] = event[0] + data["transaction"] = event[0] + data["logentry"] = {"formatted": event[1]} + data["exception"]["values"][0]["value"] = event[1] + data["exception"]["values"][0]["mechanism"]["handled"] = event[2] + self.store_event(data=data, project_id=self.project.id) + + queries = [ + (["error.unhandled"], [0, 0, 1]), + (["error.unhandled"], [0, 0, 1]), + (["-error.unhandled"], [1, 0, 0]), + (["-error.unhandled"], [1, 0, 0]), + ] + + for orderby, expected in queries: + result = errors.query( + selected_columns=["transaction", "error.unhandled"], + query="", + orderby=orderby, + params={ + "organization_id": self.organization.id, + "project_id": [self.project.id], + "start": before_now(minutes=12), + "end": before_now(minutes=8), + }, + referrer="errors", + ) + + data = result["data"] + assert [x["error.unhandled"] for x in data] == expected + + def test_orderby_aggregate_function(self): + project = self.create_project() + + data = load_data("javascript", timestamp=before_now(minutes=5)) + data["transaction"] = "/count/1" + self.store_event(data, project_id=project.id) + + data = load_data("javascript", timestamp=before_now(minutes=5)) + data["transaction"] = "/count/2" + self.store_event(data, project_id=project.id) + + for i in range(6): + data = load_data("javascript", timestamp=before_now(minutes=5)) + data["transaction"] = f"/count/{i}" + self.store_event(data, project_id=project.id) + + data = load_data("javascript", timestamp=before_now(minutes=5)) + data["transaction"] = "/count/1" + self.store_event(data, project_id=project.id) + + orderbys = [ + (["count"], [1, 1, 1, 1, 2, 3]), + (["-count"], [3, 2, 1, 1, 1, 1]), + (["count()"], [1, 1, 1, 1, 2, 3]), + (["-count()"], [3, 2, 1, 1, 1, 1]), + ] + + for orderby, expected in orderbys: + result = errors.query( + selected_columns=["transaction", "count()"], + query="", + orderby=orderby, + params={ + "start": before_now(minutes=10), + "end": before_now(minutes=2), + "project_id": [project.id], + }, + referrer="errors", + ) + data = result["data"] + assert [x["count"] for x in data] == expected + + def test_field_aliasing_in_selected_columns(self): + result = errors.query( + selected_columns=["project.id", "user", "release", "timestamp.to_hour"], + query="", + params=self.params, + referrer="errors", + ) + data = result["data"] + assert len(data) == 1 + assert data[0]["project.id"] == self.project.id + assert data[0]["user"] == "id:99" + assert data[0]["release"] == "first-release" + + event_hour = self.event_time.replace(minute=0, second=0) + assert data[0]["timestamp.to_hour"] == iso_format(event_hour) + "+00:00" + + assert len(result["meta"]["fields"]) == 4 + assert result["meta"]["fields"] == { + "project.id": "integer", + "user": "string", + "release": "string", + "timestamp.to_hour": "date", + } + + def test_field_alias_with_component(self): + result = errors.query( + selected_columns=["project.id", "user", "user.email"], + query="", + params=self.params, + referrer="errors", + ) + data = result["data"] + assert len(data) == 1 + assert data[0]["project.id"] == self.project.id + assert data[0]["user"] == "id:99" + assert data[0]["user.email"] == "bruce@example.com" + + assert len(result["meta"]["fields"]) == 3 + assert result["meta"]["fields"] == { + "project.id": "integer", + "user": "string", + "user.email": "string", + } + + def test_field_aliasing_in_aggregate_functions_and_groupby(self): + result = errors.query( + selected_columns=["project.id", "count_unique(user.email)"], + query="", + params=self.params, + auto_fields=True, + referrer="errors", + ) + data = result["data"] + assert len(data) == 1 + assert data[0]["project.id"] == self.project.id + assert data[0]["count_unique_user_email"] == 1 + + def test_field_aliasing_in_conditions(self): + result = errors.query( + selected_columns=["project.id", "user.email"], + query="user.email:bruce@example.com", + params=self.params, + referrer="errors", + auto_fields=True, + ) + data = result["data"] + assert len(data) == 1 + assert data[0]["project.id"] == self.project.id + assert data[0]["user.email"] == "bruce@example.com" + + def test_auto_fields_simple_fields(self): + result = errors.query( + selected_columns=["user.email", "release"], + referrer="errors", + query="", + params=self.params, + auto_fields=True, + ) + data = result["data"] + assert len(data) == 1 + assert data[0]["id"] == self.event.event_id + assert data[0]["user.email"] == "bruce@example.com" + assert data[0]["release"] == "first-release" + assert data[0]["project.name"] == self.project.slug + + assert len(result["meta"]["fields"]) == 4 + assert result["meta"]["fields"] == { + "user.email": "string", + "release": "string", + "id": "string", + "project.name": "string", + } + + def test_auto_fields_aggregates(self): + result = errors.query( + selected_columns=["count_unique(user.email)"], + referrer="errors", + query="", + params=self.params, + auto_fields=True, + ) + data = result["data"] + assert len(data) == 1 + assert data[0]["count_unique_user_email"] == 1 + + def test_release_condition(self): + result = errors.query( + selected_columns=["id", "message"], + query=f"release:{self.create_release(self.project).version}", + params=self.params, + referrer="errors", + ) + assert len(result["data"]) == 0 + + result = errors.query( + selected_columns=["id", "message"], + query=f"release:{self.release.version}", + params=self.params, + referrer="errors", + ) + assert len(result["data"]) == 1 + data = result["data"] + assert data[0]["id"] == self.event.event_id + assert data[0]["message"] == self.event.message + assert "event_id" not in data[0] + + def test_semver_condition(self): + release_1 = self.create_release(version="test@1.2.3") + release_2 = self.create_release(version="test@1.2.4") + release_3 = self.create_release(version="test@1.2.5") + + release_1_e_1 = self.store_event( + data={"release": release_1.version, "timestamp": iso_format(self.one_min_ago)}, + project_id=self.project.id, + ).event_id + release_1_e_2 = self.store_event( + data={"release": release_1.version, "timestamp": iso_format(self.one_min_ago)}, + project_id=self.project.id, + ).event_id + release_2_e_1 = self.store_event( + data={"release": release_2.version, "timestamp": iso_format(self.one_min_ago)}, + project_id=self.project.id, + ).event_id + release_2_e_2 = self.store_event( + data={"release": release_2.version, "timestamp": iso_format(self.one_min_ago)}, + project_id=self.project.id, + ).event_id + release_3_e_1 = self.store_event( + data={"release": release_3.version, "timestamp": iso_format(self.one_min_ago)}, + project_id=self.project.id, + ).event_id + release_3_e_2 = self.store_event( + data={"release": release_3.version, "timestamp": iso_format(self.one_min_ago)}, + project_id=self.project.id, + ).event_id + + result = errors.query( + selected_columns=["id"], + query=f"{SEMVER_ALIAS}:>1.2.3", + params=self.params, + referrer="errors", + ) + assert {r["id"] for r in result["data"]} == { + release_2_e_1, + release_2_e_2, + release_3_e_1, + release_3_e_2, + } + result = errors.query( + selected_columns=["id"], + query=f"{SEMVER_ALIAS}:>=1.2.3", + params=self.params, + referrer="errors", + ) + assert {r["id"] for r in result["data"]} == { + release_1_e_1, + release_1_e_2, + release_2_e_1, + release_2_e_2, + release_3_e_1, + release_3_e_2, + } + result = errors.query( + selected_columns=["id"], + query=f"{SEMVER_ALIAS}:<1.2.4", + params=self.params, + referrer="errors", + ) + assert {r["id"] for r in result["data"]} == {release_1_e_1, release_1_e_2} + result = errors.query( + selected_columns=["id"], + query=f"!{SEMVER_ALIAS}:1.2.3", + params=self.params, + referrer="errors", + ) + assert {r["id"] for r in result["data"]} == { + self.event.event_id, + release_2_e_1, + release_2_e_2, + release_3_e_1, + release_3_e_2, + } + + def test_release_stage_condition(self): + replaced_release = self.create_release( + version="replaced_release", + environments=[self.environment], + adopted=timezone.now(), + unadopted=timezone.now(), + ) + adopted_release = self.create_release( + version="adopted_release", + environments=[self.environment], + adopted=timezone.now(), + ) + self.create_release(version="not_adopted_release", environments=[self.environment]) + + adopted_release_e_1 = self.store_event( + data={ + "release": adopted_release.version, + "environment": self.environment.name, + "timestamp": iso_format(self.one_min_ago), + }, + project_id=self.project.id, + ).event_id + adopted_release_e_2 = self.store_event( + data={ + "release": adopted_release.version, + "environment": self.environment.name, + "timestamp": iso_format(self.one_min_ago), + }, + project_id=self.project.id, + ).event_id + replaced_release_e_1 = self.store_event( + data={ + "release": replaced_release.version, + "environment": self.environment.name, + "timestamp": iso_format(self.one_min_ago), + }, + project_id=self.project.id, + ).event_id + replaced_release_e_2 = self.store_event( + data={ + "release": replaced_release.version, + "environment": self.environment.name, + "timestamp": iso_format(self.one_min_ago), + }, + project_id=self.project.id, + ).event_id + + self.params["environment"] = [self.environment.name] + self.params["environment_objects"] = [self.environment] + + result = errors.query( + selected_columns=["id"], + query=f"{RELEASE_STAGE_ALIAS}:{ReleaseStages.ADOPTED.value}", + params=self.params, + referrer="errors", + ) + assert {r["id"] for r in result["data"]} == { + adopted_release_e_1, + adopted_release_e_2, + } + + result = errors.query( + selected_columns=["id"], + query=f"!{RELEASE_STAGE_ALIAS}:{ReleaseStages.LOW_ADOPTION.value}", + params=self.params, + referrer="errors", + ) + assert {r["id"] for r in result["data"]} == { + adopted_release_e_1, + adopted_release_e_2, + replaced_release_e_1, + replaced_release_e_2, + } + result = errors.query( + selected_columns=["id"], + query=f"{RELEASE_STAGE_ALIAS}:[{ReleaseStages.ADOPTED.value}, {ReleaseStages.REPLACED.value}]", + params=self.params, + referrer="errors", + ) + assert {r["id"] for r in result["data"]} == { + adopted_release_e_1, + adopted_release_e_2, + replaced_release_e_1, + replaced_release_e_2, + } + + def test_semver_package_condition(self): + release_1 = self.create_release(version="test@1.2.3") + release_2 = self.create_release(version="test2@1.2.4") + + release_1_e_1 = self.store_event( + data={"release": release_1.version, "timestamp": iso_format(self.one_min_ago)}, + project_id=self.project.id, + ).event_id + release_1_e_2 = self.store_event( + data={"release": release_1.version, "timestamp": iso_format(self.one_min_ago)}, + project_id=self.project.id, + ).event_id + release_2_e_1 = self.store_event( + data={"release": release_2.version, "timestamp": iso_format(self.one_min_ago)}, + project_id=self.project.id, + ).event_id + + result = errors.query( + selected_columns=["id"], + referrer="errors", + query=f"{SEMVER_PACKAGE_ALIAS}:test", + params=self.params, + ) + assert {r["id"] for r in result["data"]} == { + release_1_e_1, + release_1_e_2, + } + result = errors.query( + selected_columns=["id"], + query=f"{SEMVER_PACKAGE_ALIAS}:test2", + referrer="errors", + params=self.params, + ) + assert {r["id"] for r in result["data"]} == { + release_2_e_1, + } + + def test_semver_build_condition(self): + release_1 = self.create_release(version="test@1.2.3+123") + release_2 = self.create_release(version="test2@1.2.4+124") + + release_1_e_1 = self.store_event( + data={"release": release_1.version, "timestamp": iso_format(self.one_min_ago)}, + project_id=self.project.id, + ).event_id + release_1_e_2 = self.store_event( + data={"release": release_1.version, "timestamp": iso_format(self.one_min_ago)}, + project_id=self.project.id, + ).event_id + release_2_e_1 = self.store_event( + data={"release": release_2.version, "timestamp": iso_format(self.one_min_ago)}, + project_id=self.project.id, + ).event_id + + result = errors.query( + selected_columns=["id"], + query=f"{SEMVER_BUILD_ALIAS}:123", + params=self.params, + referrer="errors", + ) + assert {r["id"] for r in result["data"]} == { + release_1_e_1, + release_1_e_2, + } + result = errors.query( + selected_columns=["id"], + query=f"{SEMVER_BUILD_ALIAS}:124", + params=self.params, + referrer="errors", + ) + assert {r["id"] for r in result["data"]} == { + release_2_e_1, + } + result = errors.query( + selected_columns=["id"], + query=f"{SEMVER_BUILD_ALIAS}:>=123", + params=self.params, + referrer="errors", + ) + assert {r["id"] for r in result["data"]} == {release_1_e_1, release_1_e_2, release_2_e_1} + + def test_latest_release_condition(self): + result = errors.query( + selected_columns=["id", "message"], + query="release:latest", + params=self.params, + referrer="errors", + ) + assert len(result["data"]) == 1 + data = result["data"] + assert data[0]["id"] == self.event.event_id + assert data[0]["message"] == self.event.message + assert "event_id" not in data[0] + + def test_environment_condition(self): + result = errors.query( + selected_columns=["id", "message"], + query=f"environment:{self.create_environment(self.project).name}", + params=self.params, + referrer="errors", + ) + assert len(result["data"]) == 0 + + result = errors.query( + selected_columns=["id", "message"], + query=f"environment:{self.environment.name}", + params=self.params, + referrer="errors", + ) + assert len(result["data"]) == 1 + data = result["data"] + assert data[0]["id"] == self.event.event_id + assert data[0]["message"] == self.event.message + + def test_conditional_filter(self): + project2 = self.create_project(organization=self.organization) + project3 = self.create_project(organization=self.organization) + + self.store_event( + data={"message": "aaaaa", "timestamp": iso_format(self.one_min_ago)}, + project_id=project2.id, + ) + self.store_event( + data={"message": "bbbbb", "timestamp": iso_format(self.one_min_ago)}, + project_id=project3.id, + ) + + result = errors.query( + selected_columns=["project", "message"], + query=f"project:{self.project.slug} OR project:{project2.slug}", + params={ + "project_id": [self.project.id, project2.id], + "start": self.two_min_ago, + "end": self.now, + }, + orderby=["message"], + referrer="errors", + ) + + data = result["data"] + assert len(data) == 2 + assert data[0]["project"] == project2.slug + assert data[1]["project"] == self.project.slug + + def test_nested_conditional_filter(self): + project2 = self.create_project(organization=self.organization) + self.store_event( + data={"release": "a" * 32, "timestamp": iso_format(self.one_min_ago)}, + project_id=self.project.id, + ) + self.event = self.store_event( + data={"release": "b" * 32, "timestamp": iso_format(self.one_min_ago)}, + project_id=self.project.id, + ) + self.event = self.store_event( + data={"release": "c" * 32, "timestamp": iso_format(self.one_min_ago)}, + project_id=self.project.id, + ) + self.event = self.store_event( + data={"release": "a" * 32, "timestamp": iso_format(self.one_min_ago)}, + project_id=project2.id, + ) + + result = errors.query( + selected_columns=["release"], + query="(release:{} OR release:{}) AND project:{}".format( + "a" * 32, "b" * 32, self.project.slug + ), + params={ + "project_id": [self.project.id, project2.id], + "start": self.two_min_ago, + "end": self.now, + }, + orderby=["release"], + referrer="discover", + ) + + data = result["data"] + assert len(data) == 2 + assert data[0]["release"] == "a" * 32 + assert data[1]["release"] == "b" * 32 + + def test_conditions_with_special_columns(self): + for val in ["a", "b", "c"]: + data = load_data("javascript") + data["timestamp"] = iso_format(self.one_min_ago) + data["transaction"] = val * 32 + data["logentry"] = {"formatted": val * 32} + data["tags"] = {"sub_customer.is-Enterprise-42": val * 32} + self.store_event(data=data, project_id=self.project.id) + + result = errors.query( + selected_columns=["transaction", "message"], + query="event.type:error (transaction:{}* OR message:{}*)".format("a" * 32, "b" * 32), + params={ + "project_id": [self.project.id], + "start": self.two_min_ago, + "end": self.now, + }, + orderby=["transaction"], + referrer="discover", + ) + + data = result["data"] + assert len(data) == 2 + assert data[0]["transaction"] == "a" * 32 + assert data[1]["transaction"] == "b" * 32 + + result = errors.query( + selected_columns=["transaction", "sub_customer.is-Enterprise-42"], + query="event.type:error (transaction:{} AND sub_customer.is-Enterprise-42:{})".format( + "a" * 32, "a" * 32 + ), + params={ + "project_id": [self.project.id], + "start": self.two_min_ago, + "end": self.now, + }, + orderby=["transaction"], + referrer="discover", + ) + + data = result["data"] + assert len(data) == 1 + assert data[0]["transaction"] == "a" * 32 + assert data[0]["sub_customer.is-Enterprise-42"] == "a" * 32 + + def test_conditions_with_nested_aggregates(self): + events = [("a", 2), ("b", 3), ("c", 4)] + for ev in events: + val = ev[0] * 32 + for i in range(ev[1]): + data = load_data("javascript") + data["timestamp"] = iso_format(self.one_min_ago) + data["transaction"] = f"{val}-{i}" + data["logentry"] = {"formatted": val} + data["tags"] = {"trek": val} + self.store_event(data=data, project_id=self.project.id) + + result = errors.query( + selected_columns=["trek", "count()"], + query="(event.type:error AND (trek:{} AND (transaction:*{}* AND count():>2)))".format( + "b" * 32, "b" * 32 + ), + params={ + "project_id": [self.project.id], + "start": self.two_min_ago, + "end": self.now, + }, + orderby=["trek"], + use_aggregate_conditions=True, + referrer="discover", + ) + + data = result["data"] + assert len(data) == 1 + assert data[0]["trek"] == "b" * 32 + assert data[0]["count"] == 3 + + with pytest.raises(InvalidSearchQuery) as err: + errors.query( + selected_columns=["trek", "transaction"], + query="(event.type:error AND (trek:{} AND (transaction:*{}* AND count():>2)))".format( + "b" * 32, "b" * 32 + ), + referrer="discover", + params={ + "project_id": [self.project.id], + "start": self.two_min_ago, + "end": self.now, + }, + orderby=["trek"], + use_aggregate_conditions=True, + ) + assert "used in a condition but is not a selected column" in str(err) + + def test_conditions_with_timestamps(self): + events = [("a", 1), ("b", 2), ("c", 3)] + for t, ev in enumerate(events): + val = ev[0] * 32 + for i in range(ev[1]): + data = load_data("javascript", timestamp=self.now - timedelta(seconds=3 * t + 1)) + data["transaction"] = f"{val}" + self.store_event(data=data, project_id=self.project.id) + + results = errors.query( + selected_columns=["transaction", "count()"], + query="event.type:error AND (timestamp:<{} OR timestamp:>{})".format( + iso_format(self.now - timedelta(seconds=5)), + iso_format(self.now - timedelta(seconds=3)), + ), + params={ + "project_id": [self.project.id], + "start": self.two_min_ago, + "end": self.now, + }, + orderby=["transaction"], + use_aggregate_conditions=True, + referrer="discover", + ) + + data = results["data"] + assert len(data) == 2 + assert data[0]["transaction"] == "a" * 32 + assert data[0]["count"] == 1 + assert data[1]["transaction"] == "c" * 32 + assert data[1]["count"] == 3 + + def test_timestamp_rollup_filter(self): + event_hour = self.event_time.replace(minute=0, second=0) + result = errors.query( + selected_columns=["project.id", "user", "release"], + query="timestamp.to_hour:" + iso_format(event_hour), + params=self.params, + referrer="discover", + ) + data = result["data"] + assert len(data) == 1 + assert data[0]["project.id"] == self.project.id + assert data[0]["user"] == "id:99" + assert data[0]["release"] == "first-release" + + assert len(result["meta"]["fields"]) == 3 + assert result["meta"]["fields"] == { + "project.id": "integer", + "user": "string", + "release": "string", + } + + def test_count_with_or(self): + data = load_data("javascript", timestamp=before_now(seconds=3)) + data["transaction"] = "a" * 32 + self.store_event(data=data, project_id=self.project.id) + + results = errors.query( + selected_columns=["transaction", "count()"], + query="event.type:error AND (count():<1 OR count():>0)", + params=self.params, + orderby=["transaction"], + use_aggregate_conditions=True, + referrer="discover", + ) + + data = results["data"] + assert len(data) == 1 + assert data[0]["transaction"] == "a" * 32 + assert data[0]["count"] == 1 + + def test_access_to_private_functions(self): + # using private functions directly without access should error + with pytest.raises(InvalidSearchQuery, match="array_join: no access to private function"): + errors.query( + selected_columns=["array_join(tags.key)"], + query="", + params={ + "project_id": [self.project.id], + "start": self.two_min_ago, + "end": self.now, + }, + referrer="discover", + ) + + # using private functions in an aggregation without access should error + with pytest.raises(InvalidSearchQuery, match="histogram: no access to private function"): + for array_column in ARRAY_COLUMNS: + errors.query( + selected_columns=[f"histogram({array_column}_value, 1,0,1)"], + query=f"histogram({array_column}_value, 1,0,1):>0", + params={ + "project_id": [self.project.id], + "start": self.two_min_ago, + "end": self.now, + }, + use_aggregate_conditions=True, + referrer="discover", + ) + + # using private functions in an aggregation without access should error + # with auto aggregation on + with pytest.raises(InvalidSearchQuery, match="histogram: no access to private function"): + for array_column in ARRAY_COLUMNS: + errors.query( + selected_columns=["count()"], + query=f"histogram({array_column}_value, 1,0,1):>0", + params={ + "project_id": [self.project.id], + "start": self.two_min_ago, + "end": self.now, + }, + referrer="discover", + auto_aggregations=True, + use_aggregate_conditions=True, + ) + + def test_any_function(self): + data = load_data("javascript", timestamp=before_now(seconds=3)) + data["transaction"] = "a" * 32 + self.store_event(data=data, project_id=self.project.id) + + results = errors.query( + selected_columns=["count()", "any(transaction)", "any(user.id)"], + query="transaction:{}".format("a" * 32), + params={ + "start": before_now(minutes=5), + "end": before_now(seconds=1), + "project_id": [self.project.id], + }, + referrer="discover", + use_aggregate_conditions=True, + ) + data = results["data"] + assert len(data) == 1 + assert data[0]["any_transaction"] == "a" * 32 + assert data[0]["any_user_id"] == "1" + assert data[0]["count"] == 1 + + def test_offsets(self): + self.store_event( + data={"message": "hello1", "timestamp": iso_format(self.one_min_ago)}, + project_id=self.project.id, + ) + self.store_event( + data={"message": "hello2", "timestamp": iso_format(self.one_min_ago)}, + project_id=self.project.id, + ) + + result = errors.query( + selected_columns=["message"], + query="", + params=self.params, + orderby=["message"], + limit=1, + offset=1, + referrer="discover", + ) + + data = result["data"] + assert len(data) == 1 + # because we're ording by `message`, and offset by 1, the message should be `hello2` + assert data[0]["message"] == "hello2" + + +class ErrorsArithmeticTest(SnubaTestCase, TestCase): + def setUp(self): + super().setUp() + + self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0) + self.now = before_now() + event_data = load_data("javascript") + event_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=30, seconds=3)) + self.store_event(data=event_data, project_id=self.project.id) + self.params: ParamsType = { + "project_id": [self.project.id], + "start": self.day_ago, + "end": self.now, + } + self.query = "" + + def test_simple(self): + results = errors.query( + selected_columns=[ + "count()", + ], + equations=["count() + 100"], + query=self.query, + params=self.params, + referrer="discover", + ) + assert len(results["data"]) == 1 + result = results["data"][0] + assert result["equation[0]"] == 101 diff --git a/tests/snuba/api/endpoints/test_organization_events_stats.py b/tests/snuba/api/endpoints/test_organization_events_stats.py index 5d8080cc99285a..b3e013263b1cb7 100644 --- a/tests/snuba/api/endpoints/test_organization_events_stats.py +++ b/tests/snuba/api/endpoints/test_organization_events_stats.py @@ -9,6 +9,7 @@ import pytest from dateutil.parser import parse as parse_date from django.urls import reverse +from snuba_sdk import Entity from snuba_sdk.column import Column from snuba_sdk.conditions import Condition, Op from snuba_sdk.function import Function @@ -2806,3 +2807,365 @@ def test_functions_dataset_simple(self): assert any( row[1][0]["count"] > 0 for row in response.data["bar"]["p95(function.duration)"]["data"] ) + + +class OrganizationEventsStatsTopNEventsErrors(APITestCase, SnubaTestCase): + def setUp(self): + super().setUp() + self.login_as(user=self.user) + + self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0) + + self.project = self.create_project() + self.project2 = self.create_project() + self.user2 = self.create_user() + self.event_data: list[_EventDataDict] = [ + { + "data": { + "message": "poof", + "timestamp": iso_format(self.day_ago + timedelta(minutes=2)), + "user": {"email": self.user.email}, + "tags": {"shared-tag": "yup"}, + "fingerprint": ["group1"], + }, + "project": self.project2, + "count": 7, + }, + { + "data": { + "message": "voof", + "timestamp": iso_format(self.day_ago + timedelta(hours=1, minutes=2)), + "fingerprint": ["group2"], + "user": {"email": self.user2.email}, + "tags": {"shared-tag": "yup"}, + }, + "project": self.project2, + "count": 6, + }, + { + "data": { + "message": "very bad", + "timestamp": iso_format(self.day_ago + timedelta(minutes=2)), + "fingerprint": ["group3"], + "user": {"email": "foo@example.com"}, + "tags": {"shared-tag": "yup"}, + }, + "project": self.project, + "count": 5, + }, + { + "data": { + "message": "oh no", + "timestamp": iso_format(self.day_ago + timedelta(minutes=2)), + "fingerprint": ["group4"], + "user": {"email": "bar@example.com"}, + "tags": {"shared-tag": "yup"}, + }, + "project": self.project, + "count": 4, + }, + { + "data": { + "message": "kinda bad", + "timestamp": iso_format(self.day_ago + timedelta(minutes=2)), + "user": {"email": self.user.email}, + "tags": {"shared-tag": "yup"}, + "fingerprint": ["group7"], + }, + "project": self.project, + "count": 3, + }, + # Not in the top 5 + { + "data": { + "message": "sorta bad", + "timestamp": iso_format(self.day_ago + timedelta(minutes=2)), + "fingerprint": ["group5"], + "user": {"email": "bar@example.com"}, + "tags": {"shared-tag": "yup"}, + }, + "project": self.project, + "count": 2, + }, + { + "data": { + "message": "not so bad", + "timestamp": iso_format(self.day_ago + timedelta(minutes=2)), + "fingerprint": ["group6"], + "user": {"email": "bar@example.com"}, + "tags": {"shared-tag": "yup"}, + }, + "project": self.project, + "count": 1, + }, + ] + + self.events = [] + for index, event_data in enumerate(self.event_data): + data = event_data["data"].copy() + for i in range(event_data["count"]): + data["event_id"] = f"{index}{i}" * 16 + event = self.store_event(data, project_id=event_data["project"].id) + self.events.append(event) + + self.enabled_features = { + "organizations:discover-basic": True, + } + self.url = reverse( + "sentry-api-0-organization-events-stats", + kwargs={"organization_id_or_slug": self.project.organization.slug}, + ) + + def test_simple_top_events(self): + with self.feature(self.enabled_features): + response = self.client.get( + self.url, + data={ + "start": iso_format(self.day_ago), + "end": iso_format(self.day_ago + timedelta(hours=2)), + "interval": "1h", + "yAxis": "count()", + "orderby": ["-count()"], + "field": ["count()", "message", "user.email"], + "dataset": "errors", + "topEvents": 5, + }, + format="json", + ) + + data = response.data + assert response.status_code == 200, response.content + assert len(data) == 6 + + for index, event in enumerate(self.events[:5]): + message = event.message or event.transaction + results = data[ + ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")]) + ] + assert results["order"] == index + assert [{"count": self.event_data[index]["count"]}] in [ + attrs for _, attrs in results["data"] + ] + + other = data["Other"] + assert other["order"] == 5 + assert [{"count": 3}] in [attrs for _, attrs in other["data"]] + + def test_top_events_with_projects_other(self): + with self.feature(self.enabled_features): + response = self.client.get( + self.url, + data={ + "start": iso_format(self.day_ago), + "end": iso_format(self.day_ago + timedelta(hours=2)), + "interval": "1h", + "yAxis": "count()", + "orderby": ["-count()"], + "field": ["count()", "project"], + "dataset": "errors", + "topEvents": 1, + }, + format="json", + ) + + data = response.data + assert response.status_code == 200, response.content + assert set(data.keys()) == {"Other", self.project.slug} + + assert data[self.project.slug]["order"] == 0 + assert [attrs[0]["count"] for _, attrs in data[self.project.slug]["data"]] == [15, 0] + + assert data["Other"]["order"] == 1 + assert [attrs[0]["count"] for _, attrs in data["Other"]["data"]] == [7, 6] + + def test_top_events_with_issue(self): + # delete a group to make sure if this happens the value becomes unknown + event_group = self.events[0].group + event_group.delete() + + with self.feature(self.enabled_features): + response = self.client.get( + self.url, + data={ + "start": iso_format(self.day_ago), + "end": iso_format(self.day_ago + timedelta(hours=2)), + "interval": "1h", + "yAxis": "count()", + "orderby": ["-count()"], + "field": ["count()", "message", "issue"], + "topEvents": 5, + "query": "", + "dataset": "errors", + }, + format="json", + ) + + data = response.data + + assert response.status_code == 200, response.content + assert len(data) == 6 + + for index, event in enumerate(self.events[:4]): + message = event.message + # Because we deleted the group for event 0 + if index == 0 or event.group is None: + issue = "unknown" + else: + issue = event.group.qualified_short_id + + results = data[",".join([issue, message])] + assert results["order"] == index + assert [{"count": self.event_data[index]["count"]}] in [ + attrs for time, attrs in results["data"] + ] + + other = data["Other"] + assert other["order"] == 5 + assert [attrs[0]["count"] for _, attrs in data["Other"]["data"]] == [3, 0] + + @mock.patch("sentry.models.GroupManager.get_issues_mapping") + def test_top_events_with_unknown_issue(self, mock_issues_mapping): + event = self.events[0] + event_data = self.event_data[0] + + # ensure that the issue mapping returns None for the issue + mock_issues_mapping.return_value = {event.group.id: None} + + with self.feature(self.enabled_features): + response = self.client.get( + self.url, + data={ + "start": iso_format(self.day_ago), + "end": iso_format(self.day_ago + timedelta(hours=2)), + "interval": "1h", + "yAxis": "count()", + "orderby": ["-count()"], + "field": ["count()", "issue"], + "topEvents": 5, + # narrow the search to just one issue + "query": f"issue.id:{event.group.id}", + "dataset": "errors", + }, + format="json", + ) + assert response.status_code == 200, response.content + + data = response.data + assert len(data) == 1 + results = data["unknown"] + assert results["order"] == 0 + assert [{"count": event_data["count"]}] in [attrs for time, attrs in results["data"]] + + @mock.patch( + "sentry.search.events.builder.base.raw_snql_query", + side_effect=[{"data": [{"issue.id": 1}], "meta": []}, {"data": [], "meta": []}], + ) + def test_top_events_with_issue_check_query_conditions(self, mock_query): + """ "Intentionally separate from test_top_events_with_issue + + This is to test against a bug where the condition for issues wasn't included and we'd be missing data for + the interval since we'd cap out the max rows. This was not caught by the previous test since the results + would still be correct given the smaller interval & lack of data + """ + with self.feature(self.enabled_features): + self.client.get( + self.url, + data={ + "start": iso_format(self.day_ago), + "end": iso_format(self.day_ago + timedelta(hours=2)), + "interval": "1h", + "yAxis": "count()", + "orderby": ["-count()"], + "field": ["count()", "message", "issue"], + "topEvents": 5, + "query": "!event.type:transaction", + "dataset": "errors", + }, + format="json", + ) + + assert ( + Condition( + Function( + "coalesce", + [Column("group_id", entity=Entity("events", alias="events")), 0], + "issue.id", + ), + Op.IN, + [1], + ) + in mock_query.mock_calls[1].args[0].query.where + ) + + def test_group_id_tag_simple(self): + event_data: _EventDataDict = { + "data": { + "message": "poof", + "timestamp": iso_format(self.day_ago + timedelta(minutes=2)), + "user": {"email": self.user.email}, + "tags": {"group_id": "the tag"}, + "fingerprint": ["group1"], + }, + "project": self.project2, + "count": 7, + } + for i in range(event_data["count"]): + event_data["data"]["event_id"] = f"a{i}" * 16 + self.store_event(event_data["data"], project_id=event_data["project"].id) + + data = { + "start": iso_format(self.day_ago), + "end": iso_format(self.day_ago + timedelta(hours=2)), + "interval": "1h", + "yAxis": "count()", + "orderby": ["-count()"], + "field": ["count()", "group_id"], + "topEvents": 5, + "partial": 1, + } + with self.feature(self.enabled_features): + response = self.client.get(self.url, data, format="json") + assert response.status_code == 200, response.content + assert response.data["the tag"]["data"][0][1] == [{"count": 7}] + + data["query"] = 'group_id:"the tag"' + with self.feature(self.enabled_features): + response = self.client.get(self.url, data, format="json") + assert response.status_code == 200 + assert response.data["the tag"]["data"][0][1] == [{"count": 7}] + + data["query"] = "group_id:abc" + with self.feature(self.enabled_features): + response = self.client.get(self.url, data, format="json") + assert response.status_code == 200 + assert all([interval[1][0]["count"] == 0 for interval in response.data["data"]]) + + def test_top_events_with_error_unhandled(self): + self.login_as(user=self.user) + project = self.create_project() + prototype = load_data("android-ndk") + prototype["event_id"] = "f" * 32 + prototype["logentry"] = {"formatted": "not handled"} + prototype["exception"]["values"][0]["value"] = "not handled" + prototype["exception"]["values"][0]["mechanism"]["handled"] = False + prototype["timestamp"] = iso_format(self.day_ago + timedelta(minutes=2)) + self.store_event(data=prototype, project_id=project.id) + + with self.feature(self.enabled_features): + response = self.client.get( + self.url, + data={ + "start": iso_format(self.day_ago), + "end": iso_format(self.day_ago + timedelta(hours=2)), + "interval": "1h", + "yAxis": "count()", + "orderby": ["-count()"], + "field": ["count()", "error.unhandled"], + "topEvents": 5, + }, + format="json", + ) + + data = response.data + assert response.status_code == 200, response.content + assert len(data) == 2 From 3ebf740937c1b8fc188553e3840f910e8e3f11e0 Mon Sep 17 00:00:00 2001 From: Shruthi Date: Wed, 24 Jul 2024 14:23:33 -0400 Subject: [PATCH 092/126] feat(discover): Show the banner if forced split decision (#74863) In a follow up, I'd like to set dataset source to USER if the user dismisses the alert so it doesn't keep popping up. --- static/app/types/organization.tsx | 7 ++++++- static/app/utils/discover/types.tsx | 7 +++++++ static/app/views/discover/results.tsx | 13 +++++++++---- 3 files changed, 22 insertions(+), 5 deletions(-) diff --git a/static/app/types/organization.tsx b/static/app/types/organization.tsx index 99ebf0e5c9cd00..0a10db4b6d3682 100644 --- a/static/app/types/organization.tsx +++ b/static/app/types/organization.tsx @@ -1,6 +1,10 @@ import type {Project} from 'sentry/types/project'; import type {AggregationOutputType} from 'sentry/utils/discover/fields'; -import type {DiscoverDatasets, SavedQueryDatasets} from 'sentry/utils/discover/types'; +import type { + DatasetSource, + DiscoverDatasets, + SavedQueryDatasets, +} from 'sentry/utils/discover/types'; import type {Actor, Avatar, ObjectStatus, Scope} from './core'; import type {OrgExperiments} from './experiments'; @@ -243,6 +247,7 @@ export interface NewQuery { version: SavedQueryVersions; createdBy?: User; dataset?: DiscoverDatasets; + datasetSource?: DatasetSource; display?: string; end?: string | Date; environment?: Readonly; diff --git a/static/app/utils/discover/types.tsx b/static/app/utils/discover/types.tsx index 8e3b97c46d23bb..1c0e69c329ee23 100644 --- a/static/app/utils/discover/types.tsx +++ b/static/app/utils/discover/types.tsx @@ -29,6 +29,13 @@ export enum SavedQueryDatasets { TRANSACTIONS = 'transaction-like', } +export enum DatasetSource { + USER = 'user', + UNKNOWN = 'unknown', + INFERRED = 'inferred', + FORCED = 'forced', +} + export const TOP_EVENT_MODES: string[] = [DisplayModes.TOP5, DisplayModes.DAILYTOP5]; // The modes that support the interval selector diff --git a/static/app/views/discover/results.tsx b/static/app/views/discover/results.tsx index f16f0e78011bda..49f0fb67d1b5ef 100644 --- a/static/app/views/discover/results.tsx +++ b/static/app/views/discover/results.tsx @@ -44,6 +44,7 @@ import {CustomMeasurementsProvider} from 'sentry/utils/customMeasurements/custom import EventView, {isAPIPayloadSimilar} from 'sentry/utils/discover/eventView'; import {formatTagKey, generateAggregateFields} from 'sentry/utils/discover/fields'; import { + DatasetSource, DisplayModes, MULTI_Y_AXIS_SUPPORTED_DISPLAY_MODES, SavedQueryDatasets, @@ -152,6 +153,7 @@ export class Results extends Component { needConfirmation: false, confirmedQuery: false, tips: [], + showForcedDatasetAlert: true, }; componentDidMount() { @@ -575,12 +577,16 @@ export class Results extends Component { } renderForcedDatasetBanner() { - const {organization} = this.props; + const {organization, savedQuery} = this.props; if ( organization.features.includes('performance-discover-dataset-selector') && this.state.showForcedDatasetAlert && - this.state.splitDecision + (this.state.splitDecision || savedQuery?.datasetSource === DatasetSource.FORCED) ) { + const splitDecision = this.state.splitDecision ?? savedQuery?.queryDataset; + if (!splitDecision) { + return null; + } return ( { > {tct( "We're splitting our datasets up to make it a bit easier to digest. We defaulted this query to [splitDecision]. Edit as you see fit.", - {splitDecision: DATASET_LABEL_MAP[this.state.splitDecision]} + {splitDecision: DATASET_LABEL_MAP[splitDecision]} )} ); @@ -766,7 +772,6 @@ export class Results extends Component { value !== savedQuery?.dataset ) { this.setSplitDecision(value); - this.setState({showForcedDatasetAlert: true}); } }} dataset={ From d5a3517ee56191883bce51c02416421427323322 Mon Sep 17 00:00:00 2001 From: Richard Roggenkemper <46740234+roggenkemper@users.noreply.github.com> Date: Wed, 24 Jul 2024 11:49:28 -0700 Subject: [PATCH 093/126] fix(issue-stream): Fix styling on priority button (#74875) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit this pr fixes a minor style problem with the priority button before: Screenshot 2024-07-24 at 11 35 23 AM after: Screenshot 2024-07-24 at 11 39 47 AM --- static/app/components/badge/groupPriority.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/app/components/badge/groupPriority.tsx b/static/app/components/badge/groupPriority.tsx index 118c3ec5b6209b..99a0cf7505c58f 100644 --- a/static/app/components/badge/groupPriority.tsx +++ b/static/app/components/badge/groupPriority.tsx @@ -262,7 +262,7 @@ const DropdownButton = styled(Button)` border: none; padding: 0; height: unset; - border-radius: 10px; + border-radius: 20px; box-shadow: none; `; From 9f7fb0e3ce7cdb54e49928be14579c9b2de51668 Mon Sep 17 00:00:00 2001 From: Seiji Chew <67301797+schew2381@users.noreply.github.com> Date: Wed, 24 Jul 2024 11:52:08 -0700 Subject: [PATCH 094/126] fix(conditions): Include generic groups in result mapping (#74816) There's a small issue where we run into a KeyError when parsing `EventFrequencyPercentConditions`. This is b/c the relevant group that's raising the KeyError is missing from the results. The condition is skipping adding generic (non-issue) groups to the results when it should be added with a percent of 0. https://github.com/getsentry/sentry/blob/b78aefeee5a11b2abd669e74aabf45a7882bc6ae/src/sentry/rules/conditions/event_frequency.py#L673 Also switch `batch_query_hook` to use early return Fixes [SENTRY-3CAW](https://sentry.sentry.io/issues/5637069028/) --- .../rules/conditions/event_frequency.py | 65 +++++++++++-------- .../rules/conditions/test_event_frequency.py | 1 + 2 files changed, 38 insertions(+), 28 deletions(-) diff --git a/src/sentry/rules/conditions/event_frequency.py b/src/sentry/rules/conditions/event_frequency.py index d7ee979f3db93a..0ec068db0fa249 100644 --- a/src/sentry/rules/conditions/event_frequency.py +++ b/src/sentry/rules/conditions/event_frequency.py @@ -658,38 +658,47 @@ def query_hook( def batch_query_hook( self, group_ids: set[int], start: datetime, end: datetime, environment_id: int ) -> dict[int, int]: - batch_percents: dict[int, int] = defaultdict(int) groups = Group.objects.filter(id__in=group_ids).values( "id", "type", "project_id", "project__organization_id" ) project_id = self.get_value_from_groups(groups, "project_id") - avg_sessions_in_interval = None - if project_id: - session_count_last_hour = self.get_session_count(project_id, environment_id, start, end) - avg_sessions_in_interval = self.get_session_interval( - session_count_last_hour, self.get_option("interval") - ) - if avg_sessions_in_interval: - error_issue_ids, _ = self.get_error_and_generic_group_ids(groups) - organization_id = self.get_value_from_groups(groups, "project__organization_id") - if error_issue_ids and organization_id: - error_issue_count = self.get_chunked_result( - tsdb_function=self.tsdb.get_sums, - model=get_issue_tsdb_group_model(GroupCategory.ERROR), - group_ids=error_issue_ids, - organization_id=organization_id, - start=start, - end=end, - environment_id=environment_id, - referrer_suffix="batch_alert_event_frequency_percent", - ) - for group_id, count in error_issue_count.items(): - percent: int = int(100 * round(count / avg_sessions_in_interval, 4)) - batch_percents[group_id] = percent - else: - percent = 0 - for group in groups: - batch_percents[group.get("id")] = percent + + if not project_id: + return {group.get("id"): 0 for group in groups} + + session_count_last_hour = self.get_session_count(project_id, environment_id, start, end) + avg_sessions_in_interval = self.get_session_interval( + session_count_last_hour, self.get_option("interval") + ) + + if not avg_sessions_in_interval: + return {group.get("id"): 0 for group in groups} + + error_issue_ids, generic_issue_ids = self.get_error_and_generic_group_ids(groups) + organization_id = self.get_value_from_groups(groups, "project__organization_id") + + if not (error_issue_ids and organization_id): + return {group.get("id"): 0 for group in groups} + + error_issue_count = self.get_chunked_result( + tsdb_function=self.tsdb.get_sums, + model=get_issue_tsdb_group_model(GroupCategory.ERROR), + group_ids=error_issue_ids, + organization_id=organization_id, + start=start, + end=end, + environment_id=environment_id, + referrer_suffix="batch_alert_event_frequency_percent", + ) + + batch_percents: dict[int, int] = {} + for group_id, count in error_issue_count.items(): + percent: int = int(100 * round(count / avg_sessions_in_interval, 4)) + batch_percents[group_id] = percent + + # We do not have sessions for non-error issue types + for group in generic_issue_ids: + batch_percents[group] = 0 return batch_percents diff --git a/tests/snuba/rules/conditions/test_event_frequency.py b/tests/snuba/rules/conditions/test_event_frequency.py index 29bc23124c2fda..84cd06e3caf4e3 100644 --- a/tests/snuba/rules/conditions/test_event_frequency.py +++ b/tests/snuba/rules/conditions/test_event_frequency.py @@ -206,6 +206,7 @@ def test_batch_query_percent(self): assert batch_query == { self.event.group_id: percent_of_sessions, self.event2.group_id: percent_of_sessions, + self.perf_event.group_id: 0, } batch_query = self.condition_inst2.batch_query_hook( group_ids=[self.event3.group_id], From bb8249209a69858cc03c6c1367f38c91d5f49ba7 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Wed, 24 Jul 2024 18:57:39 +0000 Subject: [PATCH 095/126] Revert "fix(hybrid-cloud): Adds defaults to provisioning model fields, lost_password_hash model (#74766)" This reverts commit be18d81af8962d0cff3999606cbc9d86f390a62c. Co-authored-by: GabeVillalobos <5643012+GabeVillalobos@users.noreply.github.com> --- .../services/control_organization_provisioning/model.py | 4 ++-- src/sentry/services/organization/model.py | 4 ++-- src/sentry/users/services/lost_password_hash/model.py | 7 ++----- tests/sentry/api/endpoints/test_event_ai_suggested_fix.py | 2 +- tests/sentry/feedback/usecases/test_create_feedback.py | 2 +- 5 files changed, 8 insertions(+), 11 deletions(-) diff --git a/src/sentry/hybridcloud/services/control_organization_provisioning/model.py b/src/sentry/hybridcloud/services/control_organization_provisioning/model.py index 20e76051f7c38b..e5a904059603f4 100644 --- a/src/sentry/hybridcloud/services/control_organization_provisioning/model.py +++ b/src/sentry/hybridcloud/services/control_organization_provisioning/model.py @@ -1,7 +1,7 @@ -from sentry.hybridcloud.rpc import RpcModel +import pydantic -class RpcOrganizationSlugReservation(RpcModel): +class RpcOrganizationSlugReservation(pydantic.BaseModel): id: int organization_id: int user_id: int | None diff --git a/src/sentry/services/organization/model.py b/src/sentry/services/organization/model.py index cb9a171dca9be7..8730c9864c41eb 100644 --- a/src/sentry/services/organization/model.py +++ b/src/sentry/services/organization/model.py @@ -13,8 +13,8 @@ class OrganizationOptions(pydantic.BaseModel): class PostProvisionOptions(pydantic.BaseModel): - sentry_options: Any | None = None # Placeholder for any sentry post-provisioning data - getsentry_options: Any | None = None # Reserved for getsentry post-provisioning data + sentry_options: Any | None # Placeholder for any sentry post-provisioning data + getsentry_options: Any | None # Reserved for getsentry post-provisioning data class OrganizationProvisioningOptions(pydantic.BaseModel): diff --git a/src/sentry/users/services/lost_password_hash/model.py b/src/sentry/users/services/lost_password_hash/model.py index bf2d1f6e5f95f8..7a6d9c07cf0366 100644 --- a/src/sentry/users/services/lost_password_hash/model.py +++ b/src/sentry/users/services/lost_password_hash/model.py @@ -3,10 +3,7 @@ # in modules such as this one where hybrid cloud data models or service classes are # defined, because we want to reflect on type annotations and avoid forward references. -from datetime import datetime - -from django.utils import timezone -from pydantic import Field +import datetime from sentry.hybridcloud.rpc import RpcModel from sentry.models.lostpasswordhash import LostPasswordHash @@ -16,7 +13,7 @@ class RpcLostPasswordHash(RpcModel): id: int = -1 user_id: int = -1 hash: str = "" - date_added: datetime = Field(default_factory=timezone.now) + date_added = datetime.datetime def get_absolute_url(self, mode: str = "recover") -> str: return LostPasswordHash.get_lostpassword_url(self.user_id, self.hash, mode) diff --git a/tests/sentry/api/endpoints/test_event_ai_suggested_fix.py b/tests/sentry/api/endpoints/test_event_ai_suggested_fix.py index f990f8d5010332..b1029c188a3fab 100644 --- a/tests/sentry/api/endpoints/test_event_ai_suggested_fix.py +++ b/tests/sentry/api/endpoints/test_event_ai_suggested_fix.py @@ -34,7 +34,7 @@ def dummy_response(*args, **kwargs): finish_reason="stop", ) ], - created=int(time.time()), + created=time.time(), model="gpt3.5-trubo", object="chat.completion", ) diff --git a/tests/sentry/feedback/usecases/test_create_feedback.py b/tests/sentry/feedback/usecases/test_create_feedback.py index 986a4141e025df..8ef46e7d19f71a 100644 --- a/tests/sentry/feedback/usecases/test_create_feedback.py +++ b/tests/sentry/feedback/usecases/test_create_feedback.py @@ -64,7 +64,7 @@ def create_dummy_response(*args, **kwargs): finish_reason="stop", ) ], - created=int(time.time()), + created=time.time(), model="gpt3.5-trubo", object="chat.completion", ) From 7d03c36356c8d2b830343be52470ad786da2f320 Mon Sep 17 00:00:00 2001 From: Dan Fuller Date: Wed, 24 Jul 2024 12:02:30 -0700 Subject: [PATCH 096/126] feat(uptime): Add function to count number of uptime monitors active in an organization (#74777) This adds a funtion to count active uptime monitors for an org. Since this will be called from high volum areas it also adds caching around the function. We want the cache to be actively invalidated when the count changes in the org, so we also want to listen to `post_save` and `post_delete` signals. This is a pattern we've used a few times in sentry, so I also generalized this cache so that we don't have to manually write it every time. --- src/sentry/uptime/models.py | 11 +++ src/sentry/utils/function_cache.py | 91 +++++++++++++++++++++++ tests/sentry/uptime/test_models.py | 17 +++++ tests/sentry/utils/test_function_cache.py | 80 ++++++++++++++++++++ 4 files changed, 199 insertions(+) create mode 100644 src/sentry/utils/function_cache.py create mode 100644 tests/sentry/uptime/test_models.py create mode 100644 tests/sentry/utils/test_function_cache.py diff --git a/src/sentry/uptime/models.py b/src/sentry/uptime/models.py index 6ebfdd1356c4d3..b136a939c084ca 100644 --- a/src/sentry/uptime/models.py +++ b/src/sentry/uptime/models.py @@ -10,8 +10,10 @@ from sentry.db.models import DefaultFieldsModel, FlexibleForeignKey, region_silo_model from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey from sentry.db.models.manager.base import BaseManager +from sentry.models.organization import Organization from sentry.remote_subscriptions.models import BaseRemoteSubscription from sentry.types.actor import Actor +from sentry.utils.function_cache import cache_func_for_models @region_silo_model @@ -109,3 +111,12 @@ class Meta: @property def owner(self) -> Actor | None: return Actor.from_id(user_id=self.owner_user_id, team_id=self.owner_team_id) + + +def get_org_from_uptime_monitor(uptime_monitor: ProjectUptimeSubscription) -> tuple[Organization]: + return (uptime_monitor.project.organization,) + + +@cache_func_for_models([(ProjectUptimeSubscription, get_org_from_uptime_monitor)]) +def get_active_monitor_count_for_org(organization: Organization) -> int: + return ProjectUptimeSubscription.objects.filter(project__organization=organization).count() diff --git a/src/sentry/utils/function_cache.py b/src/sentry/utils/function_cache.py new file mode 100644 index 00000000000000..457c3a85d8754e --- /dev/null +++ b/src/sentry/utils/function_cache.py @@ -0,0 +1,91 @@ +import uuid +from collections.abc import Callable +from datetime import timedelta +from decimal import Decimal +from functools import partial +from typing import Any, ParamSpec, TypeVar + +from django.core.cache import cache +from django.db import models +from django.db.models.signals import post_delete, post_save + +from sentry.utils.hashlib import md5_text + +P = ParamSpec("P") +R = TypeVar("R") +S = TypeVar("S", bound=models.Model) + + +def arg_to_hashable(arg: Any): + if isinstance(arg, (int, float, str, Decimal, uuid.UUID)): + return arg + elif isinstance(arg, models.Model): + return f"{arg._meta.label}:{arg.pk}" + else: + raise ValueError( + "Can only cache functions whose parameters can be hashed in a consistent way" + ) + + +def cache_key_for_cached_func(cached_func: Callable[P, R], *args): + base_cache_key = f"query_cache:{md5_text(cached_func.__qualname__).hexdigest()}" + vals_to_hash = [arg_to_hashable(arg) for arg in args] + return f"{base_cache_key}:{md5_text(*vals_to_hash).hexdigest()}" + + +def clear_cache_for_cached_func( + cached_func: Callable[P, R], arg_getter, recalculate: bool, instance: S, *args, **kwargs +): + args = arg_getter(instance) + cache_key = cache_key_for_cached_func(cached_func, *args) + if recalculate: + cache.set(cache_key, cached_func(*args)) + else: + cache.delete(cache_key) + + +def cache_func_for_models( + cache_invalidators: list[tuple[type[S], Callable[[S], P.args]]], + cache_ttl: None | timedelta = None, + recalculate: bool = True, +): + """ + Decorator that caches the result of a function, and actively invalidates the result when related models are + created/updated/deleted. To use this, decorate a function with this decorator and pass a list of `cache_invalidators` + that tell us how to invalidate the cache. + Each entry in `cache_invalidators` is a tuple of (, ). In more detail: + - Model is the model we'll listen to for updates. When this model fires a `post_save` or `post_delete` signal + we'll invalidate the cache. + - Func is a function that accepts an instance of `Model` and returns a tuple of values that can be used to call + the cached function. These values are used to invalidate the cache. + + This only works with functions that are called using args. + + If `recalculate` is `True`, we'll re-run the decorated function and overwrite the cached value. If `False`, we'll + just remove the value from the cache. + """ + if cache_ttl is None: + cache_ttl = timedelta(days=7) + + def cached_query_func(func_to_cache: Callable[P, R]): + def inner(*args: P.args, **kwargs: P.kwargs) -> R: + if kwargs: + raise ValueError("Can't cache values using kwargs") + + cache_key = cache_key_for_cached_func(func_to_cache, *args) + cached_val = cache.get(cache_key, None) + if cached_val is None: + cached_val = func_to_cache(*args) + cache.set(cache_key, cached_val, timeout=cache_ttl.total_seconds()) + return cached_val + + for model, arg_getter in cache_invalidators: + clear_cache_callable = partial( + clear_cache_for_cached_func, func_to_cache, arg_getter, recalculate + ) + post_save.connect(clear_cache_callable, sender=model, weak=False) + post_delete.connect(clear_cache_callable, sender=model, weak=False) + + return inner + + return cached_query_func diff --git a/tests/sentry/uptime/test_models.py b/tests/sentry/uptime/test_models.py new file mode 100644 index 00000000000000..8851e8946344e9 --- /dev/null +++ b/tests/sentry/uptime/test_models.py @@ -0,0 +1,17 @@ +from sentry.testutils.cases import UptimeTestCase +from sentry.uptime.models import get_active_monitor_count_for_org + + +class GetActiveMonitorCountForOrgTest(UptimeTestCase): + def test(self): + assert get_active_monitor_count_for_org(self.organization) == 0 + self.create_project_uptime_subscription() + assert get_active_monitor_count_for_org(self.organization) == 1 + other_sub = self.create_uptime_subscription(url="https://santry.io") + self.create_project_uptime_subscription(uptime_subscription=other_sub) + assert get_active_monitor_count_for_org(self.organization) == 2 + other_org = self.create_organization() + other_proj = self.create_project(organization=other_org) + self.create_project_uptime_subscription(uptime_subscription=other_sub, project=other_proj) + assert get_active_monitor_count_for_org(self.organization) == 2 + assert get_active_monitor_count_for_org(other_org) == 1 diff --git a/tests/sentry/utils/test_function_cache.py b/tests/sentry/utils/test_function_cache.py new file mode 100644 index 00000000000000..f8b25b82097fe3 --- /dev/null +++ b/tests/sentry/utils/test_function_cache.py @@ -0,0 +1,80 @@ +from unittest.mock import create_autospec + +from django.db import models + +from sentry.backup.scopes import RelocationScope +from sentry.db.models import region_silo_model +from sentry.testutils.cases import TestCase +from sentry.utils.function_cache import cache_func_for_models + +cache_func_for_models + + +@region_silo_model +class CacheModel(models.Model): + __relocation_scope__ = RelocationScope.Excluded + some_field = models.TextField() + + class Meta: + app_label = "fixtures" + + +def count_func(text_search: str): + return CacheModel.objects.filter(some_field=text_search).count() + + +def arg_extractor(instance: CacheModel): + return (instance.some_field,) + + +class CacheFuncForModelsTest(TestCase): + def assert_called_with_count(self, mock_test_func, text_search: str, count: int): + assert ( + len([ca for ca in mock_test_func.call_args_list if ca.args[0] == text_search]) == count + ) + + def test(self): + mock_test_func = create_autospec(count_func) + mock_test_func.side_effect = count_func + decorated_test_func = cache_func_for_models([(CacheModel, arg_extractor)])(mock_test_func) + self.assert_called_with_count(mock_test_func, "test", 0) + assert decorated_test_func("test") == 0 + self.assert_called_with_count(mock_test_func, "test", 1) + assert decorated_test_func("test") == 0 + self.assert_called_with_count(mock_test_func, "test", 1) + + CacheModel.objects.create(some_field="test") + # Since we're actively refetching the count should go to 2 here + self.assert_called_with_count(mock_test_func, "test", 2) + assert decorated_test_func("test") == 1 + self.assert_called_with_count(mock_test_func, "test", 2) + CacheModel.objects.create(some_field="test") + self.assert_called_with_count(mock_test_func, "test", 3) + assert decorated_test_func("test") == 2 + self.assert_called_with_count(mock_test_func, "test", 3) + CacheModel.objects.create(some_field="another_val") + self.assert_called_with_count(mock_test_func, "test", 3) + assert decorated_test_func("test") == 2 + + def test_no_recalculate(self): + mock_test_func = create_autospec(count_func) + mock_test_func.side_effect = count_func + decorated_test_func = cache_func_for_models( + [(CacheModel, arg_extractor)], recalculate=False + )(mock_test_func) + self.assert_called_with_count(mock_test_func, "test", 0) + assert decorated_test_func("test") == 0 + self.assert_called_with_count(mock_test_func, "test", 1) + + CacheModel.objects.create(some_field="test") + # Since we're not actively refetching the count should remain the same here + self.assert_called_with_count(mock_test_func, "test", 1) + assert decorated_test_func("test") == 1 + self.assert_called_with_count(mock_test_func, "test", 2) + CacheModel.objects.create(some_field="test") + self.assert_called_with_count(mock_test_func, "test", 2) + assert decorated_test_func("test") == 2 + self.assert_called_with_count(mock_test_func, "test", 3) + CacheModel.objects.create(some_field="another_val") + self.assert_called_with_count(mock_test_func, "test", 3) + assert decorated_test_func("test") == 2 From 81988ef8b7f195dfb12de46ca88a5b0590c61a7d Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 24 Jul 2024 15:13:54 -0400 Subject: [PATCH 097/126] ref: unify signature of get_annotations (#74870) mypy 1.11 points out this is inconsistent --- src/sentry/plugins/base/v2.py | 4 ++-- src/sentry/templatetags/sentry_plugins.py | 2 +- tests/sentry/templatetags/test_sentry_plugins.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/sentry/plugins/base/v2.py b/src/sentry/plugins/base/v2.py index 062e4b13b2e6fe..166d76f3c37aac 100644 --- a/src/sentry/plugins/base/v2.py +++ b/src/sentry/plugins/base/v2.py @@ -331,7 +331,7 @@ def get_actions(self, request, group) -> list[tuple[str, str]]: """ return [] - def get_annotations(self, group, **kwargs): + def get_annotations(self, group) -> list[dict[str, str]]: """ Return a list of annotations to append to this aggregate. @@ -340,7 +340,7 @@ def get_annotations(self, group, **kwargs): The properties of each tag must match the constructor for :class:`sentry.plugins.Annotation` - >>> def get_annotations(self, group, **kwargs): + >>> def get_annotations(self, group): >>> task_id = GroupMeta.objects.get_value(group, 'myplugin:tid') >>> if not task_id: >>> return [] diff --git a/src/sentry/templatetags/sentry_plugins.py b/src/sentry/templatetags/sentry_plugins.py index da9586249347d4..1936199fcc9ba9 100644 --- a/src/sentry/templatetags/sentry_plugins.py +++ b/src/sentry/templatetags/sentry_plugins.py @@ -28,7 +28,7 @@ def get_actions(group, request): @register.filter -def get_annotations(group, request=None): +def get_annotations(group, request=None) -> list[dict[str, str]]: project = group.project annotation_list = [] diff --git a/tests/sentry/templatetags/test_sentry_plugins.py b/tests/sentry/templatetags/test_sentry_plugins.py index b643074b0ba084..88949e66d69e40 100644 --- a/tests/sentry/templatetags/test_sentry_plugins.py +++ b/tests/sentry/templatetags/test_sentry_plugins.py @@ -10,7 +10,7 @@ class SamplePlugin(Plugin2): def get_actions(self, request, group) -> list[tuple[str, str]]: return [("Example Action", f"http://example.com?id={group.id}")] - def get_annotations(self, group): + def get_annotations(self, group) -> list[dict[str, str]]: return [ {"label": "Example Tag", "url": f"http://example.com?id={group.id}"}, {"label": "Example Two"}, From ba12ff00b9cb7353cc1c062cdb01b34d49ce5f06 Mon Sep 17 00:00:00 2001 From: Seiji Chew <67301797+schew2381@users.noreply.github.com> Date: Wed, 24 Jul 2024 12:18:17 -0700 Subject: [PATCH 098/126] chore(processing): Add error for large num of groups (#74877) --- src/sentry/rules/processing/delayed_processing.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/src/sentry/rules/processing/delayed_processing.py b/src/sentry/rules/processing/delayed_processing.py index e5c08ff01cb2e9..47e250c55e0bd7 100644 --- a/src/sentry/rules/processing/delayed_processing.py +++ b/src/sentry/rules/processing/delayed_processing.py @@ -447,10 +447,16 @@ def apply_delayed(project_id: int, *args: Any, **kwargs: Any) -> None: num_groups = len(rulegroup_to_event_data.keys()) num_groups_bucketed = bucket_num_groups(num_groups) metrics.incr("delayed_processing.num_groups", tags={"num_groups": num_groups_bucketed}) - logger.info( - "delayed_processing.rulegroupeventdata", - extra={"rulegroupdata": rulegroup_to_event_data, "project_id": project_id}, - ) + if num_groups >= 10000: + logger.error( + "delayed_processing.too_many_groups", + extra={"project_id": project_id, "num_groups": num_groups}, + ) + else: + logger.info( + "delayed_processing.rulegroupeventdata", + extra={"rulegroupdata": rulegroup_to_event_data, "project_id": project_id}, + ) # STEP 2: Map each rule to the groups that must be checked for that rule. rules_to_groups = get_rules_to_groups(rulegroup_to_event_data) From f65b4f35434d617df6ccd1330122960d4b6f52b3 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 24 Jul 2024 15:19:52 -0400 Subject: [PATCH 099/126] ref: unify signature of get_configure_plugin_fields (#74871) mypy 1.11 pointed out these are inconsistent -- also cleaned up unused `request` param --- src/sentry/plugins/base/v1.py | 8 +------- src/sentry/plugins/bases/issue2.py | 3 --- src/sentry_plugins/bitbucket/plugin.py | 2 +- src/sentry_plugins/github/plugin.py | 2 +- src/sentry_plugins/gitlab/plugin.py | 2 +- src/sentry_plugins/jira/plugin.py | 2 +- src/sentry_plugins/phabricator/plugin.py | 2 +- src/sentry_plugins/pivotal/plugin.py | 2 +- 8 files changed, 7 insertions(+), 16 deletions(-) diff --git a/src/sentry/plugins/base/v1.py b/src/sentry/plugins/base/v1.py index a4e1191f41aecc..df4a4cac43bbad 100644 --- a/src/sentry/plugins/base/v1.py +++ b/src/sentry/plugins/base/v1.py @@ -507,13 +507,7 @@ def get_url_module(self): def view_configure(self, request, project, **kwargs): if request.method == "GET": - return Response( - self.get_configure_plugin_fields( - request=request, # DEPRECATED: this param should not be used - project=project, - **kwargs, - ) - ) + return Response(self.get_configure_plugin_fields(project=project, **kwargs)) self.configure(project, request.data) return Response({"message": "Successfully updated configuration."}) diff --git a/src/sentry/plugins/bases/issue2.py b/src/sentry/plugins/bases/issue2.py index 3b32ac37c90587..c2d41901aebaf7 100644 --- a/src/sentry/plugins/bases/issue2.py +++ b/src/sentry/plugins/bases/issue2.py @@ -392,9 +392,6 @@ def plugin_issues(self, request: Request, group, plugin_issues, **kwargs) -> Non def get_config(self, *args, **kwargs): # TODO(dcramer): update existing plugins to just use get_config - # TODO(dcramer): remove request kwarg after sentry-plugins has been - # updated - kwargs.setdefault("request", None) return self.get_configure_plugin_fields(*args, **kwargs) def check_config_and_auth(self, request: Request, group): diff --git a/src/sentry_plugins/bitbucket/plugin.py b/src/sentry_plugins/bitbucket/plugin.py index 85c7c062f3cacc..d1db1be56b92ba 100644 --- a/src/sentry_plugins/bitbucket/plugin.py +++ b/src/sentry_plugins/bitbucket/plugin.py @@ -190,7 +190,7 @@ def view_autocomplete(self, request: Request, group, **kwargs): return Response({field: issues}) - def get_configure_plugin_fields(self, request: Request, project, **kwargs): + def get_configure_plugin_fields(self, project, **kwargs): return [ { "name": "repo", diff --git a/src/sentry_plugins/github/plugin.py b/src/sentry_plugins/github/plugin.py index 9967434ca1e28b..9c948fe1922751 100644 --- a/src/sentry_plugins/github/plugin.py +++ b/src/sentry_plugins/github/plugin.py @@ -225,7 +225,7 @@ def view_autocomplete(self, request: Request, group, **kwargs): return Response({field: issues}) - def get_configure_plugin_fields(self, request: Request, project, **kwargs): + def get_configure_plugin_fields(self, project, **kwargs): return [ { "name": "repo", diff --git a/src/sentry_plugins/gitlab/plugin.py b/src/sentry_plugins/gitlab/plugin.py index b134d211b9f971..6c07bfd924e48c 100644 --- a/src/sentry_plugins/gitlab/plugin.py +++ b/src/sentry_plugins/gitlab/plugin.py @@ -155,7 +155,7 @@ def get_issue_url(self, group, issue_iid, **kwargs): return f"{url}/{repo}/issues/{issue_iid}" - def get_configure_plugin_fields(self, request: Request, project, **kwargs): + def get_configure_plugin_fields(self, project, **kwargs): gitlab_token = self.get_option("gitlab_token", project) secret_field = get_secret_field_config( gitlab_token, "Enter your GitLab API token.", include_prefix=True diff --git a/src/sentry_plugins/jira/plugin.py b/src/sentry_plugins/jira/plugin.py index e530375b040229..d13c05f847a515 100644 --- a/src/sentry_plugins/jira/plugin.py +++ b/src/sentry_plugins/jira/plugin.py @@ -524,7 +524,7 @@ def validate_config(self, project, config, actor=None): return config - def get_configure_plugin_fields(self, request: Request, project, **kwargs): + def get_configure_plugin_fields(self, project, **kwargs): instance = self.get_option("instance_url", project) username = self.get_option("username", project) pw = self.get_option("password", project) diff --git a/src/sentry_plugins/phabricator/plugin.py b/src/sentry_plugins/phabricator/plugin.py index 90404af8536d4f..199fdadadc1483 100644 --- a/src/sentry_plugins/phabricator/plugin.py +++ b/src/sentry_plugins/phabricator/plugin.py @@ -72,7 +72,7 @@ def get_api(self, project): token=self.get_option("token", project), ) - def get_configure_plugin_fields(self, request: Request, project, **kwargs): + def get_configure_plugin_fields(self, project, **kwargs): token = self.get_option("token", project) helptext = "You may generate a Conduit API Token from your account settings in Phabricator." secret_field = get_secret_field_config(token, helptext, include_prefix=True) diff --git a/src/sentry_plugins/pivotal/plugin.py b/src/sentry_plugins/pivotal/plugin.py index 1a690136c8bfa0..cdeff68749d4ae 100644 --- a/src/sentry_plugins/pivotal/plugin.py +++ b/src/sentry_plugins/pivotal/plugin.py @@ -187,7 +187,7 @@ def get_issue_title_by_id(self, request: Request, group, issue_id): json_resp = json.loads(body) return json_resp["name"] - def get_configure_plugin_fields(self, request: Request, project, **kwargs): + def get_configure_plugin_fields(self, project, **kwargs): token = self.get_option("token", project) helptext = ( "Enter your API Token (found on " From be5e5151211812f4769105b6cdcd584355e16214 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Wed, 24 Jul 2024 12:36:19 -0700 Subject: [PATCH 100/126] ref(grouping): Strip querystrings from stacktrace filenames (#74825) In cases where a stacktrace frame's `filename` is the basename of a URL, we sometimes end up with querystring nonsense on the end of it. This both makes the stacktrace hard to read and forces Seer to tokenize a whole bunch of meaningless stuff. Here's a recent example of such a stacktrace: ``` "frames": [ { "filename": "index.html?__geo_region=jp&loc=eyjrawqioiiydks4rjniyvrlwekwovb5yxdrrno4iiwiywxnijoirvmyntyifq.eyjzdwiioijhmvpdmevvnuuilcjhdwqioijndxj1z3vydsisimnvdw50cnkioijkucisimnyzwf0zwqioje3mtk4otu0mzisimlzcyi6imcxmjmtyxv0acisimn1cnjlbmn5ijoislbziiwizxhwijoxnzixmjy5nzuylcjyzwdpb24ioijkucisimxhbmcioijqysisimlhdci6mtcymta5njk1miwianrpijoicgnfnelbovpovel1cfrhsllncemyce9lwij9.wefd0fvomovr_gjrcquzatrsmstgrvzqew7uhuyiibajhas7m_hyceqkigikwyybvlsqxhdqrwywsrxqthmjeq&lang=jp&platform=jorp1&mode=0", "function": "t", "context_line": ' None: """ Applies grouping enhancement rules and ensure in_app is set on all frames. - This also trims functions if necessary. + This also trims functions and pulls query strings off of filenames if necessary. """ stacktrace_frames = [] @@ -323,10 +325,26 @@ def normalize_stacktraces_for_grouping( # otherwise stored in `function` to not make the payload larger # unnecessarily. with sentry_sdk.start_span(op=op, description="iterate_frames"): + stripped_querystring = False for frames in stacktrace_frames: for frame in frames: _update_frame(frame, platform) + if platform == "javascript": + try: + parsed_filename = urlparse(frame.get("filename", "")) + if parsed_filename.query: + stripped_querystring = True + frame["filename"] = frame["filename"].replace( + f"?{parsed_filename.query}", "" + ) + # ignore unparsable filenames + except Exception: + pass + if stripped_querystring: + # Fires once per event, regardless of how many frames' filenames were stripped + metrics.incr("sentry.grouping.stripped_filename_querystrings") + # If a grouping config is available, run grouping enhancers if grouping_config is not None: with sentry_sdk.start_span(op=op, description="apply_modifications_to_frame"): diff --git a/tests/sentry/stacktraces/test_filename.py b/tests/sentry/stacktraces/test_filename.py new file mode 100644 index 00000000000000..51a7e82f8c2a51 --- /dev/null +++ b/tests/sentry/stacktraces/test_filename.py @@ -0,0 +1,50 @@ +from typing import Any +from unittest import TestCase + +from sentry.stacktraces.processing import normalize_stacktraces_for_grouping + + +def _make_event_data(filenames: list[str], platform: str = "") -> dict[str, Any]: + return { + "exception": { + "values": [ + { + "stacktrace": { + "frames": [{"filename": filename} for filename in filenames], + }, + } + ] + }, + "platform": platform, + } + + +def _get_filenames(event_data: dict[str, Any]) -> list[str]: + frames = event_data["exception"]["values"][0]["stacktrace"]["frames"] + return [frame["filename"] for frame in frames] + + +class FilenameNormalizationTest(TestCase): + def test_leaves_non_js_events_alone(self): + filenames = ["whos_a_good_girl?.py", "maisey.py"] + event_data = _make_event_data(filenames, "python") + + normalize_stacktraces_for_grouping(event_data) + + assert _get_filenames(event_data) == filenames + + def test_leaves_non_querystringed_js_filenames_alone(self): + filenames = ["maisey.js", "charlie.js"] + event_data = _make_event_data(filenames, "javascript") + + normalize_stacktraces_for_grouping(event_data) + + assert _get_filenames(event_data) == filenames + + def test_strips_querystrings_from_files_in_js_events(self): + filenames = ["maisey.js?good=duh", "charlie.html"] + event_data = _make_event_data(filenames, "javascript") + + normalize_stacktraces_for_grouping(event_data) + + assert _get_filenames(event_data) == ["maisey.js", "charlie.html"] From b90acdaefe7d2614fc32499e7a221ab24e597c4e Mon Sep 17 00:00:00 2001 From: Dan Fuller Date: Wed, 24 Jul 2024 12:55:24 -0700 Subject: [PATCH 101/126] feat(uptime): Enforce a basic quota of 1 uptime monitor per organization (#74786) This limits each organization to 1 uptime monitor. We're not using the official quota system yet, but will just keep track of how many uptime monitors exist in the org. --- src/sentry/tasks/post_process.py | 3 ++- src/sentry/uptime/detectors/ranking.py | 5 +++++ src/sentry/uptime/detectors/tasks.py | 1 - src/sentry/uptime/subscriptions/subscriptions.py | 1 + tests/sentry/uptime/detectors/test_ranking.py | 7 +++++++ 5 files changed, 15 insertions(+), 2 deletions(-) diff --git a/src/sentry/tasks/post_process.py b/src/sentry/tasks/post_process.py index 2effb23da0a599..9bb3a6e8e95f09 100644 --- a/src/sentry/tasks/post_process.py +++ b/src/sentry/tasks/post_process.py @@ -26,7 +26,6 @@ from sentry.silo.base import SiloMode from sentry.tasks.base import instrumented_task from sentry.types.group import GroupSubStatus -from sentry.uptime.detectors.detector import detect_base_url_for_project from sentry.utils import json, metrics from sentry.utils.cache import cache from sentry.utils.event_frames import get_sdk_name @@ -1515,6 +1514,8 @@ def detect_new_escalation(job: PostProcessJob): def detect_base_urls_for_uptime(job: PostProcessJob): + from sentry.uptime.detectors.detector import detect_base_url_for_project + url = get_path(job["event"].data, "request", "url") detect_base_url_for_project(job["event"].project, url) diff --git a/src/sentry/uptime/detectors/ranking.py b/src/sentry/uptime/detectors/ranking.py index 5240fdde4c6d55..90c6da79e5455e 100644 --- a/src/sentry/uptime/detectors/ranking.py +++ b/src/sentry/uptime/detectors/ranking.py @@ -9,6 +9,8 @@ from rediscluster import RedisCluster from sentry.constants import UPTIME_AUTODETECTION +from sentry.uptime.models import get_active_monitor_count_for_org +from sentry.uptime.subscriptions.subscriptions import MAX_SUBSCRIPTIONS_PER_ORG from sentry.utils import metrics, redis if TYPE_CHECKING: @@ -166,6 +168,9 @@ def delete_organization_bucket(bucket: datetime) -> None: def should_detect_for_organization(organization: Organization) -> bool: if not organization.get_option("sentry:uptime_autodetection", UPTIME_AUTODETECTION): return False + + if get_active_monitor_count_for_org(organization) >= MAX_SUBSCRIPTIONS_PER_ORG: + return False return True diff --git a/src/sentry/uptime/detectors/tasks.py b/src/sentry/uptime/detectors/tasks.py index 2568bd21439e13..2f02bf236dae1b 100644 --- a/src/sentry/uptime/detectors/tasks.py +++ b/src/sentry/uptime/detectors/tasks.py @@ -114,7 +114,6 @@ def process_organization_url_ranking(organization_id: int): "uptime.process_organization", extra={"organization_id": org.id}, ) - # TODO: Check quota available for org should_detect = should_detect_for_organization(org) for project_id, project_count in get_candidate_projects_for_org(org): diff --git a/src/sentry/uptime/subscriptions/subscriptions.py b/src/sentry/uptime/subscriptions/subscriptions.py index bdffa5d06a95ff..6749bfdb3347bb 100644 --- a/src/sentry/uptime/subscriptions/subscriptions.py +++ b/src/sentry/uptime/subscriptions/subscriptions.py @@ -14,6 +14,7 @@ logger = logging.getLogger(__name__) UPTIME_SUBSCRIPTION_TYPE = "uptime_monitor" +MAX_SUBSCRIPTIONS_PER_ORG = 1 def create_uptime_subscription( diff --git a/tests/sentry/uptime/detectors/test_ranking.py b/tests/sentry/uptime/detectors/test_ranking.py index 120f6251588d61..16c08215459d0b 100644 --- a/tests/sentry/uptime/detectors/test_ranking.py +++ b/tests/sentry/uptime/detectors/test_ranking.py @@ -184,3 +184,10 @@ def test(self): assert not should_detect_for_organization(self.organization) self.organization.update_option("sentry:uptime_autodetection", True) assert should_detect_for_organization(self.organization) + + def test_quota(self): + assert should_detect_for_organization(self.organization) + uptime_monitor = self.create_project_uptime_subscription() + assert not should_detect_for_organization(self.organization) + uptime_monitor.delete() + assert should_detect_for_organization(self.organization) From 7c43ffbbd74336069660e716238d039a443cc769 Mon Sep 17 00:00:00 2001 From: George Gritsouk <989898+gggritso@users.noreply.github.com> Date: Wed, 24 Jul 2024 15:57:30 -0400 Subject: [PATCH 102/126] fix(insights): Improve readout ribbon wrapping (#74726) This is the last part of https://github.com/getsentry/sentry/issues/74424. Improves how the metric readout ribbons in Insights module wrap when the screen size changes. - prevent the readouts from growing strangely or overlapping each other - never partially wrap the ribbon - improve spacing ## Changes 1. Introduce some new components. `Ribbon` split into `ReadoutRibbon` and `ToolRibbon`. The former holds `MetricReadout` components. The latter holds dropdown sets (e.g., project selector, asset type selector, etc.). New `HeaderContainer` component. Extracted from all the places that copy-pasted it, and used in _all_ pages. 2. Simplify readout ribbons. Always align the readings left, always wrap them sensibly, fix the spacing. In all layouts that use `ModuleLayout`, no additional changes were needed. In pages that _do not_ use `ModuleLayout`, I had to add a bottom margin to the ribbons to preserve the current spacing. Using `ModuleLayout` everywhere is a separate can of worms. --- .../resources/components/resourceInfo.tsx | 12 +-- .../resources/components/resourceView.tsx | 8 -- .../resources/views/resourceSummaryPage.tsx | 15 ++-- .../resources/views/resourcesLandingPage.tsx | 42 +++++----- .../insights/cache/components/samplePanel.tsx | 11 +-- .../common/components/headerContainer.tsx | 10 +++ .../common/components/metricReadout.tsx | 64 +++++++++------ .../insights/common/components/ribbon.tsx | 11 ++- .../sampleList/sampleInfo/index.tsx | 16 ++-- .../sampleList/sampleTable/sampleTable.tsx | 5 +- .../database/views/databaseLandingPage.tsx | 34 ++------ .../views/databaseSpanSummaryPage.tsx | 81 +++++++++---------- .../http/components/httpSamplesPanel.tsx | 12 +-- .../http/views/httpDomainSummaryPage.tsx | 26 +++--- .../views/llmMonitoringDetailsPage.tsx | 34 ++++---- .../appStarts/views/screenSummaryPage.tsx | 19 ++--- .../components/spanSamplesPanelContainer.tsx | 12 +-- .../screenload/components/metricsRibbon.tsx | 13 ++- .../screenload/views/screenLoadSpansPage.tsx | 36 ++++----- .../mobile/ui/views/screenSummaryPage.tsx | 19 ++--- .../components/messageSpanSamplesPanel.tsx | 14 ++-- .../queues/views/destinationSummaryPage.tsx | 24 +++--- 22 files changed, 235 insertions(+), 283 deletions(-) create mode 100644 static/app/views/insights/common/components/headerContainer.tsx diff --git a/static/app/views/insights/browser/resources/components/resourceInfo.tsx b/static/app/views/insights/browser/resources/components/resourceInfo.tsx index f51a08e33f4e19..60a872a7f0f987 100644 --- a/static/app/views/insights/browser/resources/components/resourceInfo.tsx +++ b/static/app/views/insights/browser/resources/components/resourceInfo.tsx @@ -7,7 +7,7 @@ import {DurationUnit, SizeUnit} from 'sentry/utils/discover/fields'; import getDynamicText from 'sentry/utils/getDynamicText'; import {RESOURCE_THROUGHPUT_UNIT} from 'sentry/views/insights/browser/resources/settings'; import {MetricReadout} from 'sentry/views/insights/common/components/metricReadout'; -import {Ribbon} from 'sentry/views/insights/common/components/ribbon'; +import {ReadoutRibbon} from 'sentry/views/insights/common/components/ribbon'; import {getTimeSpentExplanation} from 'sentry/views/insights/common/components/tableCells/timeSpentCell'; import { DataTitles, @@ -69,9 +69,8 @@ function ResourceInfo(props: Props) { return ( - + - + {hasNoData && ( diff --git a/static/app/views/insights/browser/resources/components/resourceView.tsx b/static/app/views/insights/browser/resources/components/resourceView.tsx index b3cc720cecbc7f..449d52d598d082 100644 --- a/static/app/views/insights/browser/resources/components/resourceView.tsx +++ b/static/app/views/insights/browser/resources/components/resourceView.tsx @@ -139,12 +139,4 @@ const DropdownContainer = styled('div')` flex-wrap: wrap; `; -export const FilterOptionsContainer = styled('div')<{columnCount: number}>` - display: grid; - grid-template-columns: repeat(${props => props.columnCount}, 1fr); - gap: ${space(2)}; - margin-bottom: ${space(2)}; - max-width: 800px; -`; - export default ResourceView; diff --git a/static/app/views/insights/browser/resources/views/resourceSummaryPage.tsx b/static/app/views/insights/browser/resources/views/resourceSummaryPage.tsx index d260fb6233037b..13ec3955160ed4 100644 --- a/static/app/views/insights/browser/resources/views/resourceSummaryPage.tsx +++ b/static/app/views/insights/browser/resources/views/resourceSummaryPage.tsx @@ -1,5 +1,4 @@ import React from 'react'; -import styled from '@emotion/styled'; import {Breadcrumbs} from 'sentry/components/breadcrumbs'; import ButtonBar from 'sentry/components/buttonBar'; @@ -16,7 +15,6 @@ import {useParams} from 'sentry/utils/useParams'; import ResourceSummaryCharts from 'sentry/views/insights/browser/resources/components/charts/resourceSummaryCharts'; import RenderBlockingSelector from 'sentry/views/insights/browser/resources/components/renderBlockingSelector'; import ResourceInfo from 'sentry/views/insights/browser/resources/components/resourceInfo'; -import {FilterOptionsContainer} from 'sentry/views/insights/browser/resources/components/resourceView'; import SampleImages from 'sentry/views/insights/browser/resources/components/sampleImages'; import ResourceSummaryTable from 'sentry/views/insights/browser/resources/components/tables/resourceSummaryTable'; import {IMAGE_FILE_EXTENSIONS} from 'sentry/views/insights/browser/resources/constants'; @@ -24,8 +22,10 @@ import {Referrer} from 'sentry/views/insights/browser/resources/referrer'; import {DATA_TYPE} from 'sentry/views/insights/browser/resources/settings'; import {ResourceSpanOps} from 'sentry/views/insights/browser/resources/types'; import {useResourceModuleFilters} from 'sentry/views/insights/browser/resources/utils/useResourceFilters'; +import {HeaderContainer} from 'sentry/views/insights/common/components/headerContainer'; import * as ModuleLayout from 'sentry/views/insights/common/components/moduleLayout'; import {ModulePageProviders} from 'sentry/views/insights/common/components/modulePageProviders'; +import {ToolRibbon} from 'sentry/views/insights/common/components/ribbon'; import {useSpanMetrics} from 'sentry/views/insights/common/queries/useDiscover'; import {useModuleBreadcrumbs} from 'sentry/views/insights/common/utils/useModuleBreadcrumbs'; import {useModuleURL} from 'sentry/views/insights/common/utils/useModuleURL'; @@ -113,16 +113,17 @@ function ResourceSummary() { - + + - + - - - } - /> - + + + + } + /> + + @@ -92,6 +94,10 @@ function PageWithProviders() { ); } +const StyledHeaderContainer = styled(HeaderContainer)` + margin-bottom: ${space(2)}; +`; + export default PageWithProviders; export const PaddedContainer = styled('div')` diff --git a/static/app/views/insights/cache/components/samplePanel.tsx b/static/app/views/insights/cache/components/samplePanel.tsx index c3ff300e9cca29..11e0eba15d615d 100644 --- a/static/app/views/insights/cache/components/samplePanel.tsx +++ b/static/app/views/insights/cache/components/samplePanel.tsx @@ -32,7 +32,7 @@ import {BASE_FILTERS} from 'sentry/views/insights/cache/settings'; import DetailPanel from 'sentry/views/insights/common/components/detailPanel'; import {MetricReadout} from 'sentry/views/insights/common/components/metricReadout'; import * as ModuleLayout from 'sentry/views/insights/common/components/moduleLayout'; -import {Ribbon} from 'sentry/views/insights/common/components/ribbon'; +import {ReadoutRibbon} from 'sentry/views/insights/common/components/ribbon'; import {getTimeSpentExplanation} from 'sentry/views/insights/common/components/tableCells/timeSpentCell'; import { useMetrics, @@ -300,9 +300,8 @@ export function CacheSamplePanel() { - + - + - - + + {props.title} + + + + ); } -function ReadoutContent({ - unit, - value, - tooltip, - align = 'right', - isLoading, - preferredPolarity, -}: Props) { +function ReadoutContent({unit, value, tooltip, isLoading, preferredPolarity}: Props) { if (isLoading) { return ( - + ); @@ -72,7 +67,7 @@ function ReadoutContent({ if (isARateUnit(unit)) { renderedValue = ( - + {formatRate(typeof value === 'string' ? parseFloat(value) : value, unit, { minimumValue: MINIMUM_RATE_VALUE, })} @@ -83,7 +78,7 @@ function ReadoutContent({ if (unit === DurationUnit.MILLISECOND) { // TODO: Implement other durations renderedValue = ( - + + ); @@ -104,7 +99,7 @@ function ReadoutContent({ if (unit === 'count') { renderedValue = ( - + {formatAbbreviatedNumber(typeof value === 'string' ? parseInt(value, 10) : value)} ); @@ -114,11 +109,11 @@ function ReadoutContent({ const numericValue = typeof value === 'string' ? parseFloat(value) : value; if (numericValue <= 1) { renderedValue = ( - US ${numericValue.toFixed(3)} + US ${numericValue.toFixed(3)} ); } else { renderedValue = ( - + US ${formatAbbreviatedNumber(numericValue)} ); @@ -127,7 +122,7 @@ function ReadoutContent({ if (unit === 'percentage') { renderedValue = ( - + {formatPercentage( typeof value === 'string' ? parseFloat(value) : value, undefined, @@ -139,7 +134,7 @@ function ReadoutContent({ if (unit === 'percent_change') { renderedValue = ( - + + {renderedValue} @@ -159,7 +154,7 @@ function ReadoutContent({ ); } - return {renderedValue}; + return {renderedValue}; } const MINIMUM_RATE_VALUE = 0.01; @@ -179,3 +174,24 @@ const LoadingContainer = styled('div')<{align: 'left' | 'right'}>` function isARateUnit(unit: string): unit is RateUnit { return (Object.values(RateUnit) as string[]).includes(unit); } + +const ReadoutWrapper = styled('div')` + flex-grow: 0; + min-width: 0; + word-break: break-word; +`; + +const ReadoutTitle = styled('h3')<{alignment: 'left' | 'right'}>` + color: ${p => p.theme.gray300}; + font-size: ${p => p.theme.fontSizeMedium}; + margin: 0; + white-space: nowrap; + height: ${space(3)}; + text-align: ${p => p.alignment}; +`; + +const ReadoutContentWrapper = styled('h4')<{alignment: 'left' | 'right'}>` + margin: 0; + font-weight: ${p => p.theme.fontWeightNormal}; + text-align: ${p => p.alignment}; +`; diff --git a/static/app/views/insights/common/components/ribbon.tsx b/static/app/views/insights/common/components/ribbon.tsx index 8cded4cb7d9b11..553057a8b7877c 100644 --- a/static/app/views/insights/common/components/ribbon.tsx +++ b/static/app/views/insights/common/components/ribbon.tsx @@ -2,8 +2,15 @@ import styled from '@emotion/styled'; import {space} from 'sentry/styles/space'; -export const Ribbon = styled('div')` +export const ReadoutRibbon = styled('div')` display: flex; flex-wrap: wrap; - gap: ${space(4)}; + column-gap: ${space(4)}; + row-gap: ${space(2)}; +`; + +export const ToolRibbon = styled('div')` + display: flex; + flex-wrap: wrap; + gap: ${space(2)}; `; diff --git a/static/app/views/insights/common/views/spanSummaryPage/sampleList/sampleInfo/index.tsx b/static/app/views/insights/common/views/spanSummaryPage/sampleList/sampleInfo/index.tsx index 5b20bafbaed85c..82c7eea277fd1c 100644 --- a/static/app/views/insights/common/views/spanSummaryPage/sampleList/sampleInfo/index.tsx +++ b/static/app/views/insights/common/views/spanSummaryPage/sampleList/sampleInfo/index.tsx @@ -1,8 +1,11 @@ +import styled from '@emotion/styled'; + +import {space} from 'sentry/styles/space'; import {DurationUnit, RateUnit} from 'sentry/utils/discover/fields'; import {usePageAlert} from 'sentry/utils/performance/contexts/pageAlert'; import {MutableSearch} from 'sentry/utils/tokenizeSearch'; import {MetricReadout} from 'sentry/views/insights/common/components/metricReadout'; -import {Ribbon} from 'sentry/views/insights/common/components/ribbon'; +import {ReadoutRibbon} from 'sentry/views/insights/common/components/ribbon'; import {getTimeSpentExplanation} from 'sentry/views/insights/common/components/tableCells/timeSpentCell'; import {useSpanMetrics} from 'sentry/views/insights/common/queries/useDiscover'; import { @@ -54,10 +57,9 @@ function SampleInfo(props: Props) { } return ( - + - + ); } +const StyledReadoutRibbon = styled(ReadoutRibbon)` + margin-bottom: ${space(2)}; +`; + export default SampleInfo; diff --git a/static/app/views/insights/common/views/spanSummaryPage/sampleList/sampleTable/sampleTable.tsx b/static/app/views/insights/common/views/spanSummaryPage/sampleList/sampleTable/sampleTable.tsx index 9d8a065ec13724..c354e482d77ab9 100644 --- a/static/app/views/insights/common/views/spanSummaryPage/sampleList/sampleTable/sampleTable.tsx +++ b/static/app/views/insights/common/views/spanSummaryPage/sampleList/sampleTable/sampleTable.tsx @@ -4,7 +4,6 @@ import keyBy from 'lodash/keyBy'; import {Button} from 'sentry/components/button'; import {t} from 'sentry/locale'; -import {space} from 'sentry/styles/space'; import {trackAnalytics} from 'sentry/utils/analytics'; import {usePageAlert} from 'sentry/utils/performance/contexts/pageAlert'; import {VisuallyCompleteWithData} from 'sentry/utils/performanceForSentry'; @@ -21,9 +20,7 @@ import {SpanMetricsField} from 'sentry/views/insights/types'; const {SPAN_SELF_TIME, SPAN_OP} = SpanMetricsField; -const SpanSamplesTableContainer = styled('div')` - padding-top: ${space(2)}; -`; +const SpanSamplesTableContainer = styled('div')``; type Props = { groupId: string; diff --git a/static/app/views/insights/database/views/databaseLandingPage.tsx b/static/app/views/insights/database/views/databaseLandingPage.tsx index d520911b085d89..3157da79d70cf6 100644 --- a/static/app/views/insights/database/views/databaseLandingPage.tsx +++ b/static/app/views/insights/database/views/databaseLandingPage.tsx @@ -1,5 +1,4 @@ import React from 'react'; -import styled from '@emotion/styled'; import Alert from 'sentry/components/alert'; import {Breadcrumbs} from 'sentry/components/breadcrumbs'; @@ -9,7 +8,6 @@ import * as Layout from 'sentry/components/layouts/thirds'; import {PageHeadingQuestionTooltip} from 'sentry/components/pageHeadingQuestionTooltip'; import SearchBar from 'sentry/components/searchBar'; import {t} from 'sentry/locale'; -import {space} from 'sentry/styles/space'; import {trackAnalytics} from 'sentry/utils/analytics'; import {browserHistory} from 'sentry/utils/browserHistory'; import {decodeScalar, decodeSorts} from 'sentry/utils/queryString'; @@ -21,6 +19,7 @@ import * as ModuleLayout from 'sentry/views/insights/common/components/moduleLay import {ModulePageFilterBar} from 'sentry/views/insights/common/components/modulePageFilterBar'; import {ModulePageProviders} from 'sentry/views/insights/common/components/modulePageProviders'; import {ModulesOnboarding} from 'sentry/views/insights/common/components/modulesOnboarding'; +import {ToolRibbon} from 'sentry/views/insights/common/components/ribbon'; import {useSpanMetrics} from 'sentry/views/insights/common/queries/useDiscover'; import {useSpanMetricsSeries} from 'sentry/views/insights/common/queries/useDiscoverSeries'; import {useHasFirstSpan} from 'sentry/views/insights/common/queries/useHasFirstSpan'; @@ -203,15 +202,10 @@ export function DatabaseLandingPage() {
- - - - - - - - - + + + + @@ -246,24 +240,6 @@ function AlertBanner(props) { ); } -const FilterOptionsContainer = styled('div')` - display: flex; - flex-wrap: wrap; - gap: ${space(2)}; - - @media (min-width: ${p => p.theme.breakpoints.small}) { - flex-wrap: nowrap; - } -`; - -const SelectorContainer = styled('div')` - flex-basis: 100%; - - @media (min-width: ${p => p.theme.breakpoints.small}) { - flex-basis: auto; - } -`; - const LIMIT: number = 25; function PageWithProviders() { diff --git a/static/app/views/insights/database/views/databaseSpanSummaryPage.tsx b/static/app/views/insights/database/views/databaseSpanSummaryPage.tsx index b32140ad3b6239..2fd9ff0b6270f3 100644 --- a/static/app/views/insights/database/views/databaseSpanSummaryPage.tsx +++ b/static/app/views/insights/database/views/databaseSpanSummaryPage.tsx @@ -16,10 +16,11 @@ import {decodeScalar, decodeSorts} from 'sentry/utils/queryString'; import {MutableSearch} from 'sentry/utils/tokenizeSearch'; import {useLocation} from 'sentry/utils/useLocation'; import {useSynchronizeCharts} from 'sentry/views/insights/common/components/chart'; +import {HeaderContainer} from 'sentry/views/insights/common/components/headerContainer'; import {MetricReadout} from 'sentry/views/insights/common/components/metricReadout'; import * as ModuleLayout from 'sentry/views/insights/common/components/moduleLayout'; import {ModulePageProviders} from 'sentry/views/insights/common/components/modulePageProviders'; -import {Ribbon} from 'sentry/views/insights/common/components/ribbon'; +import {ReadoutRibbon, ToolRibbon} from 'sentry/views/insights/common/components/ribbon'; import {DatabaseSpanDescription} from 'sentry/views/insights/common/components/spanDescription'; import {getTimeSpentExplanation} from 'sentry/views/insights/common/components/tableCells/timeSpentCell'; import {useSpanMetrics} from 'sentry/views/insights/common/queries/useDiscover'; @@ -179,41 +180,45 @@ export function DatabaseSpanSummaryPage({params}: Props) { - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + {groupId && ( - + - + diff --git a/static/app/views/insights/http/views/httpDomainSummaryPage.tsx b/static/app/views/insights/http/views/httpDomainSummaryPage.tsx index afa26eca59b530..dc0242b7e33d54 100644 --- a/static/app/views/insights/http/views/httpDomainSummaryPage.tsx +++ b/static/app/views/insights/http/views/httpDomainSummaryPage.tsx @@ -1,5 +1,4 @@ import React from 'react'; -import styled from '@emotion/styled'; import Alert from 'sentry/components/alert'; import ProjectAvatar from 'sentry/components/avatar/projectAvatar'; @@ -23,10 +22,11 @@ import useLocationQuery from 'sentry/utils/url/useLocationQuery'; import {useLocation} from 'sentry/utils/useLocation'; import useProjects from 'sentry/utils/useProjects'; import {useSynchronizeCharts} from 'sentry/views/insights/common/components/chart'; +import {HeaderContainer} from 'sentry/views/insights/common/components/headerContainer'; import {MetricReadout} from 'sentry/views/insights/common/components/metricReadout'; import * as ModuleLayout from 'sentry/views/insights/common/components/moduleLayout'; import {ModulePageProviders} from 'sentry/views/insights/common/components/modulePageProviders'; -import {Ribbon} from 'sentry/views/insights/common/components/ribbon'; +import {ReadoutRibbon, ToolRibbon} from 'sentry/views/insights/common/components/ribbon'; import {getTimeSpentExplanation} from 'sentry/views/insights/common/components/tableCells/timeSpentCell'; import {useSpanMetrics} from 'sentry/views/insights/common/queries/useDiscover'; import {useSpanMetricsSeries} from 'sentry/views/insights/common/queries/useDiscoverSeries'; @@ -208,12 +208,14 @@ export function HTTPDomainSummaryPage() { - - - - - - + + + + + + + + - + @@ -329,12 +331,6 @@ const DEFAULT_SORT = { const TRANSACTIONS_TABLE_ROW_COUNT = 20; -const HeaderContainer = styled('div')` - display: flex; - justify-content: space-between; - flex-wrap: wrap; -`; - function PageWithProviders() { return ( - - - - - - - + + + + + + + + + + - - + + @@ -180,10 +181,3 @@ function PageWithProviders({params}: Props) { } export default PageWithProviders; - -const SpaceBetweenWrap = styled('div')` - display: flex; - justify-content: space-between; - flex-wrap: wrap; - gap: ${space(2)}; -`; diff --git a/static/app/views/insights/mobile/appStarts/views/screenSummaryPage.tsx b/static/app/views/insights/mobile/appStarts/views/screenSummaryPage.tsx index f1077767831520..09499f0b29d352 100644 --- a/static/app/views/insights/mobile/appStarts/views/screenSummaryPage.tsx +++ b/static/app/views/insights/mobile/appStarts/views/screenSummaryPage.tsx @@ -16,12 +16,14 @@ import {DiscoverDatasets} from 'sentry/utils/discover/types'; import {PageAlert, PageAlertProvider} from 'sentry/utils/performance/contexts/pageAlert'; import {useLocation} from 'sentry/utils/useLocation'; import useRouter from 'sentry/utils/useRouter'; +import {HeaderContainer} from 'sentry/views/insights/common/components/headerContainer'; import {ModulePageProviders} from 'sentry/views/insights/common/components/modulePageProviders'; import { PRIMARY_RELEASE_ALIAS, ReleaseComparisonSelector, SECONDARY_RELEASE_ALIAS, } from 'sentry/views/insights/common/components/releaseSelector'; +import {ToolRibbon} from 'sentry/views/insights/common/components/ribbon'; import {useModuleBreadcrumbs} from 'sentry/views/insights/common/utils/useModuleBreadcrumbs'; import {SamplesTables} from 'sentry/views/insights/mobile/appStarts/components/samples'; import { @@ -97,14 +99,15 @@ export function ScreenSummary() { - + - + + - + - + p.theme.text.cardTitle} `; diff --git a/static/app/views/insights/mobile/screenload/components/metricsRibbon.tsx b/static/app/views/insights/mobile/screenload/components/metricsRibbon.tsx index 88fa8633981a82..9cf41cd9dfc0eb 100644 --- a/static/app/views/insights/mobile/screenload/components/metricsRibbon.tsx +++ b/static/app/views/insights/mobile/screenload/components/metricsRibbon.tsx @@ -1,7 +1,9 @@ import type {ComponentProps} from 'react'; import {useMemo} from 'react'; +import styled from '@emotion/styled'; import type {Polarity} from 'sentry/components/percentChange'; +import {space} from 'sentry/styles/space'; import type {NewQuery} from 'sentry/types/organization'; import type {TableData, TableDataRow} from 'sentry/utils/discover/discoverQuery'; import EventView from 'sentry/utils/discover/eventView'; @@ -10,7 +12,7 @@ import {MutableSearch} from 'sentry/utils/tokenizeSearch'; import {useLocation} from 'sentry/utils/useLocation'; import usePageFilters from 'sentry/utils/usePageFilters'; import {MetricReadout} from 'sentry/views/insights/common/components/metricReadout'; -import {Ribbon} from 'sentry/views/insights/common/components/ribbon'; +import {ReadoutRibbon} from 'sentry/views/insights/common/components/ribbon'; import {useReleaseSelection} from 'sentry/views/insights/common/queries/useReleases'; import {appendReleaseFilters} from 'sentry/views/insights/common/utils/releaseComparison'; import useCrossPlatformProject from 'sentry/views/insights/mobile/common/queries/useCrossPlatformProject'; @@ -80,7 +82,7 @@ export function MobileMetricsRibbon({ }); return ( - + {blocks.map(({title, dataKey, unit, preferredPolarity}) => ( ))} - + ); } +const StyledReadoutRibbon = styled(ReadoutRibbon)` + margin-bottom: ${space(2)}; +`; + function MetricsBlock({ title, unit, @@ -120,7 +126,6 @@ function MetricsBlock({ return ( - - - - - - - - + + + + + + + + + + + - + + p.theme.breakpoints.large}) { - grid-template-rows: auto; - grid-template-columns: auto minmax(100px, max-content); - } -`; - const FilterContainer = styled('div')` display: grid; column-gap: ${space(1)}; diff --git a/static/app/views/insights/mobile/ui/views/screenSummaryPage.tsx b/static/app/views/insights/mobile/ui/views/screenSummaryPage.tsx index ffd3ebe6d2eade..29426fb631ffa1 100644 --- a/static/app/views/insights/mobile/ui/views/screenSummaryPage.tsx +++ b/static/app/views/insights/mobile/ui/views/screenSummaryPage.tsx @@ -11,8 +11,10 @@ import {space} from 'sentry/styles/space'; import {PageAlert, PageAlertProvider} from 'sentry/utils/performance/contexts/pageAlert'; import {useLocation} from 'sentry/utils/useLocation'; import useRouter from 'sentry/utils/useRouter'; +import {HeaderContainer} from 'sentry/views/insights/common/components/headerContainer'; import {ModulePageProviders} from 'sentry/views/insights/common/components/modulePageProviders'; import {ReleaseComparisonSelector} from 'sentry/views/insights/common/components/releaseSelector'; +import {ToolRibbon} from 'sentry/views/insights/common/components/ribbon'; import {useModuleBreadcrumbs} from 'sentry/views/insights/common/utils/useModuleBreadcrumbs'; import {SpanSamplesPanel} from 'sentry/views/insights/mobile/common/components/spanSamplesPanel'; import {SamplesTables} from 'sentry/views/insights/mobile/common/components/tables/samplesTables'; @@ -65,14 +67,15 @@ function ScreenSummary() { - + - + + + - + ); } @@ -456,16 +454,14 @@ function ConsumerMetricsRibbon({ }) { const errorRate = 1 - (metrics[0]?.['trace_status_rate(ok)'] ?? 0); return ( - + - + ); } diff --git a/static/app/views/insights/queues/views/destinationSummaryPage.tsx b/static/app/views/insights/queues/views/destinationSummaryPage.tsx index 4f931311dac139..3f77350b552b12 100644 --- a/static/app/views/insights/queues/views/destinationSummaryPage.tsx +++ b/static/app/views/insights/queues/views/destinationSummaryPage.tsx @@ -15,10 +15,11 @@ import {DurationUnit} from 'sentry/utils/discover/fields'; import {decodeScalar} from 'sentry/utils/queryString'; import {useLocation} from 'sentry/utils/useLocation'; import useOrganization from 'sentry/utils/useOrganization'; +import {HeaderContainer} from 'sentry/views/insights/common/components/headerContainer'; import {MetricReadout} from 'sentry/views/insights/common/components/metricReadout'; import * as ModuleLayout from 'sentry/views/insights/common/components/moduleLayout'; import {ModulePageProviders} from 'sentry/views/insights/common/components/modulePageProviders'; -import {Ribbon} from 'sentry/views/insights/common/components/ribbon'; +import {ReadoutRibbon, ToolRibbon} from 'sentry/views/insights/common/components/ribbon'; import {getTimeSpentExplanation} from 'sentry/views/insights/common/components/tableCells/timeSpentCell'; import {useOnboardingProject} from 'sentry/views/insights/common/queries/useOnboardingProject'; import {useModuleBreadcrumbs} from 'sentry/views/insights/common/utils/useModuleBreadcrumbs'; @@ -73,14 +74,16 @@ function DestinationSummaryPage() { - - - - - + + + + + + + {!onboardingProject && ( - + - + )} @@ -178,8 +181,3 @@ const Flex = styled('div')` flex-direction: column; gap: ${space(2)}; `; - -const HeaderContainer = styled('div')` - display: flex; - justify-content: space-between; -`; From 02066d49080306c0cd9005638d83e821c20b0baf Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Wed, 24 Jul 2024 12:57:39 -0700 Subject: [PATCH 103/126] analytics(replay): track render of missing replay alert (#74865) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit track render of alert banner, which we currently only show on issue details. context: > I’m concerned this can happen more often than customers are comfortable with and we might need to dig into reasons and improve things somehow --- .../events/eventReplay/replayClipPreviewPlayer.tsx | 7 +++++++ static/app/components/events/eventReplay/replayPreview.tsx | 7 +++++++ static/app/utils/analytics/replayAnalyticsEvents.tsx | 4 ++++ 3 files changed, 18 insertions(+) diff --git a/static/app/components/events/eventReplay/replayClipPreviewPlayer.tsx b/static/app/components/events/eventReplay/replayClipPreviewPlayer.tsx index 4ba8a05ea1f64b..643c9a1dfc4a35 100644 --- a/static/app/components/events/eventReplay/replayClipPreviewPlayer.tsx +++ b/static/app/components/events/eventReplay/replayClipPreviewPlayer.tsx @@ -17,10 +17,12 @@ import ReplayProcessingError from 'sentry/components/replays/replayProcessingErr import {IconDelete} from 'sentry/icons'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; +import {trackAnalytics} from 'sentry/utils/analytics'; import type {TabKey} from 'sentry/utils/replays/hooks/useActiveReplayTab'; import type useReplayReader from 'sentry/utils/replays/hooks/useReplayReader'; import type RequestError from 'sentry/utils/requestError/requestError'; import useRouteAnalyticsParams from 'sentry/utils/routeAnalytics/useRouteAnalyticsParams'; +import useOrganization from 'sentry/utils/useOrganization'; import FluidHeight from 'sentry/views/replays/detail/layout/fluidHeight'; import type {ReplayRecord} from 'sentry/views/replays/types'; @@ -76,6 +78,7 @@ function ReplayClipPreviewPlayer({ useRouteAnalyticsParams({ event_replay_status: getReplayAnalyticsStatus({fetchError, replayRecord}), }); + const organization = useOrganization(); if (replayRecord?.is_archived) { return ( @@ -89,6 +92,10 @@ function ReplayClipPreviewPlayer({ } if (fetchError) { + trackAnalytics('replay.render-missing-replay-alert', { + organization, + surface: 'issue details - clip preview', + }); return ; } diff --git a/static/app/components/events/eventReplay/replayPreview.tsx b/static/app/components/events/eventReplay/replayPreview.tsx index f0cb806731a11b..e531a79e32e631 100644 --- a/static/app/components/events/eventReplay/replayPreview.tsx +++ b/static/app/components/events/eventReplay/replayPreview.tsx @@ -13,10 +13,12 @@ import MissingReplayAlert from 'sentry/components/replays/alerts/missingReplayAl import {IconDelete} from 'sentry/icons'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; +import {trackAnalytics} from 'sentry/utils/analytics'; import type {TabKey} from 'sentry/utils/replays/hooks/useActiveReplayTab'; import useReplayReader from 'sentry/utils/replays/hooks/useReplayReader'; import type RequestError from 'sentry/utils/requestError/requestError'; import useRouteAnalyticsParams from 'sentry/utils/routeAnalytics/useRouteAnalyticsParams'; +import useOrganization from 'sentry/utils/useOrganization'; import type {ReplayRecord} from 'sentry/views/replays/types'; type Props = { @@ -62,6 +64,7 @@ function ReplayPreview({ orgSlug, replaySlug, }); + const organization = useOrganization(); const startTimestampMs = replayRecord?.started_at?.getTime() ?? 0; const initialTimeOffsetMs = useMemo(() => { @@ -88,6 +91,10 @@ function ReplayPreview({ } if (fetchError) { + trackAnalytics('replay.render-missing-replay-alert', { + organization, + surface: 'issue details - old preview', + }); return ; } diff --git a/static/app/utils/analytics/replayAnalyticsEvents.tsx b/static/app/utils/analytics/replayAnalyticsEvents.tsx index 45c67bcdb3ff65..0f521f64012d4f 100644 --- a/static/app/utils/analytics/replayAnalyticsEvents.tsx +++ b/static/app/utils/analytics/replayAnalyticsEvents.tsx @@ -92,6 +92,9 @@ export type ReplayEventParameters = { platform: string | undefined; project_id: string | undefined; }; + 'replay.render-missing-replay-alert': { + surface: string; + }; 'replay.render-player': { aspect_ratio: 'portrait' | 'landscape'; context: string; @@ -136,6 +139,7 @@ export const replayEventMap: Record = { 'replay.rage-click-sdk-banner.dismissed': 'Replay Rage Click SDK Banner Dismissed', 'replay.rage-click-sdk-banner.rendered': 'Replay Rage Click SDK Banner Rendered', 'replay.render-issues-group-list': 'Render Issues Detail Replay List', + 'replay.render-missing-replay-alert': 'Render Missing Replay Alert', 'replay.render-player': 'Rendered ReplayPlayer', 'replay.search': 'Searched Replay', 'replay.toggle-fullscreen': 'Toggled Replay Fullscreen', From ae57e2a213da7713df3710c237bf53188f9523a5 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Wed, 24 Jul 2024 12:58:03 -0700 Subject: [PATCH 104/126] fix(feedback): fix copy url (#74874) closes https://github.com/getsentry/sentry/issues/74869 --- .../components/feedback/feedbackItem/feedbackShortId.tsx | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/static/app/components/feedback/feedbackItem/feedbackShortId.tsx b/static/app/components/feedback/feedbackItem/feedbackShortId.tsx index 4ee134fdcd6c2f..c13274c767765a 100644 --- a/static/app/components/feedback/feedbackItem/feedbackShortId.tsx +++ b/static/app/components/feedback/feedbackItem/feedbackShortId.tsx @@ -1,6 +1,7 @@ import type {CSSProperties} from 'react'; import {css} from '@emotion/react'; import styled from '@emotion/styled'; +import queryString from 'query-string'; import {Flex} from 'sentry/components/container/flex'; import {DropdownMenu} from 'sentry/components/dropdownMenu'; @@ -39,10 +40,14 @@ export default function FeedbackShortId({className, feedbackItem, style}: Props) const organization = useOrganization(); const projectSlug = useCurrentFeedbackProject(); + // we need the stringifyUrl part so that the whole item is a string + // for the copy url button below. normalizeUrl can return an object if `query` + // or other options are passed, which breaks the copy-paste. const feedbackUrl = window.location.origin + - normalizeUrl({ - pathname: `/organizations/${organization.slug}/feedback/`, + normalizeUrl(`/organizations/${organization.slug}/feedback/`) + + queryString.stringifyUrl({ + url: '?', query: { feedbackSlug: `${projectSlug}:${feedbackItem.id}`, project: feedbackItem.project?.id, From e0aab4bb00a8ecdf4a5affeec37b6e53ab2a4833 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Wed, 24 Jul 2024 12:58:14 -0700 Subject: [PATCH 105/126] ref(feedback): add visualization for non-image attachments and allow download (#74802) - closes https://github.com/getsentry/sentry/issues/74731 - adds support for downloading non-image files - attempt to load all attachments as images first. if `` gives an error, then send that to our screenshot renderer, and tell it to use the default "no preview found" renderer instead. before: SCR-20240723-nrth after: "no preview found" text along with option to download the item: SCR-20240723-nrjc SCR-20240723-nuzb all the attachments show up in the modal as well and work as expected with the pagination. SCR-20240723-obyr SCR-20240723-obxj works with the example @c298lee brought up where the `mimetype` was incorrectly set: SCR-20240724-jsua --- .../feedbackItem/feedbackScreenshot.tsx | 61 ++++++++++++++++++- .../feedbackItem/screenshotsModal.tsx | 2 +- 2 files changed, 59 insertions(+), 4 deletions(-) diff --git a/static/app/components/feedback/feedbackItem/feedbackScreenshot.tsx b/static/app/components/feedback/feedbackItem/feedbackScreenshot.tsx index 738c553f797877..94d029a09c181f 100644 --- a/static/app/components/feedback/feedbackItem/feedbackScreenshot.tsx +++ b/static/app/components/feedback/feedbackItem/feedbackScreenshot.tsx @@ -4,6 +4,11 @@ import styled from '@emotion/styled'; import ImageVisualization from 'sentry/components/events/eventTagsAndScreenshot/screenshot/imageVisualization'; import LoadingIndicator from 'sentry/components/loadingIndicator'; import Panel from 'sentry/components/panels/panel'; +import TextOverflow from 'sentry/components/textOverflow'; +import {Tooltip} from 'sentry/components/tooltip'; +import {IconImage} from 'sentry/icons'; +import {t} from 'sentry/locale'; +import {space} from 'sentry/styles/space'; import type {EventAttachment, Organization, Project} from 'sentry/types'; type Props = { @@ -22,18 +27,27 @@ export default function FeedbackScreenshot({ onClick, }: Props) { const [isLoading, setIsLoading] = useState(true); + // since we can't trust mimetype, we'll try to load all attachments as images + // if it fails, then set that here, and render a default preview instead. + const [imgLoadError, setImgLoadError] = useState(false); + const img = ( setIsLoading(false)} - onError={() => setIsLoading(false)} + onLoad={() => { + setIsLoading(false); + }} + onError={() => { + setIsLoading(false); + setImgLoadError(true); + }} /> ); - return ( + return !imgLoadError ? ( {isLoading && ( @@ -42,6 +56,21 @@ export default function FeedbackScreenshot({ )} {onClick ? {img} : img} + ) : ( + + + + {t('No preview found')} + + + + {screenshot.name} + + + ); } @@ -81,3 +110,29 @@ const StyledImageVisualization = styled(ImageVisualization)` height: auto; } `; +const FileDownload = styled('a')` + cursor: pointer; + padding: ${space(1)}; + text-decoration: underline; + color: inherit; + :hover { + color: inherit; + text-decoration: underline; + } +`; + +const File = styled(StyledPanel)` + background: ${p => p.theme.purple100}; + padding: ${space(2)}; + max-width: 300px; +`; + +const NoPreviewFound = styled('p')` + color: ${p => p.theme.gray300}; + display: flex; + flex-direction: column; + align-items: center; + gap: ${space(0.5)}; + justify-content: center; + margin: 0; +`; diff --git a/static/app/components/feedback/feedbackItem/screenshotsModal.tsx b/static/app/components/feedback/feedbackItem/screenshotsModal.tsx index 5126f21eb4a9b4..1e348d56f8960a 100644 --- a/static/app/components/feedback/feedbackItem/screenshotsModal.tsx +++ b/static/app/components/feedback/feedbackItem/screenshotsModal.tsx @@ -53,7 +53,7 @@ export default function ScreenshotsModal({ )} - + Date: Wed, 24 Jul 2024 13:03:18 -0700 Subject: [PATCH 106/126] ref(similarity): Add retries with backoff to seer call in backfill (#74860) Add retries with backoff to seer call in similarity backfill if the service is unavailable Does not do exponential retries for other exceptions --- ...kfill_seer_grouping_records_for_project.py | 2 + src/sentry/tasks/embeddings_grouping/utils.py | 41 +++++++++++++++---- 2 files changed, 36 insertions(+), 7 deletions(-) diff --git a/src/sentry/tasks/embeddings_grouping/backfill_seer_grouping_records_for_project.py b/src/sentry/tasks/embeddings_grouping/backfill_seer_grouping_records_for_project.py index 9a62146e02a309..b00c662d3342a4 100644 --- a/src/sentry/tasks/embeddings_grouping/backfill_seer_grouping_records_for_project.py +++ b/src/sentry/tasks/embeddings_grouping/backfill_seer_grouping_records_for_project.py @@ -186,11 +186,13 @@ def backfill_seer_grouping_records_for_project( seer_response = send_group_and_stacktrace_to_seer_multithreaded( groups_to_backfill_with_no_embedding_has_snuba_row_and_nodestore_row, nodestore_results, + project.id, ) else: seer_response = send_group_and_stacktrace_to_seer( groups_to_backfill_with_no_embedding_has_snuba_row_and_nodestore_row, nodestore_results, + project.id, ) if not seer_response.get("success"): diff --git a/src/sentry/tasks/embeddings_grouping/utils.py b/src/sentry/tasks/embeddings_grouping/utils.py index edb3023969ea7d..cae4d2a67cadfa 100644 --- a/src/sentry/tasks/embeddings_grouping/utils.py +++ b/src/sentry/tasks/embeddings_grouping/utils.py @@ -21,6 +21,7 @@ from sentry.models.group import Group, GroupStatus from sentry.models.project import Project from sentry.seer.similarity.grouping_records import ( + BulkCreateGroupingRecordsResponse, CreateGroupingRecordData, CreateGroupingRecordsRequest, delete_project_grouping_records, @@ -358,33 +359,59 @@ def get_events_from_nodestore( ) +def _make_seer_call( + create_grouping_records_request: CreateGroupingRecordsRequest, project_id: int +) -> BulkCreateGroupingRecordsResponse | None: + try: + seer_response = _retry_operation( + post_bulk_grouping_records, + create_grouping_records_request, + retries=3, + delay=2, + exceptions=ServiceUnavailable, + ) + except ServiceUnavailable: + logger.exception( + "tasks.backfill_seer_grouping_records.seer_service_unavailable", + extra={"project_id": project_id}, + ) + raise + + return seer_response + + @sentry_sdk.tracing.trace @metrics.wraps(f"{BACKFILL_NAME}.send_group_and_stacktrace_to_seer", sample_rate=1.0) def send_group_and_stacktrace_to_seer( - groups_to_backfill_with_no_embedding_has_snuba_row_and_nodestore_row, nodestore_results + groups_to_backfill_with_no_embedding_has_snuba_row_and_nodestore_row, + nodestore_results, + project_id, ): - seer_response = post_bulk_grouping_records( + return _make_seer_call( CreateGroupingRecordsRequest( group_id_list=groups_to_backfill_with_no_embedding_has_snuba_row_and_nodestore_row, data=nodestore_results["data"], stacktrace_list=nodestore_results["stacktrace_list"], - ) + ), + project_id, ) - return seer_response @sentry_sdk.tracing.trace @metrics.wraps(f"{BACKFILL_NAME}.send_group_and_stacktrace_to_seer", sample_rate=1.0) def send_group_and_stacktrace_to_seer_multithreaded( - groups_to_backfill_with_no_embedding_has_snuba_row_and_nodestore_row, nodestore_results + groups_to_backfill_with_no_embedding_has_snuba_row_and_nodestore_row, + nodestore_results, + project_id, ): def process_chunk(chunk_data, chunk_stacktrace): - return post_bulk_grouping_records( + return _make_seer_call( CreateGroupingRecordsRequest( group_id_list=chunk_data["group_ids"], data=chunk_data["data"], stacktrace_list=chunk_stacktrace, - ) + ), + project_id, ) chunk_size = options.get("similarity.backfill_seer_chunk_size") From caa6473354eb8a3cca41c7f2567dc7969b6dc5a4 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 24 Jul 2024 16:04:34 -0400 Subject: [PATCH 107/126] ref: avoid clobbering base class do_request (#74881) mypy 1.11 points out the inconsistent signatures here --- .../api/endpoints/test_organization_traces.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/sentry/api/endpoints/test_organization_traces.py b/tests/sentry/api/endpoints/test_organization_traces.py index a7b5149bc054d4..e7ed69fa34dd80 100644 --- a/tests/sentry/api/endpoints/test_organization_traces.py +++ b/tests/sentry/api/endpoints/test_organization_traces.py @@ -859,7 +859,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): class OrganizationTraceSpansEndpointTest(OrganizationTracesEndpointTestBase): view = "sentry-api-0-organization-trace-spans" - def do_request(self, trace_id, query, features=None, **kwargs): + def _do_request(self, trace_id, query, features=None, **kwargs): if features is None: features = ["organizations:performance-trace-explorer"] with self.feature(features): @@ -880,18 +880,18 @@ def test_no_feature(self): query = { "project": [self.project.id], } - response = self.do_request(uuid4().hex, query, features=[]) + response = self._do_request(uuid4().hex, query, features=[]) assert response.status_code == 404, response.data def test_no_project(self): - response = self.do_request(uuid4().hex, {}) + response = self._do_request(uuid4().hex, {}) assert response.status_code == 404, response.data def test_bad_params_missing_field(self): query = { "project": [self.project.id], } - response = self.do_request(uuid4().hex, query) + response = self._do_request(uuid4().hex, query) assert response.status_code == 400, response.data assert response.data == { "field": [ @@ -916,7 +916,7 @@ def test_get_spans_for_trace(self): "sort": "id", } - response = self.do_request(trace_id, query) + response = self._do_request(trace_id, query) assert response.status_code == 200, response.data assert response.data["meta"] == { "dataset": "unknown", @@ -955,7 +955,7 @@ def test_get_spans_for_trace_matching_tags(self): "query": user_query, } - response = self.do_request(trace_id, query) + response = self._do_request(trace_id, query) assert response.status_code == 200, response.data assert response.data["meta"] == { "dataset": "unknown", @@ -1010,7 +1010,7 @@ def test_get_spans_for_trace_matching_tags_metrics(self): if user_query: query["query"] = user_query - response = self.do_request(trace_id, query) + response = self._do_request(trace_id, query) assert response.status_code == 200, response.data assert response.data["meta"] == { "dataset": "unknown", From 5b8bf56e06181ea9b04ae6e0c2eb7e3d64c53fb3 Mon Sep 17 00:00:00 2001 From: Gabe Villalobos Date: Wed, 24 Jul 2024 13:17:04 -0700 Subject: [PATCH 108/126] fix(hybrid-cloud): Resubmission: adds defaults to provisioning model fields, lost_password_hash model (#74766) (#74883) --- .../services/control_organization_provisioning/model.py | 4 ++-- src/sentry/services/organization/model.py | 4 ++-- src/sentry/users/services/lost_password_hash/model.py | 7 +++++-- tests/sentry/api/endpoints/test_event_ai_suggested_fix.py | 2 +- tests/sentry/feedback/usecases/test_create_feedback.py | 2 +- 5 files changed, 11 insertions(+), 8 deletions(-) diff --git a/src/sentry/hybridcloud/services/control_organization_provisioning/model.py b/src/sentry/hybridcloud/services/control_organization_provisioning/model.py index e5a904059603f4..20e76051f7c38b 100644 --- a/src/sentry/hybridcloud/services/control_organization_provisioning/model.py +++ b/src/sentry/hybridcloud/services/control_organization_provisioning/model.py @@ -1,7 +1,7 @@ -import pydantic +from sentry.hybridcloud.rpc import RpcModel -class RpcOrganizationSlugReservation(pydantic.BaseModel): +class RpcOrganizationSlugReservation(RpcModel): id: int organization_id: int user_id: int | None diff --git a/src/sentry/services/organization/model.py b/src/sentry/services/organization/model.py index 8730c9864c41eb..cb9a171dca9be7 100644 --- a/src/sentry/services/organization/model.py +++ b/src/sentry/services/organization/model.py @@ -13,8 +13,8 @@ class OrganizationOptions(pydantic.BaseModel): class PostProvisionOptions(pydantic.BaseModel): - sentry_options: Any | None # Placeholder for any sentry post-provisioning data - getsentry_options: Any | None # Reserved for getsentry post-provisioning data + sentry_options: Any | None = None # Placeholder for any sentry post-provisioning data + getsentry_options: Any | None = None # Reserved for getsentry post-provisioning data class OrganizationProvisioningOptions(pydantic.BaseModel): diff --git a/src/sentry/users/services/lost_password_hash/model.py b/src/sentry/users/services/lost_password_hash/model.py index 7a6d9c07cf0366..bf2d1f6e5f95f8 100644 --- a/src/sentry/users/services/lost_password_hash/model.py +++ b/src/sentry/users/services/lost_password_hash/model.py @@ -3,7 +3,10 @@ # in modules such as this one where hybrid cloud data models or service classes are # defined, because we want to reflect on type annotations and avoid forward references. -import datetime +from datetime import datetime + +from django.utils import timezone +from pydantic import Field from sentry.hybridcloud.rpc import RpcModel from sentry.models.lostpasswordhash import LostPasswordHash @@ -13,7 +16,7 @@ class RpcLostPasswordHash(RpcModel): id: int = -1 user_id: int = -1 hash: str = "" - date_added = datetime.datetime + date_added: datetime = Field(default_factory=timezone.now) def get_absolute_url(self, mode: str = "recover") -> str: return LostPasswordHash.get_lostpassword_url(self.user_id, self.hash, mode) diff --git a/tests/sentry/api/endpoints/test_event_ai_suggested_fix.py b/tests/sentry/api/endpoints/test_event_ai_suggested_fix.py index b1029c188a3fab..f990f8d5010332 100644 --- a/tests/sentry/api/endpoints/test_event_ai_suggested_fix.py +++ b/tests/sentry/api/endpoints/test_event_ai_suggested_fix.py @@ -34,7 +34,7 @@ def dummy_response(*args, **kwargs): finish_reason="stop", ) ], - created=time.time(), + created=int(time.time()), model="gpt3.5-trubo", object="chat.completion", ) diff --git a/tests/sentry/feedback/usecases/test_create_feedback.py b/tests/sentry/feedback/usecases/test_create_feedback.py index 8ef46e7d19f71a..986a4141e025df 100644 --- a/tests/sentry/feedback/usecases/test_create_feedback.py +++ b/tests/sentry/feedback/usecases/test_create_feedback.py @@ -64,7 +64,7 @@ def create_dummy_response(*args, **kwargs): finish_reason="stop", ) ], - created=time.time(), + created=int(time.time()), model="gpt3.5-trubo", object="chat.completion", ) From aa70778cd530d2028caa18ac810d15b40cc1b670 Mon Sep 17 00:00:00 2001 From: Leander Rodrigues Date: Wed, 24 Jul 2024 16:37:28 -0400 Subject: [PATCH 109/126] ref(breadcrumbs): Address early feedback (#74785) This PR addresses a wave a feedback for the new breadcrumbs design - Larger, darker text for descriptions - Monospace font and preserving whitespace for description - Show message crumbs as blue - Remove Sort/Filter on issue details - Allow categories to contain upper case letters - Unknown categories will be unchanged - Absolute timestamp is being kept as the default - Respect sort order on summary crumbs, image **todo** - [x] Fix tests - [ ] Fix virtualization (deferring to future PR) - [x] Remove text from control on main page --- .../breadcrumbItemContent.spec.tsx | 3 +- .../breadcrumbs/breadcrumbItemContent.tsx | 22 +++-- .../breadcrumbsDataSection.spec.tsx | 57 +++++------ .../breadcrumbs/breadcrumbsDataSection.tsx | 95 +++++++++---------- .../breadcrumbsDrawerContent.spec.tsx | 25 ++--- .../breadcrumbs/breadcrumbsDrawerContent.tsx | 9 +- .../breadcrumbs/breadcrumbsTimeline.tsx | 51 ++++++---- .../components/events/breadcrumbs/utils.tsx | 92 ++++++++++-------- static/app/components/timeline/index.tsx | 28 ++---- static/app/utils/string/toTitleCase.spec.tsx | 21 ++++ static/app/utils/string/toTitleCase.tsx | 17 +++- 11 files changed, 231 insertions(+), 189 deletions(-) create mode 100644 static/app/utils/string/toTitleCase.spec.tsx diff --git a/static/app/components/events/breadcrumbs/breadcrumbItemContent.spec.tsx b/static/app/components/events/breadcrumbs/breadcrumbItemContent.spec.tsx index e27363b35121c7..6c4449ad1c7d3b 100644 --- a/static/app/components/events/breadcrumbs/breadcrumbItemContent.spec.tsx +++ b/static/app/components/events/breadcrumbs/breadcrumbItemContent.spec.tsx @@ -42,7 +42,8 @@ describe('BreadcrumbItemContent', function () { screen.getByText(`${breadcrumb.data?.method}: [${breadcrumb.data?.status_code}]`) ).toBeInTheDocument(); expect(screen.getByRole('link', {name: breadcrumb.data?.url})).toBeInTheDocument(); - expect(screen.getByText('2 items')).toBeInTheDocument(); + expect(screen.getByText('123')).toBeInTheDocument(); + expect(screen.getByText('15080')).toBeInTheDocument(); }); it('renders SQL crumbs with all data', function () { diff --git a/static/app/components/events/breadcrumbs/breadcrumbItemContent.tsx b/static/app/components/events/breadcrumbs/breadcrumbItemContent.tsx index e22e2baff29aab..eaa03f788d4d1b 100644 --- a/static/app/components/events/breadcrumbs/breadcrumbItemContent.tsx +++ b/static/app/components/events/breadcrumbs/breadcrumbItemContent.tsx @@ -37,16 +37,15 @@ export default function BreadcrumbItemContent({ }: BreadcrumbItemContentProps) { const structuredDataProps = { ...DEFAULT_STRUCTURED_DATA_PROPS, - forceDefaultExpand: fullyExpanded, maxDefaultDepth: fullyExpanded ? 10000 : DEFAULT_STRUCTURED_DATA_PROPS.maxDefaultDepth, }; const defaultMessage = defined(bc.message) ? ( - + - + ) : null; const defaultData = defined(bc.data) ? ( @@ -110,7 +109,7 @@ function HTTPCrumbContent({ return ( {children} - + {defined(method) && `${method}: `} {isValidUrl ? ( )} {defined(statusCode) && ` [${statusCode}]`} - + {Object.keys(otherData).length > 0 ? ( @@ -175,10 +174,10 @@ function ExceptionCrumbContent({ const {type, value, ...otherData} = breadcrumb?.data ?? {}; return ( - + {type && type} {type ? value && `: ${value}` : value && value} - + {children} {Object.keys(otherData).length > 0 ? ( @@ -190,7 +189,7 @@ function ExceptionCrumbContent({ } const Link = styled('a')` - color: ${p => p.theme.subText}; + color: ${p => p.theme.textColor}; text-decoration: underline; text-decoration-style: dotted; word-break: break-all; @@ -204,3 +203,10 @@ const LightenTextColor = styled('pre')` font-size: ${p => p.theme.fontSizeSmall}; } `; + +const BreadcrumbText = styled(Timeline.Text)` + white-space: pre-wrap; + font-family: ${p => p.theme.text.familyMono}; + font-size: ${p => p.theme.codeFontSize}; + color: ${p => p.theme.textColor}; +`; diff --git a/static/app/components/events/breadcrumbs/breadcrumbsDataSection.spec.tsx b/static/app/components/events/breadcrumbs/breadcrumbsDataSection.spec.tsx index 0f33ee7b09ac5f..0a064b62e4867a 100644 --- a/static/app/components/events/breadcrumbs/breadcrumbsDataSection.spec.tsx +++ b/static/app/components/events/breadcrumbs/breadcrumbsDataSection.spec.tsx @@ -75,50 +75,43 @@ describe('BreadcrumbsDataSection', function () { ); // From virtual crumb - expect(screen.getByText('0ms')).toBeInTheDocument(); - expect(screen.queryByText('06:01:48.762')).not.toBeInTheDocument(); + expect(screen.getByText('06:01:48.762')).toBeInTheDocument(); + expect(screen.queryByText('0ms')).not.toBeInTheDocument(); // From event breadcrumb - expect(screen.getByText('-1min 2ms')).toBeInTheDocument(); - expect(screen.queryByText('06:00:48.760')).not.toBeInTheDocument(); + expect(screen.getByText('06:00:48.760')).toBeInTheDocument(); + expect(screen.queryByText('-1min 2ms')).not.toBeInTheDocument(); const timeControl = screen.getByRole('button', { name: 'Change Time Format for Breadcrumbs', }); await userEvent.click(timeControl); - expect(screen.queryByText('0ms')).not.toBeInTheDocument(); - expect(screen.getByText('06:01:48.762')).toBeInTheDocument(); - expect(screen.queryByText('-1min 2ms')).not.toBeInTheDocument(); - expect(screen.getByText('06:00:48.760')).toBeInTheDocument(); - - await userEvent.click(timeControl); - expect(screen.getByText('0ms')).toBeInTheDocument(); expect(screen.queryByText('06:01:48.762')).not.toBeInTheDocument(); expect(screen.getByText('-1min 2ms')).toBeInTheDocument(); expect(screen.queryByText('06:00:48.760')).not.toBeInTheDocument(); + + await userEvent.click(timeControl); + + expect(screen.queryByText('0ms')).not.toBeInTheDocument(); + expect(screen.getByText('06:01:48.762')).toBeInTheDocument(); + expect(screen.queryByText('-1min 2ms')).not.toBeInTheDocument(); + expect(screen.getByText('06:00:48.760')).toBeInTheDocument(); }); - it.each([ - {action: 'Search', elementRole: 'textbox'}, - {action: 'Filter', elementRole: 'button'}, - {action: 'Sort', elementRole: 'button'}, - ])( - 'opens the drawer, and focuses $action $elementRole when $action button is pressed', - async ({action, elementRole}) => { - render(); + it('opens the drawer and focuses search when the search button is pressed', async function () { + render(); - const control = screen.getByRole('button', {name: `${action} Breadcrumbs`}); - expect(control).toBeInTheDocument(); - await userEvent.click(control); - expect( - screen.getByRole('complementary', {name: 'breadcrumb drawer'}) - ).toBeInTheDocument(); - const drawerControl = screen.getByRole(elementRole, { - name: `${action} All Breadcrumbs`, - }); - expect(drawerControl).toBeInTheDocument(); - expect(drawerControl).toHaveFocus(); - } - ); + const control = screen.getByRole('button', {name: 'Open Breadcrumb Search'}); + expect(control).toBeInTheDocument(); + await userEvent.click(control); + expect( + screen.getByRole('complementary', {name: 'breadcrumb drawer'}) + ).toBeInTheDocument(); + const drawerControl = screen.getByRole('textbox', { + name: 'Search All Breadcrumbs', + }); + expect(drawerControl).toBeInTheDocument(); + expect(drawerControl).toHaveFocus(); + }); }); diff --git a/static/app/components/events/breadcrumbs/breadcrumbsDataSection.tsx b/static/app/components/events/breadcrumbs/breadcrumbsDataSection.tsx index 40bb442e75efd6..558b09435b30be 100644 --- a/static/app/components/events/breadcrumbs/breadcrumbsDataSection.tsx +++ b/static/app/components/events/breadcrumbs/breadcrumbsDataSection.tsx @@ -13,23 +13,26 @@ import { import BreadcrumbsTimeline from 'sentry/components/events/breadcrumbs/breadcrumbsTimeline'; import { BREADCRUMB_TIME_DISPLAY_LOCALSTORAGE_KEY, + BREADCRUMB_TIME_DISPLAY_OPTIONS, BreadcrumbTimeDisplay, getEnhancedBreadcrumbs, getSummaryBreadcrumbs, } from 'sentry/components/events/breadcrumbs/utils'; import {EventDataSection} from 'sentry/components/events/eventDataSection'; +import { + BREADCRUMB_SORT_LOCALSTORAGE_KEY, + BreadcrumbSort, +} from 'sentry/components/events/interfaces/breadcrumbs'; import useFeedbackWidget from 'sentry/components/feedback/widget/useFeedbackWidget'; import useDrawer from 'sentry/components/globalDrawer'; import { IconClock, IconEllipsis, - IconFilter, IconMegaphone, IconSearch, - IconSort, IconTimer, } from 'sentry/icons'; -import {t} from 'sentry/locale'; +import {t, tct} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import type {Event} from 'sentry/types/event'; import type {Group} from 'sentry/types/group'; @@ -52,23 +55,27 @@ export default function BreadcrumbsDataSection({ }: BreadcrumbsDataSectionProps) { const {openDrawer} = useDrawer(); const organization = useOrganization(); - // Use the local storage preferences, but allow the drawer to do updates const [timeDisplay, setTimeDisplay] = useLocalStorageState( BREADCRUMB_TIME_DISPLAY_LOCALSTORAGE_KEY, - BreadcrumbTimeDisplay.RELATIVE + BreadcrumbTimeDisplay.ABSOLUTE + ); + // Use the local storage preferences, but allow the drawer to do updates + const [sort, _setSort] = useLocalStorageState( + BREADCRUMB_SORT_LOCALSTORAGE_KEY, + BreadcrumbSort.NEWEST ); const enhancedCrumbs = useMemo(() => getEnhancedBreadcrumbs(event), [event]); const summaryCrumbs = useMemo( - () => getSummaryBreadcrumbs(enhancedCrumbs), - [enhancedCrumbs] + () => getSummaryBreadcrumbs(enhancedCrumbs, sort), + [enhancedCrumbs, sort] ); const startTimeString = useMemo( () => timeDisplay === BreadcrumbTimeDisplay.RELATIVE - ? enhancedCrumbs?.at(-1)?.breadcrumb?.timestamp + ? summaryCrumbs?.at(0)?.breadcrumb?.timestamp : undefined, - [enhancedCrumbs, timeDisplay] + [summaryCrumbs, timeDisplay] ); const onViewAllBreadcrumbs = useCallback( @@ -81,23 +88,20 @@ export default function BreadcrumbsDataSection({ ({Header, Body}) => (
- - - - {group.shortId} - - ), - }, - {label: getShortEventId(event.id)}, - {label: t('Breadcrumbs')}, - ]} - /> - - + + + {group.shortId} + + ), + }, + {label: getShortEventId(event.id)}, + {label: t('Breadcrumbs')}, + ]} + />
), - {ariaLabel: 'breadcrumb drawer', closeOnOutsideClick: false} + {ariaLabel: 'breadcrumb drawer'} ); }, [group, event, project, openDrawer, enhancedCrumbs, organization] @@ -117,28 +121,26 @@ export default function BreadcrumbsDataSection({ return null; } + const nextTimeDisplay = + timeDisplay === BreadcrumbTimeDisplay.ABSOLUTE + ? BreadcrumbTimeDisplay.RELATIVE + : BreadcrumbTimeDisplay.ABSOLUTE; + const actions = ( + ); } -const BreadcrumbHeader = styled('div')` - width: 100%; - display: flex; - justify-content: space-between; -`; - const ViewAllContainer = styled('div')` position: relative; display: grid; diff --git a/static/app/components/events/breadcrumbs/breadcrumbsDrawerContent.spec.tsx b/static/app/components/events/breadcrumbs/breadcrumbsDrawerContent.spec.tsx index 38b8a28970eeb1..98f539b8edbe9e 100644 --- a/static/app/components/events/breadcrumbs/breadcrumbsDrawerContent.spec.tsx +++ b/static/app/components/events/breadcrumbs/breadcrumbsDrawerContent.spec.tsx @@ -58,7 +58,9 @@ describe('BreadcrumbsDrawerContent', function () { expect(drawerScreen.getByText(level)).toBeInTheDocument(); expect(drawerScreen.getByText(message)).toBeInTheDocument(); } - expect(drawerScreen.getAllByText('-1min 2ms')).toHaveLength(MOCK_BREADCRUMBS.length); + expect(drawerScreen.getAllByText('06:00:48.760')).toHaveLength( + MOCK_BREADCRUMBS.length + ); }); it('allows search to affect displayed crumbs', async function () { @@ -129,24 +131,25 @@ describe('BreadcrumbsDrawerContent', function () { it('allows time display dropdown to change all displayed crumbs', async function () { const drawerScreen = await renderBreadcrumbDrawer(); - expect(drawerScreen.getAllByText('-1min 2ms')).toHaveLength(MOCK_BREADCRUMBS.length); - expect(drawerScreen.queryByText('06:00:48.760')).not.toBeInTheDocument(); - + expect(drawerScreen.getAllByText('06:00:48.760')).toHaveLength( + MOCK_BREADCRUMBS.length + ); + expect(drawerScreen.queryByText('-1min 2ms')).not.toBeInTheDocument(); const timeControl = drawerScreen.getByRole('button', { name: 'Change Time Format for All Breadcrumbs', }); + await userEvent.click(timeControl); + await userEvent.click(drawerScreen.getByRole('option', {name: 'Relative'})); + + expect(drawerScreen.queryByText('06:00:48.760')).not.toBeInTheDocument(); + expect(drawerScreen.getAllByText('-1min 2ms')).toHaveLength(MOCK_BREADCRUMBS.length); + await userEvent.click(timeControl); await userEvent.click(drawerScreen.getByRole('option', {name: 'Absolute'})); - expect(drawerScreen.queryByText('-1min 2ms')).not.toBeInTheDocument(); expect(drawerScreen.getAllByText('06:00:48.760')).toHaveLength( MOCK_BREADCRUMBS.length ); - - await userEvent.click(timeControl); - await userEvent.click(drawerScreen.getByRole('option', {name: 'Relative'})); - - expect(drawerScreen.getAllByText('-1min 2ms')).toHaveLength(MOCK_BREADCRUMBS.length); - expect(drawerScreen.queryByText('06:00:48.760')).not.toBeInTheDocument(); + expect(drawerScreen.queryByText('-1min 2ms')).not.toBeInTheDocument(); }); }); diff --git a/static/app/components/events/breadcrumbs/breadcrumbsDrawerContent.tsx b/static/app/components/events/breadcrumbs/breadcrumbsDrawerContent.tsx index 30294d3fc216fc..990b4c3bac5e23 100644 --- a/static/app/components/events/breadcrumbs/breadcrumbsDrawerContent.tsx +++ b/static/app/components/events/breadcrumbs/breadcrumbsDrawerContent.tsx @@ -70,7 +70,7 @@ export function BreadcrumbsDrawerContent({ const [timeDisplay, setTimeDisplay] = useLocalStorageState( BREADCRUMB_TIME_DISPLAY_LOCALSTORAGE_KEY, - BreadcrumbTimeDisplay.RELATIVE + BreadcrumbTimeDisplay.ABSOLUTE ); const filterOptions = useMemo( () => getBreadcrumbFilterOptions(breadcrumbs), @@ -191,10 +191,8 @@ export function BreadcrumbsDrawerContent({ }); }} value={timeDisplay} - options={BREADCRUMB_TIME_DISPLAY_OPTIONS} - > - {null} - + options={Object.values(BREADCRUMB_TIME_DISPLAY_OPTIONS)} + /> ); @@ -226,6 +224,7 @@ export function BreadcrumbsDrawerContent({ )} diff --git a/static/app/components/events/breadcrumbs/breadcrumbsTimeline.tsx b/static/app/components/events/breadcrumbs/breadcrumbsTimeline.tsx index 57a686643862ee..708e62df0d7c72 100644 --- a/static/app/components/events/breadcrumbs/breadcrumbsTimeline.tsx +++ b/static/app/components/events/breadcrumbs/breadcrumbsTimeline.tsx @@ -1,4 +1,4 @@ -import {Fragment, useRef} from 'react'; +import {useRef} from 'react'; import styled from '@emotion/styled'; import {useVirtualizer} from '@tanstack/react-virtual'; import moment from 'moment-timezone'; @@ -18,9 +18,9 @@ import {shouldUse24Hours} from 'sentry/utils/dates'; interface BreadcrumbsTimelineProps { breadcrumbs: EnhancedCrumb[]; /** - * If false, expands the contents of the breadcrumb's data payload, adds padding. + * If true, expands the contents of the breadcrumbs' data payload */ - isCompact?: boolean; + fullyExpanded?: boolean; /** * Shows the line after the last breadcrumbs icon. * Useful for connecting timeline to components rendered after it. @@ -35,7 +35,7 @@ interface BreadcrumbsTimelineProps { export default function BreadcrumbsTimeline({ breadcrumbs, startTimeString, - isCompact = false, + fullyExpanded = false, showLastLine = false, }: BreadcrumbsTimelineProps) { const containerRef = useRef(null); @@ -81,32 +81,34 @@ export default function BreadcrumbsTimeline({ ) : null; return ( - - {title} - {isVirtualCrumb && - {t('This event')}} +
+
+ {title} + {isVirtualCrumb && - {t('This event')}} +
{levelComponent} - +
} colorConfig={colorConfig} icon={iconComponent} timestamp={timestampComponent} // XXX: Only the virtual crumb can be marked as active for breadcrumbs isActive={isVirtualCrumb ?? false} - style={showLastLine ? {background: 'transparent'} : {}} data-index={virtualizedRow.index} + showLastLine={showLastLine} > - + -
+ ); }); @@ -123,6 +125,11 @@ export default function BreadcrumbsTimeline({ ); } +const Header = styled('div')` + display: flex; + justify-content: space-between; +`; + const Subtitle = styled('p')` margin: 0; font-weight: normal; @@ -131,7 +138,7 @@ const Subtitle = styled('p')` `; const Timestamp = styled('div')` - margin: 0 ${space(1)}; + margin-right: ${space(1)}; color: ${p => p.theme.subText}; font-size: ${p => p.theme.fontSizeSmall}; span { @@ -139,6 +146,18 @@ const Timestamp = styled('div')` } `; -const ContentWrapper = styled('div')<{isCompact: boolean}>` - padding-bottom: ${p => space(p.isCompact ? 0.5 : 1.0)}; +const ContentWrapper = styled('div')` + padding-bottom: ${space(1)}; +`; + +const BreadcrumbItem = styled(Timeline.Item)` + border-bottom: 1px solid transparent; + &:not(:last-child) { + border-image: linear-gradient( + to right, + transparent 20px, + ${p => p.theme.translucentInnerBorder} 20px + ) + 100% 1; + } `; diff --git a/static/app/components/events/breadcrumbs/utils.tsx b/static/app/components/events/breadcrumbs/utils.tsx index ceb38d31614e28..79bbb888df115d 100644 --- a/static/app/components/events/breadcrumbs/utils.tsx +++ b/static/app/components/events/breadcrumbs/utils.tsx @@ -1,7 +1,7 @@ import styled from '@emotion/styled'; -import Tag from 'sentry/components/badge/tag'; import type {SelectOption} from 'sentry/components/compactSelect'; +import {BreadcrumbSort} from 'sentry/components/events/interfaces/breadcrumbs'; import type {BreadcrumbMeta} from 'sentry/components/events/interfaces/breadcrumbs/types'; import { convertCrumbType, @@ -41,10 +41,16 @@ export const enum BreadcrumbTimeDisplay { RELATIVE = 'relative', ABSOLUTE = 'absolute', } -export const BREADCRUMB_TIME_DISPLAY_OPTIONS = [ - {label: t('Relative'), value: BreadcrumbTimeDisplay.RELATIVE}, - {label: t('Absolute'), value: BreadcrumbTimeDisplay.ABSOLUTE}, -]; +export const BREADCRUMB_TIME_DISPLAY_OPTIONS = { + [BreadcrumbTimeDisplay.RELATIVE]: { + label: t('Relative'), + value: BreadcrumbTimeDisplay.RELATIVE, + }, + [BreadcrumbTimeDisplay.ABSOLUTE]: { + label: t('Absolute'), + value: BreadcrumbTimeDisplay.ABSOLUTE, + }, +}; export const BREADCRUMB_TIME_DISPLAY_LOCALSTORAGE_KEY = 'event-breadcrumb-time-display'; const Color = styled('span')<{colorConfig: ColorConfig}>` @@ -56,22 +62,25 @@ const Color = styled('span')<{colorConfig: ColorConfig}>` * As of writing this, it just grabs a few, but in the future it may collapse, * or manipulate them in some way for a better summary. */ -export function getSummaryBreadcrumbs(crumbs: EnhancedCrumb[]) { - return [...crumbs].reverse().slice(0, BREADCRUMB_SUMMARY_COUNT); +export function getSummaryBreadcrumbs(crumbs: EnhancedCrumb[], sort: BreadcrumbSort) { + const breadcrumbs = [...crumbs]; + const sortedCrumbs = + sort === BreadcrumbSort.OLDEST ? breadcrumbs : breadcrumbs.reverse(); + return sortedCrumbs.slice(0, BREADCRUMB_SUMMARY_COUNT); } export function applyBreadcrumbSearch( search: string, crumbs: EnhancedCrumb[] ): EnhancedCrumb[] { - if (search === '') { + if (!search.trim()) { return crumbs; } return crumbs.filter( ({breadcrumb: c}) => c.type.includes(search) || - c.message?.includes(search) || c.category?.includes(search) || + c.message?.includes(search) || (c.data && JSON.stringify(c.data)?.includes(search)) ); } @@ -148,31 +157,35 @@ export function getEnhancedBreadcrumbs(event: Event): EnhancedCrumb[] { // Add display props return allCrumbs.map(ec => ({ ...ec, - title: getBreadcrumbTitle(ec.breadcrumb.category), + title: getBreadcrumbTitle(ec.breadcrumb), colorConfig: getBreadcrumbColorConfig(ec.breadcrumb.type), filter: getBreadcrumbFilter(ec.breadcrumb.type), iconComponent: , - levelComponent: , + levelComponent: ( + {ec.breadcrumb.level} + ), })); } -function getBreadcrumbTitle(category: RawCrumb['category']) { - switch (category) { +function getBreadcrumbTitle(crumb: RawCrumb) { + if (crumb?.type === BreadcrumbType.DEFAULT) { + return crumb?.category; + } + + switch (crumb?.category) { case 'http': case 'xhr': - return category.toUpperCase(); - case 'httplib': - return t('httplib'); + return crumb?.category.toUpperCase(); case 'ui.click': return t('UI Click'); case 'ui.input': return t('UI Input'); case null: case undefined: - return BREADCRUMB_TITLE_PLACEHOLDER; + return BREADCRUMB_TITLE_PLACEHOLDER.toLocaleLowerCase(); default: - const titleCategory = category.split('.').join(' '); - return toTitleCase(titleCategory); + const titleCategory = crumb?.category.split('.').join(' '); + return toTitleCase(titleCategory, {allowInnerUpperCase: true}); } } @@ -195,8 +208,9 @@ function getBreadcrumbColorConfig(type?: BreadcrumbType): ColorConfig { case BreadcrumbType.DEVICE: case BreadcrumbType.NETWORK: return {title: 'pink400', icon: 'pink400', iconBorder: 'pink200'}; - case BreadcrumbType.DEBUG: case BreadcrumbType.INFO: + return {title: 'blue400', icon: 'blue300', iconBorder: 'blue200'}; + case BreadcrumbType.DEBUG: default: return {title: 'gray400', icon: 'gray300', iconBorder: 'gray200'}; } @@ -232,7 +246,7 @@ function getBreadcrumbFilter(type?: BreadcrumbType) { case BreadcrumbType.NETWORK: return t('Network'); default: - return t('Default'); + return BREADCRUMB_TITLE_PLACEHOLDER; } } @@ -271,24 +285,24 @@ function BreadcrumbIcon({type}: {type?: BreadcrumbType}) { } } -function BreadcrumbLevel({level}: {level: BreadcrumbLevelType}) { - switch (level) { - case BreadcrumbLevelType.ERROR: - case BreadcrumbLevelType.FATAL: - return {level}; - case BreadcrumbLevelType.WARNING: - return {level}; - case BreadcrumbLevelType.DEBUG: - case BreadcrumbLevelType.INFO: - case BreadcrumbLevelType.LOG: - return {level}; - case BreadcrumbLevelType.UNDEFINED: - default: - return null; - } -} - -const StyledTag = styled(Tag)` +const BreadcrumbLevel = styled('div')<{level: BreadcrumbLevelType}>` margin: 0 ${space(1)}; font-weight: normal; + border: 0; + background: none; + color: ${p => { + switch (p.level) { + case BreadcrumbLevelType.ERROR: + case BreadcrumbLevelType.FATAL: + return p.theme.red400; + case BreadcrumbLevelType.WARNING: + return p.theme.yellow400; + default: + case BreadcrumbLevelType.DEBUG: + case BreadcrumbLevelType.INFO: + case BreadcrumbLevelType.LOG: + return p.theme.gray300; + } + }}; + display: ${p => (p.level === BreadcrumbLevelType.UNDEFINED ? 'none' : 'block')}; `; diff --git a/static/app/components/timeline/index.tsx b/static/app/components/timeline/index.tsx index 011cb4c5e985e0..a7d3aaf40c89a4 100644 --- a/static/app/components/timeline/index.tsx +++ b/static/app/components/timeline/index.tsx @@ -22,6 +22,7 @@ export interface TimelineItemProps { onClick?: React.MouseEventHandler; onMouseEnter?: React.MouseEventHandler; onMouseLeave?: React.MouseEventHandler; + showLastLine?: boolean; style?: CSSProperties; timestamp?: React.ReactNode; } @@ -34,21 +35,13 @@ export const Item = forwardRef(function _Item( colorConfig = {title: 'gray400', icon: 'gray300', iconBorder: 'gray200'}, timestamp, isActive = false, - style, ...props }: TimelineItemProps, ref: React.ForwardedRef ) { const theme = useTheme(); return ( - + {title} {timestamp ??
} - - - {children} - + + {children} ); }); -const Row = styled('div')` +const Row = styled('div')<{showLastLine?: boolean}>` position: relative; color: ${p => p.theme.subText}; display: grid; @@ -87,7 +72,8 @@ const Row = styled('div')` } &:last-child { margin-bottom: 0; - background: ${p => p.theme.background}; + /* Show/hide connecting line from the last element of the timeline */ + background: ${p => (p.showLastLine ? 'transparent' : p.theme.background)}; } `; diff --git a/static/app/utils/string/toTitleCase.spec.tsx b/static/app/utils/string/toTitleCase.spec.tsx new file mode 100644 index 00000000000000..d255e1f7caf58f --- /dev/null +++ b/static/app/utils/string/toTitleCase.spec.tsx @@ -0,0 +1,21 @@ +import {toTitleCase} from 'sentry/utils/string/toTitleCase'; + +describe('toTitleCase', () => { + it('capitalizes the first letter of each word', () => { + expect(toTitleCase('sentry: fix your code')).toEqual('Sentry: Fix Your Code'); + }); + + it('treats non-word characters as the parts of the same word', () => { + expect(toTitleCase('sentry-code-breaks')).toEqual('Sentry-code-breaks'); + }); + + it('flattens words with capitals in the middle', () => { + expect(toTitleCase('seNTRy: fIX youR Code')).toEqual('Sentry: Fix Your Code'); + }); + + it("doesn't flatten inner capitals if specified", () => { + expect(toTitleCase('seNTRy: fIX youR Code', {allowInnerUpperCase: true})).toEqual( + 'SeNTRy: FIX YouR Code' + ); + }); +}); diff --git a/static/app/utils/string/toTitleCase.tsx b/static/app/utils/string/toTitleCase.tsx index 22e9b5031b87d7..962e28dfea1fc9 100644 --- a/static/app/utils/string/toTitleCase.tsx +++ b/static/app/utils/string/toTitleCase.tsx @@ -1,6 +1,15 @@ -export function toTitleCase(str: string): string { - return str.replace( - /\w\S*/g, - txt => txt.charAt(0).toUpperCase() + txt.substring(1).toLowerCase() +interface TitleCaseOptions { + /** + * If true, will allow capital letters in the middle of words. + * E.g. 'my testCase' -> 'My TestCase' + */ + allowInnerUpperCase?: boolean; +} + +export function toTitleCase(str: string, opts?: TitleCaseOptions): string { + return str.replace(/\w\S*/g, txt => + opts?.allowInnerUpperCase + ? txt.charAt(0).toUpperCase() + txt.substring(1) + : txt.charAt(0).toUpperCase() + txt.substring(1).toLowerCase() ); } From 7b27718eadc95ce029adcfee117b62b97310306c Mon Sep 17 00:00:00 2001 From: Mia Hsu <55610339+ameliahsu@users.noreply.github.com> Date: Wed, 24 Jul 2024 13:45:12 -0700 Subject: [PATCH 110/126] feat(onboarding): create messaging integration picker modal (#74650) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Created modal that opens when "Connect to messaging" button is selected (see https://github.com/getsentry/sentry/pull/74474) Screenshot 2024-07-22 at 3 45 35 PM Installation buttons have the same behavior as those on the Integrations page in Settings If the modal content can't be loaded, the modal is closed and a small error message appears in the corner: https://github.com/user-attachments/assets/621725cd-8ffd-4c91-af0c-8e91483bfcaa --------- Co-authored-by: getsantry[bot] <66042841+getsantry[bot]@users.noreply.github.com> --- .../rules/issue/addIntegrationRow.spec.tsx | 97 ++++++++++++++ .../alerts/rules/issue/addIntegrationRow.tsx | 124 ++++++++++++++++++ .../issue/messagingIntegrationModal.spec.tsx | 98 ++++++++++++++ .../rules/issue/messagingIntegrationModal.tsx | 69 ++++++++++ .../setupAlertIntegrationButton.spec.tsx | 21 ++- .../issue/setupAlertIntegrationButton.tsx | 29 +++- .../issue/setupAlertIntegrationModal.tsx | 16 --- 7 files changed, 430 insertions(+), 24 deletions(-) create mode 100644 static/app/views/alerts/rules/issue/addIntegrationRow.spec.tsx create mode 100644 static/app/views/alerts/rules/issue/addIntegrationRow.tsx create mode 100644 static/app/views/alerts/rules/issue/messagingIntegrationModal.spec.tsx create mode 100644 static/app/views/alerts/rules/issue/messagingIntegrationModal.tsx delete mode 100644 static/app/views/alerts/rules/issue/setupAlertIntegrationModal.tsx diff --git a/static/app/views/alerts/rules/issue/addIntegrationRow.spec.tsx b/static/app/views/alerts/rules/issue/addIntegrationRow.spec.tsx new file mode 100644 index 00000000000000..059e532d173494 --- /dev/null +++ b/static/app/views/alerts/rules/issue/addIntegrationRow.spec.tsx @@ -0,0 +1,97 @@ +import {GitHubIntegrationProviderFixture} from 'sentry-fixture/githubIntegrationProvider'; +import {OrganizationFixture} from 'sentry-fixture/organization'; +import {ProjectFixture} from 'sentry-fixture/project'; + +import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary'; + +import AddIntegrationRow from 'sentry/views/alerts/rules/issue/addIntegrationRow'; + +jest.mock('sentry/actionCreators/modal'); + +describe('AddIntegrationRow', function () { + let project, org; + const integrationSlug = 'github'; + const providers = [GitHubIntegrationProviderFixture()]; + + beforeEach(function () { + MockApiClient.clearMockResponses(); + + project = ProjectFixture(); + org = OrganizationFixture(); + + jest.clearAllMocks(); + }); + + const getComponent = () => ( + + ); + + it('renders', async () => { + MockApiClient.addMockResponse({ + url: `/organizations/${org.slug}/config/integrations/?provider_key=${integrationSlug}`, + body: { + providers: providers, + }, + }); + + render(getComponent()); + + const button = await screen.findByRole('button', {name: /add integration/i}); + expect(button).toBeInTheDocument(); + }); + + it('opens the setup dialog on click', async () => { + const focus = jest.fn(); + const open = jest.fn().mockReturnValue({focus, close: jest.fn()}); + // any is needed here because getSentry has different types for global + (global as any).open = open; + + const mock1 = MockApiClient.addMockResponse({ + url: `/organizations/${org.slug}/config/integrations/?provider_key=${integrationSlug}`, + body: { + providers: providers, + }, + }); + + render(getComponent()); + + expect(mock1).toHaveBeenCalled(); + const button = await screen.findByRole('button', {name: /add integration/i}); + await userEvent.click(button); + expect(open.mock.calls).toHaveLength(1); + expect(focus.mock.calls).toHaveLength(1); + expect(open.mock.calls[0][2]).toBe( + 'scrollbars=yes,width=100,height=100,top=334,left=462' + ); + }); + + it('handles API error', async () => { + const setHasError = jest.fn(); + + MockApiClient.addMockResponse({ + url: `/organizations/${org.slug}/config/integrations/?provider_key=${integrationSlug}`, + statusCode: 400, + body: {error: 'internal error'}, + }); + + render( + + ); + + await waitFor(() => { + expect(setHasError).toHaveBeenCalled(); + }); + }); +}); diff --git a/static/app/views/alerts/rules/issue/addIntegrationRow.tsx b/static/app/views/alerts/rules/issue/addIntegrationRow.tsx new file mode 100644 index 00000000000000..7add50a25644c5 --- /dev/null +++ b/static/app/views/alerts/rules/issue/addIntegrationRow.tsx @@ -0,0 +1,124 @@ +import {useCallback, useEffect, useState} from 'react'; +import styled from '@emotion/styled'; + +import {Button} from 'sentry/components/button'; +import PluginIcon from 'sentry/plugins/components/pluginIcon'; +import {space} from 'sentry/styles/space'; +import type {IntegrationProvider} from 'sentry/types/integrations'; +import type {Organization} from 'sentry/types/organization'; +import type {Project} from 'sentry/types/project'; +import useApi from 'sentry/utils/useApi'; +import {AddIntegrationButton} from 'sentry/views/settings/organizationIntegrations/addIntegrationButton'; + +type Props = { + onClickHandler: () => void; + organization: Organization; + project: Project; + providerKey: string; + setHasError: (boolean) => void; +}; + +function AddIntegrationRow({ + providerKey, + organization, + project, + onClickHandler, + setHasError, +}: Props) { + const [provider, setProvider] = useState(null); + + const api = useApi(); + const fetchData = useCallback(() => { + if (!providerKey) { + return Promise.resolve(); + } + + const endpoint = `/organizations/${organization.slug}/config/integrations/?provider_key=${providerKey}`; + return api + .requestPromise(endpoint) + .then(integrations => { + setProvider(integrations.providers[0]); + }) + .catch(() => { + setHasError(true); + }); + }, [providerKey, api, organization.slug, setHasError]); + + useEffect(() => { + fetchData(); + }, [fetchData]); + + if (!provider) { + return null; + } + + const {metadata} = provider; + + const buttonProps = { + size: 'sm' as const, + priority: 'primary' as const, + 'data-test-id': 'install-button', + organization, + }; + + const close = () => onClickHandler; + + // TODO(Mia): show request installation button if user does not have necessary permissions + const integrationButton = metadata.aspects.externalInstall ? ( + close} + external + {...buttonProps} + > + Add Installation + + ) : ( + + ); + + return ( + + + + Connect {provider.name} + + {integrationButton} + + ); +} + +const RowWrapper = styled('div')` + display: flex; + border-radius: 4px; + border: 1px solid ${p => p.theme.gray200}; + justify-content: space-between; + align-items: center; + padding: ${space(3)} ${space(4)}; +`; + +const IconTextWrapper = styled('div')` + display: flex; + align-items: center; + gap: ${space(3)}; +`; + +const NameHeader = styled('h6')` + margin: 0; +`; + +const ExternalButton = styled(Button)` + margin: 0; +`; + +const InternalButton = styled(AddIntegrationButton)` + margin: 0; +`; + +export default AddIntegrationRow; diff --git a/static/app/views/alerts/rules/issue/messagingIntegrationModal.spec.tsx b/static/app/views/alerts/rules/issue/messagingIntegrationModal.spec.tsx new file mode 100644 index 00000000000000..d765fd0d4881e0 --- /dev/null +++ b/static/app/views/alerts/rules/issue/messagingIntegrationModal.spec.tsx @@ -0,0 +1,98 @@ +import {GitHubIntegrationProviderFixture} from 'sentry-fixture/githubIntegrationProvider'; +import {OrganizationFixture} from 'sentry-fixture/organization'; +import {ProjectFixture} from 'sentry-fixture/project'; + +import {render, screen, waitFor} from 'sentry-test/reactTestingLibrary'; + +import * as indicators from 'sentry/actionCreators/indicator'; +import { + makeClosableHeader, + makeCloseButton, + ModalBody, + ModalFooter, +} from 'sentry/components/globalModal/components'; +import MessagingIntegrationModal from 'sentry/views/alerts/rules/issue/messagingIntegrationModal'; + +jest.mock('sentry/actionCreators/modal'); + +describe('MessagingIntegrationModal', function () { + let project, org; + const providerKeys = ['slack', 'discord', 'msteams']; + const providers = [GitHubIntegrationProviderFixture()]; + + beforeEach(function () { + MockApiClient.clearMockResponses(); + + project = ProjectFixture(); + org = OrganizationFixture({ + features: ['messaging-integration-onboarding'], + }); + + jest.clearAllMocks(); + }); + + const getComponent = (closeModal?, props = {}) => ( + {})} + Body={ModalBody} + headerContent={

Connect with a messaging tool

} + bodyContent={

Receive alerts and digests right where you work.

} + providerKeys={providerKeys} + organization={org} + project={project} + CloseButton={makeCloseButton(() => {})} + Footer={ModalFooter} + closeModal={closeModal ? closeModal : jest.fn()} + {...props} + /> + ); + + it('renders', async function () { + const mockResponses: jest.Mock[] = []; + providerKeys.forEach(providerKey => { + mockResponses.push( + MockApiClient.addMockResponse({ + url: `/organizations/${org.slug}/config/integrations/?provider_key=${providerKey}`, + body: {providers: providers}, + }) + ); + }); + render(getComponent()); + + mockResponses.forEach(mock => { + expect(mock).toHaveBeenCalled(); + }); + const heading = await screen.findByRole('heading', { + name: /connect with a messaging tool/i, + }); + expect(heading).toBeInTheDocument(); + const buttons = await screen.findAllByRole('button', {name: /add integration/i}); + expect(buttons).toHaveLength(providerKeys.length); + }); + + it('closes on error', async function () { + const closeModal = jest.fn(); + jest.spyOn(indicators, 'addErrorMessage'); + + const mockResponses: jest.Mock[] = []; + providerKeys.forEach(value => { + mockResponses.push( + MockApiClient.addMockResponse({ + url: `/organizations/${org.slug}/config/integrations/?provider_key=${value}`, + statusCode: 400, + body: {error: 'internal error'}, + }) + ); + }); + + render(getComponent(closeModal)); + + mockResponses.forEach(mock => { + expect(mock).toHaveBeenCalled(); + }); + await waitFor(() => { + expect(closeModal).toHaveBeenCalled(); + expect(indicators.addErrorMessage).toHaveBeenCalled(); + }); + }); +}); diff --git a/static/app/views/alerts/rules/issue/messagingIntegrationModal.tsx b/static/app/views/alerts/rules/issue/messagingIntegrationModal.tsx new file mode 100644 index 00000000000000..1fb28054928e7d --- /dev/null +++ b/static/app/views/alerts/rules/issue/messagingIntegrationModal.tsx @@ -0,0 +1,69 @@ +import {Fragment, useEffect, useState} from 'react'; +import styled from '@emotion/styled'; + +import {addErrorMessage} from 'sentry/actionCreators/indicator'; +import type {ModalRenderProps} from 'sentry/actionCreators/modal'; +import {t} from 'sentry/locale'; +import {space} from 'sentry/styles/space'; +import type {Project} from 'sentry/types'; +import type {Organization} from 'sentry/types/organization'; +import AddIntegrationRow from 'sentry/views/alerts/rules/issue/addIntegrationRow'; + +type Props = ModalRenderProps & { + headerContent: React.ReactElement; + organization: Organization; + project: Project; + providerKeys: string[]; + bodyContent?: React.ReactElement; +}; + +function MessagingIntegrationModal({ + closeModal, + Header, + Body, + headerContent, + bodyContent, + providerKeys, + organization, + project, +}: Props) { + const [hasError, setHasError] = useState(false); + + useEffect(() => { + if (hasError) { + closeModal(); + addErrorMessage(t('Failed to load integration data')); + } + }, [hasError, closeModal]); + + return ( + +
{headerContent}
+ + {bodyContent} + + {providerKeys.map((value: string) => { + return ( + + ); + })} + + +
+ ); +} + +const IntegrationsWrapper = styled('div')` + display: flex; + flex-direction: column; + gap: ${space(2)}; +`; + +export default MessagingIntegrationModal; diff --git a/static/app/views/alerts/rules/issue/setupAlertIntegrationButton.spec.tsx b/static/app/views/alerts/rules/issue/setupAlertIntegrationButton.spec.tsx index 0e88d2253ee7f7..1bf01e34747933 100644 --- a/static/app/views/alerts/rules/issue/setupAlertIntegrationButton.spec.tsx +++ b/static/app/views/alerts/rules/issue/setupAlertIntegrationButton.spec.tsx @@ -1,16 +1,20 @@ import {OrganizationFixture} from 'sentry-fixture/organization'; import {ProjectFixture} from 'sentry-fixture/project'; -import {render} from 'sentry-test/reactTestingLibrary'; +import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; +import {openModal} from 'sentry/actionCreators/modal'; import SetupAlertIntegrationButton from 'sentry/views/alerts/rules/issue/setupAlertIntegrationButton'; +jest.mock('sentry/actionCreators/modal'); + describe('SetupAlertIntegrationButton', function () { const organization = OrganizationFixture(); const featureOrg = OrganizationFixture({ features: ['messaging-integration-onboarding'], }); const project = ProjectFixture(); + it('renders slack button if no alert integrations when feature flag is off', function () { MockApiClient.addMockResponse({ url: `/projects/${organization.slug}/${project.slug}/?expand=hasAlertIntegration`, @@ -69,4 +73,19 @@ describe('SetupAlertIntegrationButton', function () { ); expect(container).not.toHaveTextContent('Connect to messaging'); }); + it('opens modal when clicked', async () => { + MockApiClient.addMockResponse({ + url: `/projects/${featureOrg.slug}/${project.slug}/?expand=hasAlertIntegration`, + body: { + ...project, + hasAlertIntegrationInstalled: false, + }, + }); + render( + + ); + await userEvent.click(screen.getByLabelText('Connect to messaging')); + + expect(openModal).toHaveBeenCalled(); + }); }); diff --git a/static/app/views/alerts/rules/issue/setupAlertIntegrationButton.tsx b/static/app/views/alerts/rules/issue/setupAlertIntegrationButton.tsx index 4c82137a1a4a76..56e49cda63b993 100644 --- a/static/app/views/alerts/rules/issue/setupAlertIntegrationButton.tsx +++ b/static/app/views/alerts/rules/issue/setupAlertIntegrationButton.tsx @@ -10,7 +10,7 @@ import ConfigStore from 'sentry/stores/configStore'; import {space} from 'sentry/styles/space'; import type {Organization} from 'sentry/types/organization'; import type {Project} from 'sentry/types/project'; -import SetupAlertIntegrationModal from 'sentry/views/alerts/rules/issue/setupAlertIntegrationModal'; +import MessagingIntegrationModal from 'sentry/views/alerts/rules/issue/messagingIntegrationModal'; type Props = DeprecatedAsyncComponent['props'] & { organization: Organization; @@ -51,6 +51,9 @@ export default class SetupAlertIntegrationButton extends DeprecatedAsyncComponen } renderBody(): React.ReactNode { + const headerContent =

Connect with a messaging tool

; + const bodyContent =

Receive alerts and digests right where you work.

; + const providerKeys = ['slack', 'discord', 'msteams']; const {organization} = this.props; const {detailedProject} = this.state; // don't render anything if we don't have the project yet or if an alert integration @@ -69,15 +72,27 @@ export default class SetupAlertIntegrationButton extends DeprecatedAsyncComponen size="sm" icon={ - - - + {providerKeys.map((value: string) => { + return ; + })} } onClick={() => - openModal(deps => , { - closeEvents: 'escape-key', - }) + openModal( + deps => ( + + ), + { + closeEvents: 'escape-key', + } + ) } > {t('Connect to messaging')} diff --git a/static/app/views/alerts/rules/issue/setupAlertIntegrationModal.tsx b/static/app/views/alerts/rules/issue/setupAlertIntegrationModal.tsx deleted file mode 100644 index 5ccbb8a66ac318..00000000000000 --- a/static/app/views/alerts/rules/issue/setupAlertIntegrationModal.tsx +++ /dev/null @@ -1,16 +0,0 @@ -import {Fragment} from 'react'; - -import type {ModalRenderProps} from 'sentry/actionCreators/modal'; - -function SetupAlertIntegrationModal({Header, Body}: ModalRenderProps) { - return ( - -
Connect with a messaging tool
- -
Receive alerts and digests right where you work.
- -
- ); -} - -export default SetupAlertIntegrationModal; From a6f413df65624a797b9a6fe5c2ec9bafd15af01a Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Wed, 24 Jul 2024 13:47:08 -0700 Subject: [PATCH 111/126] chore(seer grouping): Collect metric on HTML stacktraces (#74827) We've been running into trouble in the Seer grouping backfill when confronted with stacktraces full of HTML files, which are token-dense and often meaningless. Here's a recent example of such a stacktrace: ``` "frames": [ { "filename": "index.html?__geo_region=jp&loc=eyjrawqioiiydks4rjniyvrlwekwovb5yxdrrno4iiwiywxnijoirvmyntyifq.eyjzdwiioijhmvpdmevvnuuilcjhdwqioijndxj1z3vydsisimnvdw50cnkioijkucisimnyzwf0zwqioje3mtk4otu0mzisimlzcyi6imcxmjmtyxv0acisimn1cnjlbmn5ijoislbziiwizxhwijoxnzixmjy5nzuylcjyzwdpb24ioijkucisimxhbmcioijqysisimlhdci6mtcymta5njk1miwianrpijoicgnfnelbovpovel1cfrhsllncemyce9lwij9.wefd0fvomovr_gjrcquzatrsmstgrvzqew7uhuyiibajhas7m_hyceqkigikwyybvlsqxhdqrwywsrxqthmjeq&lang=jp&platform=jorp1&mode=0", "function": "t", "context_line": ' str: if not _is_snipped_context_line(frame_dict["context-line"]): found_non_snipped_context_line = True + # Not an exhaustive list of tests we could run to detect HTML, but this is only + # meant to be a temporary, quick-and-dirty metric + # TODO: Don't let this, and the metric below, hang around forever. It's only to + # help us get a sense of whether it's worthwhile trying to more accurately + # detect, and then exclude, frames containing HTML + if ( + frame_dict["filename"].endswith("html") + or "" in frame_dict["context-line"] + ): + html_frame_count += 1 + frame_strings.append( f' File "{frame_dict["filename"]}", function {frame_dict["function"]}\n {frame_dict["context-line"]}\n' ) @@ -97,6 +109,20 @@ def get_stacktrace_string(data: dict[str, Any]) -> str: stacktrace_str += header + "".join(frame_strings) + metrics.incr( + "seer.grouping.html_in_stacktrace", + sample_rate=1.0, + tags={ + "html_frames": ( + "none" + if html_frame_count == 0 + else "all" + if html_frame_count == final_frame_count + else "some" + ) + }, + ) + return stacktrace_str.strip() From f639f73a3b1044c9f331dd8bf88c325c149f705e Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 24 Jul 2024 17:10:09 -0400 Subject: [PATCH 112/126] ref: use base factories create_environment (#74882) --- .../sentry/api/endpoints/test_group_user_reports.py | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/tests/sentry/api/endpoints/test_group_user_reports.py b/tests/sentry/api/endpoints/test_group_user_reports.py index 325d070814787f..ae4a7a795ea4a1 100644 --- a/tests/sentry/api/endpoints/test_group_user_reports.py +++ b/tests/sentry/api/endpoints/test_group_user_reports.py @@ -1,6 +1,5 @@ from functools import cached_property -from sentry.models.environment import Environment from sentry.models.userreport import UserReport from sentry.testutils.cases import APITestCase, SnubaTestCase from sentry.testutils.helpers.datetime import before_now, iso_format @@ -10,8 +9,8 @@ class GroupUserReport(APITestCase, SnubaTestCase): def setUp(self): super().setUp() self.project = self.create_project() - self.env1 = self.create_environment(self.project, "production") - self.env2 = self.create_environment(self.project, "staging") + self.env1 = self.create_environment(self.project, name="production") + self.env2 = self.create_environment(self.project, name="staging") self.env1_events = self.create_events_for_environment(self.env1, 5) self.env2_events = self.create_events_for_environment(self.env2, 5) @@ -29,11 +28,6 @@ def setUp(self): def path(self): return f"/api/0/groups/{self.group.id}/user-feedback/" - def create_environment(self, project, name): - env = Environment.objects.create(organization_id=project.organization_id, name=name) - env.add_project(project) - return env - def create_events_for_environment(self, environment, num_events): return [ self.store_event( @@ -91,7 +85,7 @@ def test_no_environment_does_not_exists(self): def test_no_environment(self): self.login_as(user=self.user) - empty_env = self.create_environment(self.project, "") + empty_env = self.create_environment(self.project, name="") empty_env_events = self.create_events_for_environment(empty_env, 5) userreports = self.create_user_report_for_events( self.project, self.group, empty_env_events, empty_env From de49a91b3d21d55e34a7963df8e7b0bbad2b77fb Mon Sep 17 00:00:00 2001 From: MichaelSun48 Date: Mon, 24 Jun 2024 16:22:57 -0700 Subject: [PATCH 113/126] Copy tabs dir to draggableTabs dir --- .../components/draggableTabs/draggableTab.tsx | 236 ++++++++++++++ .../draggableTabs/draggableTabList.tsx | 300 ++++++++++++++++++ .../draggableTabs/draggableTabPanels.tsx | 99 ++++++ static/app/components/draggableTabs/index.tsx | 91 ++++++ static/app/components/draggableTabs/item.tsx | 12 + static/app/components/draggableTabs/utils.tsx | 4 + 6 files changed, 742 insertions(+) create mode 100644 static/app/components/draggableTabs/draggableTab.tsx create mode 100644 static/app/components/draggableTabs/draggableTabList.tsx create mode 100644 static/app/components/draggableTabs/draggableTabPanels.tsx create mode 100644 static/app/components/draggableTabs/index.tsx create mode 100644 static/app/components/draggableTabs/item.tsx create mode 100644 static/app/components/draggableTabs/utils.tsx diff --git a/static/app/components/draggableTabs/draggableTab.tsx b/static/app/components/draggableTabs/draggableTab.tsx new file mode 100644 index 00000000000000..9f5cd4bfdf1c7b --- /dev/null +++ b/static/app/components/draggableTabs/draggableTab.tsx @@ -0,0 +1,236 @@ +import {forwardRef, useCallback} from 'react'; +import type {Theme} from '@emotion/react'; +import styled from '@emotion/styled'; +import type {AriaTabProps} from '@react-aria/tabs'; +import {useTab} from '@react-aria/tabs'; +import {useObjectRef} from '@react-aria/utils'; +import type {TabListState} from '@react-stately/tabs'; +import type {Node, Orientation} from '@react-types/shared'; + +import InteractionStateLayer from 'sentry/components/interactionStateLayer'; +import Link from 'sentry/components/links/link'; +import {space} from 'sentry/styles/space'; + +import {tabsShouldForwardProp} from './utils'; + +interface TabProps extends AriaTabProps { + item: Node; + orientation: Orientation; + /** + * Whether this tab is overflowing the TabList container. If so, the tab + * needs to be visually hidden. Users can instead select it via an overflow + * menu. + */ + overflowing: boolean; + state: TabListState; +} + +/** + * Stops event propagation if the command/ctrl/shift key is pressed, in effect + * preventing any state change. This is useful because when a user + * command/ctrl/shift-clicks on a tab link, the intention is to view the tab + * in a new browser tab/window, not to update the current view. + */ +function handleLinkClick(e: React.PointerEvent) { + if (e.metaKey || e.ctrlKey || e.shiftKey) { + e.stopPropagation(); + } +} + +/** + * Renders a single tab item. This should not be imported directly into any + * page/view – it's only meant to be used by . See the correct + * usage in tabs.stories.js + */ +function BaseTab( + {item, state, orientation, overflowing}: TabProps, + forwardedRef: React.ForwardedRef +) { + const ref = useObjectRef(forwardedRef); + + const { + key, + rendered, + props: {to, hidden}, + } = item; + const {tabProps, isSelected} = useTab({key, isDisabled: hidden}, state, ref); + + const InnerWrap = useCallback( + ({children}) => + to ? ( + + {children} + + ) : ( + {children} + ), + [to, orientation] + ); + + return ( + + ); +} + +export const Tab = forwardRef(BaseTab); + +const TabWrap = styled('li', {shouldForwardProp: tabsShouldForwardProp})<{ + overflowing: boolean; + selected: boolean; +}>` + color: ${p => (p.selected ? p.theme.activeText : p.theme.textColor)}; + white-space: nowrap; + cursor: pointer; + + &:hover { + color: ${p => (p.selected ? p.theme.activeText : p.theme.headingColor)}; + } + + &:focus { + outline: none; + } + + &[aria-disabled], + &[aria-disabled]:hover { + color: ${p => p.theme.subText}; + pointer-events: none; + cursor: default; + } + + ${p => + p.overflowing && + ` + opacity: 0; + pointer-events: none; + `} +`; + +const innerWrapStyles = ({ + theme, + orientation, +}: { + orientation: Orientation; + theme: Theme; +}) => ` + display: flex; + align-items: center; + position: relative; + height: calc( + ${theme.form.sm.height}px + + ${orientation === 'horizontal' ? space(0.75) : '0px'} + ); + border-radius: ${theme.borderRadius}; + transform: translateY(1px); + + ${ + orientation === 'horizontal' + ? ` + /* Extra padding + negative margin trick, to expand click area */ + padding: ${space(0.75)} ${space(1)} ${space(1.5)}; + margin-left: -${space(1)}; + margin-right: -${space(1)}; + ` + : `padding: ${space(0.75)} ${space(2)};` + }; +`; + +const TabLink = styled(Link)<{orientation: Orientation}>` + ${innerWrapStyles} + + &, + &:hover { + color: inherit; + } +`; + +const TabInnerWrap = styled('span')<{orientation: Orientation}>` + ${innerWrapStyles} +`; + +const StyledInteractionStateLayer = styled(InteractionStateLayer)<{ + orientation: Orientation; +}>` + position: absolute; + width: auto; + height: auto; + transform: none; + left: 0; + right: 0; + top: 0; + bottom: ${p => (p.orientation === 'horizontal' ? space(0.75) : 0)}; +`; + +const FocusLayer = styled('div')<{orientation: Orientation}>` + position: absolute; + left: 0; + right: 0; + top: 0; + bottom: ${p => (p.orientation === 'horizontal' ? space(0.75) : 0)}; + + pointer-events: none; + border-radius: inherit; + z-index: 0; + transition: box-shadow 0.1s ease-out; + + li:focus-visible & { + box-shadow: + ${p => p.theme.focusBorder} 0 0 0 1px, + inset ${p => p.theme.focusBorder} 0 0 0 1px; + } +`; + +const TabSelectionIndicator = styled('div')<{ + orientation: Orientation; + selected: boolean; +}>` + position: absolute; + border-radius: 2px; + pointer-events: none; + background: ${p => (p.selected ? p.theme.active : 'transparent')}; + transition: background 0.1s ease-out; + + li[aria-disabled='true'] & { + background: ${p => (p.selected ? p.theme.subText : 'transparent')}; + } + + ${p => + p.orientation === 'horizontal' + ? ` + width: calc(100% - ${space(2)}); + height: 3px; + + bottom: 0; + left: 50%; + transform: translateX(-50%); + ` + : ` + width: 3px; + height: 50%; + + left: 0; + top: 50%; + transform: translateY(-50%); + `}; +`; diff --git a/static/app/components/draggableTabs/draggableTabList.tsx b/static/app/components/draggableTabs/draggableTabList.tsx new file mode 100644 index 00000000000000..2ef4d52a21c287 --- /dev/null +++ b/static/app/components/draggableTabs/draggableTabList.tsx @@ -0,0 +1,300 @@ +import {useContext, useEffect, useMemo, useRef, useState} from 'react'; +import styled from '@emotion/styled'; +import type {AriaTabListOptions} from '@react-aria/tabs'; +import {useTabList} from '@react-aria/tabs'; +import {useCollection} from '@react-stately/collections'; +import {ListCollection} from '@react-stately/list'; +import type {TabListStateOptions} from '@react-stately/tabs'; +import {useTabListState} from '@react-stately/tabs'; +import type {Node, Orientation} from '@react-types/shared'; + +import type {SelectOption} from 'sentry/components/compactSelect'; +import {CompactSelect} from 'sentry/components/compactSelect'; +import DropdownButton from 'sentry/components/dropdownButton'; +import {IconEllipsis} from 'sentry/icons'; +import {t} from 'sentry/locale'; +import {space} from 'sentry/styles/space'; +import {browserHistory} from 'sentry/utils/browserHistory'; + +import {Tab} from './draggableTab'; +import {TabsContext} from './index'; +import type {TabListItemProps} from './item'; +import {Item} from './item'; +import {tabsShouldForwardProp} from './utils'; + +/** + * Uses IntersectionObserver API to detect overflowing tabs. Returns an array + * containing of keys of overflowing tabs. + */ +function useOverflowTabs({ + tabListRef, + tabItemsRef, + tabItems, +}: { + tabItems: TabListItemProps[]; + tabItemsRef: React.RefObject>; + tabListRef: React.RefObject; +}) { + const [overflowTabs, setOverflowTabs] = useState>([]); + + useEffect(() => { + const options = { + root: tabListRef.current, + // Nagative right margin to account for overflow menu's trigger button + rootMargin: `0px -42px 1px ${space(1)}`, + // Use 0.95 rather than 1 because of a bug in Edge (Windows) where the intersection + // ratio may unexpectedly drop to slightly below 1 (0.999…) on page scroll. + threshold: 0.95, + }; + + const callback: IntersectionObserverCallback = entries => { + entries.forEach(entry => { + const {target} = entry; + const {key} = (target as HTMLElement).dataset; + if (!key) { + return; + } + + if (!entry.isIntersecting) { + setOverflowTabs(prev => prev.concat([key])); + return; + } + + setOverflowTabs(prev => prev.filter(k => k !== key)); + }); + }; + + const observer = new IntersectionObserver(callback, options); + Object.values(tabItemsRef.current ?? {}).forEach( + element => element && observer.observe(element) + ); + + return () => observer.disconnect(); + }, [tabListRef, tabItemsRef]); + + const tabItemKeyToHiddenMap = tabItems.reduce( + (acc, next) => ({ + ...acc, + [next.key]: next.hidden, + }), + {} + ); + + // Tabs that are hidden will be rendered with display: none so won't intersect, + // but we don't want to show them in the overflow menu + return overflowTabs.filter(tabKey => !tabItemKeyToHiddenMap[tabKey]); +} + +export interface TabListProps + extends AriaTabListOptions, + TabListStateOptions { + className?: string; + hideBorder?: boolean; + outerWrapStyles?: React.CSSProperties; +} + +interface BaseTabListProps extends TabListProps { + items: TabListItemProps[]; +} + +function BaseTabList({ + hideBorder = false, + className, + outerWrapStyles, + ...props +}: BaseTabListProps) { + const tabListRef = useRef(null); + const {rootProps, setTabListState} = useContext(TabsContext); + const { + value, + defaultValue, + onChange, + disabled, + orientation = 'horizontal', + keyboardActivation = 'manual', + ...otherRootProps + } = rootProps; + + // Load up list state + const ariaProps = { + selectedKey: value, + defaultSelectedKey: defaultValue, + onSelectionChange: key => { + onChange?.(key); + + // If the newly selected tab is a tab link, then navigate to the specified link + const linkTo = [...(props.items ?? [])].find(item => item.key === key)?.to; + if (!linkTo) { + return; + } + browserHistory.push(linkTo); + }, + isDisabled: disabled, + keyboardActivation, + ...otherRootProps, + ...props, + }; + + const state = useTabListState(ariaProps); + const {tabListProps} = useTabList({orientation, ...ariaProps}, state, tabListRef); + useEffect(() => { + setTabListState(state); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [state.disabledKeys, state.selectedItem, state.selectedKey, props.children]); + + // Detect tabs that overflow from the wrapper and put them in an overflow menu + const tabItemsRef = useRef>({}); + const overflowTabs = useOverflowTabs({ + tabListRef, + tabItemsRef, + tabItems: props.items, + }); + + const overflowMenuItems = useMemo(() => { + // Sort overflow items in the order that they appear in TabList + const sortedKeys = [...state.collection].map(item => item.key); + const sortedOverflowTabs = overflowTabs.sort( + (a, b) => sortedKeys.indexOf(a) - sortedKeys.indexOf(b) + ); + + return sortedOverflowTabs.flatMap>(key => { + const item = state.collection.getItem(key); + + if (!item) { + return []; + } + + return { + value: key, + label: item.props.children, + disabled: item.props.disabled, + textValue: item.textValue, + }; + }); + }, [state.collection, overflowTabs]); + + return ( + + + {[...state.collection].map(item => ( + (tabItemsRef.current[item.key] = element)} + /> + ))} + + + {orientation === 'horizontal' && overflowMenuItems.length > 0 && ( + + state.setSelectedKey(opt.value)} + disabled={disabled} + position="bottom-end" + size="sm" + offset={4} + trigger={triggerProps => ( + } + aria-label={t('More tabs')} + /> + )} + /> + + )} + + ); +} + +const collectionFactory = (nodes: Iterable>) => new ListCollection(nodes); + +/** + * To be used as a direct child of the component. See example usage + * in tabs.stories.js + */ +export function TabList({items, ...props}: TabListProps) { + /** + * Initial, unfiltered list of tab items. + */ + const collection = useCollection({items, ...props}, collectionFactory); + + const parsedItems = useMemo( + () => [...collection].map(({key, props: itemProps}) => ({key, ...itemProps})), + [collection] + ); + + /** + * List of keys of disabled items (those with a `disbled` prop) to be passed + * into `BaseTabList`. + */ + const disabledKeys = useMemo( + () => parsedItems.filter(item => item.disabled).map(item => item.key), + [parsedItems] + ); + + return ( + + {item => } + + ); +} + +TabList.Item = Item; + +const TabListOuterWrap = styled('div')` + position: relative; +`; + +const TabListWrap = styled('ul', {shouldForwardProp: tabsShouldForwardProp})<{ + hideBorder: boolean; + orientation: Orientation; +}>` + position: relative; + display: grid; + padding: 0; + margin: 0; + list-style-type: none; + flex-shrink: 0; + + ${p => + p.orientation === 'horizontal' + ? ` + grid-auto-flow: column; + justify-content: start; + gap: ${space(2)}; + ${!p.hideBorder && `border-bottom: solid 1px ${p.theme.border};`} + ` + : ` + height: 100%; + grid-auto-flow: row; + align-content: start; + gap: 1px; + padding-right: ${space(2)}; + ${!p.hideBorder && `border-right: solid 1px ${p.theme.border};`} + `}; +`; + +const TabListOverflowWrap = styled('div')` + position: absolute; + right: 0; + bottom: ${space(0.75)}; +`; +const OverflowMenuTrigger = styled(DropdownButton)` + padding-left: ${space(1)}; + padding-right: ${space(1)}; +`; diff --git a/static/app/components/draggableTabs/draggableTabPanels.tsx b/static/app/components/draggableTabs/draggableTabPanels.tsx new file mode 100644 index 00000000000000..38741ed667a49b --- /dev/null +++ b/static/app/components/draggableTabs/draggableTabPanels.tsx @@ -0,0 +1,99 @@ +import {useContext, useRef} from 'react'; +import styled from '@emotion/styled'; +import type {AriaTabPanelProps} from '@react-aria/tabs'; +import {useTabPanel} from '@react-aria/tabs'; +import {useCollection} from '@react-stately/collections'; +import {ListCollection} from '@react-stately/list'; +import type {TabListState} from '@react-stately/tabs'; +import type {CollectionBase, Node, Orientation} from '@react-types/shared'; + +import {TabsContext} from './index'; +import {Item} from './item'; +import {tabsShouldForwardProp} from './utils'; + +const collectionFactory = (nodes: Iterable>) => new ListCollection(nodes); + +interface TabPanelsProps extends AriaTabPanelProps, CollectionBase { + className?: string; +} + +/** + * To be used as a direct child of the component. See example usage + * in tabs.stories.js + */ +export function TabPanels(props: TabPanelsProps) { + const { + rootProps: {orientation, items}, + tabListState, + } = useContext(TabsContext); + + // Parse child tab panels from props and identify the selected panel + const collection = useCollection({items, ...props}, collectionFactory, { + suppressTextValueWarning: true, + }); + const selectedPanel = tabListState + ? collection.getItem(tabListState.selectedKey) + : null; + + if (!tabListState) { + return null; + } + + return ( + + {selectedPanel?.props.children} + + ); +} + +TabPanels.Item = Item; + +interface TabPanelProps extends AriaTabPanelProps { + state: TabListState; + children?: React.ReactNode; + className?: string; + orientation?: Orientation; +} + +function TabPanel({ + state, + orientation = 'horizontal', + className, + children, + ...props +}: TabPanelProps) { + const ref = useRef(null); + const {tabPanelProps} = useTabPanel(props, state, ref); + + return ( + + {children} + + ); +} + +const TabPanelWrap = styled('div', {shouldForwardProp: tabsShouldForwardProp})<{ + orientation: Orientation; +}>` + border-radius: ${p => p.theme.borderRadius}; + + ${p => (p.orientation === 'horizontal' ? `height: 100%;` : `width: 100%;`)}; + + &:focus-visible { + outline: none; + box-shadow: + inset ${p => p.theme.focusBorder} 0 0 0 1px, + ${p => p.theme.focusBorder} 0 0 0 1px; + z-index: 1; + } +`; diff --git a/static/app/components/draggableTabs/index.tsx b/static/app/components/draggableTabs/index.tsx new file mode 100644 index 00000000000000..7ded52b54ed766 --- /dev/null +++ b/static/app/components/draggableTabs/index.tsx @@ -0,0 +1,91 @@ +import 'intersection-observer'; // polyfill + +import {createContext, useState} from 'react'; +import styled from '@emotion/styled'; +import type {AriaTabListOptions} from '@react-aria/tabs'; +import type {TabListState, TabListStateOptions} from '@react-stately/tabs'; +import type {Orientation} from '@react-types/shared'; + +import {tabsShouldForwardProp} from './utils'; + +export interface TabsProps + extends Omit< + AriaTabListOptions, + 'selectedKey' | 'defaultSelectedKey' | 'onSelectionChange' | 'isDisabled' + >, + Omit< + TabListStateOptions, + | 'children' + | 'selectedKey' + | 'defaultSelectedKey' + | 'onSelectionChange' + | 'isDisabled' + > { + children?: React.ReactNode; + className?: string; + /** + * [Uncontrolled] Default selected tab. Must match the `key` prop on the + * selected tab item. + */ + defaultValue?: T; + disabled?: boolean; + /** + * Callback when the selected tab changes. + */ + onChange?: (key: T) => void; + /** + * [Controlled] Selected tab . Must match the `key` prop on the selected tab + * item. + */ + value?: T; +} + +interface TabContext { + rootProps: Omit, 'children' | 'className'>; + setTabListState: (state: TabListState) => void; + tabListState?: TabListState; +} + +export const TabsContext = createContext({ + rootProps: {orientation: 'horizontal'}, + setTabListState: () => {}, +}); + +/** + * Root tabs component. Provides the necessary data (via React context) for + * child components (TabList and TabPanels) to work together. See example + * usage in tabs.stories.js + */ +export function Tabs({ + orientation = 'horizontal', + className, + children, + ...props +}: TabsProps) { + const [tabListState, setTabListState] = useState>(); + + return ( + + + {children} + + + ); +} + +const TabsWrap = styled('div', {shouldForwardProp: tabsShouldForwardProp})<{ + orientation: Orientation; +}>` + display: flex; + flex-direction: ${p => (p.orientation === 'horizontal' ? 'column' : 'row')}; + flex-grow: 1; + + ${p => + p.orientation === 'vertical' && + ` + height: 100%; + align-items: stretch; + `}; +`; diff --git a/static/app/components/draggableTabs/item.tsx b/static/app/components/draggableTabs/item.tsx new file mode 100644 index 00000000000000..a8aac0aa157cd9 --- /dev/null +++ b/static/app/components/draggableTabs/item.tsx @@ -0,0 +1,12 @@ +import {Item as _Item} from '@react-stately/collections'; +import type {ItemProps} from '@react-types/shared'; +import type {LocationDescriptor} from 'history'; + +export interface TabListItemProps extends ItemProps { + key: string | number; + disabled?: boolean; + hidden?: boolean; + to?: LocationDescriptor; +} + +export const Item = _Item as (props: TabListItemProps) => JSX.Element; diff --git a/static/app/components/draggableTabs/utils.tsx b/static/app/components/draggableTabs/utils.tsx new file mode 100644 index 00000000000000..f8c644fe4b5ea6 --- /dev/null +++ b/static/app/components/draggableTabs/utils.tsx @@ -0,0 +1,4 @@ +import isPropValid from '@emotion/is-prop-valid'; + +export const tabsShouldForwardProp = (prop: string) => + typeof prop === 'string' && isPropValid(prop) && prop !== 'orientation'; From 67febcf6a9897e5b76af87d9129c1d7899b0fecc Mon Sep 17 00:00:00 2001 From: Michael Sun <55160142+MichaelSun48@users.noreply.github.com> Date: Fri, 28 Jun 2024 14:09:46 -0700 Subject: [PATCH 114/126] feat(): Add new draggable tabs component to storybook (#73239) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **Note:** This PR merges into the `msun/draggableTabsComponent`, not `master`. The `msun/draggableTabsComponent` branch contains the `components/draggableTabs` directory with with the files from `components/tabs` (except for the test file). This is so that the diff for this PR is easier to read and more intuitive – it effectively shows all upgrades to the existing tabs components to make the tabs draggable. This PR implements an MVP for drag and drop tabs. Visually, these tabs are identical to the current tabs component, but you can drag them and drop them within the TabList, and a vertical line appears to indicate the drop position for a tab that is currently being dragged. You can play around with this component in the vercel deployment's storybook. Quick Demo: https://github.com/getsentry/sentry/assets/55160142/d36b8fd6-4d99-4a1a-9aa9-04e293cfef21 Known issues: - Tabs seem to not drag unless you focus on them first - The drop indicator seems to stutter between the first and second positions sometimes (visible in the screen recording above) --- .../components/draggableTabs/draggableTab.tsx | 311 ++++++------------ .../draggableTabs/draggableTabList.tsx | 226 ++++++------- .../draggableTabs/draggableTabPanels.tsx | 99 ------ .../components/draggableTabs/index.spec.tsx | 59 ++++ .../draggableTabs/index.stories.tsx | 31 ++ static/app/components/draggableTabs/index.tsx | 105 ++---- static/app/components/draggableTabs/item.tsx | 4 +- static/app/components/tabs/tab.tsx | 148 ++++++--- static/app/components/tabs/tabList.tsx | 54 +-- 9 files changed, 455 insertions(+), 582 deletions(-) delete mode 100644 static/app/components/draggableTabs/draggableTabPanels.tsx create mode 100644 static/app/components/draggableTabs/index.spec.tsx create mode 100644 static/app/components/draggableTabs/index.stories.tsx diff --git a/static/app/components/draggableTabs/draggableTab.tsx b/static/app/components/draggableTabs/draggableTab.tsx index 9f5cd4bfdf1c7b..b536be5f145662 100644 --- a/static/app/components/draggableTabs/draggableTab.tsx +++ b/static/app/components/draggableTabs/draggableTab.tsx @@ -1,19 +1,24 @@ -import {forwardRef, useCallback} from 'react'; -import type {Theme} from '@emotion/react'; +import type React from 'react'; +import {forwardRef, Fragment, useRef} from 'react'; import styled from '@emotion/styled'; +import {useButton} from '@react-aria/button'; +import { + type DropIndicatorProps, + useDrag, + useDropIndicator, + useDroppableItem, +} from '@react-aria/dnd'; import type {AriaTabProps} from '@react-aria/tabs'; import {useTab} from '@react-aria/tabs'; -import {useObjectRef} from '@react-aria/utils'; +import {mergeProps, useObjectRef} from '@react-aria/utils'; +import type {DroppableCollectionState} from '@react-stately/dnd'; import type {TabListState} from '@react-stately/tabs'; import type {Node, Orientation} from '@react-types/shared'; -import InteractionStateLayer from 'sentry/components/interactionStateLayer'; -import Link from 'sentry/components/links/link'; -import {space} from 'sentry/styles/space'; +import {BaseTab} from 'sentry/components/tabs/tab'; -import {tabsShouldForwardProp} from './utils'; - -interface TabProps extends AriaTabProps { +interface DraggableTabProps extends AriaTabProps { + dropState: DroppableCollectionState; item: Node; orientation: Orientation; /** @@ -25,16 +30,48 @@ interface TabProps extends AriaTabProps { state: TabListState; } -/** - * Stops event propagation if the command/ctrl/shift key is pressed, in effect - * preventing any state change. This is useful because when a user - * command/ctrl/shift-clicks on a tab link, the intention is to view the tab - * in a new browser tab/window, not to update the current view. - */ -function handleLinkClick(e: React.PointerEvent) { - if (e.metaKey || e.ctrlKey || e.shiftKey) { - e.stopPropagation(); +interface BaseDropIndicatorProps { + dropState: DroppableCollectionState; + target: DropIndicatorProps['target']; +} + +function TabDropIndicator(props: BaseDropIndicatorProps) { + const ref = useRef(null); + const {dropIndicatorProps, isHidden} = useDropIndicator(props, props.dropState, ref); + if (isHidden) { + return null; } + + return ; +} + +interface DraggableProps { + children: React.ReactNode; + item: Node; + onTabClick: () => void; +} + +function Draggable({item, children, onTabClick}: DraggableProps) { + // TODO(msun): Implement the "preview" parameter in this useDrag hook + const {dragProps, dragButtonProps} = useDrag({ + getAllowedDropOperations: () => ['move'], + getItems() { + return [ + { + tab: JSON.stringify({key: item.key, value: children}), + }, + ]; + }, + }); + + const ref = useRef(null); + const {buttonProps} = useButton({...dragButtonProps, elementType: 'div'}, ref); + + return ( +
+ {children} +
+ ); } /** @@ -42,195 +79,61 @@ function handleLinkClick(e: React.PointerEvent) { * page/view – it's only meant to be used by . See the correct * usage in tabs.stories.js */ -function BaseTab( - {item, state, orientation, overflowing}: TabProps, - forwardedRef: React.ForwardedRef -) { - const ref = useObjectRef(forwardedRef); - - const { - key, - rendered, - props: {to, hidden}, - } = item; - const {tabProps, isSelected} = useTab({key, isDisabled: hidden}, state, ref); - - const InnerWrap = useCallback( - ({children}) => - to ? ( - + ) => { + const ref = useObjectRef(forwardedRef); + + const { + key, + rendered, + props: {to, hidden}, + } = item; + const {tabProps, isSelected} = useTab({key, isDisabled: hidden}, state, ref); + + const {dropProps} = useDroppableItem( + { + target: {type: 'item', key: item.key, dropPosition: 'on'}, + }, + dropState, + ref + ); + + return ( + + + - ) : ( - {children} - ), - [to, orientation] - ); - - return ( - - ); -} - -export const Tab = forwardRef(BaseTab); - -const TabWrap = styled('li', {shouldForwardProp: tabsShouldForwardProp})<{ - overflowing: boolean; - selected: boolean; -}>` - color: ${p => (p.selected ? p.theme.activeText : p.theme.textColor)}; - white-space: nowrap; - cursor: pointer; - - &:hover { - color: ${p => (p.selected ? p.theme.activeText : p.theme.headingColor)}; - } - - &:focus { - outline: none; + state.setSelectedKey(item.key)} item={item}> + {rendered} + + + {state.collection.getKeyAfter(item.key) == null && ( + + )} + + ); } +); - &[aria-disabled], - &[aria-disabled]:hover { - color: ${p => p.theme.subText}; - pointer-events: none; - cursor: default; - } - - ${p => - p.overflowing && - ` - opacity: 0; - pointer-events: none; - `} -`; - -const innerWrapStyles = ({ - theme, - orientation, -}: { - orientation: Orientation; - theme: Theme; -}) => ` - display: flex; - align-items: center; - position: relative; - height: calc( - ${theme.form.sm.height}px + - ${orientation === 'horizontal' ? space(0.75) : '0px'} - ); - border-radius: ${theme.borderRadius}; - transform: translateY(1px); - - ${ - orientation === 'horizontal' - ? ` - /* Extra padding + negative margin trick, to expand click area */ - padding: ${space(0.75)} ${space(1)} ${space(1.5)}; - margin-left: -${space(1)}; - margin-right: -${space(1)}; - ` - : `padding: ${space(0.75)} ${space(2)};` - }; -`; - -const TabLink = styled(Link)<{orientation: Orientation}>` - ${innerWrapStyles} - - &, - &:hover { - color: inherit; - } -`; - -const TabInnerWrap = styled('span')<{orientation: Orientation}>` - ${innerWrapStyles} -`; - -const StyledInteractionStateLayer = styled(InteractionStateLayer)<{ - orientation: Orientation; -}>` - position: absolute; - width: auto; - height: auto; - transform: none; - left: 0; - right: 0; - top: 0; - bottom: ${p => (p.orientation === 'horizontal' ? space(0.75) : 0)}; -`; - -const FocusLayer = styled('div')<{orientation: Orientation}>` - position: absolute; - left: 0; - right: 0; - top: 0; - bottom: ${p => (p.orientation === 'horizontal' ? space(0.75) : 0)}; - - pointer-events: none; - border-radius: inherit; - z-index: 0; - transition: box-shadow 0.1s ease-out; - - li:focus-visible & { - box-shadow: - ${p => p.theme.focusBorder} 0 0 0 1px, - inset ${p => p.theme.focusBorder} 0 0 0 1px; - } -`; - -const TabSelectionIndicator = styled('div')<{ - orientation: Orientation; - selected: boolean; -}>` - position: absolute; - border-radius: 2px; - pointer-events: none; - background: ${p => (p.selected ? p.theme.active : 'transparent')}; - transition: background 0.1s ease-out; - - li[aria-disabled='true'] & { - background: ${p => (p.selected ? p.theme.subText : 'transparent')}; - } - - ${p => - p.orientation === 'horizontal' - ? ` - width: calc(100% - ${space(2)}); - height: 3px; - - bottom: 0; - left: 50%; - transform: translateX(-50%); - ` - : ` - width: 3px; - height: 50%; - - left: 0; - top: 50%; - transform: translateY(-50%); - `}; +const TabSeparator = styled('li')` + height: 80%; + width: 2px; + background-color: ${p => p.theme.gray200}; `; diff --git a/static/app/components/draggableTabs/draggableTabList.tsx b/static/app/components/draggableTabs/draggableTabList.tsx index 2ef4d52a21c287..66d69b31837a1e 100644 --- a/static/app/components/draggableTabs/draggableTabList.tsx +++ b/static/app/components/draggableTabs/draggableTabList.tsx @@ -1,108 +1,47 @@ -import {useContext, useEffect, useMemo, useRef, useState} from 'react'; +import {useContext, useEffect, useMemo, useRef} from 'react'; import styled from '@emotion/styled'; +import {ListDropTargetDelegate, useDroppableCollection} from '@react-aria/dnd'; +import {ListKeyboardDelegate} from '@react-aria/selection'; import type {AriaTabListOptions} from '@react-aria/tabs'; import {useTabList} from '@react-aria/tabs'; +import {mergeProps} from '@react-aria/utils'; import {useCollection} from '@react-stately/collections'; +import { + type DroppableCollectionStateOptions, + useDroppableCollectionState, +} from '@react-stately/dnd'; import {ListCollection} from '@react-stately/list'; import type {TabListStateOptions} from '@react-stately/tabs'; import {useTabListState} from '@react-stately/tabs'; -import type {Node, Orientation} from '@react-types/shared'; +import type { + DroppableCollectionInsertDropEvent, + Node, + Orientation, + TextDropItem, +} from '@react-types/shared'; import type {SelectOption} from 'sentry/components/compactSelect'; -import {CompactSelect} from 'sentry/components/compactSelect'; -import DropdownButton from 'sentry/components/dropdownButton'; -import {IconEllipsis} from 'sentry/icons'; -import {t} from 'sentry/locale'; +import type {Tab} from 'sentry/components/draggableTabs'; +import {TabsContext} from 'sentry/components/tabs'; +import {OverflowMenu, useOverflowTabs} from 'sentry/components/tabs/tabList'; import {space} from 'sentry/styles/space'; import {browserHistory} from 'sentry/utils/browserHistory'; -import {Tab} from './draggableTab'; -import {TabsContext} from './index'; -import type {TabListItemProps} from './item'; +import {DraggableTab} from './draggableTab'; +import type {DraggableTabListItemProps} from './item'; import {Item} from './item'; import {tabsShouldForwardProp} from './utils'; -/** - * Uses IntersectionObserver API to detect overflowing tabs. Returns an array - * containing of keys of overflowing tabs. - */ -function useOverflowTabs({ - tabListRef, - tabItemsRef, - tabItems, -}: { - tabItems: TabListItemProps[]; - tabItemsRef: React.RefObject>; - tabListRef: React.RefObject; -}) { - const [overflowTabs, setOverflowTabs] = useState>([]); - - useEffect(() => { - const options = { - root: tabListRef.current, - // Nagative right margin to account for overflow menu's trigger button - rootMargin: `0px -42px 1px ${space(1)}`, - // Use 0.95 rather than 1 because of a bug in Edge (Windows) where the intersection - // ratio may unexpectedly drop to slightly below 1 (0.999…) on page scroll. - threshold: 0.95, - }; - - const callback: IntersectionObserverCallback = entries => { - entries.forEach(entry => { - const {target} = entry; - const {key} = (target as HTMLElement).dataset; - if (!key) { - return; - } - - if (!entry.isIntersecting) { - setOverflowTabs(prev => prev.concat([key])); - return; - } - - setOverflowTabs(prev => prev.filter(k => k !== key)); - }); - }; - - const observer = new IntersectionObserver(callback, options); - Object.values(tabItemsRef.current ?? {}).forEach( - element => element && observer.observe(element) - ); - - return () => observer.disconnect(); - }, [tabListRef, tabItemsRef]); - - const tabItemKeyToHiddenMap = tabItems.reduce( - (acc, next) => ({ - ...acc, - [next.key]: next.hidden, - }), - {} - ); - - // Tabs that are hidden will be rendered with display: none so won't intersect, - // but we don't want to show them in the overflow menu - return overflowTabs.filter(tabKey => !tabItemKeyToHiddenMap[tabKey]); +interface BaseDraggableTabListProps extends DraggableTabListProps { + items: DraggableTabListItemProps[]; } -export interface TabListProps - extends AriaTabListOptions, - TabListStateOptions { - className?: string; - hideBorder?: boolean; - outerWrapStyles?: React.CSSProperties; -} - -interface BaseTabListProps extends TabListProps { - items: TabListItemProps[]; -} - -function BaseTabList({ +function BaseDraggableTabList({ hideBorder = false, className, outerWrapStyles, ...props -}: BaseTabListProps) { +}: BaseDraggableTabListProps) { const tabListRef = useRef(null); const {rootProps, setTabListState} = useContext(TabsContext); const { @@ -136,12 +75,34 @@ function BaseTabList({ }; const state = useTabListState(ariaProps); + const {tabListProps} = useTabList({orientation, ...ariaProps}, state, tabListRef); useEffect(() => { setTabListState(state); // eslint-disable-next-line react-hooks/exhaustive-deps }, [state.disabledKeys, state.selectedItem, state.selectedKey, props.children]); + const dropState = useDroppableCollectionState({ + ...props, + collection: state.collection, + selectionManager: state.selectionManager, + }); + + const {collectionProps} = useDroppableCollection( + { + ...props, + // Provide drop targets for keyboard and pointer-based drag and drop. + keyboardDelegate: new ListKeyboardDelegate( + state.collection, + state.disabledKeys, + tabListRef + ), + dropTargetDelegate: new ListDropTargetDelegate(state.collection, tabListRef), + }, + dropState, + tabListRef + ); + // Detect tabs that overflow from the wrapper and put them in an overflow menu const tabItemsRef = useRef>({}); const overflowTabs = useOverflowTabs({ @@ -176,46 +137,31 @@ function BaseTabList({ return ( {[...state.collection].map(item => ( - (tabItemsRef.current[item.key] = element)} /> ))} {orientation === 'horizontal' && overflowMenuItems.length > 0 && ( - - state.setSelectedKey(opt.value)} - disabled={disabled} - position="bottom-end" - size="sm" - offset={4} - trigger={triggerProps => ( - } - aria-label={t('More tabs')} - /> - )} - /> - + )} ); @@ -223,14 +169,49 @@ function BaseTabList({ const collectionFactory = (nodes: Iterable>) => new ListCollection(nodes); +export interface DraggableTabListProps + extends AriaTabListOptions, + TabListStateOptions, + Omit { + setTabs: (tabs: Tab[]) => void; + tabs: Tab[]; + className?: string; + hideBorder?: boolean; + outerWrapStyles?: React.CSSProperties; +} + /** * To be used as a direct child of the component. See example usage * in tabs.stories.js */ -export function TabList({items, ...props}: TabListProps) { - /** - * Initial, unfiltered list of tab items. - */ +export function DraggableTabList({ + items, + tabs, + setTabs, + ...props +}: DraggableTabListProps) { + const onInsert = async (e: DroppableCollectionInsertDropEvent) => { + const dropItem = e.items[0] as TextDropItem; + const eventTab: {key: string; value: string} = JSON.parse( + await dropItem.getText('tab') + ); + const draggedTab = tabs.find(tab => tab.key === eventTab.key); + if (!draggedTab || e.target.key === draggedTab.key) { + return; // Do nothing if the dragged tab is dropped on itself + } + + const updatedTabs = tabs.filter(tab => tab.key !== draggedTab.key); + const targetIdx = updatedTabs.findIndex(tab => tab.key === e.target.key); + if (targetIdx > -1) { + if (e.target.dropPosition === 'before') { + updatedTabs.splice(targetIdx, 0, draggedTab); + } else if (e.target.dropPosition === 'after') { + updatedTabs.splice(targetIdx + 1, 0, draggedTab); + } + setTabs(updatedTabs); + } + }; + const collection = useCollection({items, ...props}, collectionFactory); const parsedItems = useMemo( @@ -248,13 +229,20 @@ export function TabList({items, ...props}: TabListProps) { ); return ( - + {item => } - + ); } -TabList.Item = Item; +DraggableTabList.Item = Item; const TabListOuterWrap = styled('div')` position: relative; @@ -288,13 +276,3 @@ const TabListWrap = styled('ul', {shouldForwardProp: tabsShouldForwardProp})<{ ${!p.hideBorder && `border-right: solid 1px ${p.theme.border};`} `}; `; - -const TabListOverflowWrap = styled('div')` - position: absolute; - right: 0; - bottom: ${space(0.75)}; -`; -const OverflowMenuTrigger = styled(DropdownButton)` - padding-left: ${space(1)}; - padding-right: ${space(1)}; -`; diff --git a/static/app/components/draggableTabs/draggableTabPanels.tsx b/static/app/components/draggableTabs/draggableTabPanels.tsx deleted file mode 100644 index 38741ed667a49b..00000000000000 --- a/static/app/components/draggableTabs/draggableTabPanels.tsx +++ /dev/null @@ -1,99 +0,0 @@ -import {useContext, useRef} from 'react'; -import styled from '@emotion/styled'; -import type {AriaTabPanelProps} from '@react-aria/tabs'; -import {useTabPanel} from '@react-aria/tabs'; -import {useCollection} from '@react-stately/collections'; -import {ListCollection} from '@react-stately/list'; -import type {TabListState} from '@react-stately/tabs'; -import type {CollectionBase, Node, Orientation} from '@react-types/shared'; - -import {TabsContext} from './index'; -import {Item} from './item'; -import {tabsShouldForwardProp} from './utils'; - -const collectionFactory = (nodes: Iterable>) => new ListCollection(nodes); - -interface TabPanelsProps extends AriaTabPanelProps, CollectionBase { - className?: string; -} - -/** - * To be used as a direct child of the component. See example usage - * in tabs.stories.js - */ -export function TabPanels(props: TabPanelsProps) { - const { - rootProps: {orientation, items}, - tabListState, - } = useContext(TabsContext); - - // Parse child tab panels from props and identify the selected panel - const collection = useCollection({items, ...props}, collectionFactory, { - suppressTextValueWarning: true, - }); - const selectedPanel = tabListState - ? collection.getItem(tabListState.selectedKey) - : null; - - if (!tabListState) { - return null; - } - - return ( - - {selectedPanel?.props.children} - - ); -} - -TabPanels.Item = Item; - -interface TabPanelProps extends AriaTabPanelProps { - state: TabListState; - children?: React.ReactNode; - className?: string; - orientation?: Orientation; -} - -function TabPanel({ - state, - orientation = 'horizontal', - className, - children, - ...props -}: TabPanelProps) { - const ref = useRef(null); - const {tabPanelProps} = useTabPanel(props, state, ref); - - return ( - - {children} - - ); -} - -const TabPanelWrap = styled('div', {shouldForwardProp: tabsShouldForwardProp})<{ - orientation: Orientation; -}>` - border-radius: ${p => p.theme.borderRadius}; - - ${p => (p.orientation === 'horizontal' ? `height: 100%;` : `width: 100%;`)}; - - &:focus-visible { - outline: none; - box-shadow: - inset ${p => p.theme.focusBorder} 0 0 0 1px, - ${p => p.theme.focusBorder} 0 0 0 1px; - z-index: 1; - } -`; diff --git a/static/app/components/draggableTabs/index.spec.tsx b/static/app/components/draggableTabs/index.spec.tsx new file mode 100644 index 00000000000000..f34eb40a81ec6a --- /dev/null +++ b/static/app/components/draggableTabs/index.spec.tsx @@ -0,0 +1,59 @@ +import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; + +import {DraggableTabBar} from 'sentry/components/draggableTabs'; + +const TABS = [ + {key: 'one', label: 'Tab One', content: 'So by colonel hearted ferrars.'}, + { + key: 'two', + label: 'Tab Two', + content: 'This is tab two', + }, + { + key: 'three', + label: 'Tab Three', + content: 'Is inquiry no he several excited am.', + }, + { + key: 'four', + label: 'Tab Four', + content: 'Called though excuse length ye needed it he having.', + }, +]; + +describe('DraggableTabs', () => { + it('renders draggable tabs list', () => { + render(); + + expect(screen.getByRole('tablist')).toHaveAttribute('aria-orientation', 'horizontal'); + expect(screen.getAllByRole('tab')).toHaveLength(TABS.length); + TABS.forEach(tab => { + expect(screen.getByRole('tab', {name: tab.label})).toBeInTheDocument(); + }); + + // The first tab item is selected and its content visible + expect(screen.getByRole('tab', {name: TABS[0].label})).toHaveAttribute( + 'aria-selected', + 'true' + ); + expect(screen.getByText(TABS[0].content)).toBeInTheDocument(); + }); + + it('changes tabs using keyboard navigation', async () => { + render(); + + // Focus on tab list + await userEvent.tab(); + expect(screen.getByRole('tab', {name: 'Tab One'})).toHaveFocus(); + + // Press Arrow Right to select the next tab to the right (Tab Two) + await userEvent.keyboard('{arrowRight}{enter}'); + + // The Second tab is selected and its contents rendered + expect(screen.getByRole('tab', {name: 'Tab Two'})).toHaveAttribute( + 'aria-selected', + 'true' + ); + expect(screen.getByText(TABS[1].content)).toBeInTheDocument(); + }); +}); diff --git a/static/app/components/draggableTabs/index.stories.tsx b/static/app/components/draggableTabs/index.stories.tsx new file mode 100644 index 00000000000000..df37291d3c4f03 --- /dev/null +++ b/static/app/components/draggableTabs/index.stories.tsx @@ -0,0 +1,31 @@ +import {Fragment} from 'react'; + +import {DraggableTabBar} from 'sentry/components/draggableTabs'; +import JSXNode from 'sentry/components/stories/jsxNode'; +import SizingWindow from 'sentry/components/stories/sizingWindow'; +import storyBook from 'sentry/stories/storyBook'; + +export default storyBook(DraggableTabBar, story => { + const TABS = [ + {key: 'one', label: 'Tab One', content: 'This is the first Panel.'}, + {key: 'two', label: 'Tab Two', content: 'This is the second panel'}, + {key: 'three', label: 'Tab Three', content: 'This is the third panel'}, + ]; + + story('Default', () => ( + +

+ You should be using all of , ,{' '} + , and + components. +

+

+ This will give you all kinds of accessibility and state tracking out of the box. + But you will have to render all tab content, including hooks, upfront. +

+ + + +
+ )); +}); diff --git a/static/app/components/draggableTabs/index.tsx b/static/app/components/draggableTabs/index.tsx index 7ded52b54ed766..a7da1bbecbde53 100644 --- a/static/app/components/draggableTabs/index.tsx +++ b/static/app/components/draggableTabs/index.tsx @@ -1,91 +1,40 @@ import 'intersection-observer'; // polyfill -import {createContext, useState} from 'react'; -import styled from '@emotion/styled'; -import type {AriaTabListOptions} from '@react-aria/tabs'; -import type {TabListState, TabListStateOptions} from '@react-stately/tabs'; -import type {Orientation} from '@react-types/shared'; +import {useEffect, useState} from 'react'; +import type {Key} from '@react-types/shared'; -import {tabsShouldForwardProp} from './utils'; +import {DraggableTabList} from 'sentry/components/draggableTabs/draggableTabList'; +import {TabPanels, Tabs} from 'sentry/components/tabs'; -export interface TabsProps - extends Omit< - AriaTabListOptions, - 'selectedKey' | 'defaultSelectedKey' | 'onSelectionChange' | 'isDisabled' - >, - Omit< - TabListStateOptions, - | 'children' - | 'selectedKey' - | 'defaultSelectedKey' - | 'onSelectionChange' - | 'isDisabled' - > { - children?: React.ReactNode; - className?: string; - /** - * [Uncontrolled] Default selected tab. Must match the `key` prop on the - * selected tab item. - */ - defaultValue?: T; - disabled?: boolean; - /** - * Callback when the selected tab changes. - */ - onChange?: (key: T) => void; - /** - * [Controlled] Selected tab . Must match the `key` prop on the selected tab - * item. - */ - value?: T; +export interface Tab { + content: React.ReactNode; + key: Key; + label: string; } -interface TabContext { - rootProps: Omit, 'children' | 'className'>; - setTabListState: (state: TabListState) => void; - tabListState?: TabListState; +export interface DragAndDropTabBarProps { + tabs: Tab[]; } -export const TabsContext = createContext({ - rootProps: {orientation: 'horizontal'}, - setTabListState: () => {}, -}); +export function DraggableTabBar(props: DragAndDropTabBarProps) { + const [tabs, setTabs] = useState(props.tabs); -/** - * Root tabs component. Provides the necessary data (via React context) for - * child components (TabList and TabPanels) to work together. See example - * usage in tabs.stories.js - */ -export function Tabs({ - orientation = 'horizontal', - className, - children, - ...props -}: TabsProps) { - const [tabListState, setTabListState] = useState>(); + useEffect(() => { + setTabs(props.tabs); + }, [props.tabs]); return ( - - - {children} - - + + + {tabs.map(tab => ( + {tab.label} + ))} + + + {tabs.map(tab => ( + {tab.content} + ))} + + ); } - -const TabsWrap = styled('div', {shouldForwardProp: tabsShouldForwardProp})<{ - orientation: Orientation; -}>` - display: flex; - flex-direction: ${p => (p.orientation === 'horizontal' ? 'column' : 'row')}; - flex-grow: 1; - - ${p => - p.orientation === 'vertical' && - ` - height: 100%; - align-items: stretch; - `}; -`; diff --git a/static/app/components/draggableTabs/item.tsx b/static/app/components/draggableTabs/item.tsx index a8aac0aa157cd9..00cd6e94e91c38 100644 --- a/static/app/components/draggableTabs/item.tsx +++ b/static/app/components/draggableTabs/item.tsx @@ -2,11 +2,11 @@ import {Item as _Item} from '@react-stately/collections'; import type {ItemProps} from '@react-types/shared'; import type {LocationDescriptor} from 'history'; -export interface TabListItemProps extends ItemProps { +export interface DraggableTabListItemProps extends ItemProps { key: string | number; disabled?: boolean; hidden?: boolean; to?: LocationDescriptor; } -export const Item = _Item as (props: TabListItemProps) => JSX.Element; +export const Item = _Item as (props: DraggableTabListItemProps) => JSX.Element; diff --git a/static/app/components/tabs/tab.tsx b/static/app/components/tabs/tab.tsx index 9f5cd4bfdf1c7b..eba78720b264d2 100644 --- a/static/app/components/tabs/tab.tsx +++ b/static/app/components/tabs/tab.tsx @@ -3,9 +3,14 @@ import type {Theme} from '@emotion/react'; import styled from '@emotion/styled'; import type {AriaTabProps} from '@react-aria/tabs'; import {useTab} from '@react-aria/tabs'; -import {useObjectRef} from '@react-aria/utils'; +import {mergeProps, useObjectRef} from '@react-aria/utils'; import type {TabListState} from '@react-stately/tabs'; -import type {Node, Orientation} from '@react-types/shared'; +import type { + DOMAttributes, + FocusableElement, + Node, + Orientation, +} from '@react-types/shared'; import InteractionStateLayer from 'sentry/components/interactionStateLayer'; import Link from 'sentry/components/links/link'; @@ -37,64 +42,101 @@ function handleLinkClick(e: React.PointerEvent) { } } +interface BaseTabProps { + children: React.ReactNode; + hidden: boolean; + isSelected: boolean; + orientation: Orientation; + overflowing: boolean; + tabProps: DOMAttributes; + /** + * Additional props to be merged with `tabProps`. This is used + * by to pass in props used for drag-and-drop functionality. + */ + additionalProps?: React.HTMLAttributes; + to?: string; +} + +export const BaseTab = forwardRef( + (props: BaseTabProps, forwardedRef: React.ForwardedRef) => { + const {to, orientation, overflowing, tabProps, hidden, isSelected, additionalProps} = + props; + + const ref = useObjectRef(forwardedRef); + const InnerWrap = useCallback( + ({children}) => + to ? ( + + {children} + + ) : ( + {children} + ), + [to, orientation] + ); + + return ( + + ); + } +); + /** * Renders a single tab item. This should not be imported directly into any * page/view – it's only meant to be used by . See the correct * usage in tabs.stories.js */ -function BaseTab( - {item, state, orientation, overflowing}: TabProps, - forwardedRef: React.ForwardedRef -) { - const ref = useObjectRef(forwardedRef); - - const { - key, - rendered, - props: {to, hidden}, - } = item; - const {tabProps, isSelected} = useTab({key, isDisabled: hidden}, state, ref); - - const InnerWrap = useCallback( - ({children}) => - to ? ( - - {children} - - ) : ( - {children} - ), - [to, orientation] - ); +export const Tab = forwardRef( + ( + {item, state, orientation, overflowing}: TabProps, + forwardedRef: React.ForwardedRef + ) => { + const ref = useObjectRef(forwardedRef); - return ( - - ); -} + const { + key, + rendered, + props: {to, hidden}, + } = item; + const {tabProps, isSelected} = useTab({key, isDisabled: hidden}, state, ref); -export const Tab = forwardRef(BaseTab); + return ( + + ); + } +); const TabWrap = styled('li', {shouldForwardProp: tabsShouldForwardProp})<{ overflowing: boolean; diff --git a/static/app/components/tabs/tabList.tsx b/static/app/components/tabs/tabList.tsx index 253a32ccf53577..84e462c7e0fd7e 100644 --- a/static/app/components/tabs/tabList.tsx +++ b/static/app/components/tabs/tabList.tsx @@ -26,7 +26,7 @@ import {tabsShouldForwardProp} from './utils'; * Uses IntersectionObserver API to detect overflowing tabs. Returns an array * containing of keys of overflowing tabs. */ -function useOverflowTabs({ +export function useOverflowTabs({ tabListRef, tabItemsRef, tabItems, @@ -85,6 +85,32 @@ function useOverflowTabs({ return overflowTabs.filter(tabKey => !tabItemKeyToHiddenMap[tabKey]); } +export function OverflowMenu({state, overflowMenuItems, disabled}) { + return ( + + state.setSelectedKey(opt.value)} + disabled={disabled} + position="bottom-end" + size="sm" + offset={4} + trigger={triggerProps => ( + } + aria-label={t('More tabs')} + /> + )} + /> + + ); +} + export interface TabListProps extends AriaTabListOptions, TabListStateOptions { @@ -195,27 +221,11 @@ function BaseTabList({ {orientation === 'horizontal' && overflowMenuItems.length > 0 && ( - - state.setSelectedKey(opt.value)} - disabled={disabled} - position="bottom-end" - size="sm" - offset={4} - trigger={triggerProps => ( - } - aria-label={t('More tabs')} - /> - )} - /> - + )} ); From 49d178f108934dd156ad59e99c78cf6146ff6fb1 Mon Sep 17 00:00:00 2001 From: MichaelSun48 Date: Thu, 27 Jun 2024 17:50:13 -0700 Subject: [PATCH 115/126] Add query count badge and dropdown menu button --- .../components/draggableTabs/draggableTab.tsx | 85 ++++++++++++++++++- .../draggableTabs/draggableTabMenuButton.tsx | 58 +++++++++++++ .../draggableTabs/index.stories.tsx | 18 +++- static/app/components/dropdownMenu/item.tsx | 2 + 4 files changed, 155 insertions(+), 8 deletions(-) create mode 100644 static/app/components/draggableTabs/draggableTabMenuButton.tsx diff --git a/static/app/components/draggableTabs/draggableTab.tsx b/static/app/components/draggableTabs/draggableTab.tsx index b536be5f145662..d18521fa4d1f6c 100644 --- a/static/app/components/draggableTabs/draggableTab.tsx +++ b/static/app/components/draggableTabs/draggableTab.tsx @@ -15,7 +15,13 @@ import type {DroppableCollectionState} from '@react-stately/dnd'; import type {TabListState} from '@react-stately/tabs'; import type {Node, Orientation} from '@react-types/shared'; +import Badge from 'sentry/components/badge/badge'; +import {DraggableTabMenuButton} from 'sentry/components/draggableTabs/draggableTabMenuButton'; +import {DropdownMenu, type MenuItemProps} from 'sentry/components/dropdownMenu'; +import QueryCount from 'sentry/components/queryCount'; import {BaseTab} from 'sentry/components/tabs/tab'; +import {t} from 'sentry/locale'; +import {space} from 'sentry/styles/space'; interface DraggableTabProps extends AriaTabProps { dropState: DroppableCollectionState; @@ -35,6 +41,31 @@ interface BaseDropIndicatorProps { target: DropIndicatorProps['target']; } +const TAB_MENU_OPTIONS: MenuItemProps[] = [ + { + key: 'save-changes', + label: t('Save Changes'), + }, + { + key: 'discard-changes', + label: t('Discard Changes'), + }, + { + key: 'rename-tab', + label: t('Rename'), + showDivider: true, + }, + { + key: 'duplicate-tab', + label: t('Duplicate'), + }, + { + key: 'delete-tab', + label: t('Delete'), + priority: 'danger', + }, +]; + function TabDropIndicator(props: BaseDropIndicatorProps) { const ref = useRef(null); const {dropIndicatorProps, isHidden} = useDropIndicator(props, props.dropState, ref); @@ -58,7 +89,7 @@ function Draggable({item, children, onTabClick}: DraggableProps) { getItems() { return [ { - tab: JSON.stringify({key: item.key, value: children}), + tab: JSON.stringify({key: item.key}), }, ]; }, @@ -107,7 +138,7 @@ export const DraggableTab = forwardRef( target={{type: 'item', key: item.key, dropPosition: 'before'}} dropState={dropState} /> - state.setSelectedKey(item.key)} item={item}> - {rendered} + + {rendered} + + + + , + style: {padding: 0}, + }} + items={TAB_MENU_OPTIONS} + offset={[-10, 5]} + /> + - + + {/* {state.selectedKey !== item.key && state.collection.getLastKey() !== item.key && ( + + )} */} {state.collection.getKeyAfter(item.key) == null && ( p.theme.gray200}; + opacity: 0px; +`; + +const TabContentWrap = styled('span')` + display: flex; + align-items: center; + flex-direction: row; + gap: 6px; +`; + +const StyledBadge = styled(Badge)` + display: flex; + height: 16px; + align-items: center; + border-radius: 10px; + background: transparent; + border: 1px solid ${p => p.theme.gray200}; + color: ${p => p.theme.gray300}; + margin-left: ${space(0)}; +`; + const TabSeparator = styled('li')` height: 80%; width: 2px; diff --git a/static/app/components/draggableTabs/draggableTabMenuButton.tsx b/static/app/components/draggableTabs/draggableTabMenuButton.tsx new file mode 100644 index 00000000000000..ac4df2a72bfe29 --- /dev/null +++ b/static/app/components/draggableTabs/draggableTabMenuButton.tsx @@ -0,0 +1,58 @@ +import styled from '@emotion/styled'; + +import {Button} from 'sentry/components/button'; +import {SvgIcon} from 'sentry/icons/svgIcon'; +import {space} from 'sentry/styles/space'; + +interface DraggableTabMenuButtonProps { + triggerProps?: Omit, 'children'>; +} + +export function DraggableTabMenuButton({triggerProps}: DraggableTabMenuButtonProps) { + return ( + + } + /> + + + ); +} + +function IconCompactEllipsis() { + return ( + + + + + + ); +} + +export const ChangedAndUnsavedIndicator = styled('div')` + width: 6px; + height: 6px; + border-radius: 50%; + background: ${p => p.theme.active}; + border: solid 1px ${p => p.theme.background}; + position: absolute; + top: -${space(0.25)}; + right: -${space(0.25)}; +`; + +const StyledDropdownButton = styled(Button)` + width: 18px; + height: 16px; + border: 1px solid ${p => p.theme.gray200}; + gap: 5px; + border-radius: 4px; +`; +const TriggerIconWrap = styled('div')` + position: relative; + display: flex; + align-items: center; +`; diff --git a/static/app/components/draggableTabs/index.stories.tsx b/static/app/components/draggableTabs/index.stories.tsx index df37291d3c4f03..770af3e1576c4e 100644 --- a/static/app/components/draggableTabs/index.stories.tsx +++ b/static/app/components/draggableTabs/index.stories.tsx @@ -1,4 +1,5 @@ import {Fragment} from 'react'; +import styled from '@emotion/styled'; import {DraggableTabBar} from 'sentry/components/draggableTabs'; import JSXNode from 'sentry/components/stories/jsxNode'; @@ -7,9 +8,9 @@ import storyBook from 'sentry/stories/storyBook'; export default storyBook(DraggableTabBar, story => { const TABS = [ - {key: 'one', label: 'Tab One', content: 'This is the first Panel.'}, - {key: 'two', label: 'Tab Two', content: 'This is the second panel'}, - {key: 'three', label: 'Tab Three', content: 'This is the third panel'}, + {key: 'one', label: 'Inbox', content: 'This is the Inbox view.'}, + {key: 'two', label: 'For Review', content: 'This is the For Review view'}, + {key: 'three', label: 'Regressed', content: 'This is the Regressed view'}, ]; story('Default', () => ( @@ -24,8 +25,17 @@ export default storyBook(DraggableTabBar, story => { But you will have to render all tab content, including hooks, upfront.

- + + + )); }); + +const TabBarContainer = styled('div')` + display: flex; + justify-content: start; + width: 90%; + height: 300px; +`; diff --git a/static/app/components/dropdownMenu/item.tsx b/static/app/components/dropdownMenu/item.tsx index e06e6b5b360aa9..fcba7689802e24 100644 --- a/static/app/components/dropdownMenu/item.tsx +++ b/static/app/components/dropdownMenu/item.tsx @@ -53,6 +53,7 @@ export interface MenuItemProps extends MenuListItemProps { * item's key is passed as an argument. */ onAction?: (key: MenuItemProps['key']) => void; + showDivider?: boolean; /** * Passed as the `menuTitle` prop onto the associated sub-menu (applicable * if `children` is defined and `isSubmenu` is true) @@ -63,6 +64,7 @@ export interface MenuItemProps extends MenuListItemProps { * filtering and keyboard select (quick-focusing on options by typing the first letter). */ textValue?: string; + /** * Destination if this menu item is a link. */ From 0eda4d60c66d6a7ebf0ddbf3d35f9795ad183970 Mon Sep 17 00:00:00 2001 From: MichaelSun48 Date: Fri, 28 Jun 2024 12:27:07 -0700 Subject: [PATCH 116/126] Update baseTab component with new designs (will need to be decoupled from this PR) --- .../components/draggableTabs/draggableTab.tsx | 46 +---------- .../draggableTabs/draggableTabList.tsx | 3 +- .../draggableTabs/draggableTabMenuButton.tsx | 77 +++++++++++++++---- .../draggableTabs/index.stories.tsx | 24 +++++- static/app/components/tabs/tab.tsx | 49 +++++++++--- 5 files changed, 128 insertions(+), 71 deletions(-) diff --git a/static/app/components/draggableTabs/draggableTab.tsx b/static/app/components/draggableTabs/draggableTab.tsx index d18521fa4d1f6c..5ad8d0073f280d 100644 --- a/static/app/components/draggableTabs/draggableTab.tsx +++ b/static/app/components/draggableTabs/draggableTab.tsx @@ -17,14 +17,13 @@ import type {Node, Orientation} from '@react-types/shared'; import Badge from 'sentry/components/badge/badge'; import {DraggableTabMenuButton} from 'sentry/components/draggableTabs/draggableTabMenuButton'; -import {DropdownMenu, type MenuItemProps} from 'sentry/components/dropdownMenu'; import QueryCount from 'sentry/components/queryCount'; import {BaseTab} from 'sentry/components/tabs/tab'; -import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; interface DraggableTabProps extends AriaTabProps { dropState: DroppableCollectionState; + isChanged: boolean; item: Node; orientation: Orientation; /** @@ -41,31 +40,6 @@ interface BaseDropIndicatorProps { target: DropIndicatorProps['target']; } -const TAB_MENU_OPTIONS: MenuItemProps[] = [ - { - key: 'save-changes', - label: t('Save Changes'), - }, - { - key: 'discard-changes', - label: t('Discard Changes'), - }, - { - key: 'rename-tab', - label: t('Rename'), - showDivider: true, - }, - { - key: 'duplicate-tab', - label: t('Duplicate'), - }, - { - key: 'delete-tab', - label: t('Delete'), - priority: 'danger', - }, -]; - function TabDropIndicator(props: BaseDropIndicatorProps) { const ref = useRef(null); const {dropIndicatorProps, isHidden} = useDropIndicator(props, props.dropState, ref); @@ -112,7 +86,7 @@ function Draggable({item, children, onTabClick}: DraggableProps) { */ export const DraggableTab = forwardRef( ( - {item, state, orientation, overflowing, dropState}: DraggableTabProps, + {item, state, orientation, overflowing, dropState, isChanged}: DraggableTabProps, forwardedRef: React.ForwardedRef ) => { const ref = useObjectRef(forwardedRef); @@ -154,24 +128,10 @@ export const DraggableTab = forwardRef( - , - style: {padding: 0}, - }} - items={TAB_MENU_OPTIONS} - offset={[-10, 5]} - /> + {state.selectedKey === item.key && } - {/* {state.selectedKey !== item.key && state.collection.getLastKey() !== item.key && ( - - )} */} {state.collection.getKeyAfter(item.key) == null && ( p.orientation === 'horizontal' ? ` grid-auto-flow: column; justify-content: start; - gap: ${space(2)}; + gap: ${space(0.5)}; ${!p.hideBorder && `border-bottom: solid 1px ${p.theme.border};`} ` : ` diff --git a/static/app/components/draggableTabs/draggableTabMenuButton.tsx b/static/app/components/draggableTabs/draggableTabMenuButton.tsx index ac4df2a72bfe29..617a1f46a5bffe 100644 --- a/static/app/components/draggableTabs/draggableTabMenuButton.tsx +++ b/static/app/components/draggableTabs/draggableTabMenuButton.tsx @@ -1,24 +1,71 @@ +import {Fragment} from 'react'; import styled from '@emotion/styled'; import {Button} from 'sentry/components/button'; +import {DropdownMenu, type MenuItemProps} from 'sentry/components/dropdownMenu'; import {SvgIcon} from 'sentry/icons/svgIcon'; -import {space} from 'sentry/styles/space'; +import {t} from 'sentry/locale'; + +const TAB_MENU_OPTIONS: MenuItemProps[] = [ + { + key: 'save-changes', + label: t('Save Changes'), + priority: 'primary', + }, + { + key: 'discard-changes', + label: t('Discard Changes'), + }, + { + key: 'rename-tab', + label: t('Rename'), + showDivider: true, + }, + { + key: 'duplicate-tab', + label: t('Duplicate'), + }, + { + key: 'delete-tab', + label: t('Delete'), + priority: 'danger', + }, +]; interface DraggableTabMenuButtonProps { + isChanged: boolean; triggerProps?: Omit, 'children'>; } -export function DraggableTabMenuButton({triggerProps}: DraggableTabMenuButtonProps) { +export function DraggableTabMenuButton({ + triggerProps, + isChanged, +}: DraggableTabMenuButtonProps) { return ( - } + + } + /> + {isChanged && } + + ), + style: {width: '18px', height: '16px'}, + }} + items={TAB_MENU_OPTIONS} + offset={[-10, 5]} /> - ); } @@ -33,15 +80,19 @@ function IconCompactEllipsis() { ); } +const StyledDropdownMenu = styled(DropdownMenu)` + font-weight: ${p => p.theme.fontWeightNormal}; +`; + export const ChangedAndUnsavedIndicator = styled('div')` - width: 6px; - height: 6px; + width: 7px; + height: 7px; border-radius: 50%; background: ${p => p.theme.active}; border: solid 1px ${p => p.theme.background}; position: absolute; - top: -${space(0.25)}; - right: -${space(0.25)}; + top: -3px; + right: -3px; `; const StyledDropdownButton = styled(Button)` diff --git a/static/app/components/draggableTabs/index.stories.tsx b/static/app/components/draggableTabs/index.stories.tsx index 770af3e1576c4e..7fd3aeee13f78f 100644 --- a/static/app/components/draggableTabs/index.stories.tsx +++ b/static/app/components/draggableTabs/index.stories.tsx @@ -6,11 +6,29 @@ import JSXNode from 'sentry/components/stories/jsxNode'; import SizingWindow from 'sentry/components/stories/sizingWindow'; import storyBook from 'sentry/stories/storyBook'; +const TabPanelContainer = styled('div')` + width: 90%; + height: 250px; + background-color: white; +`; + export default storyBook(DraggableTabBar, story => { const TABS = [ - {key: 'one', label: 'Inbox', content: 'This is the Inbox view.'}, - {key: 'two', label: 'For Review', content: 'This is the For Review view'}, - {key: 'three', label: 'Regressed', content: 'This is the Regressed view'}, + { + key: 'one', + label: 'Inbox', + content: This is the Inbox view, + }, + { + key: 'two', + label: 'For Review', + content: This is the For Review view, + }, + { + key: 'three', + label: 'Regressed', + content: This is the Regressed view, + }, ]; story('Default', () => ( diff --git a/static/app/components/tabs/tab.tsx b/static/app/components/tabs/tab.tsx index eba78720b264d2..854d95e0d2bc05 100644 --- a/static/app/components/tabs/tab.tsx +++ b/static/app/components/tabs/tab.tsx @@ -82,23 +82,15 @@ export const BaseTab = forwardRef( ); return ( - + {props.children} + ); } ); @@ -138,6 +130,41 @@ export const Tab = forwardRef( } ); +const NewTabWrap = styled('li', {shouldForwardProp: tabsShouldForwardProp})<{ + overflowing: boolean; + selected: boolean; +}>` + ${p => + p.selected + ? ` + border-radius: 6px 6px 1px 1px; + border-top: 1px solid ${p.theme.border}; + border-left: 1px solid ${p.theme.border}; + border-right: 1px solid ${p.theme.border}; + background-color: ${p.theme.white}; + color: ${p.theme.fontWeightBold}; + font-weight: 600; + ` + : ``} + transform: translateY(1px); + padding: 5px 10px; + + opacity: 0px; + + cursor: pointer; + + &:focus { + outline: none; + } + + ${p => + p.overflowing && + ` + opacity: 0; + pointer-events: none; + `} +`; + const TabWrap = styled('li', {shouldForwardProp: tabsShouldForwardProp})<{ overflowing: boolean; selected: boolean; From dd76bae13170dbb85ec454f3081a3d41b2f7b0c8 Mon Sep 17 00:00:00 2001 From: MichaelSun48 Date: Fri, 28 Jun 2024 14:52:45 -0700 Subject: [PATCH 117/126] Fix minor unused variable errors --- static/app/components/draggableTabs/draggableTab.tsx | 4 +++- static/app/components/draggableTabs/draggableTabList.tsx | 1 + 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/static/app/components/draggableTabs/draggableTab.tsx b/static/app/components/draggableTabs/draggableTab.tsx index 5ad8d0073f280d..2e7c11ed348abf 100644 --- a/static/app/components/draggableTabs/draggableTab.tsx +++ b/static/app/components/draggableTabs/draggableTab.tsx @@ -128,7 +128,9 @@ export const DraggableTab = forwardRef( - {state.selectedKey === item.key && } + {state.selectedKey === item.key && ( + + )} diff --git a/static/app/components/draggableTabs/draggableTabList.tsx b/static/app/components/draggableTabs/draggableTabList.tsx index 450bcbc2fb8670..0c7ff0736348cd 100644 --- a/static/app/components/draggableTabs/draggableTabList.tsx +++ b/static/app/components/draggableTabs/draggableTabList.tsx @@ -152,6 +152,7 @@ function BaseDraggableTabList({ overflowing={orientation === 'horizontal' && overflowTabs.includes(item.key)} dropState={dropState} ref={element => (tabItemsRef.current[item.key] = element)} + isChanged /> ))} From 6037ea3f898d919a72b0175ad9f6d59e5044d36f Mon Sep 17 00:00:00 2001 From: MichaelSun48 Date: Fri, 28 Jun 2024 14:57:57 -0700 Subject: [PATCH 118/126] Add newVariant to tabs component --- .../components/draggableTabs/draggableTab.tsx | 1 + static/app/components/tabs/tab.tsx | 33 +++++++++++++++++-- 2 files changed, 31 insertions(+), 3 deletions(-) diff --git a/static/app/components/draggableTabs/draggableTab.tsx b/static/app/components/draggableTabs/draggableTab.tsx index 2e7c11ed348abf..7aa9d3e4753c76 100644 --- a/static/app/components/draggableTabs/draggableTab.tsx +++ b/static/app/components/draggableTabs/draggableTab.tsx @@ -121,6 +121,7 @@ export const DraggableTab = forwardRef( orientation={orientation} overflowing={overflowing} ref={ref} + newVariant > state.setSelectedKey(item.key)} item={item}> diff --git a/static/app/components/tabs/tab.tsx b/static/app/components/tabs/tab.tsx index 854d95e0d2bc05..a47bed7efc12f8 100644 --- a/static/app/components/tabs/tab.tsx +++ b/static/app/components/tabs/tab.tsx @@ -54,13 +54,22 @@ interface BaseTabProps { * by to pass in props used for drag-and-drop functionality. */ additionalProps?: React.HTMLAttributes; + newVariant?: boolean; to?: string; } export const BaseTab = forwardRef( (props: BaseTabProps, forwardedRef: React.ForwardedRef) => { - const {to, orientation, overflowing, tabProps, hidden, isSelected, additionalProps} = - props; + const { + to, + orientation, + overflowing, + tabProps, + hidden, + isSelected, + additionalProps, + newVariant = false, + } = props; const ref = useObjectRef(forwardedRef); const InnerWrap = useCallback( @@ -81,7 +90,7 @@ export const BaseTab = forwardRef( [to, orientation] ); - return ( + return newVariant ? ( + ) : ( + ); } ); From bcc1fab4e79bcbb49a21c82c26eec5611728a0e7 Mon Sep 17 00:00:00 2001 From: MichaelSun48 Date: Fri, 28 Jun 2024 15:24:35 -0700 Subject: [PATCH 119/126] Add 'Add View' button and tab dividers --- .../components/draggableTabs/draggableTab.tsx | 20 +++++++++++++++++-- .../draggableTabs/draggableTabList.tsx | 12 +++++++++++ 2 files changed, 30 insertions(+), 2 deletions(-) diff --git a/static/app/components/draggableTabs/draggableTab.tsx b/static/app/components/draggableTabs/draggableTab.tsx index 7aa9d3e4753c76..21cfe5fa44fc8b 100644 --- a/static/app/components/draggableTabs/draggableTab.tsx +++ b/static/app/components/draggableTabs/draggableTab.tsx @@ -123,7 +123,7 @@ export const DraggableTab = forwardRef( ref={ref} newVariant > - state.setSelectedKey(item.key)} item={item}> + state.setSelectedKey(item.key)} item={item}> {rendered} @@ -133,8 +133,10 @@ export const DraggableTab = forwardRef( )} - + + {state.selectedKey !== item.key && + state.collection.getKeyAfter(item.key) !== state.selectedKey && } {state.collection.getKeyAfter(item.key) == null && ( p.theme.gray200}; + margin: auto; +`; const StyledBaseTab = styled(BaseTab)` padding: 2px 12px 2px 12px; diff --git a/static/app/components/draggableTabs/draggableTabList.tsx b/static/app/components/draggableTabs/draggableTabList.tsx index 0c7ff0736348cd..a904fdfa5dffdf 100644 --- a/static/app/components/draggableTabs/draggableTabList.tsx +++ b/static/app/components/draggableTabs/draggableTabList.tsx @@ -20,10 +20,12 @@ import type { TextDropItem, } from '@react-types/shared'; +import {Button} from 'sentry/components/button'; import type {SelectOption} from 'sentry/components/compactSelect'; import type {Tab} from 'sentry/components/draggableTabs'; import {TabsContext} from 'sentry/components/tabs'; import {OverflowMenu, useOverflowTabs} from 'sentry/components/tabs/tabList'; +import {IconAdd} from 'sentry/icons'; import {space} from 'sentry/styles/space'; import {browserHistory} from 'sentry/utils/browserHistory'; @@ -155,6 +157,10 @@ function BaseDraggableTabList({ isChanged /> ))} + + + Add View + {orientation === 'horizontal' && overflowMenuItems.length > 0 && ( @@ -245,6 +251,12 @@ export function DraggableTabList({ DraggableTabList.Item = Item; +const AddViewButton = styled(Button)` + color: ${p => p.theme.gray300}; + margin: auto; + font-weight: normal; +`; + const TabListOuterWrap = styled('div')` position: relative; `; From d0b0694a98461b0114c0bd5cb2747c5ca052d83f Mon Sep 17 00:00:00 2001 From: MichaelSun48 Date: Fri, 28 Jun 2024 16:53:28 -0700 Subject: [PATCH 120/126] Separate sections for tab menu options, revert change to dropdownmenu item --- .../draggableTabs/draggableTabMenuButton.tsx | 17 +++++++++++++++-- static/app/components/dropdownMenu/item.tsx | 1 - 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/static/app/components/draggableTabs/draggableTabMenuButton.tsx b/static/app/components/draggableTabs/draggableTabMenuButton.tsx index 617a1f46a5bffe..df2ee5e3d44d43 100644 --- a/static/app/components/draggableTabs/draggableTabMenuButton.tsx +++ b/static/app/components/draggableTabs/draggableTabMenuButton.tsx @@ -6,7 +6,7 @@ import {DropdownMenu, type MenuItemProps} from 'sentry/components/dropdownMenu'; import {SvgIcon} from 'sentry/icons/svgIcon'; import {t} from 'sentry/locale'; -const TAB_MENU_OPTIONS: MenuItemProps[] = [ +const TAB_MENU_OPTIONS_CHANGED: MenuItemProps[] = [ { key: 'save-changes', label: t('Save Changes'), @@ -16,10 +16,12 @@ const TAB_MENU_OPTIONS: MenuItemProps[] = [ key: 'discard-changes', label: t('Discard Changes'), }, +]; + +const TAB_MENU_OPTIONS_DEFAULT: MenuItemProps[] = [ { key: 'rename-tab', label: t('Rename'), - showDivider: true, }, { key: 'duplicate-tab', @@ -32,6 +34,17 @@ const TAB_MENU_OPTIONS: MenuItemProps[] = [ }, ]; +const TAB_MENU_OPTIONS: MenuItemProps[] = [ + { + key: 'changed', + children: TAB_MENU_OPTIONS_CHANGED, + }, + { + key: 'default', + children: TAB_MENU_OPTIONS_DEFAULT, + }, +]; + interface DraggableTabMenuButtonProps { isChanged: boolean; triggerProps?: Omit, 'children'>; diff --git a/static/app/components/dropdownMenu/item.tsx b/static/app/components/dropdownMenu/item.tsx index fcba7689802e24..4087ecf81197ac 100644 --- a/static/app/components/dropdownMenu/item.tsx +++ b/static/app/components/dropdownMenu/item.tsx @@ -53,7 +53,6 @@ export interface MenuItemProps extends MenuListItemProps { * item's key is passed as an argument. */ onAction?: (key: MenuItemProps['key']) => void; - showDivider?: boolean; /** * Passed as the `menuTitle` prop onto the associated sub-menu (applicable * if `children` is defined and `isSubmenu` is true) From 678c721f795c36e308e207fc3355a4db8bc1c62f Mon Sep 17 00:00:00 2001 From: MichaelSun48 Date: Mon, 1 Jul 2024 10:31:54 -0700 Subject: [PATCH 121/126] Add styles for temp tab, fix text baseline changing on selection state change --- .../components/draggableTabs/draggableTab.tsx | 21 +++++++++-------- .../draggableTabs/draggableTabList.tsx | 23 ++++++++++++++++--- .../draggableTabs/index.stories.tsx | 5 ++++ static/app/components/tabs/tab.tsx | 14 +++++++---- 4 files changed, 47 insertions(+), 16 deletions(-) diff --git a/static/app/components/draggableTabs/draggableTab.tsx b/static/app/components/draggableTabs/draggableTab.tsx index 21cfe5fa44fc8b..c37ab47a5d1fdb 100644 --- a/static/app/components/draggableTabs/draggableTab.tsx +++ b/static/app/components/draggableTabs/draggableTab.tsx @@ -116,6 +116,7 @@ export const DraggableTab = forwardRef( additionalProps={dropProps} tabProps={tabProps} isSelected={isSelected} + isTempTab={state.collection.getLastKey() === item.key} to={to} hidden={hidden} orientation={orientation} @@ -127,7 +128,7 @@ export const DraggableTab = forwardRef( {rendered} - + {state.selectedKey === item.key && ( @@ -147,21 +148,22 @@ export const DraggableTab = forwardRef( ); } ); -const StyledDraggable = styled(Draggable)` - display: flex; - align-items: center; - flex-direction: row; - gap: 6px; -`; -const TabDivider = styled('div')` +export const TabDivider = styled('div')` height: 50%; - width: 2px; + width: 1px; border-radius: 6px; background-color: ${p => p.theme.gray200}; margin: auto; `; +const StyledDraggable = styled(Draggable)` + display: flex; + align-items: center; + flex-direction: row; + gap: 6px; +`; + const StyledBaseTab = styled(BaseTab)` padding: 2px 12px 2px 12px; gap: 8px; @@ -181,6 +183,7 @@ const StyledBadge = styled(Badge)` display: flex; height: 16px; align-items: center; + justify-content: center; border-radius: 10px; background: transparent; border: 1px solid ${p => p.theme.gray200}; diff --git a/static/app/components/draggableTabs/draggableTabList.tsx b/static/app/components/draggableTabs/draggableTabList.tsx index a904fdfa5dffdf..74b9f24be0bbdd 100644 --- a/static/app/components/draggableTabs/draggableTabList.tsx +++ b/static/app/components/draggableTabs/draggableTabList.tsx @@ -29,7 +29,7 @@ import {IconAdd} from 'sentry/icons'; import {space} from 'sentry/styles/space'; import {browserHistory} from 'sentry/utils/browserHistory'; -import {DraggableTab} from './draggableTab'; +import {DraggableTab, TabDivider} from './draggableTab'; import type {DraggableTabListItemProps} from './item'; import {Item} from './item'; import {tabsShouldForwardProp} from './utils'; @@ -140,12 +140,13 @@ function BaseDraggableTabList({ - {[...state.collection].map(item => ( + {[...state.collection].slice(0, -1).map(item => ( Add View + + {[...state.collection].slice(-1).map(item => ( + (tabItemsRef.current[item.key] = element)} + isChanged + /> + ))} {orientation === 'horizontal' && overflowMenuItems.length > 0 && ( @@ -253,6 +267,7 @@ DraggableTabList.Item = Item; const AddViewButton = styled(Button)` color: ${p => p.theme.gray300}; + padding-right: ${space(0.5)}; margin: auto; font-weight: normal; `; @@ -264,6 +279,7 @@ const TabListOuterWrap = styled('div')` const TabListWrap = styled('ul', {shouldForwardProp: tabsShouldForwardProp})<{ hideBorder: boolean; orientation: Orientation; + tempViewSelected: boolean; }>` position: relative; display: grid; @@ -279,7 +295,8 @@ const TabListWrap = styled('ul', {shouldForwardProp: tabsShouldForwardProp})<{ grid-auto-flow: column; justify-content: start; gap: ${space(0.5)}; - ${!p.hideBorder && `border-bottom: solid 1px ${p.theme.border};`} + ${!p.hideBorder && `border-bottom: ${p.tempViewSelected ? `dashed 1px` : `solid 1px`} ${p.theme.border};`} + stroke-dasharray: 4, 3; ` : ` height: 100%; diff --git a/static/app/components/draggableTabs/index.stories.tsx b/static/app/components/draggableTabs/index.stories.tsx index 7fd3aeee13f78f..0e313962823067 100644 --- a/static/app/components/draggableTabs/index.stories.tsx +++ b/static/app/components/draggableTabs/index.stories.tsx @@ -29,6 +29,11 @@ export default storyBook(DraggableTabBar, story => { label: 'Regressed', content: This is the Regressed view, }, + { + key: 'four', + label: 'Unsaved', + content: This is an Unsaved view, + }, ]; story('Default', () => ( diff --git a/static/app/components/tabs/tab.tsx b/static/app/components/tabs/tab.tsx index a47bed7efc12f8..f5b5ed04b75556 100644 --- a/static/app/components/tabs/tab.tsx +++ b/static/app/components/tabs/tab.tsx @@ -54,6 +54,7 @@ interface BaseTabProps { * by to pass in props used for drag-and-drop functionality. */ additionalProps?: React.HTMLAttributes; + isTempTab?: boolean; newVariant?: boolean; to?: string; } @@ -69,6 +70,7 @@ export const BaseTab = forwardRef( isSelected, additionalProps, newVariant = false, + isTempTab = false, } = props; const ref = useObjectRef(forwardedRef); @@ -96,6 +98,7 @@ export const BaseTab = forwardRef( hidden={hidden} selected={isSelected} overflowing={overflowing} + isTempTab={isTempTab} ref={ref} > {props.children} @@ -158,6 +161,7 @@ export const Tab = forwardRef( ); const NewTabWrap = styled('li', {shouldForwardProp: tabsShouldForwardProp})<{ + isTempTab: boolean; overflowing: boolean; selected: boolean; }>` @@ -165,14 +169,16 @@ const NewTabWrap = styled('li', {shouldForwardProp: tabsShouldForwardProp})<{ p.selected ? ` border-radius: 6px 6px 1px 1px; - border-top: 1px solid ${p.theme.border}; - border-left: 1px solid ${p.theme.border}; - border-right: 1px solid ${p.theme.border}; + border-top: 1px ${p.isTempTab && p.selected ? `dashed` : `solid`} ${p.theme.border}; + border-left: 1px ${p.isTempTab && p.selected ? `dashed` : `solid`} ${p.theme.border}; + border-right: 1px ${p.isTempTab && p.selected ? `dashed` : `solid`} ${p.theme.border}; background-color: ${p.theme.white}; color: ${p.theme.fontWeightBold}; font-weight: 600; ` - : ``} + : ` + border-top: 1px solid transparent; + `}; transform: translateY(1px); padding: 5px 10px; From 8d70df54a3695206b0d19c1dcc477fc25b137ad1 Mon Sep 17 00:00:00 2001 From: MichaelSun48 Date: Tue, 2 Jul 2024 11:21:45 -0700 Subject: [PATCH 122/126] Use framer motion for dragging animations. Implement some more functionality with temp tabs --- .../components/draggableTabs/draggableTab.tsx | 165 ++++++------------ .../draggableTabs/draggableTabList.tsx | 130 +++++++++----- .../draggableTabs/draggableTabMenuButton.tsx | 101 ++++++----- .../draggableTabs/index.stories.tsx | 12 +- static/app/components/draggableTabs/index.tsx | 20 ++- static/app/components/draggableTabs/item.tsx | 1 + 6 files changed, 222 insertions(+), 207 deletions(-) diff --git a/static/app/components/draggableTabs/draggableTab.tsx b/static/app/components/draggableTabs/draggableTab.tsx index c37ab47a5d1fdb..8afaacb74c1c80 100644 --- a/static/app/components/draggableTabs/draggableTab.tsx +++ b/static/app/components/draggableTabs/draggableTab.tsx @@ -1,16 +1,10 @@ import type React from 'react'; -import {forwardRef, Fragment, useRef} from 'react'; +import {forwardRef} from 'react'; import styled from '@emotion/styled'; -import {useButton} from '@react-aria/button'; -import { - type DropIndicatorProps, - useDrag, - useDropIndicator, - useDroppableItem, -} from '@react-aria/dnd'; +import {useDroppableItem} from '@react-aria/dnd'; import type {AriaTabProps} from '@react-aria/tabs'; import {useTab} from '@react-aria/tabs'; -import {mergeProps, useObjectRef} from '@react-aria/utils'; +import {useObjectRef} from '@react-aria/utils'; import type {DroppableCollectionState} from '@react-stately/dnd'; import type {TabListState} from '@react-stately/tabs'; import type {Node, Orientation} from '@react-types/shared'; @@ -33,50 +27,12 @@ interface DraggableTabProps extends AriaTabProps { */ overflowing: boolean; state: TabListState; -} - -interface BaseDropIndicatorProps { - dropState: DroppableCollectionState; - target: DropIndicatorProps['target']; -} - -function TabDropIndicator(props: BaseDropIndicatorProps) { - const ref = useRef(null); - const {dropIndicatorProps, isHidden} = useDropIndicator(props, props.dropState, ref); - if (isHidden) { - return null; - } - - return ; -} - -interface DraggableProps { - children: React.ReactNode; - item: Node; - onTabClick: () => void; -} - -function Draggable({item, children, onTabClick}: DraggableProps) { - // TODO(msun): Implement the "preview" parameter in this useDrag hook - const {dragProps, dragButtonProps} = useDrag({ - getAllowedDropOperations: () => ['move'], - getItems() { - return [ - { - tab: JSON.stringify({key: item.key}), - }, - ]; - }, - }); - - const ref = useRef(null); - const {buttonProps} = useButton({...dragButtonProps, elementType: 'div'}, ref); - - return ( -
- {children} -
- ); + isTempTab?: boolean; + onDelete?: () => void; + onDiscard?: () => void; + onDuplicate?: () => void; + onRename?: () => void; + onSave?: () => void; } /** @@ -86,7 +42,20 @@ function Draggable({item, children, onTabClick}: DraggableProps) { */ export const DraggableTab = forwardRef( ( - {item, state, orientation, overflowing, dropState, isChanged}: DraggableTabProps, + { + item, + state, + orientation, + overflowing, + dropState, + isChanged, + isTempTab = false, + onDelete, + onDiscard, + onDuplicate, + onRename, + onSave, + }: DraggableTabProps, forwardedRef: React.ForwardedRef ) => { const ref = useObjectRef(forwardedRef); @@ -107,63 +76,39 @@ export const DraggableTab = forwardRef( ); return ( - - - - {state.selectedKey !== item.key && - state.collection.getKeyAfter(item.key) !== state.selectedKey && } - {state.collection.getKeyAfter(item.key) == null && ( - - )} - + ); } ); -export const TabDivider = styled('div')` - height: 50%; - width: 1px; - border-radius: 6px; - background-color: ${p => p.theme.gray200}; - margin: auto; -`; - -const StyledDraggable = styled(Draggable)` - display: flex; - align-items: center; - flex-direction: row; - gap: 6px; -`; - const StyledBaseTab = styled(BaseTab)` padding: 2px 12px 2px 12px; gap: 8px; @@ -190,9 +135,3 @@ const StyledBadge = styled(Badge)` color: ${p => p.theme.gray300}; margin-left: ${space(0)}; `; - -const TabSeparator = styled('li')` - height: 80%; - width: 2px; - background-color: ${p => p.theme.gray200}; -`; diff --git a/static/app/components/draggableTabs/draggableTabList.tsx b/static/app/components/draggableTabs/draggableTabList.tsx index 74b9f24be0bbdd..7311814f989bfe 100644 --- a/static/app/components/draggableTabs/draggableTabList.tsx +++ b/static/app/components/draggableTabs/draggableTabList.tsx @@ -1,4 +1,4 @@ -import {useContext, useEffect, useMemo, useRef} from 'react'; +import {useContext, useEffect, useMemo, useRef, useState} from 'react'; import styled from '@emotion/styled'; import {ListDropTargetDelegate, useDroppableCollection} from '@react-aria/dnd'; import {ListKeyboardDelegate} from '@react-aria/selection'; @@ -19,6 +19,7 @@ import type { Orientation, TextDropItem, } from '@react-types/shared'; +import {Reorder} from 'framer-motion'; import {Button} from 'sentry/components/button'; import type {SelectOption} from 'sentry/components/compactSelect'; @@ -29,19 +30,23 @@ import {IconAdd} from 'sentry/icons'; import {space} from 'sentry/styles/space'; import {browserHistory} from 'sentry/utils/browserHistory'; -import {DraggableTab, TabDivider} from './draggableTab'; +import {DraggableTab} from './draggableTab'; import type {DraggableTabListItemProps} from './item'; import {Item} from './item'; import {tabsShouldForwardProp} from './utils'; interface BaseDraggableTabListProps extends DraggableTabListProps { items: DraggableTabListItemProps[]; + setTabs: (tabs: Tab[]) => void; + tabs: Tab[]; } function BaseDraggableTabList({ hideBorder = false, className, outerWrapStyles, + tabs, + setTabs, ...props }: BaseDraggableTabListProps) { const tabListRef = useRef(null); @@ -77,6 +82,7 @@ function BaseDraggableTabList({ }; const state = useTabListState(ariaProps); + const [isTempTabVisible, setIsTempTabVisible] = useState(false); const {tabListProps} = useTabList({orientation, ...ariaProps}, state, tabListRef); useEffect(() => { @@ -136,46 +142,78 @@ function BaseDraggableTabList({ }); }, [state.collection, overflowTabs]); + const persistentTabs = [...state.collection].filter( + item => item.key !== 'temporary-tab' + ); + const tempTab = [...state.collection].find(item => item.key === 'temporary-tab'); + return ( - { + setTabs(newOrder); + }} + as="div" > - {[...state.collection].slice(0, -1).map(item => ( - (tabItemsRef.current[item.key] = element)} - isChanged - /> - ))} - - - Add View - - - {[...state.collection].slice(-1).map(item => ( - (tabItemsRef.current[item.key] = element)} - isChanged - /> - ))} - + + {persistentTabs.map(item => ( + tab.key === item.key)} + style={{display: 'flex', flexDirection: 'row'}} + > + { + const updatedTabs = tabs.filter(tab => tab.key !== item.key); + setTabs(updatedTabs); + }} + ref={element => (tabItemsRef.current[item.key] = element)} + isChanged + /> + {state.selectedKey !== item.key && + state.collection.getKeyAfter(item.key) !== state.selectedKey && ( + + )} + + ))} + setIsTempTabVisible(true)}> + + Add View + + {isTempTabVisible && } + {isTempTabVisible && tempTab && ( + setIsTempTabVisible(false)} + ref={element => (tabItemsRef.current[tempTab.key] = element)} + isChanged + isTempTab + /> + )} + + {orientation === 'horizontal' && overflowMenuItems.length > 0 && ( p.theme.gray200}; + margin: 9px auto; +`; + const AddViewButton = styled(Button)` color: ${p => p.theme.gray300}; padding-right: ${space(0.5)}; - margin: auto; + margin-top: 3px; font-weight: normal; `; @@ -276,7 +322,9 @@ const TabListOuterWrap = styled('div')` position: relative; `; -const TabListWrap = styled('ul', {shouldForwardProp: tabsShouldForwardProp})<{ +const TabListWrap = styled('ul', { + shouldForwardProp: tabsShouldForwardProp, +})<{ hideBorder: boolean; orientation: Orientation; tempViewSelected: boolean; diff --git a/static/app/components/draggableTabs/draggableTabMenuButton.tsx b/static/app/components/draggableTabs/draggableTabMenuButton.tsx index df2ee5e3d44d43..b51fac5248995b 100644 --- a/static/app/components/draggableTabs/draggableTabMenuButton.tsx +++ b/static/app/components/draggableTabs/draggableTabMenuButton.tsx @@ -6,54 +6,71 @@ import {DropdownMenu, type MenuItemProps} from 'sentry/components/dropdownMenu'; import {SvgIcon} from 'sentry/icons/svgIcon'; import {t} from 'sentry/locale'; -const TAB_MENU_OPTIONS_CHANGED: MenuItemProps[] = [ - { - key: 'save-changes', - label: t('Save Changes'), - priority: 'primary', - }, - { - key: 'discard-changes', - label: t('Discard Changes'), - }, -]; - -const TAB_MENU_OPTIONS_DEFAULT: MenuItemProps[] = [ - { - key: 'rename-tab', - label: t('Rename'), - }, - { - key: 'duplicate-tab', - label: t('Duplicate'), - }, - { - key: 'delete-tab', - label: t('Delete'), - priority: 'danger', - }, -]; - -const TAB_MENU_OPTIONS: MenuItemProps[] = [ - { - key: 'changed', - children: TAB_MENU_OPTIONS_CHANGED, - }, - { - key: 'default', - children: TAB_MENU_OPTIONS_DEFAULT, - }, -]; - interface DraggableTabMenuButtonProps { - isChanged: boolean; + isChanged?: boolean; + onDelete?: () => void; + onDiscard?: () => void; + onDuplicate?: () => void; + onRename?: () => void; + onSave?: () => void; triggerProps?: Omit, 'children'>; } export function DraggableTabMenuButton({ triggerProps, - isChanged, + isChanged = false, + onDelete, + onDiscard, + onDuplicate, + onRename, + onSave, }: DraggableTabMenuButtonProps) { + const TAB_MENU_OPTIONS_CHANGED: MenuItemProps[] = [ + { + key: 'save-changes', + label: t('Save Changes'), + priority: 'primary', + onAction: onSave, + }, + { + key: 'discard-changes', + label: t('Discard Changes'), + onAction: onDiscard, + }, + ]; + + const TAB_MENU_OPTIONS_DEFAULT: MenuItemProps[] = [ + { + key: 'rename-tab', + label: t('Rename'), + onAction: onRename, + }, + { + key: 'duplicate-tab', + label: t('Duplicate'), + onAction: onDuplicate, + }, + { + key: 'delete-tab', + label: t('Delete'), + priority: 'danger', + onAction: onDelete, + }, + ]; + + const menuOptions = isChanged + ? [ + { + key: 'changed', + children: TAB_MENU_OPTIONS_CHANGED, + }, + { + key: 'default', + children: TAB_MENU_OPTIONS_DEFAULT, + }, + ] + : TAB_MENU_OPTIONS_DEFAULT; + return ( diff --git a/static/app/components/draggableTabs/index.stories.tsx b/static/app/components/draggableTabs/index.stories.tsx index 0e313962823067..bc20c4f36e74b0 100644 --- a/static/app/components/draggableTabs/index.stories.tsx +++ b/static/app/components/draggableTabs/index.stories.tsx @@ -29,11 +29,6 @@ export default storyBook(DraggableTabBar, story => { label: 'Regressed', content: This is the Regressed view, }, - { - key: 'four', - label: 'Unsaved', - content: This is an Unsaved view, - }, ]; story('Default', () => ( @@ -49,7 +44,12 @@ export default storyBook(DraggableTabBar, story => {

- + This is a temporary tab + } + /> diff --git a/static/app/components/draggableTabs/index.tsx b/static/app/components/draggableTabs/index.tsx index a7da1bbecbde53..d2b405c8ae0598 100644 --- a/static/app/components/draggableTabs/index.tsx +++ b/static/app/components/draggableTabs/index.tsx @@ -10,24 +10,34 @@ export interface Tab { content: React.ReactNode; key: Key; label: string; + queryCount?: number; } export interface DragAndDropTabBarProps { tabs: Tab[]; + tempTabContent: React.ReactNode; } export function DraggableTabBar(props: DragAndDropTabBarProps) { - const [tabs, setTabs] = useState(props.tabs); + const [tabs, setTabs] = useState([ + ...props.tabs, + {key: 'temporary-tab', label: 'Unsaved', content: props.tempTabContent}, + ]); useEffect(() => { - setTabs(props.tabs); - }, [props.tabs]); + setTabs([ + ...props.tabs, + {key: 'temporary-tab', label: 'Unsaved', content: props.tempTabContent}, + ]); + }, [props.tabs, props.tempTabContent]); return ( - + {tabs.map(tab => ( - {tab.label} + + {tab.label} + ))} diff --git a/static/app/components/draggableTabs/item.tsx b/static/app/components/draggableTabs/item.tsx index 00cd6e94e91c38..b97b156552cf39 100644 --- a/static/app/components/draggableTabs/item.tsx +++ b/static/app/components/draggableTabs/item.tsx @@ -6,6 +6,7 @@ export interface DraggableTabListItemProps extends ItemProps { key: string | number; disabled?: boolean; hidden?: boolean; + queryCount?: number; to?: LocationDescriptor; } From 0744aa970af71d1f137de50ccb96e417312172b8 Mon Sep 17 00:00:00 2001 From: MichaelSun48 Date: Tue, 2 Jul 2024 11:31:57 -0700 Subject: [PATCH 123/126] Minor style changes --- .../components/draggableTabs/draggableTabList.tsx | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/static/app/components/draggableTabs/draggableTabList.tsx b/static/app/components/draggableTabs/draggableTabList.tsx index 7311814f989bfe..1adb692c0a0a37 100644 --- a/static/app/components/draggableTabs/draggableTabList.tsx +++ b/static/app/components/draggableTabs/draggableTabList.tsx @@ -149,14 +149,7 @@ function BaseDraggableTabList({ return ( - { - setTabs(newOrder); - }} - as="div" - > + { - const updatedTabs = tabs.filter(tab => tab.key !== item.key); - setTabs(updatedTabs); + setTabs(tabs.filter(tab => tab.key !== item.key)); }} ref={element => (tabItemsRef.current[item.key] = element)} isChanged From 094ff3e96380cd50088d3554d97f3bbcf129760a Mon Sep 17 00:00:00 2001 From: MichaelSun48 Date: Tue, 2 Jul 2024 15:48:29 -0700 Subject: [PATCH 124/126] Fix tab divider left of Add View not showing up when temp view selected --- static/app/components/draggableTabs/draggableTabList.tsx | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/static/app/components/draggableTabs/draggableTabList.tsx b/static/app/components/draggableTabs/draggableTabList.tsx index 1adb692c0a0a37..762fba4ee736a7 100644 --- a/static/app/components/draggableTabs/draggableTabList.tsx +++ b/static/app/components/draggableTabs/draggableTabList.tsx @@ -179,10 +179,11 @@ function BaseDraggableTabList({ ref={element => (tabItemsRef.current[item.key] = element)} isChanged /> - {state.selectedKey !== item.key && - state.collection.getKeyAfter(item.key) !== state.selectedKey && ( - - )} + {(isTempTabVisible || + (state.selectedKey !== item.key && + state.collection.getKeyAfter(item.key) !== state.selectedKey)) && ( + + )} ))} setIsTempTabVisible(true)}> From 2a7ed44240a278d2e017abfd83e421fbf34110c3 Mon Sep 17 00:00:00 2001 From: MichaelSun48 Date: Tue, 2 Jul 2024 16:14:52 -0700 Subject: [PATCH 125/126] Fix menu options for temporary tabs --- .../components/draggableTabs/draggableTab.tsx | 10 ++- .../draggableTabs/draggableTabList.tsx | 2 +- .../draggableTabs/draggableTabMenuButton.tsx | 88 +++++++++++++++---- 3 files changed, 79 insertions(+), 21 deletions(-) diff --git a/static/app/components/draggableTabs/draggableTab.tsx b/static/app/components/draggableTabs/draggableTab.tsx index 8afaacb74c1c80..77cacdffd55e20 100644 --- a/static/app/components/draggableTabs/draggableTab.tsx +++ b/static/app/components/draggableTabs/draggableTab.tsx @@ -29,7 +29,8 @@ interface DraggableTabProps extends AriaTabProps { state: TabListState; isTempTab?: boolean; onDelete?: () => void; - onDiscard?: () => void; + onDiscardChanges?: () => void; + onDiscardTempView?: () => void; onDuplicate?: () => void; onRename?: () => void; onSave?: () => void; @@ -51,7 +52,8 @@ export const DraggableTab = forwardRef( isChanged, isTempTab = false, onDelete, - onDiscard, + onDiscardChanges, + onDiscardTempView, onDuplicate, onRename, onSave, @@ -96,11 +98,13 @@ export const DraggableTab = forwardRef( {state.selectedKey === item.key && ( )}
diff --git a/static/app/components/draggableTabs/draggableTabList.tsx b/static/app/components/draggableTabs/draggableTabList.tsx index 762fba4ee736a7..959d46ac10a818 100644 --- a/static/app/components/draggableTabs/draggableTabList.tsx +++ b/static/app/components/draggableTabs/draggableTabList.tsx @@ -199,7 +199,7 @@ function BaseDraggableTabList({ orientation={orientation} overflowing={false} dropState={dropState} - onDelete={() => setIsTempTabVisible(false)} + onDiscardTempView={() => setIsTempTabVisible(false)} ref={element => (tabItemsRef.current[tempTab.key] = element)} isChanged isTempTab diff --git a/static/app/components/draggableTabs/draggableTabMenuButton.tsx b/static/app/components/draggableTabs/draggableTabMenuButton.tsx index b51fac5248995b..54c26866f93c4c 100644 --- a/static/app/components/draggableTabs/draggableTabMenuButton.tsx +++ b/static/app/components/draggableTabs/draggableTabMenuButton.tsx @@ -8,24 +8,59 @@ import {t} from 'sentry/locale'; interface DraggableTabMenuButtonProps { isChanged?: boolean; + isTempTab?: boolean; + + /** + * Callback function to be called when user clicks the `Delete` button (for persistent tabs) + * Note: The `Delete` button only appears when `isTempTab=false` (persistent tabs) + */ onDelete?: () => void; - onDiscard?: () => void; + + /** + * Callback function to be called when user clicks on the `Discard Changes` button + * Note: The `Discard Changes` button only appears for persistent tabs when `isChanged=true` + */ + onDiscardChanges?: () => void; + + /** + * Callback function to be called when use clicks on the `Discard View` button + * Note: The `Discard View` button only appears when `isTempTab=true` (temporary tabs) + */ + onDiscardTempView?: () => void; + + /** + * Callback function to be called when user clicks the 'Duplicate' Button + * Note: The `Duplicate` button only appears when `isTempTab=false` (persistent tabs) + */ onDuplicate?: () => void; + + /** + * Callback function to be called when user clicks the 'Rename' Button + * Note: The `Rename` button only appears when `isTempTab=false` (persistent tabs) + * @returns + */ onRename?: () => void; + + /** + * Callback function to be called when user clicks the 'Save' button. + * Note: The `Save` button only appears for persistent tabs when `isChanged=true`, or when `isTempTab=true` + */ onSave?: () => void; triggerProps?: Omit, 'children'>; } export function DraggableTabMenuButton({ + isTempTab, triggerProps, isChanged = false, onDelete, - onDiscard, + onDiscardChanges, + onDiscardTempView, onDuplicate, onRename, onSave, }: DraggableTabMenuButtonProps) { - const TAB_MENU_OPTIONS_CHANGED: MenuItemProps[] = [ + const changedMenuOptions: MenuItemProps[] = [ { key: 'save-changes', label: t('Save Changes'), @@ -35,11 +70,11 @@ export function DraggableTabMenuButton({ { key: 'discard-changes', label: t('Discard Changes'), - onAction: onDiscard, + onAction: onDiscardChanges, }, ]; - const TAB_MENU_OPTIONS_DEFAULT: MenuItemProps[] = [ + const defaultMenuOptions: MenuItemProps[] = [ { key: 'rename-tab', label: t('Rename'), @@ -58,18 +93,37 @@ export function DraggableTabMenuButton({ }, ]; - const menuOptions = isChanged - ? [ - { - key: 'changed', - children: TAB_MENU_OPTIONS_CHANGED, - }, - { - key: 'default', - children: TAB_MENU_OPTIONS_DEFAULT, - }, - ] - : TAB_MENU_OPTIONS_DEFAULT; + const tempTabMenuOptions: MenuItemProps[] = [ + { + key: 'save-changes', + label: t('Save View'), + priority: 'primary', + onAction: onSave, + }, + { + key: 'discard-temp-view', + label: t('Discard'), + priority: 'danger', + onAction: onDiscardTempView, + }, + ]; + let menuOptions: MenuItemProps[] = []; + if (isTempTab) { + menuOptions = tempTabMenuOptions; + } else if (isChanged) { + menuOptions = [ + { + key: 'changed', + children: changedMenuOptions, + }, + { + key: 'default', + children: defaultMenuOptions, + }, + ]; + } else { + menuOptions = defaultMenuOptions; + } return ( From d09858e88a91c2b33ee0b03d2523cabe4f6b75c5 Mon Sep 17 00:00:00 2001 From: MichaelSun48 Date: Tue, 9 Jul 2024 10:22:25 -0400 Subject: [PATCH 126/126] Turn iconAdd into styled component --- static/app/components/draggableTabs/draggableTabList.tsx | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/static/app/components/draggableTabs/draggableTabList.tsx b/static/app/components/draggableTabs/draggableTabList.tsx index 959d46ac10a818..6d7f53525efd2a 100644 --- a/static/app/components/draggableTabs/draggableTabList.tsx +++ b/static/app/components/draggableTabs/draggableTabList.tsx @@ -187,7 +187,7 @@ function BaseDraggableTabList({ ))} setIsTempTabVisible(true)}> - + Add View {isTempTabVisible && } @@ -296,6 +296,11 @@ export function DraggableTabList({ DraggableTabList.Item = Item; +const StlyedIconAdd = styled(IconAdd)` + margin-right: ${space(0.5)}; + margin-left: ${space(0.5)}; +`; + const TabDivider = styled('div')` height: 50%; width: 1px;