From e69c79bc420dec175dce222bdd00c1b0f6ed0424 Mon Sep 17 00:00:00 2001 From: Kyle Mumma Date: Mon, 5 Feb 2024 11:26:43 -0800 Subject: [PATCH 001/357] ref(snuba): add typing to tsdb/snuba and remove from ignore list (#64422) This pr is associated with [SNS-2588](https://getsentry.atlassian.net/browse/SNS-2588) The goal of this pr is to fully type utils/snuba.py up to the standard deemed in the codebase's pyproject.toml, and remove it from the do-not-typecheck list. [SNS-2588]: https://getsentry.atlassian.net/browse/SNS-2588?atlOrigin=eyJpIjoiNWRkNTljNzYxNjVmNDY3MDlhMDU5Y2ZhYzA5YTRkZjUiLCJwIjoiZ2l0aHViLWNvbS1KU1cifQ --- pyproject.toml | 1 - src/sentry/tsdb/base.py | 16 ++++++++++------ src/sentry/tsdb/snuba.py | 18 ++++++++++-------- src/sentry/utils/snuba.py | 24 +++++++++++++----------- 4 files changed, 33 insertions(+), 26 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index fc39386796e49c..dce7d516c2d5d1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -511,7 +511,6 @@ module = [ "sentry.tsdb.dummy", "sentry.tsdb.inmemory", "sentry.tsdb.redis", - "sentry.tsdb.snuba", "sentry.types.integrations", "sentry.utils.audit", "sentry.utils.auth", diff --git a/src/sentry/tsdb/base.py b/src/sentry/tsdb/base.py index 40da89ec5df980..90dd053db3106a 100644 --- a/src/sentry/tsdb/base.py +++ b/src/sentry/tsdb/base.py @@ -212,7 +212,7 @@ def normalize_ts_to_rollup(self, epoch, seconds): """ return int(epoch / seconds) - def get_optimal_rollup(self, start_timestamp, end_timestamp): + def get_optimal_rollup(self, start_timestamp, end_timestamp) -> int: """ Identify the lowest granularity rollup available within the given time range. @@ -237,7 +237,9 @@ def get_optimal_rollup(self, start_timestamp, end_timestamp): # lowest resolution interval. return list(self.rollups)[-1] - def get_optimal_rollup_series(self, start, end=None, rollup=None): + def get_optimal_rollup_series( + self, start, end: datetime | None = None, rollup: int | None = None + ) -> tuple[int, list[int]]: if end is None: end = timezone.now() @@ -260,9 +262,11 @@ def get_active_series(self, start=None, end=None, timestamp=None): rollups = {} for rollup, samples in self.rollups.items(): _, series = self.get_optimal_rollup_series( - start - if start is not None - else to_datetime(self.get_earliest_timestamp(rollup, timestamp=timestamp)), + ( + start + if start is not None + else to_datetime(self.get_earliest_timestamp(rollup, timestamp=timestamp)) + ), end, rollup=rollup, ) @@ -418,7 +422,7 @@ def rollup(self, values, rollup): for key, points in values.items(): result[key] = [] last_new_ts = None - for (ts, count) in points: + for ts, count in points: new_ts = normalize_ts_to_epoch(ts, rollup) if new_ts == last_new_ts: result[key][-1][1] += count diff --git a/src/sentry/tsdb/snuba.py b/src/sentry/tsdb/snuba.py index 83af715fd420dc..6293451a721933 100644 --- a/src/sentry/tsdb/snuba.py +++ b/src/sentry/tsdb/snuba.py @@ -211,7 +211,7 @@ class SnubaTSDB(BaseTSDB): def __init__(self, **options): super().__init__(**options) - def __manual_group_on_time_aggregation(self, rollup, time_column_alias) -> Sequence[Any]: + def __manual_group_on_time_aggregation(self, rollup, time_column_alias) -> list[Any]: """ Explicitly builds an aggregation expression in-place of using a `TimeSeriesProcessor` on the snuba entity. Older tables and queries that target that table had syntactic sugar on the `time` column and would apply @@ -320,7 +320,7 @@ def get_data( def __get_data_snql( self, model: TSDBModel, - keys: Sequence[Any], + keys: Sequence | Set | Mapping, start: datetime | None, end: datetime | None, rollup: int | None = None, @@ -350,8 +350,8 @@ def __get_data_snql( model_dataset = model_query_settings.dataset columns = (model_query_settings.groupby, model_query_settings.aggregate) - keys_map = dict(zip(columns, self.flatten_keys(keys))) - keys_map = {k: v for k, v in keys_map.items() if k is not None and v is not None} + keys_map_tmp = dict(zip(columns, self.flatten_keys(keys))) + keys_map = {k: v for k, v in keys_map_tmp.items() if k is not None and v is not None} if environment_ids is not None: keys_map["environment"] = environment_ids @@ -400,7 +400,7 @@ def __get_data_snql( orderby.append(OrderBy(Column(model_group), Direction.ASC)) # build up where conditions - conditions = conditions if conditions is not None else [] + conditions = list(conditions) if conditions is not None else [] if model_query_settings.conditions is not None: conditions += model_query_settings.conditions @@ -436,7 +436,9 @@ def __get_data_snql( app_id="tsdb.get_data", query=Query( match=Entity(model_dataset.value), - select=(model_query_settings.selected_columns or []) + aggregations, + select=list( + itertools.chain((model_query_settings.selected_columns or []), aggregations) + ), where=where_conds, groupby=[Column(g) for g in groupby] if groupby else None, orderby=orderby, @@ -907,7 +909,7 @@ def get_frequency_totals( tenant_ids=tenant_ids, ) - def flatten_keys(self, items): + def flatten_keys(self, items: Mapping | Sequence | Set) -> tuple[list, Sequence | None]: """ Returns a normalized set of keys based on the various formats accepted by TSDB methods. The input is either just a plain list of keys for the @@ -920,6 +922,6 @@ def flatten_keys(self, items): list(set.union(*(set(v) for v in items.values())) if items else []), ) elif isinstance(items, (Sequence, Set)): - return (items, None) + return (list(items), None) else: raise ValueError("Unsupported type: %s" % (type(items))) diff --git a/src/sentry/utils/snuba.py b/src/sentry/utils/snuba.py index 683f054cd0f8b9..1a57e851e04973 100644 --- a/src/sentry/utils/snuba.py +++ b/src/sentry/utils/snuba.py @@ -1516,9 +1516,9 @@ def get_snuba_translators(filter_keys, is_grouprelease=False): ) )(col, fwd_map) rev = ( - lambda col, trans: lambda row: replace(row, col, trans[row[col]]) - if col in row - else row + lambda col, trans: lambda row: ( + replace(row, col, trans[row[col]]) if col in row else row + ) )(col, rev_map) if fwd is not None: @@ -1529,18 +1529,20 @@ def get_snuba_translators(filter_keys, is_grouprelease=False): # Extra reverse translator for time column. reverse = compose( reverse, - lambda row: replace(row, "time", int(to_timestamp(parse_datetime(row["time"])))) - if "time" in row - else row, + lambda row: ( + replace(row, "time", int(to_timestamp(parse_datetime(row["time"])))) + if "time" in row + else row + ), ) # Extra reverse translator for bucketed_end column. reverse = compose( reverse, - lambda row: replace( - row, "bucketed_end", int(to_timestamp(parse_datetime(row["bucketed_end"]))) - ) - if "bucketed_end" in row - else row, + lambda row: ( + replace(row, "bucketed_end", int(to_timestamp(parse_datetime(row["bucketed_end"])))) + if "bucketed_end" in row + else row + ), ) return (forward, reverse) From 3b97429a2a1a6cbd5f8b8e44c87c5ec65b29001e Mon Sep 17 00:00:00 2001 From: Jonas Date: Mon, 5 Feb 2024 14:35:25 -0500 Subject: [PATCH 002/357] ref(focus-visible): drop focus-visible (#64562) [:focus-visible support](https://caniuse.com/?search=focus-visible) has come a long way and according to our browserslist definition, we no longer support IE11 which was the last browser that would require the polyfill. I would love for someone to double check renaming of the CSS selectors and ensure styling still applies correctly. I've went through some of the stories and couldn't notice a difference. --- package.json | 1 - static/app/bootstrap/commonInitialization.tsx | 2 -- static/app/components/alertLink.tsx | 2 +- static/app/components/button.tsx | 4 ++-- static/app/components/checkbox.tsx | 2 +- static/app/components/codeSnippet.tsx | 2 +- .../app/components/compactSelect/control.tsx | 2 +- .../app/components/compactSelect/styles.tsx | 2 +- static/app/components/footer.tsx | 2 +- .../forms/controls/rangeSlider/slider.tsx | 6 +++--- static/app/components/input.tsx | 2 +- static/app/components/links/styles.tsx | 2 +- static/app/components/radio.tsx | 2 +- static/app/components/sidebar/sidebarItem.tsx | 4 ++-- .../components/sidebar/sidebarMenuItem.tsx | 2 +- static/app/components/switchButton.tsx | 2 +- static/app/components/tabs/tabPanels.tsx | 2 +- .../timeRangeSelector/timePicker.tsx | 2 +- static/app/styles/deprecatedInput.tsx | 2 +- .../app/views/discover/table/cellAction.tsx | 2 +- static/app/views/integrationPipeline/init.tsx | 2 -- .../views/replays/replayTable/tableCell.tsx | 2 +- .../settings/components/settingsNavItem.tsx | 2 +- .../organizationTeams/allTeamsRow.tsx | 2 +- static/less/shared-components.less | 19 +++++++++++++++++-- yarn.lock | 5 ----- 26 files changed, 42 insertions(+), 37 deletions(-) diff --git a/package.json b/package.json index 6fb37a406daa6a..f007ffb3fdccbc 100644 --- a/package.json +++ b/package.json @@ -114,7 +114,6 @@ "echarts-for-react": "3.0.2", "esbuild": "^0.19.10", "focus-trap": "^7.3.1", - "focus-visible": "^5.2.0", "fork-ts-checker-webpack-plugin": "^8.0.0", "framer-motion": "^6.2.8", "fuse.js": "^6.6.2", diff --git a/static/app/bootstrap/commonInitialization.tsx b/static/app/bootstrap/commonInitialization.tsx index 4f0f94e2f2f16f..163b7763df0b6d 100644 --- a/static/app/bootstrap/commonInitialization.tsx +++ b/static/app/bootstrap/commonInitialization.tsx @@ -1,5 +1,3 @@ -import 'focus-visible'; - import {NODE_ENV, UI_DEV_ENABLE_PROFILING} from 'sentry/constants'; import ConfigStore from 'sentry/stores/configStore'; import type {Config} from 'sentry/types'; diff --git a/static/app/components/alertLink.tsx b/static/app/components/alertLink.tsx index a73acc8723b8c3..2cd7ffae4821df 100644 --- a/static/app/components/alertLink.tsx +++ b/static/app/components/alertLink.tsx @@ -90,7 +90,7 @@ const StyledLink = styled( border-radius: 0.25em; transition: 0.2s border-color; - &.focus-visible { + &:focus-visible { outline: none; box-shadow: ${p => p.theme.alert[p.priority].border}7f 0 0 0 2px; } diff --git a/static/app/components/button.tsx b/static/app/components/button.tsx index ce8ad7d4075903..d51595a4b2251f 100644 --- a/static/app/components/button.tsx +++ b/static/app/components/button.tsx @@ -442,14 +442,14 @@ const getColors = ({ border-color: ${borderless || priority === 'link' ? 'transparent' : borderActive}; } - &.focus-visible { + &:focus-visible { color: ${colorActive || color}; border-color: ${borderActive}; } ` } - &.focus-visible { + &:focus-visible { ${getFocusState()} z-index: 1; } diff --git a/static/app/components/checkbox.tsx b/static/app/components/checkbox.tsx index 2321d4f2b0df08..256fa6a203da60 100644 --- a/static/app/components/checkbox.tsx +++ b/static/app/components/checkbox.tsx @@ -124,7 +124,7 @@ const HiddenInput = styled('input')` padding: 0; cursor: pointer; - &.focus-visible + * { + &:focus-visible + * { ${p => p.checked ? ` diff --git a/static/app/components/codeSnippet.tsx b/static/app/components/codeSnippet.tsx index 013147b717b2b3..18cdcd9136687b 100644 --- a/static/app/components/codeSnippet.tsx +++ b/static/app/components/codeSnippet.tsx @@ -250,7 +250,7 @@ const CopyButton = styled(Button)<{isAlwaysVisible: boolean}>` opacity: 0; div:hover > div > &, /* if Wrapper is hovered */ - &.focus-visible { + &:focus-visible { opacity: 1; } &:hover { diff --git a/static/app/components/compactSelect/control.tsx b/static/app/components/compactSelect/control.tsx index 5c237d3f50cfc7..68c31a6df20710 100644 --- a/static/app/components/compactSelect/control.tsx +++ b/static/app/components/compactSelect/control.tsx @@ -641,7 +641,7 @@ const SearchInput = styled('input')<{visualSize: FormSize}>` } &:focus, - &.focus-visible { + &:focus-visible { outline: none; border-color: ${p => p.theme.focusBorder}; box-shadow: ${p => p.theme.focusBorder} 0 0 0 1px; diff --git a/static/app/components/compactSelect/styles.tsx b/static/app/components/compactSelect/styles.tsx index 35b63ae2551d91..01ef3eda11c99e 100644 --- a/static/app/components/compactSelect/styles.tsx +++ b/static/app/components/compactSelect/styles.tsx @@ -111,7 +111,7 @@ export const SectionToggleButton = styled(Button)<{visible: boolean}>` color: ${p => p.theme.subText}; transition: opacity 0.1s; - &.focus-visible { + &:focus-visible { opacity: 1; pointer-events: all; } diff --git a/static/app/components/footer.tsx b/static/app/components/footer.tsx index ee29184ad59417..6faf0feaf97978 100644 --- a/static/app/components/footer.tsx +++ b/static/app/components/footer.tsx @@ -91,7 +91,7 @@ const RightLinks = styled('div')` const FooterLink = styled(ExternalLink)` color: ${p => p.theme.subText}; - &.focus-visible { + &:focus-visible { outline: none; box-shadow: ${p => p.theme.blue300} 0 2px 0; } diff --git a/static/app/components/forms/controls/rangeSlider/slider.tsx b/static/app/components/forms/controls/rangeSlider/slider.tsx index f0be85c7c339ba..c1b804d2e5fec5 100644 --- a/static/app/components/forms/controls/rangeSlider/slider.tsx +++ b/static/app/components/forms/controls/rangeSlider/slider.tsx @@ -150,19 +150,19 @@ const Slider = styled('input')<{hasLabel: boolean}>` } &:focus::-webkit-slider-thumb, - &.focus-visible::-webkit-slider-thumb { + &:focus-visible::-webkit-slider-thumb { box-shadow: ${p => p.theme.background} 0 0 0 3px, ${p => p.theme.focus} 0 0 0 6px; } &:focus::-moz-range-thumb, - &.focus-visible::-moz-range-thumb { + &:focus-visible::-moz-range-thumb { box-shadow: ${p => p.theme.background} 0 0 0 3px, ${p => p.theme.focus} 0 0 0 6px; } &:focus::-ms-thumb, - &.focus-visible::-ms-thumb { + &:focus-visible::-ms-thumb { box-shadow: ${p => p.theme.background} 0 0 0 3px, ${p => p.theme.focus} 0 0 0 6px; diff --git a/static/app/components/input.tsx b/static/app/components/input.tsx index 1b1ca375fa7eb0..9964b8d9435668 100644 --- a/static/app/components/input.tsx +++ b/static/app/components/input.tsx @@ -49,7 +49,7 @@ export const inputStyles = (p: InputStylesProps & {theme: Theme}) => css` } &:focus, - &.focus-visible { + &:focus-visible { outline: none; border-color: ${p.theme.focusBorder}; box-shadow: ${p.theme.focusBorder} 0 0 0 1px; diff --git a/static/app/components/links/styles.tsx b/static/app/components/links/styles.tsx index 50f828035eb784..3893f4a6f26c9f 100644 --- a/static/app/components/links/styles.tsx +++ b/static/app/components/links/styles.tsx @@ -3,7 +3,7 @@ import type {Theme} from '@emotion/react'; export const linkStyles = ({disabled, theme}: {theme: Theme; disabled?: boolean}) => ` border-radius: ${theme.linkBorderRadius}; - &.focus-visible { + &:focus-visible { box-shadow: ${theme.linkFocus} 0 0 0 2px; text-decoration: none; outline: none; diff --git a/static/app/components/radio.tsx b/static/app/components/radio.tsx index e3ff9ba7fc3ce4..0789e626cc8772 100644 --- a/static/app/components/radio.tsx +++ b/static/app/components/radio.tsx @@ -40,7 +40,7 @@ const Radio = styled('input')` margin: 0 !important; &:focus, - &.focus-visible { + &:focus-visible { outline: none; border-color: ${p => p.theme.focusBorder}; box-shadow: ${p => p.theme.focusBorder} 0 0 0 1px; diff --git a/static/app/components/sidebar/sidebarItem.tsx b/static/app/components/sidebar/sidebarItem.tsx index 3020947d55017b..2768a89ce10773 100644 --- a/static/app/components/sidebar/sidebarItem.tsx +++ b/static/app/components/sidebar/sidebarItem.tsx @@ -327,7 +327,7 @@ const StyledSidebarItem = styled(Link, { } &:hover, - &.focus-visible { + &:focus-visible { color: ${p => p.theme.white}; } @@ -335,7 +335,7 @@ const StyledSidebarItem = styled(Link, { outline: none; } - &.focus-visible { + &:focus-visible { outline: none; box-shadow: 0 0 0 2px ${p => p.theme.purple300}; } diff --git a/static/app/components/sidebar/sidebarMenuItem.tsx b/static/app/components/sidebar/sidebarMenuItem.tsx index bb29a3427d391b..26884808829be7 100644 --- a/static/app/components/sidebar/sidebarMenuItem.tsx +++ b/static/app/components/sidebar/sidebarMenuItem.tsx @@ -33,7 +33,7 @@ const menuItemStyles = ( &:hover, &:active, - &.focus-visible { + &:focus-visible { background: ${p.theme.backgroundSecondary}; color: ${p.theme.textColor}; outline: none; diff --git a/static/app/components/switchButton.tsx b/static/app/components/switchButton.tsx index fa965dfeff7e1e..0959b1cd6fb6f0 100644 --- a/static/app/components/switchButton.tsx +++ b/static/app/components/switchButton.tsx @@ -84,7 +84,7 @@ const SwitchButton = styled('button')` } &:focus, - &.focus-visible { + &:focus-visible { outline: none; border-color: ${p => p.theme.focusBorder}; box-shadow: ${p => p.theme.focusBorder} 0 0 0 1px; diff --git a/static/app/components/tabs/tabPanels.tsx b/static/app/components/tabs/tabPanels.tsx index fda28493d69412..38741ed667a49b 100644 --- a/static/app/components/tabs/tabPanels.tsx +++ b/static/app/components/tabs/tabPanels.tsx @@ -89,7 +89,7 @@ const TabPanelWrap = styled('div', {shouldForwardProp: tabsShouldForwardProp})<{ ${p => (p.orientation === 'horizontal' ? `height: 100%;` : `width: 100%;`)}; - &.focus-visible { + &:focus-visible { outline: none; box-shadow: inset ${p => p.theme.focusBorder} 0 0 0 1px, diff --git a/static/app/components/timeRangeSelector/timePicker.tsx b/static/app/components/timeRangeSelector/timePicker.tsx index 735f9c72497132..7e4701b22fa441 100644 --- a/static/app/components/timeRangeSelector/timePicker.tsx +++ b/static/app/components/timeRangeSelector/timePicker.tsx @@ -122,7 +122,7 @@ const Input = styled('input')` box-shadow: none; font-variant-numeric: tabular-nums; - &&.focus-visible { + &&:focus-visible { outline: none; border-color: ${p => p.theme.focusBorder}; box-shadow: 0 0 0 1px ${p => p.theme.focusBorder}; diff --git a/static/app/styles/deprecatedInput.tsx b/static/app/styles/deprecatedInput.tsx index d6174d24830fd8..3ce44d5142992a 100644 --- a/static/app/styles/deprecatedInput.tsx +++ b/static/app/styles/deprecatedInput.tsx @@ -75,7 +75,7 @@ const inputStyles = (props: Props) => css` } &:focus, - &.focus-visible { + &:focus-visible { outline: none; border-color: ${props.theme.focusBorder}; box-shadow: ${props.theme.focusBorder} 0 0 0 1px; diff --git a/static/app/views/discover/table/cellAction.tsx b/static/app/views/discover/table/cellAction.tsx index b1374b66f39780..63ee5551521529 100644 --- a/static/app/views/discover/table/cellAction.tsx +++ b/static/app/views/discover/table/cellAction.tsx @@ -312,7 +312,7 @@ const ActionMenuTrigger = styled(Button)` opacity: 0; transition: opacity 0.1s; - &.focus-visible, + &:focus-visible, &[aria-expanded='true'], ${Container}:hover & { opacity: 1; diff --git a/static/app/views/integrationPipeline/init.tsx b/static/app/views/integrationPipeline/init.tsx index 86cc0cd4becfed..d7ecef5addfdeb 100644 --- a/static/app/views/integrationPipeline/init.tsx +++ b/static/app/views/integrationPipeline/init.tsx @@ -1,5 +1,3 @@ -import 'focus-visible'; - import {initializePipelineView} from 'sentry/bootstrap/initializePipelineView'; export function init() { diff --git a/static/app/views/replays/replayTable/tableCell.tsx b/static/app/views/replays/replayTable/tableCell.tsx index 844bd5a7850f38..7d1a0e72efb895 100644 --- a/static/app/views/replays/replayTable/tableCell.tsx +++ b/static/app/views/replays/replayTable/tableCell.tsx @@ -680,7 +680,7 @@ const ActionMenuTrigger = styled(Button)` align-items: center; opacity: 0; transition: opacity 0.1s; - &.focus-visible, + &:focus-visible, &[aria-expanded='true'], ${Container}:hover & { opacity: 1; diff --git a/static/app/views/settings/components/settingsNavItem.tsx b/static/app/views/settings/components/settingsNavItem.tsx index 969c1a6f1bcb99..ff4baec32fbf94 100644 --- a/static/app/views/settings/components/settingsNavItem.tsx +++ b/static/app/views/settings/components/settingsNavItem.tsx @@ -73,7 +73,7 @@ const StyledNavItem = styled(RouterLink)` outline: none; } - &.focus-visible { + &:focus-visible { outline: none; background: ${p => p.theme.backgroundSecondary}; padding-left: 15px; diff --git a/static/app/views/settings/organizationTeams/allTeamsRow.tsx b/static/app/views/settings/organizationTeams/allTeamsRow.tsx index 7c45f259c26fbd..e05ce136927224 100644 --- a/static/app/views/settings/organizationTeams/allTeamsRow.tsx +++ b/static/app/views/settings/organizationTeams/allTeamsRow.tsx @@ -270,7 +270,7 @@ class AllTeamsRow extends Component { const TeamLink = styled(Link)` display: inline-block; - &.focus-visible { + &:focus-visible { margin: -${space(1)}; padding: ${space(1)}; background: #f2eff5; diff --git a/static/less/shared-components.less b/static/less/shared-components.less index 205d9cdf8badfc..e32f2b1a5db5ec 100644 --- a/static/less/shared-components.less +++ b/static/less/shared-components.less @@ -1,12 +1,14 @@ dl.flat { margin-bottom: 1em; } + dl.flat dt { float: left; clear: left; width: 70px; padding-bottom: 5px; } + dl.flat dd { padding-bottom: 5px; } @@ -23,12 +25,15 @@ dl.flat dd { padding-left: 30px; padding-right: 30px; } + &.flex-justify-right { justify-content: flex-end; } + &.flex-vertically-centered { align-items: center; } + > * { flex-grow: 1; } @@ -361,6 +366,7 @@ table.table.key-value { } } } + /** * Avatar * ============================================================================ @@ -437,16 +443,19 @@ table.table.key-value { -webkit-transform: rotate(0deg); transform: rotate(0deg); } + 100% { -webkit-transform: rotate(360deg); transform: rotate(360deg); } } + @keyframes loading { 0% { -webkit-transform: rotate(0deg); transform: rotate(0deg); } + 100% { -webkit-transform: rotate(360deg); transform: rotate(360deg); @@ -550,6 +559,7 @@ table.table.key-value { a { color: @gray-dark; + &:hover { color: @gray-darker; } @@ -665,6 +675,7 @@ table.table.key-value { height: auto; font-size: 14px; } + .with-padding { padding: 10px 10px 0; } @@ -742,6 +753,7 @@ header + .alert { p:last-child { margin: 0; } + .icon { float: left; margin: 3px 8px 0 3px; @@ -808,6 +820,7 @@ header + .alert { .alert-block ul { padding-left: 20px; } + // ugh this is awful .alert-block p + ul, .alert-block p + p, @@ -865,7 +878,9 @@ header + .alert { top: 32px; border: none; border-radius: 2px; - box-shadow: 0 0 0 1px rgba(52, 60, 69, 0.2), 0 1px 3px rgba(70, 82, 98, 0.25); + box-shadow: + 0 0 0 1px rgba(52, 60, 69, 0.2), + 0 1px 3px rgba(70, 82, 98, 0.25); -webkit-background-clip: padding-box; -moz-background-clip: padding; background-clip: padding-box; @@ -1081,7 +1096,7 @@ header + .alert { color: @gray-darker; } - &.focus-visible { + &:focus-visible { text-decoration: underline; outline: none; } diff --git a/yarn.lock b/yarn.lock index d0027ce8b5ef85..f53eb6889aa57e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -6635,11 +6635,6 @@ focus-trap@^7.3.1: dependencies: tabbable "^6.1.1" -focus-visible@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/focus-visible/-/focus-visible-5.2.0.tgz#3a9e41fccf587bd25dcc2ef045508284f0a4d6b3" - integrity sha512-Rwix9pBtC1Nuy5wysTmKy+UjbDJpIfg8eHjw0rjZ1mX4GNLz1Bmd16uDpI3Gk1i70Fgcs8Csg2lPm8HULFg9DQ== - follow-redirects@^1.0.0, follow-redirects@^1.15.4: version "1.15.5" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.5.tgz#54d4d6d062c0fa7d9d17feb008461550e3ba8020" From f8ad4ec1d5b90e74381ca50987dc436fb5c8cbf1 Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Mon, 5 Feb 2024 14:01:06 -0600 Subject: [PATCH 003/357] feat(ddm): Add support for spans correlation source (#64503) This implements the simplest approach to correlate the span duration and span exclusive time metrics to sample events by looking up the segment id in the spans entity and then fetching the segment info. --- .../querying/metadata/metrics_correlations.py | 123 ++++++++++++++++++ src/sentry/snuba/metrics/naming_layer/mri.py | 2 +- src/sentry/testutils/cases.py | 3 +- .../endpoints/test_organization_ddm_meta.py | 44 +++++++ .../metadata/test_metrics_correlations.py | 91 +++++++++++++ 5 files changed, 261 insertions(+), 2 deletions(-) create mode 100644 tests/sentry/sentry_metrics/querying/metadata/test_metrics_correlations.py diff --git a/src/sentry/sentry_metrics/querying/metadata/metrics_correlations.py b/src/sentry/sentry_metrics/querying/metadata/metrics_correlations.py index 53ab91025a7072..dfc850b560380e 100644 --- a/src/sentry/sentry_metrics/querying/metadata/metrics_correlations.py +++ b/src/sentry/sentry_metrics/querying/metadata/metrics_correlations.py @@ -40,6 +40,7 @@ from sentry.snuba.dataset import Dataset, EntityKey from sentry.snuba.metrics.naming_layer.mri import ( ParsedMRI, + SpanMRI, TransactionMRI, is_measurement, is_mri, @@ -498,6 +499,127 @@ def _get_segments( ) +class SpansDurationCorrelationsSource(CorrelationsSource): + @classmethod + def supports(cls, metric_mri: str) -> bool: + return cls.get_span_column(metric_mri) is not None + + @classmethod + def get_span_column(cls, metric_mri: str) -> Column | None: + if metric_mri == SpanMRI.SELF_TIME.value: + return Column("exclusive_time") + + if metric_mri == SpanMRI.DURATION.value: + return Column("duration") + + return None + + def _get_segments( + self, + metric_mri: str, + conditions: QueryConditions, + start: datetime, + end: datetime, + min_value: float | None, + max_value: float | None, + ) -> Sequence[Segment]: + segments_spans = self._get_segments_spans( + metric_mri, conditions, start, end, min_value, max_value + ) + + if segments_spans: + segments = _get_segments( + where=[Condition(Column("transaction_id"), Op.IN, list(segments_spans.keys()))], + start=start, + end=end, + organization=self.organization, + projects=self.projects, + ) + else: + segments = [] + + extended_segments = [] + for segment in segments: + metric_summaries = [] + spans_details = [] + for span_id, duration, timestamp in segments_spans.get(segment.segment_id, []): + # the span duration and self time metric happens once per span, so we can + # hard code what the metric summary object here + metric_summaries.append( + MetricSummary( + span_id=span_id, + min=duration, + max=duration, + sum=duration, + count=1, + ) + ) + + spans_details.append( + SpanDetail(span_id=span_id, span_duration=duration, span_timestamp=timestamp) + ) + + extended_segments.append( + segment.add_metric_summaries(metric_summaries).add_spans_details(spans_details) + ) + + return extended_segments + + def _get_segments_spans( + self, + metric_mri, + conditions: QueryConditions, + start: datetime, + end: datetime, + min_value: float | None, + max_value: float | None, + ) -> Mapping[str, Sequence[tuple[str, int, datetime]]]: + column = self.get_span_column(metric_mri) + assert column is not None + + where: list[QueryCondition] = [ + Condition(Column("project_id"), Op.IN, [project.id for project in self.projects]), + Condition(Column("timestamp"), Op.GTE, start), + Condition(Column("timestamp"), Op.LT, end), + ] + + where.extend(conditions.get()) + + if min_value: + where += [Condition(column, Op.GTE, min_value)] + if max_value: + where += [Condition(column, Op.LTE, max_value)] + + query = Query( + match=Entity(EntityKey.Spans.value), + select=[ + Column("transaction_id"), + Column("span_id"), + column, + Column("timestamp"), + ], + where=where, + limit=Limit(SNUBA_QUERY_LIMIT), + ) + + request = Request( + dataset=Dataset.SpansIndexed.value, + app_id="metrics", + query=query, + tenant_ids={"organization_id": self.organization.id}, + ) + + data = raw_snql_query(request, Referrer.API_DDM_FETCH_SPANS.value, use_cache=True)["data"] + + segments_spans: dict[str, list[tuple[str, int, datetime]]] = {} + for value in data: + segments_spans.setdefault(value["transaction_id"], []).append( + (value["span_id"], value[column.name], value["timestamp"]) + ) + + return segments_spans + + def _get_segments_aggregates_query( where: ConditionGroup | None, start: datetime, @@ -657,6 +779,7 @@ def _get_segments( CORRELATIONS_SOURCES = [ MeasurementsCorrelationsSource, TransactionDurationCorrelationsSource, + SpansDurationCorrelationsSource, MetricsSummariesCorrelationsSource, ] diff --git a/src/sentry/snuba/metrics/naming_layer/mri.py b/src/sentry/snuba/metrics/naming_layer/mri.py index 337c5660e614c3..33f40a35322e78 100644 --- a/src/sentry/snuba/metrics/naming_layer/mri.py +++ b/src/sentry/snuba/metrics/naming_layer/mri.py @@ -325,7 +325,7 @@ def is_custom_measurement(parsed_mri: ParsedMRI) -> bool: and parsed_mri.name.startswith("measurements.") and # Iterate through the transaction MRI and check that this parsed_mri isn't in there - parsed_mri.mri_string not in [mri.value for mri in TransactionMRI.__members__.values()] + all(parsed_mri.mri_string != mri.value for mri in TransactionMRI.__members__.values()) ) diff --git a/src/sentry/testutils/cases.py b/src/sentry/testutils/cases.py index 1118eac3becaff..7302b92d3e28fb 100644 --- a/src/sentry/testutils/cases.py +++ b/src/sentry/testutils/cases.py @@ -1438,6 +1438,7 @@ def store_segment( profile_id: str | None = None, transaction: str | None = None, duration: int = 10, + exclusive_time: int = 5, tags: Mapping[str, Any] | None = None, measurements: Mapping[str, int | float] | None = None, timestamp: datetime | None = None, @@ -1452,7 +1453,7 @@ def store_segment( "span_id": span_id, "trace_id": trace_id, "duration_ms": int(duration), - "exclusive_time_ms": 5, + "exclusive_time_ms": int(exclusive_time), "is_segment": True, "received": datetime.now(tz=timezone.utc).timestamp(), "start_timestamp_ms": int(timestamp.timestamp() * 1000), diff --git a/tests/sentry/api/endpoints/test_organization_ddm_meta.py b/tests/sentry/api/endpoints/test_organization_ddm_meta.py index 3131ccd59d841b..1185881760a740 100644 --- a/tests/sentry/api/endpoints/test_organization_ddm_meta.py +++ b/tests/sentry/api/endpoints/test_organization_ddm_meta.py @@ -1088,3 +1088,47 @@ def test_get_metric_spans_with_measurement_with_zero_edge_case(self): metric_spans = response.data["metricSpans"] # We expect to only have returned that span with that measurement, even if the value is 0. assert len(metric_spans) == 1 + + def test_get_metric_span_self_time(self): + mri = "d:spans/exclusive_time@millisecond" + + self.store_segment( + project_id=self.project.id, + timestamp=before_now(minutes=5), + trace_id=uuid.uuid4().hex, + transaction_id=uuid.uuid4().hex, + ) + + response = self.get_success_response( + self.organization.slug, + metric=[mri], + project=[self.project.id], + statsPeriod="1d", + metricSpans="true", + ) + + metric_spans = response.data["metricSpans"] + # We expect to only have returned that span with that measurement, even if the value is 0. + assert len(metric_spans) == 1 + + def test_get_metric_span_duration(self): + mri = "d:spans/duration@millisecond" + + self.store_segment( + project_id=self.project.id, + timestamp=before_now(minutes=5), + trace_id=uuid.uuid4().hex, + transaction_id=uuid.uuid4().hex, + ) + + response = self.get_success_response( + self.organization.slug, + metric=[mri], + project=[self.project.id], + statsPeriod="1d", + metricSpans="true", + ) + + metric_spans = response.data["metricSpans"] + # We expect to only have returned that span with that measurement, even if the value is 0. + assert len(metric_spans) == 1 diff --git a/tests/sentry/sentry_metrics/querying/metadata/test_metrics_correlations.py b/tests/sentry/sentry_metrics/querying/metadata/test_metrics_correlations.py new file mode 100644 index 00000000000000..3bd06174145dc5 --- /dev/null +++ b/tests/sentry/sentry_metrics/querying/metadata/test_metrics_correlations.py @@ -0,0 +1,91 @@ +import pytest + +from sentry.sentry_metrics.querying.metadata.metrics_correlations import ( + MeasurementsCorrelationsSource, + MetricsSummariesCorrelationsSource, + SpansDurationCorrelationsSource, + TransactionDurationCorrelationsSource, +) +from sentry.snuba.metrics.naming_layer.mri import ( + ErrorsMRI, + SessionMRI, + SpanMRI, + TransactionMRI, + parse_mri, +) + + +def assign_correlation_source_for_transaction_mri(mri): + if not isinstance(mri, TransactionMRI): + raise ValueError(f"Non TransactionMRI: {mri.value}") + + if mri == TransactionMRI.DURATION: + return TransactionDurationCorrelationsSource + + parsed_mri = parse_mri(mri.value) + if parsed_mri is None: + raise ValueError(f"Illegal MRI: {mri.value}") + + if parsed_mri.name.startswith("measurements."): + return MeasurementsCorrelationsSource + + if parsed_mri.namespace == "spans": + if mri == TransactionMRI.SPAN_SELF_TIME: + return SpansDurationCorrelationsSource + if mri == TransactionMRI.SPAN_DURATION: + return SpansDurationCorrelationsSource + return None + + return MetricsSummariesCorrelationsSource + + +@pytest.mark.parametrize( + ["correlation_source"], + [ + pytest.param(MeasurementsCorrelationsSource, id="measurements"), + pytest.param(SpansDurationCorrelationsSource, id="span duration"), + pytest.param(TransactionDurationCorrelationsSource, id="transaction duration"), + ], +) +@pytest.mark.parametrize( + ["mri", "expected_source"], + [ + # ========== SessionMRI ========== + *[pytest.param(mri.value, MetricsSummariesCorrelationsSource) for mri in SessionMRI], + # ========== Transaction MRI ========== + *[ + pytest.param( + mri.value, + assign_correlation_source_for_transaction_mri(mri), + ) + for mri in TransactionMRI + ], + # ========== Span MRI ========== + *[ + pytest.param( + mri.value, + SpansDurationCorrelationsSource + if mri is SpanMRI.SELF_TIME or mri is SpanMRI.DURATION + else MetricsSummariesCorrelationsSource, + marks=pytest.mark.skipif( + mri.value.startswith("e:spans_light/"), + reason="Unexpected namespace: spans_light", + ), + ) + for mri in SpanMRI + ], + # ========== Error MRI ========== + pytest.param(ErrorsMRI.EVENT_INGESTED.value, MetricsSummariesCorrelationsSource), + # ========== Custom MRI ========== + pytest.param( + "d:custom/sentry.process_profile.track_outcome@second", + MetricsSummariesCorrelationsSource, + ), + ], +) +def test_correlation_source_supports_mri(correlation_source, mri, expected_source): + supported = expected_source is correlation_source + assert correlation_source.supports(mri) == supported + + # Treat metrics summary as a fallback, ie it should support all valid MRIs + assert MetricsSummariesCorrelationsSource.supports(mri) From 63e1baee95df770dff0e14f6c7c38cbf55e55e61 Mon Sep 17 00:00:00 2001 From: Seiji Chew <67301797+schew2381@users.noreply.github.com> Date: Mon, 5 Feb 2024 12:36:55 -0800 Subject: [PATCH 004/357] chore(staff): Let staff access user permission endpoints (#64440) Let staff access endpoints to manage user permissions Requires https://github.com/getsentry/sentry/pull/64429 --- .../api/endpoints/user_permission_details.py | 4 +- .../endpoints/test_user_permission_details.py | 165 +++++++++++++----- 2 files changed, 127 insertions(+), 42 deletions(-) diff --git a/src/sentry/api/endpoints/user_permission_details.py b/src/sentry/api/endpoints/user_permission_details.py index 82be3fd625e16b..d174f15e03135c 100644 --- a/src/sentry/api/endpoints/user_permission_details.py +++ b/src/sentry/api/endpoints/user_permission_details.py @@ -10,7 +10,7 @@ from sentry.api.base import control_silo_endpoint from sentry.api.bases.user import UserEndpoint from sentry.api.decorators import sudo_required -from sentry.api.permissions import SuperuserPermission +from sentry.api.permissions import SuperuserOrStaffFeatureFlaggedPermission from sentry.models.userpermission import UserPermission audit_logger = logging.getLogger("sentry.audit.user") @@ -24,7 +24,7 @@ class UserPermissionDetailsEndpoint(UserEndpoint): "POST": ApiPublishStatus.PRIVATE, } owner = ApiOwner.ENTERPRISE - permission_classes = (SuperuserPermission,) + permission_classes = (SuperuserOrStaffFeatureFlaggedPermission,) def get(self, request: Request, user, permission_name) -> Response: # XXX(dcramer): we may decide to relax "view" permission over time, but being more restrictive by default diff --git a/tests/sentry/api/endpoints/test_user_permission_details.py b/tests/sentry/api/endpoints/test_user_permission_details.py index 6594accdea789a..26449ccab845cf 100644 --- a/tests/sentry/api/endpoints/test_user_permission_details.py +++ b/tests/sentry/api/endpoints/test_user_permission_details.py @@ -1,5 +1,9 @@ +from unittest.mock import patch + +from sentry.api.permissions import StaffPermission from sentry.models.userpermission import UserPermission from sentry.testutils.cases import APITestCase +from sentry.testutils.helpers import with_feature from sentry.testutils.silo import control_silo_test @@ -9,68 +13,149 @@ class UserDetailsTest(APITestCase): def setUp(self): super().setUp() - self.user = self.create_user(is_superuser=True) - self.login_as(user=self.user, superuser=True) - self.add_user_permission(self.user, "users.admin") + self.superuser = self.create_user(is_superuser=True) + self.add_user_permission(self.superuser, "users.admin") - def test_fails_without_superuser(self): - self.user = self.create_user(is_superuser=False) - self.login_as(self.user) + self.staff_user = self.create_user(is_staff=True) + self.add_user_permission(self.staff_user, "users.admin") - resp = self.get_response("me", "broadcasts.admin") - assert resp.status_code == 403 + self.normal_user = self.create_user(is_superuser=False, is_staff=False) - self.user.update(is_superuser=True) - resp = self.get_response("me", "broadcasts.admin") - assert resp.status_code == 403 + # For each request method testcase, ensure regular users fail + def test_fails_without_superuser_or_staff(self): + self.login_as(self.normal_user) + response = self.get_response("me", "broadcasts.admin") + assert response.status_code == 403 + # For each request method testcase, ensure superuser+staff without users.admin fail def test_fails_without_users_admin_permission(self): - self.user = self.create_user(is_superuser=True) - self.login_as(self.user, superuser=True) - resp = self.get_response("me", "broadcasts.admin") - assert resp.status_code == 403 + self.superuser_and_staff = self.create_user(is_superuser=True, is_staff=True) + self.login_as(self.superuser_and_staff, superuser=True, staff=True) + + # We are active superuser and staff but lack the users.admin permission + response = self.get_response("me", "broadcasts.admin", status_code=403) + assert response.status_code == 403 class UserPermissionDetailsGetTest(UserDetailsTest): - def test_with_permission(self): - UserPermission.objects.create(user=self.user, permission="broadcasts.admin") - resp = self.get_response("me", "broadcasts.admin") - assert resp.status_code == 204 + method = "GET" + + def test_superuser_with_permission(self): + self.login_as(self.superuser, superuser=True) + self.add_user_permission(self.superuser, "broadcasts.admin") + self.get_success_response("me", "broadcasts.admin", status_code=204) + + def test_superuser_without_permission(self): + self.login_as(self.superuser, superuser=True) + self.get_error_response("me", "broadcasts.admin", status_code=404) + + @with_feature("auth:enterprise-staff-cookie") + @patch.object(StaffPermission, "has_permission", wraps=StaffPermission().has_permission) + def test_staff_with_permission(self, mock_has_permission): + self.login_as(self.staff_user, staff=True) + self.add_user_permission(self.staff_user, "broadcasts.admin") + + self.get_success_response("me", "broadcasts.admin", status_code=204) + # ensure we fail the scope check and call is_active_staff + assert mock_has_permission.call_count == 1 + + @with_feature("auth:enterprise-staff-cookie") + @patch.object(StaffPermission, "has_permission", wraps=StaffPermission().has_permission) + def test_staff_without_permission(self, mock_has_permission): + self.login_as(self.staff_user, staff=True) - def test_without_permission(self): - resp = self.get_response("me", "broadcasts.admin") - assert resp.status_code == 404 + self.get_error_response("me", "broadcasts.admin", status_code=404) + # ensure we fail the scope check and call is_active_staff + assert mock_has_permission.call_count == 1 class UserPermissionDetailsPostTest(UserDetailsTest): method = "POST" - def test_with_permission(self): - UserPermission.objects.create(user=self.user, permission="broadcasts.admin") - resp = self.get_response("me", "broadcasts.admin") - assert resp.status_code == 410 - assert UserPermission.objects.filter(user=self.user, permission="broadcasts.admin").exists() + def test_superuser_with_permission(self): + self.login_as(self.superuser, superuser=True) - def test_without_permission(self): - resp = self.get_response("me", "broadcasts.admin") - assert resp.status_code == 201 - assert UserPermission.objects.filter(user=self.user, permission="broadcasts.admin").exists() + self.get_success_response("me", "broadcasts.admin", status_code=201) + assert UserPermission.objects.filter( + user=self.superuser, permission="broadcasts.admin" + ).exists() + + def test_superuser_duplicate_permission(self): + self.login_as(self.superuser, superuser=True) + self.add_user_permission(self.superuser, "broadcasts.admin") + + self.get_error_response("me", "broadcasts.admin", status_code=410) + assert UserPermission.objects.filter( + user=self.superuser, permission="broadcasts.admin" + ).exists() + + @with_feature("auth:enterprise-staff-cookie") + @patch.object(StaffPermission, "has_permission", wraps=StaffPermission().has_permission) + def test_staff_with_permission(self, mock_has_permission): + self.login_as(self.staff_user, staff=True) + + self.get_success_response("me", "broadcasts.admin", status_code=201) + assert UserPermission.objects.filter( + user=self.staff_user, permission="broadcasts.admin" + ).exists() + # ensure we fail the scope check and call is_active_staff + assert mock_has_permission.call_count == 1 + + @with_feature("auth:enterprise-staff-cookie") + @patch.object(StaffPermission, "has_permission", wraps=StaffPermission().has_permission) + def test_staff_duplicate_permission(self, mock_has_permission): + self.login_as(self.staff_user, staff=True) + self.add_user_permission(self.staff_user, "broadcasts.admin") + + self.get_error_response("me", "broadcasts.admin", status_code=410) + assert UserPermission.objects.filter( + user=self.staff_user, permission="broadcasts.admin" + ).exists() + # ensure we fail the scope check and call is_active_staff + assert mock_has_permission.call_count == 1 class UserPermissionDetailsDeleteTest(UserDetailsTest): method = "DELETE" - def test_with_permission(self): - UserPermission.objects.create(user=self.user, permission="broadcasts.admin") - resp = self.get_response("me", "broadcasts.admin") - assert resp.status_code == 204 + def test_superuser_with_permission(self): + self.login_as(self.superuser, superuser=True) + self.add_user_permission(self.superuser, "broadcasts.admin") + + self.get_success_response("me", "broadcasts.admin", status_code=204) + assert not UserPermission.objects.filter( + user=self.superuser, permission="broadcasts.admin" + ).exists() + + def test_superuser_without_permission(self): + self.login_as(self.superuser, superuser=True) + + self.get_error_response("me", "broadcasts.admin", status_code=404) + assert not UserPermission.objects.filter( + user=self.superuser, permission="broadcasts.admin" + ).exists() + + @with_feature("auth:enterprise-staff-cookie") + @patch.object(StaffPermission, "has_permission", wraps=StaffPermission().has_permission) + def test_staff_with_permission(self, mock_has_permission): + self.login_as(self.staff_user, staff=True) + self.add_user_permission(self.staff_user, "broadcasts.admin") + + self.get_success_response("me", "broadcasts.admin", status_code=204) assert not UserPermission.objects.filter( - user=self.user, permission="broadcasts.admin" + user=self.staff_user, permission="broadcasts.admin" ).exists() + # ensure we fail the scope check and call is_active_staff + assert mock_has_permission.call_count == 1 + + @with_feature("auth:enterprise-staff-cookie") + @patch.object(StaffPermission, "has_permission", wraps=StaffPermission().has_permission) + def test_staff_without_permission(self, mock_has_permission): + self.login_as(self.staff_user, staff=True) - def test_without_permission(self): - resp = self.get_response("me", "broadcasts.admin") - assert resp.status_code == 404 + self.get_error_response("me", "broadcasts.admin", status_code=404) assert not UserPermission.objects.filter( - user=self.user, permission="broadcasts.admin" + user=self.staff_user, permission="broadcasts.admin" ).exists() + # ensure we fail the scope check and call is_active_staff + assert mock_has_permission.call_count == 1 From c1ef4e5d601eacaaa6cd70bf6b43b18a59ac1ed5 Mon Sep 17 00:00:00 2001 From: Josh Ferge Date: Mon, 5 Feb 2024 13:01:00 -0800 Subject: [PATCH 005/357] fix(replays): use issue platform for rage click issues (#64594) WIthin the replay counts logic, changes this conditional so we only use discover for error issue types and issue platform for everything else --- .../views/issueDetails/groupReplays/useReplaysFromIssue.tsx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/static/app/views/issueDetails/groupReplays/useReplaysFromIssue.tsx b/static/app/views/issueDetails/groupReplays/useReplaysFromIssue.tsx index cf2fb4c6441ccb..a815da9d8a4af6 100644 --- a/static/app/views/issueDetails/groupReplays/useReplaysFromIssue.tsx +++ b/static/app/views/issueDetails/groupReplays/useReplaysFromIssue.tsx @@ -26,8 +26,9 @@ export default function useReplayFromIssue({ const [fetchError, setFetchError] = useState(); + // use Discover for errors and Issue Platform for everything else const dataSource = - group.issueCategory === IssueCategory.PERFORMANCE ? 'search_issues' : 'discover'; + group.issueCategory === IssueCategory.ERROR ? 'discover' : 'search_issues'; const fetchReplayIds = useCallback(async () => { try { From 1be2440a4c5c63283cd2139c8c5ed9c6ccfe23d1 Mon Sep 17 00:00:00 2001 From: Mark Story Date: Mon, 5 Feb 2024 16:03:44 -0500 Subject: [PATCH 006/357] fix(ui) Use preloaded data instead of redownloading org details (#64492) Because we initialized the useRef() to false, we didn't ever use the preloaded data that was fetched. Org details, teams and projects were going to control silo servers resulting in increased latency. With these changes OrganizationContext will now always use preloaded data if it has resolved. Additional requests will still be done if org slugs are different or the preloaded data hasn't resolved. --- static/app/actionCreators/organization.tsx | 20 +++++++++----------- static/app/utils/getPreloadedData.ts | 4 ++-- static/app/views/organizationContext.tsx | 5 +---- 3 files changed, 12 insertions(+), 17 deletions(-) diff --git a/static/app/actionCreators/organization.tsx b/static/app/actionCreators/organization.tsx index 84961440895b79..c65194358b6232 100644 --- a/static/app/actionCreators/organization.tsx +++ b/static/app/actionCreators/organization.tsx @@ -19,7 +19,7 @@ import parseLinkHeader from 'sentry/utils/parseLinkHeader'; async function fetchOrg( api: Client, slug: string, - isInitialFetch?: boolean + usePreload?: boolean ): Promise { const [org] = await getPreloadedDataPromise( 'organization', @@ -31,7 +31,7 @@ async function fetchOrg( includeAllArgs: true, query: {detailed: 0}, }), - isInitialFetch + usePreload ); if (!org) { @@ -53,7 +53,7 @@ async function fetchOrg( async function fetchProjectsAndTeams( slug: string, - isInitialFetch?: boolean + usePreload?: boolean ): Promise< [ [Project[], string | undefined, XMLHttpRequest | ResponseMeta | undefined], @@ -76,7 +76,7 @@ async function fetchProjectsAndTeams( collapse: 'latestDeploys', }, }), - isInitialFetch + usePreload ); const teamsPromise = getPreloadedDataPromise( @@ -88,7 +88,7 @@ async function fetchProjectsAndTeams( uncancelableApi.requestPromise(`/organizations/${slug}/teams/`, { includeAllArgs: true, }), - isInitialFetch + usePreload ); try { @@ -117,12 +117,13 @@ async function fetchProjectsAndTeams( * @param slug The organization slug * @param silent Should we silently update the organization (do not clear the * current organization in the store) + * @param usePreload Should the preloaded data be used if available? */ export function fetchOrganizationDetails( api: Client, slug: string, silent: boolean, - isInitialFetch?: boolean + usePreload?: boolean ) { if (!silent) { OrganizationStore.reset(); @@ -133,7 +134,7 @@ export function fetchOrganizationDetails( const loadOrganization = async () => { try { - await fetchOrg(api, slug, isInitialFetch); + await fetchOrg(api, slug, usePreload); } catch (err) { if (!err) { return; @@ -161,10 +162,7 @@ export function fetchOrganizationDetails( }; const loadTeamsAndProjects = async () => { - const [[projects], [teams, , resp]] = await fetchProjectsAndTeams( - slug, - isInitialFetch - ); + const [[projects], [teams, , resp]] = await fetchProjectsAndTeams(slug, usePreload); ProjectsStore.loadInitialData(projects ?? []); diff --git a/static/app/utils/getPreloadedData.ts b/static/app/utils/getPreloadedData.ts index 4600209cbed46d..934e52d3a07dc8 100644 --- a/static/app/utils/getPreloadedData.ts +++ b/static/app/utils/getPreloadedData.ts @@ -2,12 +2,12 @@ export async function getPreloadedDataPromise( name: string, slug: string, fallback: () => Promise, - isInitialFetch?: boolean + usePreload?: boolean ) { try { const data = (window as any).__sentry_preload; if ( - !isInitialFetch || + !usePreload || !data || !data.orgSlug || data.orgSlug.toLowerCase() !== slug.toLowerCase() || diff --git a/static/app/views/organizationContext.tsx b/static/app/views/organizationContext.tsx index 2e8b7528f7ab52..03475b9a473615 100644 --- a/static/app/views/organizationContext.tsx +++ b/static/app/views/organizationContext.tsx @@ -83,8 +83,6 @@ export function OrganizationContextProvider({children}: Props) { const {organizations} = useLegacyStore(OrganizationsStore); const {organization, error} = useLegacyStore(OrganizationStore); - const hasMadeFirstFetch = useRef(false); - const lastOrganizationSlug: string | null = configStore.lastOrganization ?? organizations[0]?.slug ?? null; @@ -110,8 +108,7 @@ export function OrganizationContextProvider({children}: Props) { } metric.mark({name: 'organization-details-fetch-start'}); - fetchOrganizationDetails(api, orgSlug, false, hasMadeFirstFetch.current); - hasMadeFirstFetch.current = true; + fetchOrganizationDetails(api, orgSlug, false, true); }, [api, orgSlug, organization]); // Take a measurement for when organization details are done loading and the From 86a07c0cbb8d7ad88b6df0984d9a6e232ca5073a Mon Sep 17 00:00:00 2001 From: Ash <0Calories@users.noreply.github.com> Date: Mon, 5 Feb 2024 16:11:23 -0500 Subject: [PATCH 007/357] ref(plugins): Remove old component annotation plugin (#64597) Remove the old plugin for annotating components, as we will be adding our own plugin to do this. getsentry PR: https://github.com/getsentry/getsentry/pull/12837 --- babel.config.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/babel.config.ts b/babel.config.ts index 0266dfcf4131cc..f7cebd18e6a222 100644 --- a/babel.config.ts +++ b/babel.config.ts @@ -44,14 +44,12 @@ const config: TransformOptions = { }, ], ['babel-plugin-add-react-displayname'], - ['@fullstory/babel-plugin-annotate-react'], ], }, development: { plugins: [ '@emotion/babel-plugin', '@babel/plugin-transform-react-jsx-source', - ['@fullstory/babel-plugin-annotate-react'], ...(process.env.SENTRY_UI_HOT_RELOAD ? ['react-refresh/babel'] : []), ], }, From 6f3dd03c8297e77592fe0ce4112895513ba6c3f7 Mon Sep 17 00:00:00 2001 From: Cathy Teng <70817427+cathteng@users.noreply.github.com> Date: Mon, 5 Feb 2024 13:15:04 -0800 Subject: [PATCH 008/357] ref(superuser): switch UserPermission to use superuser_has_permission (#64444) --- src/sentry/api/bases/user.py | 11 +- src/sentry/auth/superuser.py | 15 ++- tests/sentry/api/bases/test_user.py | 35 ++++++ tests/sentry/auth/test_superuser.py | 164 ++++++++++++++++------------ 4 files changed, 151 insertions(+), 74 deletions(-) diff --git a/src/sentry/api/bases/user.py b/src/sentry/api/bases/user.py index b9ea0b564123ea..b038a3e18461ea 100644 --- a/src/sentry/api/bases/user.py +++ b/src/sentry/api/bases/user.py @@ -7,12 +7,13 @@ from sentry.api.base import Endpoint from sentry.api.exceptions import ResourceDoesNotExist from sentry.api.permissions import SentryPermission, StaffPermissionMixin -from sentry.auth.superuser import is_active_superuser +from sentry.auth.superuser import is_active_superuser, superuser_has_permission from sentry.auth.system import is_system_auth from sentry.models.organization import OrganizationStatus from sentry.models.organizationmapping import OrganizationMapping from sentry.models.organizationmembermapping import OrganizationMemberMapping from sentry.models.user import User +from sentry.services.hybrid_cloud.access.service import access_service from sentry.services.hybrid_cloud.organization import organization_service from sentry.services.hybrid_cloud.user import RpcUser from sentry.services.hybrid_cloud.user.service import user_service @@ -26,8 +27,14 @@ def has_object_permission(self, request: Request, view, user: User | RpcUser | N return True if request.auth: return False + if is_active_superuser(request): - return True + # collect admin level permissions (only used when a user is active superuser) + permissions = access_service.get_permissions_for_user(request.user.id) + + if superuser_has_permission(request, permissions): + return True + return False diff --git a/src/sentry/auth/superuser.py b/src/sentry/auth/superuser.py index 85c62aaac02136..b0a05897e19bc4 100644 --- a/src/sentry/auth/superuser.py +++ b/src/sentry/auth/superuser.py @@ -87,12 +87,17 @@ def get_superuser_scopes(auth_state: RpcAuthState, user: Any): return superuser_scopes -def superuser_has_permission(request: HttpRequest | Request) -> bool: +def superuser_has_permission( + request: HttpRequest | Request, permissions: frozenset[str] | None = None +) -> bool: """ This is used in place of is_active_superuser() in APIs / permission classes. Checks if superuser has permission for the request. Superuser read-only is restricted to GET and OPTIONS requests. These checks do not affect self-hosted. + + The `permissions` arg is passed in and used when request.access is not populated, + e.g. in UserPermission """ if not is_active_superuser(request): return False @@ -104,8 +109,14 @@ def superuser_has_permission(request: HttpRequest | Request) -> bool: if not features.has("auth:enterprise-superuser-read-write", actor=request.user): return True + # either request.access or permissions must exist + assert getattr(request, "access", None) or permissions is not None + # superuser write can access all requests - if request.access.has_permission("superuser.write"): + if getattr(request, "access", None) and request.access.has_permission("superuser.write"): + return True + + elif permissions is not None and "superuser.write" in permissions: return True # superuser read-only can only hit GET and OPTIONS (pre-flight) requests diff --git a/tests/sentry/api/bases/test_user.py b/tests/sentry/api/bases/test_user.py index 389eb4aabaaf53..ee349483068cc0 100644 --- a/tests/sentry/api/bases/test_user.py +++ b/tests/sentry/api/bases/test_user.py @@ -3,6 +3,7 @@ from unittest.mock import patch import pytest +from django.test import override_settings from sentry.api.bases.user import ( RegionSiloUserEndpoint, @@ -13,6 +14,7 @@ from sentry.api.exceptions import ResourceDoesNotExist from sentry.auth.staff import is_active_staff from sentry.testutils.cases import DRFPermissionTestCase +from sentry.testutils.helpers.features import with_feature from sentry.testutils.silo import all_silo_test, control_silo_test, region_silo_test @@ -32,13 +34,46 @@ def test_allows_current_user(self): self.make_request(self.normal_user), None, self.normal_user ) + @override_settings(SUPERUSER_ORG_ID=1000) def test_allows_active_superuser(self): # The user passed in and the user on the request must be different to # check superuser. + self.create_organization(owner=self.superuser_user, id=1000) assert self.user_permission.has_object_permission( self.superuser_request, None, self.normal_user ) + with self.settings(SENTRY_SELF_HOSTED=False): + assert self.user_permission.has_object_permission( + self.superuser_request, None, self.normal_user + ) + + @override_settings(SENTRY_SELF_HOSTED=False, SUPERUSER_ORG_ID=1000) + @with_feature("auth:enterprise-superuser-read-write") + def test_active_superuser_read(self): + # superuser read can hit GET + request = self.make_request(user=self.superuser_user, is_superuser=True, method="GET") + self.create_organization(owner=self.superuser_user, id=1000) + assert self.user_permission.has_object_permission(request, None, self.normal_user) + + # superuser read cannot hit POST + request.method = "POST" + assert not self.user_permission.has_object_permission(request, None, self.normal_user) + + @override_settings(SENTRY_SELF_HOSTED=False, SUPERUSER_ORG_ID=1000) + @with_feature("auth:enterprise-superuser-read-write") + def test_active_superuser_write(self): + # superuser write can hit GET + self.add_user_permission(self.superuser_user, "superuser.write") + self.create_organization(owner=self.superuser_user, id=1000) + + request = self.make_request(user=self.superuser_user, is_superuser=True, method="GET") + assert self.user_permission.has_object_permission(request, None, self.normal_user) + + # superuser write can hit POST + request.method = "POST" + assert self.user_permission.has_object_permission(request, None, self.normal_user) + def test_rejects_active_staff(self): # The user passed in and the user on the request must be different to # check staff. diff --git a/tests/sentry/auth/test_superuser.py b/tests/sentry/auth/test_superuser.py index bcbfe3f65c691f..4aa977d2f34142 100644 --- a/tests/sentry/auth/test_superuser.py +++ b/tests/sentry/auth/test_superuser.py @@ -5,6 +5,7 @@ import pytest from django.contrib.auth.models import AnonymousUser from django.core import signing +from django.test import override_settings from django.utils import timezone as django_timezone from sentry.auth.superuser import ( @@ -174,47 +175,44 @@ def test_idle_expired(self): superuser = Superuser(request, allowed_ips=()) assert superuser.is_active is False + @override_settings(SENTRY_SELF_HOSTED=False, VALIDATE_SUPERUSER_ACCESS_CATEGORY_AND_REASON=True) @mock.patch("sentry.auth.superuser.logger") def test_su_access_logs(self, logger): - with self.settings( - SENTRY_SELF_HOSTED=False, VALIDATE_SUPERUSER_ACCESS_CATEGORY_AND_REASON=True - ): - user = User(is_superuser=True, email="test@sentry.io") - request = self.make_request(user=user, method="PUT") - request._body = json.dumps( - { - "superuserAccessCategory": "for_unit_test", - "superuserReason": "Edit organization settings", - "isSuperuserModal": True, - } - ).encode() - - superuser = Superuser(request, org_id=None) - superuser.set_logged_in(request.user) - assert superuser.is_active is True - assert logger.info.call_count == 2 - logger.info.assert_any_call( - "superuser.superuser_access", - extra={ - "superuser_token_id": superuser.token, - "user_id": user.id, - "user_email": user.email, - "su_access_category": "for_unit_test", - "reason_for_su": "Edit organization settings", - }, - ) + user = User(is_superuser=True, email="test@sentry.io") + request = self.make_request(user=user, method="PUT") + request._body = json.dumps( + { + "superuserAccessCategory": "for_unit_test", + "superuserReason": "Edit organization settings", + "isSuperuserModal": True, + } + ).encode() + + superuser = Superuser(request, org_id=None) + superuser.set_logged_in(request.user) + assert superuser.is_active is True + assert logger.info.call_count == 2 + logger.info.assert_any_call( + "superuser.superuser_access", + extra={ + "superuser_token_id": superuser.token, + "user_id": user.id, + "user_email": user.email, + "su_access_category": "for_unit_test", + "reason_for_su": "Edit organization settings", + }, + ) + @override_settings(SENTRY_SELF_HOSTED=False, VALIDATE_SUPERUSER_ACCESS_CATEGORY_AND_REASON=True) def test_su_access_no_request(self): user = User(is_superuser=True) request = self.make_request(user=user, method="PUT") superuser = Superuser(request, org_id=None) - with self.settings( - SENTRY_SELF_HOSTED=False, VALIDATE_SUPERUSER_ACCESS_CATEGORY_AND_REASON=True - ): - with pytest.raises(EmptySuperuserAccessForm): - superuser.set_logged_in(request.user) - assert superuser.is_active is False + + with pytest.raises(EmptySuperuserAccessForm): + superuser.set_logged_in(request.user) + assert superuser.is_active is False @freeze_time(BASETIME + OUTSIDE_PRIVILEGE_ACCESS_EXPIRE_TIME) def test_not_expired_check_org_in_request(self): @@ -250,6 +248,7 @@ def test_max_time_org_change_time_expired(self, logger): extra={"superuser_token": "abcdefghjiklmnog"}, ) + @override_settings(SENTRY_SELF_HOSTED=False, VALIDATE_SUPERUSER_ACCESS_CATEGORY_AND_REASON=True) @mock.patch("sentry.auth.superuser.logger") def test_su_access_no_request_user_missing_info(self, logger): user = User(is_superuser=True) @@ -263,12 +262,11 @@ def test_su_access_no_request_user_missing_info(self, logger): del request.user.id superuser = Superuser(request, org_id=None) - with self.settings( - SENTRY_SELF_HOSTED=False, VALIDATE_SUPERUSER_ACCESS_CATEGORY_AND_REASON=True - ): - superuser.set_logged_in(request.user) - logger.exception.assert_any_call("superuser.superuser_access.missing_user_info") + superuser.set_logged_in(request.user) + logger.exception.assert_any_call("superuser.superuser_access.missing_user_info") + + @override_settings(SENTRY_SELF_HOSTED=False, VALIDATE_SUPERUSER_ACCESS_CATEGORY_AND_REASON=True) def test_su_access_invalid_request_body( self, ): @@ -277,12 +275,10 @@ def test_su_access_invalid_request_body( request._body = b'{"invalid" "json"}' superuser = Superuser(request, org_id=None) - with self.settings( - SENTRY_SELF_HOSTED=False, VALIDATE_SUPERUSER_ACCESS_CATEGORY_AND_REASON=True - ): - with pytest.raises(SuperuserAccessFormInvalidJson): - superuser.set_logged_in(request.user) - assert superuser.is_active is False + + with pytest.raises(SuperuserAccessFormInvalidJson): + superuser.set_logged_in(request.user) + assert superuser.is_active is False def test_login_saves_session(self): user = self.create_user("foo@example.com", is_superuser=True) @@ -474,6 +470,7 @@ def test_superuser_invalid_serializer(self): == '{"superuserReason":["Ensure this field has no more than 128 characters."]}' ) + @override_settings(SENTRY_SELF_HOSTED=False) def test_superuser_scopes(self): user = self.create_user(is_superuser=True) @@ -482,14 +479,13 @@ def test_superuser_scopes(self): sso_state=RpcMemberSsoState(), permissions=["superuser.write"] ) - with self.settings(SENTRY_SELF_HOSTED=False): - assert get_superuser_scopes(auth_state, user) == SUPERUSER_SCOPES - assert get_superuser_scopes(auth_state_with_write, user) == SUPERUSER_SCOPES + assert get_superuser_scopes(auth_state, user) == SUPERUSER_SCOPES + assert get_superuser_scopes(auth_state_with_write, user) == SUPERUSER_SCOPES - # test scope separation - with self.feature("auth:enterprise-superuser-read-write"): - assert get_superuser_scopes(auth_state, user) == SUPERUSER_READONLY_SCOPES - assert get_superuser_scopes(auth_state_with_write, user) == SUPERUSER_SCOPES + # test scope separation + with self.feature("auth:enterprise-superuser-read-write"): + assert get_superuser_scopes(auth_state, user) == SUPERUSER_READONLY_SCOPES + assert get_superuser_scopes(auth_state_with_write, user) == SUPERUSER_SCOPES def test_superuser_scopes_self_hosted(self): # self hosted always has superuser write scopes @@ -508,16 +504,16 @@ def test_superuser_scopes_self_hosted(self): assert get_superuser_scopes(auth_state, user) == SUPERUSER_SCOPES assert get_superuser_scopes(auth_state_with_write, user) == SUPERUSER_SCOPES + @override_settings(SENTRY_SELF_HOSTED=False) def test_superuser_has_permission(self): request = self.build_request() - with self.settings(SENTRY_SELF_HOSTED=False): - assert not superuser_has_permission(request) + assert not superuser_has_permission(request) - # logging in gives permission - request.superuser = Superuser(request) - request.superuser._is_active = True - assert superuser_has_permission(request) + # logging in gives permission + request.superuser = Superuser(request) + request.superuser._is_active = True + assert superuser_has_permission(request) def test_superuser_has_permission_self_hosted(self): request = self.build_request() @@ -527,6 +523,7 @@ def test_superuser_has_permission_self_hosted(self): assert superuser_has_permission(request) + @override_settings(SENTRY_SELF_HOSTED=False) @with_feature("auth:enterprise-superuser-read-write") def test_superuser_has_permission_read_write_get(self): request = self.build_request(method="GET") @@ -534,14 +531,14 @@ def test_superuser_has_permission_read_write_get(self): request.superuser = Superuser(request) request.superuser._is_active = True - with self.settings(SENTRY_SELF_HOSTED=False): - # all superusers have permission to hit GET - request.access = self.create_request_access() - assert superuser_has_permission(request) + # all superusers have permission to hit GET + request.access = self.create_request_access() + assert superuser_has_permission(request) - request.access = self.create_request_access(permissions=["superuser.write"]) - assert superuser_has_permission(request) + request.access = self.create_request_access(permissions=["superuser.write"]) + assert superuser_has_permission(request) + @override_settings(SENTRY_SELF_HOSTED=False) @with_feature("auth:enterprise-superuser-read-write") def test_superuser_has_permission_read_write_post(self): request = self.build_request(method="POST") @@ -549,11 +546,38 @@ def test_superuser_has_permission_read_write_post(self): request.superuser = Superuser(request) request.superuser._is_active = True - with self.settings(SENTRY_SELF_HOSTED=False): - # superuser without superuser.write does not have permission - request.access = self.create_request_access() - assert not superuser_has_permission(request) + # superuser without superuser.write does not have permission + request.access = self.create_request_access() + assert not superuser_has_permission(request) + + # superuser with superuser.write has permission + request.access = self.create_request_access(permissions=["superuser.write"]) + assert superuser_has_permission(request) + + @override_settings(SENTRY_SELF_HOSTED=False) + @with_feature("auth:enterprise-superuser-read-write") + def test_superuser_has_permission_read_write_no_request_access(self): + request = self.build_request(method="GET") + + request.superuser = Superuser(request) + request.superuser._is_active = True + + # no request.access and no permissions passed in + with pytest.raises(AssertionError): + superuser_has_permission(request) + + # no request.access with permissions passed in + # all superusers have permission for GET + assert superuser_has_permission(request, frozenset()) + assert superuser_has_permission(request, frozenset(["superuser.write"])) + + request.method = "POST" + + # no request.access and no permissions passed in + with pytest.raises(AssertionError): + superuser_has_permission(request) - # superuser with superuser.write has permission - request.access = self.create_request_access(permissions=["superuser.write"]) - assert superuser_has_permission(request) + # no request.access with permissions passed in + # only superuser write has permissions for POST + assert not superuser_has_permission(request, frozenset()) + assert superuser_has_permission(request, frozenset(["superuser.write"])) From 332cfc7702bd160d2f3f759d4f661abb44aaad62 Mon Sep 17 00:00:00 2001 From: Josh Ferge Date: Mon, 5 Feb 2024 13:29:57 -0800 Subject: [PATCH 009/357] feat(replays): add replays rage click issue creation toggle (#64241) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - [x] creates replay subsection on project settings, under 'processing' - [x] adds a toggle to control a project option for rage click issue creation Depends on creating project option that the toggle hooks up to. creating PR now so we can iterate on copy. Also need to start thinking about what the docs section for rage click issues could look like. for now just links to [here](https://docs.sentry.io/product/session-replay/replay-page-and-filters/) which mentions rage clicks. Screenshot 2024-01-30 at 3 10 19 PM --- static/app/data/forms/replay.tsx | 22 +++++ static/app/routes.tsx | 5 ++ .../project/navigationConfiguration.tsx | 5 ++ .../views/settings/project/projectReplays.tsx | 80 +++++++++++++++++++ 4 files changed, 112 insertions(+) create mode 100644 static/app/data/forms/replay.tsx create mode 100644 static/app/views/settings/project/projectReplays.tsx diff --git a/static/app/data/forms/replay.tsx b/static/app/data/forms/replay.tsx new file mode 100644 index 00000000000000..687bc63bcae179 --- /dev/null +++ b/static/app/data/forms/replay.tsx @@ -0,0 +1,22 @@ +import type {JsonFormObject} from 'sentry/components/forms/types'; + +export const route = '/settings/:orgId/projects/:projectId/replays/'; + +const formGroups: JsonFormObject[] = [ + { + title: 'Settings', + fields: [ + { + name: 'sentry:replay_rage_click_issues', + type: 'boolean', + + // additional data/props that is related to rendering of form field rather than data + label: 'Create Rage Click Issues', + help: 'Toggles whether or not to create Session Replay Rage Click Issues', + getData: data => ({options: data}), + }, + ], + }, +]; + +export default formGroups; diff --git a/static/app/routes.tsx b/static/app/routes.tsx index d5a2203469ec29..98fda7d5d2840a 100644 --- a/static/app/routes.tsx +++ b/static/app/routes.tsx @@ -580,6 +580,11 @@ function buildRoutes() { )} /> + import('sentry/views/settings/project/projectReplays'))} + /> !!organization?.features?.includes('session-replay-ui'), + }, ], }, { diff --git a/static/app/views/settings/project/projectReplays.tsx b/static/app/views/settings/project/projectReplays.tsx new file mode 100644 index 00000000000000..e17af590e18bc3 --- /dev/null +++ b/static/app/views/settings/project/projectReplays.tsx @@ -0,0 +1,80 @@ +import type {RouteComponentProps} from 'react-router'; +import styled from '@emotion/styled'; + +import Access from 'sentry/components/acl/access'; +import {Button} from 'sentry/components/button'; +import Form from 'sentry/components/forms/form'; +import JsonForm from 'sentry/components/forms/jsonForm'; +import formGroups from 'sentry/data/forms/replay'; +import {t} from 'sentry/locale'; +import {space} from 'sentry/styles/space'; +import type {Organization, Project} from 'sentry/types'; +import routeTitleGen from 'sentry/utils/routeTitle'; +import DeprecatedAsyncView from 'sentry/views/deprecatedAsyncView'; +import SettingsPageHeader from 'sentry/views/settings/components/settingsPageHeader'; +import PermissionAlert from 'sentry/views/settings/project/permissionAlert'; + +type RouteParams = { + projectId: string; +}; +type Props = RouteComponentProps & { + organization: Organization; + project: Project; +}; + +class ProjectUserFeedbackSettings extends DeprecatedAsyncView { + submitTimeout: number | undefined = undefined; + + getEndpoints(): ReturnType { + const {organization} = this.props; + const {projectId} = this.props.params; + return [['project', `/projects/${organization.slug}/${projectId}/`]]; + } + + getTitle(): string { + const {projectId} = this.props.params; + return routeTitleGen(t('Replays'), projectId, false); + } + + renderBody() { + const {organization, project} = this.props; + const {projectId} = this.props.params; + + return ( +
+ + + + } + /> + +
+ + {({hasAccess}) => } + +
+
+ ); + } +} + +export default ProjectUserFeedbackSettings; + +const ButtonList = styled('div')` + display: inline-grid; + grid-auto-flow: column; + gap: ${space(1)}; +`; From 9df5a2f407cb673247860fe796281b1204c13d6a Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Mon, 5 Feb 2024 13:42:06 -0800 Subject: [PATCH 010/357] chore(API): Add 404 logging to `ReleaseDeploysEndpoint.post` (#64595) As reported in https://github.com/getsentry/sentry/issues/63816, creating a deploy using `sentry-cli` (or anything that uses `sentry-cli` under the hood) is failing intermittently with a 404. This adds logging to both help us differentiate between the reasons for the endpoint to 404 and give us request info for debugging purposes. --- src/sentry/api/endpoints/release_deploys.py | 29 +++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/src/sentry/api/endpoints/release_deploys.py b/src/sentry/api/endpoints/release_deploys.py index 371422a46fc3f1..64dc07a2b51fe7 100644 --- a/src/sentry/api/endpoints/release_deploys.py +++ b/src/sentry/api/endpoints/release_deploys.py @@ -1,3 +1,5 @@ +import logging + from django.db.models import F from django.utils import timezone from rest_framework import serializers @@ -17,6 +19,8 @@ from sentry.models.releaseprojectenvironment import ReleaseProjectEnvironment from sentry.signals import deploy_created +logger = logging.getLogger(__name__) + class DeploySerializer(serializers.Serializer): name = serializers.CharField(max_length=64, required=False, allow_blank=True, allow_null=True) @@ -90,12 +94,37 @@ def post(self, request: Request, organization, version) -> Response: the deploy ended. If not provided, the current time is used. """ + logging_info = { + "org_slug": organization.slug, + "org_id": organization.id, + "version": version, + } + try: release = Release.objects.get(version=version, organization=organization) except Release.DoesNotExist: + logger.info( + "create_release_deploy.release_not_found", + extra=logging_info, + ) raise ResourceDoesNotExist if not self.has_release_permission(request, organization, release): + # Logic here copied from `has_release_permission` (lightly edited for results to be more + # human-readable) + auth = None + if getattr(request, "user", None) and request.user.id: + auth = f"user.id: {request.user.id}" + elif getattr(request, "auth", None) and getattr(request.auth, "id", None): + auth = f"auth.id: {request.auth.id}" # type: ignore + elif getattr(request, "auth", None) and getattr(request.auth, "entity_id", None): + auth = f"auth.entity_id: {request.auth.entity_id}" # type: ignore + if auth is not None: + logging_info.update({"auth": auth}) + logger.info( + "create_release_deploy.no_release_permission", + extra=logging_info, + ) raise ResourceDoesNotExist serializer = DeploySerializer( From 11b7707dc937780a28a07824da9da92b2dfc5bb9 Mon Sep 17 00:00:00 2001 From: Cathy Teng <70817427+cathteng@users.noreply.github.com> Date: Mon, 5 Feb 2024 13:43:04 -0800 Subject: [PATCH 011/357] ref(superuser): switch EventAttachmentDetailsPermission to use superuser_has_permission (#64500) --- .../api/endpoints/event_attachment_details.py | 4 +- .../test_event_attachment_details.py | 79 +++++++++++++------ 2 files changed, 57 insertions(+), 26 deletions(-) diff --git a/src/sentry/api/endpoints/event_attachment_details.py b/src/sentry/api/endpoints/event_attachment_details.py index 134e29c67d2df0..7f77139455c19f 100644 --- a/src/sentry/api/endpoints/event_attachment_details.py +++ b/src/sentry/api/endpoints/event_attachment_details.py @@ -10,7 +10,7 @@ from sentry.api.base import region_silo_endpoint from sentry.api.bases.project import ProjectEndpoint, ProjectPermission from sentry.api.serializers import serialize -from sentry.auth.superuser import is_active_superuser +from sentry.auth.superuser import superuser_has_permission from sentry.auth.system import is_system_auth from sentry.constants import ATTACHMENTS_ROLE_DEFAULT from sentry.models.eventattachment import EventAttachment @@ -24,7 +24,7 @@ def has_object_permission(self, request: Request, view, project): if not result: return result - if is_system_auth(request.auth) or is_active_superuser(request): + if is_system_auth(request.auth) or superuser_has_permission(request): return True if not request.user.is_authenticated: diff --git a/tests/sentry/api/endpoints/test_event_attachment_details.py b/tests/sentry/api/endpoints/test_event_attachment_details.py index 7724e1f4c30a70..d57c5258167222 100644 --- a/tests/sentry/api/endpoints/test_event_attachment_details.py +++ b/tests/sentry/api/endpoints/test_event_attachment_details.py @@ -1,7 +1,10 @@ +from django.test import override_settings + from sentry.attachments.base import CachedAttachment from sentry.models.eventattachment import EventAttachment from sentry.testutils.cases import APITestCase, PermissionTestCase from sentry.testutils.helpers.datetime import before_now, iso_format +from sentry.testutils.helpers.features import with_feature from sentry.testutils.helpers.response import close_streaming_response from sentry.testutils.silo import region_silo_test from sentry.testutils.skips import requires_snuba @@ -52,28 +55,28 @@ def create_attachment(self, content: bytes | None = None): @region_silo_test class EventAttachmentDetailsTest(APITestCase, CreateAttachmentMixin): + @with_feature("organizations:event-attachments") def test_simple(self): self.login_as(user=self.user) self.create_attachment() path = f"/api/0/projects/{self.organization.slug}/{self.project.slug}/events/{self.event.event_id}/attachments/{self.attachment.id}/" - with self.feature("organizations:event-attachments"): - response = self.client.get(path) + response = self.client.get(path) assert response.status_code == 200, response.content assert response.data["id"] == str(self.attachment.id) assert response.data["mimetype"] == "image/png" assert response.data["event_id"] == self.event.event_id + @with_feature("organizations:event-attachments") def test_download(self): self.login_as(user=self.user) self.create_attachment() path1 = f"/api/0/projects/{self.organization.slug}/{self.project.slug}/events/{self.event.event_id}/attachments/{self.attachment.id}/?download" - with self.feature("organizations:event-attachments"): - response = self.client.get(path1) + response = self.client.get(path1) assert response.status_code == 200, response.content assert response.get("Content-Disposition") == 'attachment; filename="hello.png"' @@ -91,8 +94,7 @@ def test_download(self): path2 = f"/api/0/projects/{self.organization.slug}/{self.project.slug}/events/{self.event.event_id}/attachments/{self.attachment.id}/?download" assert path1 is not path2 - with self.feature("organizations:event-attachments"): - response = self.client.get(path2) + response = self.client.get(path2) assert response.status_code == 200, response.content assert response.get("Content-Disposition") == 'attachment; filename="hello.png"' @@ -100,6 +102,7 @@ def test_download(self): assert response.get("Content-Type") == "image/png" assert close_streaming_response(response) == ATTACHMENT_CONTENT + @with_feature("organizations:event-attachments") def test_zero_sized_attachment(self): self.login_as(user=self.user) @@ -107,8 +110,7 @@ def test_zero_sized_attachment(self): path = f"/api/0/projects/{self.organization.slug}/{self.project.slug}/events/{self.event.event_id}/attachments/{self.attachment.id}/" - with self.feature("organizations:event-attachments"): - response = self.client.get(path) + response = self.client.get(path) assert response.status_code == 200, response.content assert response.data["id"] == str(self.attachment.id) @@ -118,8 +120,7 @@ def test_zero_sized_attachment(self): path = f"{path}?download" - with self.feature("organizations:event-attachments"): - response = self.client.get(path) + response = self.client.get(path) assert response.status_code == 200, response.content assert response.get("Content-Disposition") == 'attachment; filename="hello.png"' @@ -127,14 +128,14 @@ def test_zero_sized_attachment(self): assert response.get("Content-Type") == "image/png" assert close_streaming_response(response) == b"" + @with_feature("organizations:event-attachments") def test_delete(self): self.login_as(user=self.user) self.create_attachment() path = f"/api/0/projects/{self.organization.slug}/{self.project.slug}/events/{self.event.event_id}/attachments/{self.attachment.id}/" - with self.feature("organizations:event-attachments"): - response = self.client.delete(path) + response = self.client.delete(path) assert response.status_code == 204, response.content assert EventAttachment.objects.count() == 0 @@ -147,35 +148,65 @@ def setUp(self): self.create_attachment() self.path = f"/api/0/projects/{self.organization.slug}/{self.project.slug}/events/{self.event.event_id}/attachments/{self.attachment.id}/?download" + @with_feature("organizations:event-attachments") def test_member_can_access_by_default(self): - with self.feature("organizations:event-attachments"): - close_streaming_response(self.assert_member_can_access(self.path)) - close_streaming_response(self.assert_can_access(self.owner, self.path)) + close_streaming_response(self.assert_member_can_access(self.path)) + close_streaming_response(self.assert_can_access(self.owner, self.path)) + @with_feature("organizations:event-attachments") def test_member_cannot_access_for_owner_role(self): self.organization.update_option("sentry:attachments_role", "owner") - with self.feature("organizations:event-attachments"): - self.assert_member_cannot_access(self.path) - close_streaming_response(self.assert_can_access(self.owner, self.path)) + self.assert_member_cannot_access(self.path) + close_streaming_response(self.assert_can_access(self.owner, self.path)) + @with_feature("organizations:event-attachments") def test_member_on_owner_team_can_access_for_owner_role(self): self.organization.update_option("sentry:attachments_role", "owner") owner_team = self.create_team(organization=self.organization, org_role="owner") user = self.create_user() self.create_member(organization=self.organization, user=user, teams=[owner_team, self.team]) - with self.feature("organizations:event-attachments"): - close_streaming_response(self.assert_can_access(user, self.path)) + close_streaming_response(self.assert_can_access(user, self.path)) + @with_feature("organizations:event-attachments") def test_random_user_cannot_access(self): self.organization.update_option("sentry:attachments_role", "owner") user = self.create_user() - with self.feature("organizations:event-attachments"): - self.assert_cannot_access(user, self.path) + self.assert_cannot_access(user, self.path) + @with_feature("organizations:event-attachments") def test_superuser_can_access(self): self.organization.update_option("sentry:attachments_role", "owner") superuser = self.create_user(is_superuser=True) - with self.feature("organizations:event-attachments"): - close_streaming_response(self.assert_can_access(superuser, self.path)) + close_streaming_response(self.assert_can_access(superuser, self.path)) + + with self.settings(SENTRY_SELF_HOSTED=False): + self.assert_can_access(superuser, self.path) + self.assert_can_access(superuser, self.path, method="DELETE") + + @with_feature( + {"organizations:event-attachments": True, "auth:enterprise-superuser-read-write": True} + ) + @override_settings(SENTRY_SELF_HOSTED=False) + def test_superuser_read_access(self): + self.organization.update_option("sentry:attachments_role", "owner") + superuser = self.create_user(is_superuser=True) + + close_streaming_response(self.assert_can_access(superuser, self.path)) + + self.assert_cannot_access(superuser, self.path, method="DELETE") + + @with_feature( + {"organizations:event-attachments": True, "auth:enterprise-superuser-read-write": True} + ) + @override_settings(SENTRY_SELF_HOSTED=False) + def test_superuser_write_can_access(self): + self.organization.update_option("sentry:attachments_role", "owner") + superuser = self.create_user(is_superuser=True) + + self.add_user_permission(superuser, "superuser.write") + + close_streaming_response(self.assert_can_access(superuser, self.path)) + + self.assert_can_access(superuser, self.path, method="DELETE") From 71979f699b652bcd923f007e374c170cdfe1c167 Mon Sep 17 00:00:00 2001 From: Cathy Teng <70817427+cathteng@users.noreply.github.com> Date: Mon, 5 Feb 2024 13:43:17 -0800 Subject: [PATCH 012/357] ref(superuser): only allow superuser write to set team role (#64485) --- .../endpoints/organization_member/__init__.py | 6 +-- .../test_organization_member_team_details.py | 41 +++++++++++++++++++ 2 files changed, 44 insertions(+), 3 deletions(-) diff --git a/src/sentry/api/endpoints/organization_member/__init__.py b/src/sentry/api/endpoints/organization_member/__init__.py index 37cb847b32a54e..032079bb43624b 100644 --- a/src/sentry/api/endpoints/organization_member/__init__.py +++ b/src/sentry/api/endpoints/organization_member/__init__.py @@ -9,7 +9,7 @@ from sentry import roles from sentry.api.exceptions import SentryAPIException from sentry.auth.access import Access -from sentry.auth.superuser import is_active_superuser +from sentry.auth.superuser import is_active_superuser, superuser_has_permission from sentry.locks import locks from sentry.models.organization import Organization from sentry.models.organizationmember import OrganizationMember @@ -65,11 +65,11 @@ def can_set_team_role(request: Request, team: Team, new_role: TeamRole) -> bool: """ User can set a team role: - * If they are an active superuser + * If they are an active superuser (with the feature flag, they must be superuser write) * If they are an org owner/manager/admin * If they are a team admin on the team """ - if is_active_superuser(request): + if superuser_has_permission(request): return True access: Access = request.access diff --git a/tests/sentry/api/endpoints/test_organization_member_team_details.py b/tests/sentry/api/endpoints/test_organization_member_team_details.py index 6a33cd13e86a02..87c6dc1d943f98 100644 --- a/tests/sentry/api/endpoints/test_organization_member_team_details.py +++ b/tests/sentry/api/endpoints/test_organization_member_team_details.py @@ -1,5 +1,6 @@ from functools import cached_property +from django.test import override_settings from rest_framework import status from sentry.api.endpoints.organization_member.team_details import ERR_INSUFFICIENT_ROLE @@ -788,6 +789,46 @@ def test_superuser_can_promote_member(self): ) assert updated_omt.role == "admin" + with self.settings(SENTRY_SELF_HOSTED=False): + resp = self.get_response( + self.org.slug, self.member_on_team.id, self.team.slug, teamRole="admin" + ) + assert resp.status_code == 200 + + updated_omt = OrganizationMemberTeam.objects.get( + team=self.team, organizationmember=self.member_on_team + ) + assert updated_omt.role == "admin" + + @with_feature({"organizations:team-roles": True, "auth:enterprise-superuser-read-write": True}) + @override_settings(SENTRY_SELF_HOSTED=False) + def test_superuser_read_cannot_promote_member(self): + superuser = self.create_user(is_superuser=True) + self.login_as(superuser, superuser=True) + + resp = self.get_response( + self.org.slug, self.member_on_team.id, self.team.slug, teamRole="admin" + ) + assert resp.status_code == 400 + assert resp.data["detail"] == ERR_INSUFFICIENT_ROLE + + @with_feature({"organizations:team-roles": True, "auth:enterprise-superuser-read-write": True}) + @override_settings(SENTRY_SELF_HOSTED=False) + def test_superuser_write_can_promote_member(self): + superuser = self.create_user(is_superuser=True) + self.login_as(superuser, superuser=True) + + self.add_user_permission(superuser, "superuser.write") + resp = self.get_response( + self.org.slug, self.member_on_team.id, self.team.slug, teamRole="admin" + ) + assert resp.status_code == 200 + + updated_omt = OrganizationMemberTeam.objects.get( + team=self.team, organizationmember=self.member_on_team + ) + assert updated_omt.role == "admin" + @with_feature("organizations:team-roles") def test_admin_can_promote_member(self): self.login_as(self.admin_on_team) From a65831b26cb8efeb1915c4ba79caeda7c6cd4ca1 Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Mon, 5 Feb 2024 16:50:12 -0500 Subject: [PATCH 013/357] feat(metrics-extraction): Add date_modified to dashboard widget query (#64601) ### Summary We need a date_modified on the widget query since if they get edited they can be out of sync with the on-demand row until the tasks runs again. --- migrations_lockfile.txt | 2 +- src/sentry/backup/comparators.py | 1 + ...ate_modified_col_dashboard_widget_query.py | 47 ++ src/sentry/models/dashboard_widget.py | 1 + .../ReleaseTests/test_at_head.pysnap | 581 +++++++++--------- 5 files changed, 341 insertions(+), 291 deletions(-) create mode 100644 src/sentry/migrations/0643_add_date_modified_col_dashboard_widget_query.py diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt index f78a146dcc5783..a09fa42184032f 100644 --- a/migrations_lockfile.txt +++ b/migrations_lockfile.txt @@ -9,5 +9,5 @@ feedback: 0004_index_together hybridcloud: 0009_make_user_id_optional_for_slug_reservation_replica nodestore: 0002_nodestore_no_dictfield replays: 0004_index_together -sentry: 0642_index_together_release +sentry: 0643_add_date_modified_col_dashboard_widget_query social_auth: 0002_default_auto_field diff --git a/src/sentry/backup/comparators.py b/src/sentry/backup/comparators.py index db2453b8746d83..318f5f053aa9d1 100644 --- a/src/sentry/backup/comparators.py +++ b/src/sentry/backup/comparators.py @@ -782,6 +782,7 @@ def get_default_comparators(): HashObfuscatingComparator("token_hashed", "token_last_characters") ], "sentry.dashboardwidgetqueryondemand": [DateUpdatedComparator("date_modified")], + "sentry.dashboardwidgetquery": [DateUpdatedComparator("date_modified")], "sentry.organization": [AutoSuffixComparator("slug")], "sentry.organizationintegration": [DateUpdatedComparator("date_updated")], "sentry.organizationmember": [ diff --git a/src/sentry/migrations/0643_add_date_modified_col_dashboard_widget_query.py b/src/sentry/migrations/0643_add_date_modified_col_dashboard_widget_query.py new file mode 100644 index 00000000000000..fccfbcfbaac471 --- /dev/null +++ b/src/sentry/migrations/0643_add_date_modified_col_dashboard_widget_query.py @@ -0,0 +1,47 @@ +# Generated by Django 4.2.8 on 2024-02-05 21:00 + +import django.utils.timezone +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. For + # the most part, this should only be used for operations where it's safe to run the migration + # after your code has deployed. So this should not be used for most operations that alter the + # schema of a table. + # Here are some things that make sense to mark as dangerous: + # - Large data migrations. Typically we want these to be run manually by ops so that they can + # be monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # have ops run this and not block the deploy. Note that while adding an index is a schema + # change, it's completely safe to run the operation after the code has deployed. + is_dangerous = False + + dependencies = [ + ("sentry", "0642_index_together_release"), + ] + + operations = [ + migrations.SeparateDatabaseAndState( + database_operations=[ + migrations.RunSQL( + sql=""" + ALTER TABLE "sentry_dashboardwidgetquery" ADD COLUMN "date_modified" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP; + """, + reverse_sql=""" + ALTER TABLE "sentry_dashboardwidgetquery" DROP COLUMN "date_modified" + """, + hints={"tables": ["sentry_dashboardwidgetquery"]}, + ), + ], + state_operations=[ + migrations.AddField( + model_name="dashboardwidgetquery", + name="date_modified", + field=models.DateTimeField(default=django.utils.timezone.now), + ), + ], + ) + ] diff --git a/src/sentry/models/dashboard_widget.py b/src/sentry/models/dashboard_widget.py index 26dbab2057bfa4..791c4528e0c9cf 100644 --- a/src/sentry/models/dashboard_widget.py +++ b/src/sentry/models/dashboard_widget.py @@ -108,6 +108,7 @@ class DashboardWidgetQuery(Model): # Order of the widget query in the widget. order = BoundedPositiveIntegerField() date_added = models.DateTimeField(default=timezone.now) + date_modified = models.DateTimeField(default=timezone.now) class Meta: app_label = "sentry" diff --git a/tests/sentry/backup/snapshots/ReleaseTests/test_at_head.pysnap b/tests/sentry/backup/snapshots/ReleaseTests/test_at_head.pysnap index 465324b15b535c..e810ac354d3cfc 100644 --- a/tests/sentry/backup/snapshots/ReleaseTests/test_at_head.pysnap +++ b/tests/sentry/backup/snapshots/ReleaseTests/test_at_head.pysnap @@ -1,18 +1,18 @@ --- -created: '2024-01-30T20:38:39.899646Z' +created: '2024-02-05T21:02:22.011281Z' creator: sentry source: tests/sentry/backup/test_releases.py --- - fields: key: bar - last_updated: '2024-01-30T20:38:39.581Z' + last_updated: '2024-02-05T21:02:21.749Z' last_updated_by: unknown value: '"b"' model: sentry.controloption pk: 1 - fields: - date_added: '2024-01-30T20:38:39.269Z' - date_updated: '2024-01-30T20:38:39.269Z' + date_added: '2024-02-05T21:02:21.347Z' + date_updated: '2024-02-05T21:02:21.347Z' external_id: slack:test-org metadata: {} name: Slack for test-org @@ -22,13 +22,13 @@ source: tests/sentry/backup/test_releases.py pk: 1 - fields: key: foo - last_updated: '2024-01-30T20:38:39.580Z' + last_updated: '2024-02-05T21:02:21.748Z' last_updated_by: unknown value: '"a"' model: sentry.option pk: 1 - fields: - date_added: '2024-01-30T20:38:38.991Z' + date_added: '2024-02-05T21:02:20.998Z' default_role: member flags: '1' is_test: false @@ -36,92 +36,92 @@ source: tests/sentry/backup/test_releases.py slug: test-org status: 0 model: sentry.organization - pk: 4553410180153344 + pk: 4553444247207936 - fields: - date_added: '2024-01-30T20:38:39.365Z' + date_added: '2024-02-05T21:02:21.499Z' default_role: member flags: '1' is_test: false - name: Better Sailfish - slug: better-sailfish + name: Capable Hornet + slug: capable-hornet status: 0 model: sentry.organization - pk: 4553410180218883 + pk: 4553444247273475 - fields: config: hello: hello - date_added: '2024-01-30T20:38:39.270Z' - date_updated: '2024-01-30T20:38:39.270Z' + date_added: '2024-02-05T21:02:21.348Z' + date_updated: '2024-02-05T21:02:21.348Z' default_auth_id: null grace_period_end: null integration: 1 - organization_id: 4553410180153344 + organization_id: 4553444247207936 status: 0 model: sentry.organizationintegration pk: 1 - fields: key: sentry:account-rate-limit - organization: 4553410180153344 + organization: 4553444247207936 value: 0 model: sentry.organizationoption pk: 1 - fields: - date_added: '2024-01-30T20:38:39.169Z' + date_added: '2024-02-05T21:02:21.225Z' first_event: null flags: '10' forced_color: null name: project-test-org - organization: 4553410180153344 + organization: 4553444247207936 platform: null public: false slug: project-test-org status: 0 model: sentry.project - pk: 4553410180218881 + pk: 4553444247273473 - fields: - date_added: '2024-01-30T20:38:39.291Z' + date_added: '2024-02-05T21:02:21.371Z' first_event: null flags: '10' forced_color: null name: other-project-test-org - organization: 4553410180153344 + organization: 4553444247207936 platform: null public: false slug: other-project-test-org status: 0 model: sentry.project - pk: 4553410180218882 + pk: 4553444247273474 - fields: - date_added: '2024-01-30T20:38:39.409Z' + date_added: '2024-02-05T21:02:21.556Z' first_event: null flags: '10' forced_color: null - name: Fast Louse - organization: 4553410180153344 + name: Dear Finch + organization: 4553444247207936 platform: null public: false - slug: fast-louse + slug: dear-finch status: 0 model: sentry.project - pk: 4553410180218884 + pk: 4553444247273476 - fields: - date_added: '2024-01-30T20:38:39.536Z' + date_added: '2024-02-05T21:02:21.706Z' first_event: null flags: '10' forced_color: null - name: Close Swift - organization: 4553410180153344 + name: Sought Grouper + organization: 4553444247207936 platform: null public: false - slug: close-swift + slug: sought-grouper status: 0 model: sentry.project - pk: 4553410180218885 + pk: 4553444247273477 - fields: config: hello: hello integration_id: 1 - project: 4553410180218881 + project: 4553444247273473 model: sentry.projectintegration pk: 1 - fields: @@ -129,14 +129,14 @@ source: tests/sentry/backup/test_releases.py dynamicSdkLoaderOptions: hasPerformance: true hasReplay: true - date_added: '2024-01-30T20:38:39.180Z' + date_added: '2024-02-05T21:02:21.240Z' label: Default - project: 4553410180218881 - public_key: 6f9b9507fb9d2d30dfa0d0db789e60b4 + project: 4553444247273473 + public_key: e97b38e31d413821f534b881b5fcf2b4 rate_limit_count: null rate_limit_window: null roles: '1' - secret_key: dcca774a5d5706e0d256d76e499946b2 + secret_key: 421281bcc8f3fe441c47ee2ce30a5f9f status: 0 model: sentry.projectkey pk: 1 @@ -145,14 +145,14 @@ source: tests/sentry/backup/test_releases.py dynamicSdkLoaderOptions: hasPerformance: true hasReplay: true - date_added: '2024-01-30T20:38:39.301Z' + date_added: '2024-02-05T21:02:21.387Z' label: Default - project: 4553410180218882 - public_key: 7e3aa38227a10c2716192d8e0754f812 + project: 4553444247273474 + public_key: ca5ede3659d38d33362ad12075957941 rate_limit_count: null rate_limit_window: null roles: '1' - secret_key: ac9de56a513189a295c976a2985d28a2 + secret_key: b2b0a42a038200354c6dcfc832cfafb5 status: 0 model: sentry.projectkey pk: 2 @@ -161,14 +161,14 @@ source: tests/sentry/backup/test_releases.py dynamicSdkLoaderOptions: hasPerformance: true hasReplay: true - date_added: '2024-01-30T20:38:39.423Z' + date_added: '2024-02-05T21:02:21.572Z' label: Default - project: 4553410180218884 - public_key: 24f9aca9aced60a8619ed86b401f143a + project: 4553444247273476 + public_key: 8fa690ff6a0d75ebcd8786c7920a18cb rate_limit_count: null rate_limit_window: null roles: '1' - secret_key: 405af46039d4138cf67a4b41bb27c0ad + secret_key: 4dd83afc8158e553129881273b921424 status: 0 model: sentry.projectkey pk: 3 @@ -177,97 +177,97 @@ source: tests/sentry/backup/test_releases.py dynamicSdkLoaderOptions: hasPerformance: true hasReplay: true - date_added: '2024-01-30T20:38:39.549Z' + date_added: '2024-02-05T21:02:21.722Z' label: Default - project: 4553410180218885 - public_key: 2d58452d1db0094962cea3899731bc72 + project: 4553444247273477 + public_key: c1b09c30b928e4f1ecb5aff135e88820 rate_limit_count: null rate_limit_window: null roles: '1' - secret_key: 0fa2202383e945ae6d8c71b744542370 + secret_key: 06f46392e2cd71da5213ae2e478367ef status: 0 model: sentry.projectkey pk: 4 - fields: key: sentry:relay-rev - project: 4553410180218881 - value: '"7e7d67346f3a4670ba9af01305e3ffbf"' + project: 4553444247273473 + value: '"0b33c68d2560422fb4fea8f982a72ffc"' model: sentry.projectoption pk: 1 - fields: key: sentry:relay-rev-lastchange - project: 4553410180218881 - value: '"2024-01-30T20:38:39.183423Z"' + project: 4553444247273473 + value: '"2024-02-05T21:02:21.243777Z"' model: sentry.projectoption pk: 2 - fields: key: sentry:option-epoch - project: 4553410180218881 + project: 4553444247273473 value: 11 model: sentry.projectoption pk: 3 - fields: key: sentry:relay-rev - project: 4553410180218882 - value: '"9f019375a39f4e3e96e1bb256311244b"' + project: 4553444247273474 + value: '"2703ecc1615143349ced3c2a145ed96d"' model: sentry.projectoption pk: 4 - fields: key: sentry:relay-rev-lastchange - project: 4553410180218882 - value: '"2024-01-30T20:38:39.304647Z"' + project: 4553444247273474 + value: '"2024-02-05T21:02:21.390972Z"' model: sentry.projectoption pk: 5 - fields: key: sentry:option-epoch - project: 4553410180218882 + project: 4553444247273474 value: 11 model: sentry.projectoption pk: 6 - fields: key: sentry:relay-rev - project: 4553410180218884 - value: '"5d08f5315e6b4413a58e9f02dc31b005"' + project: 4553444247273476 + value: '"c0b5e716a92849c499718e2c1b3a98ab"' model: sentry.projectoption pk: 7 - fields: key: sentry:relay-rev-lastchange - project: 4553410180218884 - value: '"2024-01-30T20:38:39.426335Z"' + project: 4553444247273476 + value: '"2024-02-05T21:02:21.576491Z"' model: sentry.projectoption pk: 8 - fields: key: sentry:option-epoch - project: 4553410180218884 + project: 4553444247273476 value: 11 model: sentry.projectoption pk: 9 - fields: key: sentry:relay-rev - project: 4553410180218885 - value: '"597d999763804f73b8c56eedfb8c3f4d"' + project: 4553444247273477 + value: '"cdadbdb62ebf45eda233a1ebbcff934b"' model: sentry.projectoption pk: 10 - fields: key: sentry:relay-rev-lastchange - project: 4553410180218885 - value: '"2024-01-30T20:38:39.552263Z"' + project: 4553444247273477 + value: '"2024-02-05T21:02:21.725799Z"' model: sentry.projectoption pk: 11 - fields: key: sentry:option-epoch - project: 4553410180218885 + project: 4553444247273477 value: 11 model: sentry.projectoption pk: 12 - fields: auto_assignment: true codeowners_auto_sync: true - date_created: '2024-01-30T20:38:39.196Z' + date_created: '2024-02-05T21:02:21.256Z' fallthrough: true is_active: true - last_updated: '2024-01-30T20:38:39.196Z' - project: 4553410180218881 + last_updated: '2024-02-05T21:02:21.256Z' + project: 4553444247273473 raw: '{"hello":"hello"}' schema: hello: hello @@ -275,9 +275,9 @@ source: tests/sentry/backup/test_releases.py model: sentry.projectownership pk: 1 - fields: - date_added: '2024-01-30T20:38:39.200Z' - organization: 4553410180153344 - project: 4553410180218881 + date_added: '2024-02-05T21:02:21.259Z' + organization: 4553444247207936 + project: 4553444247273473 redirect_slug: project_slug_in_test-org model: sentry.projectredirect pk: 1 @@ -285,26 +285,26 @@ source: tests/sentry/backup/test_releases.py first_seen: null is_internal: true last_seen: null - public_key: itjcEvHT7ywPDqyuOLJhMvGrQf0gOTwpMnuBf-Md3fM - relay_id: 7c6c12a4-6695-4cda-b6b9-7899827b7896 + public_key: I3-cTVtOE0J9pIPGLCbZgDVnh08x9NuO6sVN7UO5P0s + relay_id: bf7fbddd-9e98-4e1a-9b4d-2a0f121c81fe model: sentry.relay pk: 1 - fields: - first_seen: '2024-01-30T20:38:39.579Z' - last_seen: '2024-01-30T20:38:39.579Z' - public_key: itjcEvHT7ywPDqyuOLJhMvGrQf0gOTwpMnuBf-Md3fM - relay_id: 7c6c12a4-6695-4cda-b6b9-7899827b7896 + first_seen: '2024-02-05T21:02:21.748Z' + last_seen: '2024-02-05T21:02:21.748Z' + public_key: I3-cTVtOE0J9pIPGLCbZgDVnh08x9NuO6sVN7UO5P0s + relay_id: bf7fbddd-9e98-4e1a-9b4d-2a0f121c81fe version: 0.0.1 model: sentry.relayusage pk: 1 - fields: config: {} - date_added: '2024-01-30T20:38:39.357Z' + date_added: '2024-02-05T21:02:21.489Z' external_id: null integration_id: 1 languages: '[]' name: getsentry/getsentry - organization_id: 4553410180153344 + organization_id: 4553444247207936 provider: integrations:github status: 0 url: https://github.com/getsentry/getsentry @@ -312,19 +312,19 @@ source: tests/sentry/backup/test_releases.py pk: 1 - fields: actor: 1 - date_added: '2024-01-30T20:38:39.126Z' + date_added: '2024-02-05T21:02:21.174Z' idp_provisioned: false name: test_team_in_test-org org_role: null - organization: 4553410180153344 + organization: 4553444247207936 slug: test_team_in_test-org status: 0 model: sentry.team - pk: 4553410180218880 + pk: 4553444247273472 - fields: avatar_type: 0 avatar_url: null - date_joined: '2024-01-30T20:38:38.938Z' + date_joined: '2024-02-05T21:02:20.808Z' email: owner flags: '0' is_active: true @@ -334,11 +334,11 @@ source: tests/sentry/backup/test_releases.py is_staff: true is_superuser: true is_unclaimed: false - last_active: '2024-01-30T20:38:38.938Z' + last_active: '2024-02-05T21:02:20.808Z' last_login: null - last_password_change: '2024-01-30T20:38:38.938Z' + last_password_change: '2024-02-05T21:02:20.808Z' name: '' - password: md5$LWK0TRmwMnrMtvUURX0hKw$5574cc4d5ceed216e1bf8f28804680c3 + password: md5$4yBodc7vqHrUWXpIjNvI9B$d684d018f8769e68931ebf934d0809a4 session_nonce: null username: owner model: sentry.user @@ -346,7 +346,7 @@ source: tests/sentry/backup/test_releases.py - fields: avatar_type: 0 avatar_url: null - date_joined: '2024-01-30T20:38:38.979Z' + date_joined: '2024-02-05T21:02:20.984Z' email: member flags: '0' is_active: true @@ -356,11 +356,11 @@ source: tests/sentry/backup/test_releases.py is_staff: false is_superuser: false is_unclaimed: false - last_active: '2024-01-30T20:38:38.979Z' + last_active: '2024-02-05T21:02:20.984Z' last_login: null - last_password_change: '2024-01-30T20:38:38.979Z' + last_password_change: '2024-02-05T21:02:20.984Z' name: '' - password: md5$RcHcqiQPdWqwZLX5NjO0iI$75844f80bae63f220dc9d4b5fe432fb0 + password: md5$8U8ABCfbDLvmN1dQqG34RV$659690d03011a1f033a8f426e365962a session_nonce: null username: member model: sentry.user @@ -368,7 +368,7 @@ source: tests/sentry/backup/test_releases.py - fields: avatar_type: 0 avatar_url: null - date_joined: '2024-01-30T20:38:39.325Z' + date_joined: '2024-02-05T21:02:21.415Z' email: admin@localhost flags: '0' is_active: true @@ -378,11 +378,11 @@ source: tests/sentry/backup/test_releases.py is_staff: true is_superuser: true is_unclaimed: false - last_active: '2024-01-30T20:38:39.325Z' + last_active: '2024-02-05T21:02:21.415Z' last_login: null - last_password_change: '2024-01-30T20:38:39.325Z' + last_password_change: '2024-02-05T21:02:21.415Z' name: '' - password: md5$W4OZbZrNhw8hQvwbhnj7Zj$2dc58bc3afa18272dfbf5edde289be7d + password: md5$Ck8AJD3A5IlyJX4zTPsolr$513d7446b169f8f63d1e49a803777a32 session_nonce: null username: admin@localhost model: sentry.user @@ -390,8 +390,8 @@ source: tests/sentry/backup/test_releases.py - fields: avatar_type: 0 avatar_url: null - date_joined: '2024-01-30T20:38:39.358Z' - email: ad96a3b6286240c2a088df3a5a7388dd@example.com + date_joined: '2024-02-05T21:02:21.490Z' + email: 79c8ec0075e04ee58ba1667e1b0a84f4@example.com flags: '0' is_active: true is_managed: false @@ -400,19 +400,19 @@ source: tests/sentry/backup/test_releases.py is_staff: true is_superuser: false is_unclaimed: false - last_active: '2024-01-30T20:38:39.358Z' + last_active: '2024-02-05T21:02:21.490Z' last_login: null - last_password_change: '2024-01-30T20:38:39.358Z' + last_password_change: '2024-02-05T21:02:21.490Z' name: '' - password: md5$VCYTwKqc7vo63L5Sl7iU3C$90be637465d7d426da50d7ceb16b495e + password: md5$Sc7cxoKE8te1mI67AdBTUF$992e0e5ab42ab480be7a703440e33a7e session_nonce: null - username: ad96a3b6286240c2a088df3a5a7388dd@example.com + username: 79c8ec0075e04ee58ba1667e1b0a84f4@example.com model: sentry.user pk: 4 - fields: avatar_type: 0 avatar_url: null - date_joined: '2024-01-30T20:38:39.400Z' + date_joined: '2024-02-05T21:02:21.545Z' email: '' flags: '0' is_active: true @@ -422,20 +422,20 @@ source: tests/sentry/backup/test_releases.py is_staff: false is_superuser: false is_unclaimed: false - last_active: '2024-01-30T20:38:39.400Z' + last_active: '2024-02-05T21:02:21.545Z' last_login: null last_password_change: null name: '' password: '' session_nonce: null - username: test-app-58b3b7b6-cfb5-47d9-ae44-c2a07f552df2 + username: test-app-0186fe86-f0ed-49c7-a5e2-755b6c267143 model: sentry.user pk: 5 - fields: avatar_type: 0 avatar_url: null - date_joined: '2024-01-30T20:38:39.528Z' - email: f5c736575fd3438b84f9b3ee2e866bcd@example.com + date_joined: '2024-02-05T21:02:21.692Z' + email: bfe930dcc5214168a60cae09e61aa2d3@example.com flags: '0' is_active: true is_managed: false @@ -444,13 +444,13 @@ source: tests/sentry/backup/test_releases.py is_staff: true is_superuser: false is_unclaimed: false - last_active: '2024-01-30T20:38:39.528Z' + last_active: '2024-02-05T21:02:21.692Z' last_login: null - last_password_change: '2024-01-30T20:38:39.528Z' + last_password_change: '2024-02-05T21:02:21.693Z' name: '' - password: md5$EifcXhDm79ylWRTdVaNVCc$8291bd2b42aee640f03921914fe9d987 + password: md5$7ynRVk5zCu8w5n8zkEIxJh$6cb94a4b8ebe9643d68b1bd6e1e94520 session_nonce: null - username: f5c736575fd3438b84f9b3ee2e866bcd@example.com + username: bfe930dcc5214168a60cae09e61aa2d3@example.com model: sentry.user pk: 6 - fields: @@ -493,24 +493,24 @@ source: tests/sentry/backup/test_releases.py model: sentry.userpermission pk: 1 - fields: - date_added: '2024-01-30T20:38:38.957Z' - date_updated: '2024-01-30T20:38:38.957Z' + date_added: '2024-02-05T21:02:20.829Z' + date_updated: '2024-02-05T21:02:20.829Z' name: test-admin-role permissions: '[]' model: sentry.userrole pk: 1 - fields: - date_added: '2024-01-30T20:38:38.960Z' - date_updated: '2024-01-30T20:38:38.960Z' + date_added: '2024-02-05T21:02:20.833Z' + date_updated: '2024-02-05T21:02:20.833Z' role: 1 user: 1 model: sentry.userroleuser pk: 1 - fields: - date_added: '2024-01-30T20:38:39.352Z' + date_added: '2024-02-05T21:02:21.482Z' is_global: false name: Saved query for test-org - organization: 4553410180153344 + organization: 4553444247207936 owner_id: null query: saved query for test-org sort: date @@ -519,9 +519,9 @@ source: tests/sentry/backup/test_releases.py model: sentry.savedsearch pk: 1 - fields: - date_added: '2024-01-30T20:38:39.351Z' - last_seen: '2024-01-30T20:38:39.351Z' - organization: 4553410180153344 + date_added: '2024-02-05T21:02:21.481Z' + last_seen: '2024-02-05T21:02:21.481Z' + organization: 4553444247207936 query: some query for test-org query_hash: 7c69362cd42207b83f80087bc15ebccb type: 0 @@ -529,42 +529,42 @@ source: tests/sentry/backup/test_releases.py model: sentry.recentsearch pk: 1 - fields: - project: 4553410180218881 - team: 4553410180218880 + project: 4553444247273473 + team: 4553444247273472 model: sentry.projectteam pk: 1 - fields: - project: 4553410180218882 - team: 4553410180218880 + project: 4553444247273474 + team: 4553444247273472 model: sentry.projectteam pk: 2 - fields: - date_added: '2024-01-30T20:38:39.195Z' - project: 4553410180218881 + date_added: '2024-02-05T21:02:21.255Z' + project: 4553444247273473 user_id: 1 model: sentry.projectbookmark pk: 1 - fields: created_by: null - date_added: '2024-01-30T20:38:39.254Z' + date_added: '2024-02-05T21:02:21.329Z' date_deactivated: null date_last_used: null name: token 1 for test-org - organization_id: 4553410180153344 - project_last_used_id: 4553410180218881 + organization_id: 4553444247207936 + project_last_used_id: 4553444247273473 scope_list: '[''org:ci'']' token_hashed: ABCDEFtest-org token_last_characters: xyz1 model: sentry.orgauthtoken pk: 1 - fields: - date_added: '2024-01-30T20:38:39.055Z' + date_added: '2024-02-05T21:02:21.042Z' email: null flags: '0' has_global_access: true invite_status: 0 inviter_id: null - organization: 4553410180153344 + organization: 4553444247207936 role: owner token: null token_expires_at: null @@ -575,13 +575,13 @@ source: tests/sentry/backup/test_releases.py model: sentry.organizationmember pk: 1 - fields: - date_added: '2024-01-30T20:38:39.089Z' + date_added: '2024-02-05T21:02:21.128Z' email: null flags: '0' has_global_access: true invite_status: 0 inviter_id: null - organization: 4553410180153344 + organization: 4553444247207936 role: member token: null token_expires_at: null @@ -594,106 +594,106 @@ source: tests/sentry/backup/test_releases.py - fields: member: 2 requester_id: null - team: 4553410180218880 + team: 4553444247273472 model: sentry.organizationaccessrequest pk: 1 - fields: config: schedule: '* * * * *' schedule_type: 1 - date_added: '2024-01-30T20:38:39.287Z' - guid: c45eec76-ec27-4bf5-b729-9ba360eddd50 + date_added: '2024-02-05T21:02:21.368Z' + guid: 94ea3996-d88a-4b6f-8a3d-a6e7553b2d28 is_muted: false name: '' - organization_id: 4553410180153344 - project_id: 4553410180218881 - slug: a9d97d22847b + organization_id: 4553444247207936 + project_id: 4553444247273473 + slug: 723032bdc1f7 status: 0 type: 3 model: sentry.monitor pk: 1 - fields: - date_added: '2024-01-30T20:38:39.285Z' - name: wholly legal shad - organization_id: 4553410180153344 + date_added: '2024-02-05T21:02:21.366Z' + name: steadily coherent crane + organization_id: 4553444247207936 model: sentry.environment pk: 1 - fields: - date_added: '2024-01-30T20:38:38.942Z' + date_added: '2024-02-05T21:02:20.812Z' email: owner model: sentry.email pk: 1 - fields: - date_added: '2024-01-30T20:38:38.981Z' + date_added: '2024-02-05T21:02:20.987Z' email: member model: sentry.email pk: 2 - fields: - date_added: '2024-01-30T20:38:39.328Z' + date_added: '2024-02-05T21:02:21.420Z' email: admin@localhost model: sentry.email pk: 3 - fields: - date_added: '2024-01-30T20:38:39.361Z' - email: ad96a3b6286240c2a088df3a5a7388dd@example.com + date_added: '2024-02-05T21:02:21.494Z' + email: 79c8ec0075e04ee58ba1667e1b0a84f4@example.com model: sentry.email pk: 4 - fields: - date_added: '2024-01-30T20:38:39.402Z' + date_added: '2024-02-05T21:02:21.548Z' email: '' model: sentry.email pk: 5 - fields: - date_added: '2024-01-30T20:38:39.531Z' - email: f5c736575fd3438b84f9b3ee2e866bcd@example.com + date_added: '2024-02-05T21:02:21.696Z' + email: bfe930dcc5214168a60cae09e61aa2d3@example.com model: sentry.email pk: 6 - fields: - date_added: '2024-01-30T20:38:39.351Z' - organization: 4553410180153344 + date_added: '2024-02-05T21:02:21.480Z' + organization: 4553444247207936 slug: test-tombstone-in-test-org model: sentry.dashboardtombstone pk: 1 - fields: created_by_id: 1 - date_added: '2024-01-30T20:38:39.348Z' + date_added: '2024-02-05T21:02:21.471Z' filters: null - last_visited: '2024-01-30T20:38:39.348Z' - organization: 4553410180153344 + last_visited: '2024-02-05T21:02:21.471Z' + organization: 4553444247207936 title: Dashboard 1 for test-org visits: 1 model: sentry.dashboard pk: 1 - fields: condition: '{"op":"equals","name":"environment","value":"prod"}' - condition_hash: cc456c813650daf91a0d48d5ef215f05b917e221 + condition_hash: 47bde0216fd464a56033392714b49ad4e929fb35 created_by_id: null - date_added: '2024-01-30T20:38:39.281Z' - end_date: '2024-01-30T21:38:39.278Z' + date_added: '2024-02-05T21:02:21.361Z' + end_date: '2024-02-05T22:02:21.358Z' is_active: true is_org_level: false notification_sent: false num_samples: 100 - organization: 4553410180153344 + organization: 4553444247207936 query: environment:prod event.type:transaction rule_id: 1 sample_rate: 0.5 - start_date: '2024-01-30T20:38:39.278Z' + start_date: '2024-02-05T21:02:21.358Z' model: sentry.customdynamicsamplingrule pk: 1 - fields: - project: 4553410180218881 + project: 4553444247273473 value: 1 model: sentry.counter pk: 1 - fields: config: {} - date_added: '2024-01-30T20:38:39.220Z' + date_added: '2024-02-05T21:02:21.286Z' default_global_access: true default_role: 50 flags: '0' last_sync: null - organization_id: 4553410180153344 + organization_id: 4553444247207936 provider: sentry sync_time: null model: sentry.authprovider @@ -709,16 +709,16 @@ source: tests/sentry/backup/test_releases.py - 3 key4: nested_key: nested_value - date_added: '2024-01-30T20:38:39.237Z' + date_added: '2024-02-05T21:02:21.306Z' ident: 123456789test-org - last_synced: '2024-01-30T20:38:39.237Z' - last_verified: '2024-01-30T20:38:39.237Z' + last_synced: '2024-02-05T21:02:21.306Z' + last_verified: '2024-02-05T21:02:21.306Z' user: 1 model: sentry.authidentity pk: 1 - fields: config: '""' - created_at: '2024-01-30T20:38:38.950Z' + created_at: '2024-02-05T21:02:20.821Z' last_used_at: null type: 1 user: 1 @@ -726,7 +726,7 @@ source: tests/sentry/backup/test_releases.py pk: 1 - fields: config: '""' - created_at: '2024-01-30T20:38:38.988Z' + created_at: '2024-02-05T21:02:20.995Z' last_used_at: null type: 1 user: 2 @@ -734,10 +734,10 @@ source: tests/sentry/backup/test_releases.py pk: 2 - fields: allowed_origins: null - date_added: '2024-01-30T20:38:39.205Z' - key: 2d0b84e64b2c4ff1a87286bcc7b14713 + date_added: '2024-02-05T21:02:21.266Z' + key: 10dfbbd9b89d4605937fdc4448a38d72 label: Default - organization_id: 4553410180153344 + organization_id: 4553444247207936 scope_list: '[]' scopes: '0' status: 0 @@ -745,11 +745,11 @@ source: tests/sentry/backup/test_releases.py pk: 1 - fields: allowed_origins: '' - client_id: 702d46e2139ccc9a79a3db9fb2a98294d6dbd40aae6129fa7a94a3dfddafc130 - client_secret: c8f2d111f7f3d1b1f92e58b9a42aefea5598862f23dbf927063482553751ec4e - date_added: '2024-01-30T20:38:39.406Z' + client_id: 832a73141ddced5ac9e4ca762e6152d15bbe9c9efe3da77979296a8c9af25e3c + client_secret: 3864157aa1232924d8cd3162ace717bc9a578f3b994990ffcbe3afb923e5b4d3 + date_added: '2024-02-05T21:02:21.552Z' homepage_url: null - name: Settled Sturgeon + name: Wanted Amoeba owner: 5 privacy_url: null redirect_uris: '' @@ -758,63 +758,63 @@ source: tests/sentry/backup/test_releases.py model: sentry.apiapplication pk: 1 - fields: - team: 4553410180218880 + team: 4553444247273472 type: 0 user_id: null model: sentry.actor pk: 1 - fields: - date_hash_added: '2024-01-30T20:38:38.940Z' + date_hash_added: '2024-02-05T21:02:20.810Z' email: owner is_verified: true user: 1 - validation_hash: uZTQrlfDVSrPoyj8Lt1mvUq9mdhWCJsb + validation_hash: XoscmeILdKfgIHXNwRoaZOOxQBfTmpCb model: sentry.useremail pk: 1 - fields: - date_hash_added: '2024-01-30T20:38:38.980Z' + date_hash_added: '2024-02-05T21:02:20.986Z' email: member is_verified: true user: 2 - validation_hash: XVbcz1nD1oC8Z7v5DUehLFeATWCeohIy + validation_hash: DiKGxasiGZRoShBen1XOfokjgrJY0ySq model: sentry.useremail pk: 2 - fields: - date_hash_added: '2024-01-30T20:38:39.326Z' + date_hash_added: '2024-02-05T21:02:21.418Z' email: admin@localhost is_verified: true user: 3 - validation_hash: QkxFw67l2mdmdULUyGEVFlls2m3dnfr1 + validation_hash: kpgygAhxye7Lb71YVLZGUlKEmzs5Xz7z model: sentry.useremail pk: 3 - fields: - date_hash_added: '2024-01-30T20:38:39.360Z' - email: ad96a3b6286240c2a088df3a5a7388dd@example.com + date_hash_added: '2024-02-05T21:02:21.492Z' + email: 79c8ec0075e04ee58ba1667e1b0a84f4@example.com is_verified: true user: 4 - validation_hash: VPFjYiHSKn1NhOJkHPCXXt5H6Zr0wFBL + validation_hash: kSkn5YlR9Z6EHQdAxinOI7YEO9gNYOS0 model: sentry.useremail pk: 4 - fields: - date_hash_added: '2024-01-30T20:38:39.401Z' + date_hash_added: '2024-02-05T21:02:21.547Z' email: '' is_verified: false user: 5 - validation_hash: 0csBkRCW3F4yCLlXdtORxurCWRtYennJ + validation_hash: EYcaZDvedEQ56rSTX7omNfyKdll6GK0E model: sentry.useremail pk: 5 - fields: - date_hash_added: '2024-01-30T20:38:39.530Z' - email: f5c736575fd3438b84f9b3ee2e866bcd@example.com + date_hash_added: '2024-02-05T21:02:21.694Z' + email: bfe930dcc5214168a60cae09e61aa2d3@example.com is_verified: true user: 6 - validation_hash: zokGk6cryVtotNHHJX60hEvElmPr4jYx + validation_hash: 2uhjRv0DJjAfP04KD5MM2WvSFc39ooQL model: sentry.useremail pk: 6 - fields: aggregate: count() dataset: events - date_added: '2024-01-30T20:38:39.314Z' + date_added: '2024-02-05T21:02:21.402Z' environment: null query: level:error resolution: 60 @@ -825,7 +825,7 @@ source: tests/sentry/backup/test_releases.py - fields: aggregate: count() dataset: events - date_added: '2024-01-30T20:38:39.334Z' + date_added: '2024-02-05T21:02:21.436Z' environment: null query: test query resolution: 60 @@ -836,18 +836,18 @@ source: tests/sentry/backup/test_releases.py - fields: application: 1 author: A Company - creator_label: ad96a3b6286240c2a088df3a5a7388dd@example.com + creator_label: 79c8ec0075e04ee58ba1667e1b0a84f4@example.com creator_user: 4 - date_added: '2024-01-30T20:38:39.407Z' + date_added: '2024-02-05T21:02:21.553Z' date_deleted: null date_published: null - date_updated: '2024-01-30T20:38:39.503Z' + date_updated: '2024-02-05T21:02:21.664Z' events: '[]' is_alertable: false metadata: {} name: test app overview: null - owner_id: 4553410180153344 + owner_id: 4553444247207936 popularity: 1 proxy_user: 5 redirect_url: null @@ -888,26 +888,26 @@ source: tests/sentry/backup/test_releases.py scopes: '0' slug: test-app status: 0 - uuid: 774a6972-ea58-494e-b64d-f7b8d225a523 + uuid: 81abc958-a9ab-479f-bc1d-b9f31ff0f9e7 verify_install: true webhook_url: https://example.com/webhook model: sentry.sentryapp pk: 1 - fields: data: '{"conditions":[{"id":"sentry.rules.conditions.first_seen_event.FirstSeenEventCondition"},{"id":"sentry.rules.conditions.every_event.EveryEventCondition"}],"action_match":"all","filter_match":"all","actions":[{"id":"sentry.rules.actions.notify_event.NotifyEventAction"},{"id":"sentry.rules.actions.notify_event_service.NotifyEventServiceAction","service":"mail"}]}' - date_added: '2024-01-30T20:38:39.274Z' + date_added: '2024-02-05T21:02:21.353Z' environment_id: null label: '' owner: null - project: 4553410180218881 + project: 4553444247273473 source: 0 status: 0 model: sentry.rule pk: 1 - fields: - date_added: '2024-01-30T20:38:39.318Z' - date_updated: '2024-01-30T20:38:39.318Z' - project: 4553410180218881 + date_added: '2024-02-05T21:02:21.407Z' + date_updated: '2024-02-05T21:02:21.407Z' + project: 4553444247273473 snuba_query: 1 status: 1 subscription_id: null @@ -915,9 +915,9 @@ source: tests/sentry/backup/test_releases.py model: sentry.querysubscription pk: 1 - fields: - date_added: '2024-01-30T20:38:39.336Z' - date_updated: '2024-01-30T20:38:39.336Z' - project: 4553410180218881 + date_added: '2024-02-05T21:02:21.443Z' + date_updated: '2024-02-05T21:02:21.443Z' + project: 4553444247273473 snuba_query: 2 status: 1 subscription_id: null @@ -925,9 +925,9 @@ source: tests/sentry/backup/test_releases.py model: sentry.querysubscription pk: 2 - fields: - date_added: '2024-01-30T20:38:39.412Z' - date_updated: '2024-01-30T20:38:39.412Z' - project: 4553410180218884 + date_added: '2024-02-05T21:02:21.559Z' + date_updated: '2024-02-05T21:02:21.559Z' + project: 4553444247273476 snuba_query: 1 status: 1 subscription_id: null @@ -935,9 +935,9 @@ source: tests/sentry/backup/test_releases.py model: sentry.querysubscription pk: 3 - fields: - date_added: '2024-01-30T20:38:39.539Z' - date_updated: '2024-01-30T20:38:39.539Z' - project: 4553410180218885 + date_added: '2024-02-05T21:02:21.708Z' + date_updated: '2024-02-05T21:02:21.708Z' + project: 4553444247273477 snuba_query: 1 status: 1 subscription_id: null @@ -948,12 +948,12 @@ source: tests/sentry/backup/test_releases.py is_active: true organizationmember: 1 role: null - team: 4553410180218880 + team: 4553444247273472 model: sentry.organizationmemberteam pk: 1 - fields: integration_id: null - organization: 4553410180153344 + organization: 4553444247207936 sentry_app_id: null target_display: Sentry User target_identifier: '1' @@ -964,7 +964,7 @@ source: tests/sentry/backup/test_releases.py pk: 1 - fields: integration_id: null - organization: 4553410180153344 + organization: 4553444247207936 sentry_app_id: 1 target_display: Sentry User target_identifier: '1' @@ -974,23 +974,23 @@ source: tests/sentry/backup/test_releases.py model: sentry.notificationaction pk: 2 - fields: - disable_date: '2024-01-30T20:38:39.277Z' + disable_date: '2024-02-05T21:02:21.357Z' opted_out: false - organization: 4553410180153344 + organization: 4553444247207936 rule: 1 - sent_final_email_date: '2024-01-30T20:38:39.277Z' - sent_initial_email_date: '2024-01-30T20:38:39.277Z' + sent_final_email_date: '2024-02-05T21:02:21.357Z' + sent_initial_email_date: '2024-02-05T21:02:21.357Z' model: sentry.neglectedrule pk: 1 - fields: environment: 1 is_hidden: null - project: 4553410180218881 + project: 4553444247273473 model: sentry.environmentproject pk: 1 - fields: dashboard: 1 - date_added: '2024-01-30T20:38:39.348Z' + date_added: '2024-02-05T21:02:21.473Z' description: null detail: null display_type: 0 @@ -1004,51 +1004,51 @@ source: tests/sentry/backup/test_releases.py pk: 1 - fields: custom_dynamic_sampling_rule: 1 - project: 4553410180218881 + project: 4553444247273473 model: sentry.customdynamicsamplingruleproject pk: 1 - fields: application: 1 - date_added: '2024-01-30T20:38:39.470Z' - expires_at: '2024-01-31T04:38:39.470Z' + date_added: '2024-02-05T21:02:21.627Z' + expires_at: '2024-02-06T05:02:21.627Z' name: null - refresh_token: 2f01a33333a676dc1838f7eddaccf6f259de051633d197fd048ad2719b74da10 + refresh_token: e77fe089037acd0b21edfbfd967eb0dfe2d14e8f293bfe25ab2622b42e73f9b0 scope_list: '[]' scopes: '0' - token: e320c2900c066f88521633e2ed951d6e067196206ce8770e087196a06d0bcee0 - token_last_characters: cee0 + token: 86cd2ba0e404edefc0aa96192546168c73c9c4fb4e887498315d0ac2b840c70c + token_last_characters: c70c user: 5 model: sentry.apitoken pk: 1 - fields: application: 1 - date_added: '2024-01-30T20:38:39.510Z' + date_added: '2024-02-05T21:02:21.672Z' expires_at: null name: create_exhaustive_sentry_app - refresh_token: eb19419c7dd79395948c5a6d8f9159f360122f6fc97702b7f485efb82e0d6970 + refresh_token: ec1dd41b6541042b3eb715b4368131bc0c4af64e7e74b303a04a347a7b98039e scope_list: '[]' scopes: '0' - token: 4a92f822edc653d2c2275fa2a2a397fc0469e872f1194ec916696401c10ef872 - token_last_characters: f872 + token: 1bd2069581c0f2a600dec0dcaaabac23bbe5f7026128c773160eb9405b88f460 + token_last_characters: f460 user: 1 model: sentry.apitoken pk: 2 - fields: application: null - date_added: '2024-01-30T20:38:39.583Z' + date_added: '2024-02-05T21:02:21.751Z' expires_at: null name: create_exhaustive_global_configs - refresh_token: 6be73e5cdec9bab0823d68548a2b2f6d0e740184c10b525a6553b4dd96b5b97b + refresh_token: cba038aeacd263f5d13cb15655b11b06046a17a383fe1e9fe61075553476cb15 scope_list: '[]' scopes: '0' - token: 7fd229e4a3c36bef5c7fa9dcfe47f47adb377bc782d3a8d87f9efd498abd4e46 - token_last_characters: 4e46 + token: 1fa548f2ea9c799042836e9e4a1256adfdf309719f4f5caf00ca8a1e3256d0a9 + token_last_characters: d0a9 user: 1 model: sentry.apitoken pk: 3 - fields: application: 1 - code: 430df068f25f1f318f4d2aaa0c1e088899e78789282c977c595c0f1157475850 + code: 63ee9cbb386624cbf09eefc4185641eb563e52a28a63cc7ff24675e1ac093322 expires_at: '2022-01-01T11:11:00.000Z' redirect_uri: https://example.com scope_list: '[''openid'', ''profile'', ''email'']' @@ -1058,7 +1058,7 @@ source: tests/sentry/backup/test_releases.py pk: 2 - fields: application: 1 - date_added: '2024-01-30T20:38:39.509Z' + date_added: '2024-02-05T21:02:21.671Z' scope_list: '[]' scopes: '0' user: 1 @@ -1066,7 +1066,7 @@ source: tests/sentry/backup/test_releases.py pk: 1 - fields: application: null - date_added: '2024-01-30T20:38:39.582Z' + date_added: '2024-02-05T21:02:21.750Z' scope_list: '[]' scopes: '0' user: 1 @@ -1074,11 +1074,11 @@ source: tests/sentry/backup/test_releases.py pk: 2 - fields: comparison_delta: null - date_added: '2024-01-30T20:38:39.316Z' - date_modified: '2024-01-30T20:38:39.316Z' + date_added: '2024-02-05T21:02:21.404Z' + date_modified: '2024-02-05T21:02:21.404Z' include_all_projects: true - name: Intimate Swine - organization: 4553410180153344 + name: Nice Pangolin + organization: 4553444247207936 owner: null resolve_threshold: null snuba_query: 1 @@ -1091,11 +1091,11 @@ source: tests/sentry/backup/test_releases.py pk: 1 - fields: comparison_delta: null - date_added: '2024-01-30T20:38:39.336Z' - date_modified: '2024-01-30T20:38:39.336Z' + date_added: '2024-02-05T21:02:21.440Z' + date_modified: '2024-02-05T21:02:21.440Z' include_all_projects: false - name: Enhanced Polliwog - organization: 4553410180153344 + name: Ace Hagfish + organization: 4553444247207936 owner: null resolve_threshold: null snuba_query: 2 @@ -1119,13 +1119,13 @@ source: tests/sentry/backup/test_releases.py - fields: api_grant: null api_token: 1 - date_added: '2024-01-30T20:38:39.437Z' + date_added: '2024-02-05T21:02:21.589Z' date_deleted: null - date_updated: '2024-01-30T20:38:39.455Z' - organization_id: 4553410180153344 + date_updated: '2024-02-05T21:02:21.610Z' + organization_id: 4553444247207936 sentry_app: 1 status: 1 - uuid: 61a84478-97ff-4fe0-b6fb-da2e6c225ae6 + uuid: 82da1b77-e836-4dd1-b226-a9eaa19e4025 model: sentry.sentryappinstallation pk: 1 - fields: @@ -1163,12 +1163,12 @@ source: tests/sentry/backup/test_releases.py type: alert-rule-action sentry_app: 1 type: alert-rule-action - uuid: 55e78dbf-e4a7-47c5-b14f-0debdcd8a618 + uuid: 0aba9566-dca0-4b46-ba53-d37017ad710c model: sentry.sentryappcomponent pk: 1 - fields: alert_rule: null - date_added: '2024-01-30T20:38:39.276Z' + date_added: '2024-02-05T21:02:21.355Z' owner_id: 1 rule: 1 until: null @@ -1176,7 +1176,7 @@ source: tests/sentry/backup/test_releases.py model: sentry.rulesnooze pk: 1 - fields: - date_added: '2024-01-30T20:38:39.275Z' + date_added: '2024-02-05T21:02:21.354Z' rule: 1 type: 1 user_id: null @@ -1184,26 +1184,26 @@ source: tests/sentry/backup/test_releases.py pk: 1 - fields: action: 1 - project: 4553410180218881 + project: 4553444247273473 model: sentry.notificationactionproject pk: 1 - fields: action: 2 - project: 4553410180218881 + project: 4553444247273473 model: sentry.notificationactionproject pk: 2 - fields: alert_rule: 2 - date_added: '2024-01-30T20:38:39.339Z' + date_added: '2024-02-05T21:02:21.454Z' date_closed: null - date_detected: '2024-01-30T20:38:39.338Z' - date_started: '2024-01-30T20:38:39.338Z' + date_detected: '2024-02-05T21:02:21.451Z' + date_started: '2024-02-05T21:02:21.451Z' detection_uuid: null identifier: 1 - organization: 4553410180153344 + organization: 4553444247207936 status: 1 status_method: 3 - title: Allowed Foxhound + title: Wise Fowl type: 2 model: sentry.incident pk: 1 @@ -1211,7 +1211,8 @@ source: tests/sentry/backup/test_releases.py aggregates: null columns: null conditions: '' - date_added: '2024-01-30T20:38:39.349Z' + date_added: '2024-02-05T21:02:21.475Z' + date_modified: '2024-02-05T21:02:21.475Z' field_aliases: null fields: '[]' name: Test Query for test-org @@ -1223,21 +1224,21 @@ source: tests/sentry/backup/test_releases.py - fields: alert_rule: 1 alert_threshold: 100.0 - date_added: '2024-01-30T20:38:39.323Z' - label: Coherent Mosquito + date_added: '2024-02-05T21:02:21.412Z' + label: Becoming Moose resolve_threshold: null threshold_type: null model: sentry.alertruletrigger pk: 1 - fields: alert_rule: 1 - date_added: '2024-01-30T20:38:39.317Z' - project: 4553410180218882 + date_added: '2024-02-05T21:02:21.405Z' + project: 4553444247273474 model: sentry.alertruleexcludedprojects pk: 1 - fields: alert_rule: 1 - date_added: '2024-01-30T20:38:39.319Z' + date_added: '2024-02-05T21:02:21.408Z' previous_alert_rule: null type: 1 user_id: null @@ -1245,30 +1246,30 @@ source: tests/sentry/backup/test_releases.py pk: 1 - fields: alert_rule: 2 - date_added: '2024-01-30T20:38:39.337Z' + date_added: '2024-02-05T21:02:21.445Z' previous_alert_rule: null type: 1 user_id: null model: sentry.alertruleactivity pk: 2 - fields: - date_added: '2024-01-30T20:38:39.343Z' - end: '2024-01-30T20:38:39.343Z' + date_added: '2024-02-05T21:02:21.464Z' + end: '2024-02-05T21:02:21.464Z' period: 1 - start: '2024-01-29T20:38:39.343Z' + start: '2024-02-04T21:02:21.464Z' values: '[[1.0, 2.0, 3.0], [1.5, 2.5, 3.5]]' model: sentry.timeseriessnapshot pk: 1 - fields: actor_id: 1 application_id: 1 - date_added: '2024-01-30T20:38:39.453Z' + date_added: '2024-02-05T21:02:21.608Z' events: '[]' - guid: c8208ac6bd3e4f32853c778cbea3d939 + guid: 305e27bb9e1d4ac7b0171027d292312d installation_id: 1 - organization_id: 4553410180153344 + organization_id: 4553444247207936 project_id: null - secret: 9e53f3cdf1c065af8cd71225474e06c82c2c5015d10edd673f0bcdc5d23b2881 + secret: a74196d854eee100f4096c2dd8021c12ba6995b39ab8975c10044adb223a494d status: 0 url: https://example.com/webhook version: 0 @@ -1277,40 +1278,40 @@ source: tests/sentry/backup/test_releases.py - fields: actor_id: 6 application_id: 1 - date_added: '2024-01-30T20:38:39.560Z' + date_added: '2024-02-05T21:02:21.734Z' events: '[''event.created'']' - guid: 0820190211c0440aaf94be74bb8a30ca + guid: 5e134f0841ce41549e9e702602ab2c97 installation_id: 1 - organization_id: 4553410180153344 - project_id: 4553410180218885 - secret: 04b0333736bff527fc15c9db1ea2050ced381ce62da1d20b0ce9194da6e99d4d + organization_id: 4553444247207936 + project_id: 4553444247273477 + secret: e3629a34f9182e68b9d95df45a4659d8a52cce2255f2826cd539fed9555b77fc status: 0 url: https://example.com/sentry/webhook version: 0 model: sentry.servicehook pk: 2 - fields: - date_added: '2024-01-30T20:38:39.347Z' + date_added: '2024-02-05T21:02:21.469Z' incident: 1 - target_run_date: '2024-01-31T00:38:39.347Z' + target_run_date: '2024-02-06T01:02:21.469Z' model: sentry.pendingincidentsnapshot pk: 1 - fields: alert_rule_trigger: 1 - date_added: '2024-01-30T20:38:39.345Z' - date_modified: '2024-01-30T20:38:39.345Z' + date_added: '2024-02-05T21:02:21.468Z' + date_modified: '2024-02-05T21:02:21.468Z' incident: 1 status: 1 model: sentry.incidenttrigger pk: 1 - fields: - date_added: '2024-01-30T20:38:39.345Z' + date_added: '2024-02-05T21:02:21.466Z' incident: 1 user_id: 1 model: sentry.incidentsubscription pk: 1 - fields: - date_added: '2024-01-30T20:38:39.344Z' + date_added: '2024-02-05T21:02:21.465Z' event_stats_snapshot: 1 incident: 1 total_events: 1 @@ -1319,7 +1320,7 @@ source: tests/sentry/backup/test_releases.py pk: 1 - fields: comment: hello test-org - date_added: '2024-01-30T20:38:39.342Z' + date_added: '2024-02-05T21:02:21.463Z' incident: 1 notification_uuid: null previous_value: null @@ -1330,8 +1331,8 @@ source: tests/sentry/backup/test_releases.py pk: 1 - fields: dashboard_widget_query: 1 - date_added: '2024-01-30T20:38:39.350Z' - date_modified: '2024-01-30T20:38:39.350Z' + date_added: '2024-02-05T21:02:21.478Z' + date_modified: '2024-02-05T21:02:21.478Z' extraction_state: disabled:not-applicable spec_hashes: '[]' spec_version: null @@ -1339,13 +1340,13 @@ source: tests/sentry/backup/test_releases.py pk: 1 - fields: alert_rule_trigger: 1 - date_added: '2024-01-30T20:38:39.323Z' + date_added: '2024-02-05T21:02:21.413Z' query_subscription: 1 model: sentry.alertruletriggerexclusion pk: 1 - fields: alert_rule_trigger: 1 - date_added: '2024-01-30T20:38:39.333Z' + date_added: '2024-02-05T21:02:21.433Z' integration_id: null sentry_app_config: null sentry_app_id: null From 5265c5bba81ccb0675f4f8fd4966e02a204d4fc8 Mon Sep 17 00:00:00 2001 From: Ryan Skonnord Date: Mon, 5 Feb 2024 13:59:35 -0800 Subject: [PATCH 014/357] fix(hc): Escape file and function names in regex (#64607) --- src/sentry/silo/patches/silo_aware_transaction_patch.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sentry/silo/patches/silo_aware_transaction_patch.py b/src/sentry/silo/patches/silo_aware_transaction_patch.py index b703eb207674b7..d710e86f5bcec1 100644 --- a/src/sentry/silo/patches/silo_aware_transaction_patch.py +++ b/src/sentry/silo/patches/silo_aware_transaction_patch.py @@ -78,7 +78,7 @@ def is_in_test_case_body() -> bool: def seek(module_path: str, function_name: str) -> bool: """Check whether the named function has been called in the current stack.""" - pattern = re.compile(rf"/{module_path}\b.*\b{function_name}>$") + pattern = re.compile(rf"/{re.escape(module_path)}\b.*\b{re.escape(function_name)}>$") return any(pattern.search(frame) for frame in frames) return seek("_pytest/runner.py", "pytest_runtest_call") and not ( From 0e69fa2c76a122c998ab3a29277651a4be34b21a Mon Sep 17 00:00:00 2001 From: Ash <0Calories@users.noreply.github.com> Date: Mon, 5 Feb 2024 17:22:15 -0500 Subject: [PATCH 015/357] ref(deps): Remove old component annotate plugin dependency (#64606) Removes this dependency, as it is being replaced with our own plugin. The replacement plugin to be added in a followup PR getsentry PR: https://github.com/getsentry/getsentry/pull/12838 --- package.json | 1 - yarn.lock | 5 ----- 2 files changed, 6 deletions(-) diff --git a/package.json b/package.json index f007ffb3fdccbc..37a1e7367bfcb6 100644 --- a/package.json +++ b/package.json @@ -23,7 +23,6 @@ "@emotion/css": "^11.10.5", "@emotion/react": "^11.10.5", "@emotion/styled": "^11.10.5", - "@fullstory/babel-plugin-annotate-react": "^2.3.0", "@monaco-editor/react": "^4.4.5", "@popperjs/core": "^2.11.5", "@react-aria/button": "^3.9.1", diff --git a/yarn.lock b/yarn.lock index f53eb6889aa57e..ace868ee7d4397 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1643,11 +1643,6 @@ dependencies: tslib "^2.4.0" -"@fullstory/babel-plugin-annotate-react@^2.3.0": - version "2.3.0" - resolved "https://registry.yarnpkg.com/@fullstory/babel-plugin-annotate-react/-/babel-plugin-annotate-react-2.3.0.tgz#ab4df27dbecaa3771a1b353b898ccf887876e9fb" - integrity sha512-gYLUL6Tu0exbvTIhK9nSCaztmqBlQAm07Fvtl/nKTc+lxwFkcX9vR8RrdTbyjJZKbPaA5EMlExQ6GeLCXkfm5g== - "@humanwhocodes/config-array@^0.11.11": version "0.11.11" resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.11.11.tgz#88a04c570dbbc7dd943e4712429c3df09bc32844" From ce4bc4a750edb931bad1dc397f27a33a39924d10 Mon Sep 17 00:00:00 2001 From: Seiji Chew <67301797+schew2381@users.noreply.github.com> Date: Mon, 5 Feb 2024 14:22:28 -0800 Subject: [PATCH 016/357] chore: Restrict user permission details endpoint to session auth (#64608) --- src/sentry/api/endpoints/user_permission_details.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/sentry/api/endpoints/user_permission_details.py b/src/sentry/api/endpoints/user_permission_details.py index d174f15e03135c..44a96401e8ba60 100644 --- a/src/sentry/api/endpoints/user_permission_details.py +++ b/src/sentry/api/endpoints/user_permission_details.py @@ -2,6 +2,7 @@ from django.conf import settings from django.db import IntegrityError, router, transaction +from rest_framework.authentication import SessionAuthentication from rest_framework.request import Request from rest_framework.response import Response @@ -24,6 +25,7 @@ class UserPermissionDetailsEndpoint(UserEndpoint): "POST": ApiPublishStatus.PRIVATE, } owner = ApiOwner.ENTERPRISE + authentication_classes = (SessionAuthentication,) permission_classes = (SuperuserOrStaffFeatureFlaggedPermission,) def get(self, request: Request, user, permission_name) -> Response: From 8b49c80eb6ff9b5aa4815e2e3ec73afd1850720b Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Mon, 5 Feb 2024 14:22:46 -0800 Subject: [PATCH 017/357] fix(issues): Increase trace timeline tooltip target (#64598) --- .../traceTimeline/traceTimeline.tsx | 1 + .../traceTimeline/traceTimelineEvents.tsx | 22 +++++++++---------- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx b/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx index 5c18d4a1306926..ba6ae051831a28 100644 --- a/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx @@ -68,6 +68,7 @@ const TimelineOutline = styled('div')` height: 8px; border: 1px solid ${p => p.theme.innerBorder}; border-radius: ${p => p.theme.borderRadius}; + background-color: ${p => p.theme.backgroundSecondary}; `; /** diff --git a/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx b/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx index 4556f239884f38..6686fa1081d8a3 100644 --- a/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx @@ -50,7 +50,7 @@ export function TraceTimelineEvents({event, width}: TraceTimelineEventsProps) { return ( {/* Add padding to the total columns, 1 column of padding on each side */} - + {Array.from(eventsByColumn.entries()).map(([column, colEvents]) => { // Calculate the timestamp range that this column represents const timeRange = getChunkTimeRange( @@ -105,7 +105,7 @@ export function TraceTimelineEvents({event, width}: TraceTimelineEventsProps) { * ... * */ -const TimelineColumns = styled('ul')<{totalColumns: number}>` +const TimelineColumns = styled('div')` /* Reset defaults for
    */ list-style: none; margin: 0; @@ -113,7 +113,6 @@ const TimelineColumns = styled('ul')<{totalColumns: number}>` /* Layout of the lines */ display: grid; - grid-template-columns: repeat(${p => p.totalColumns}, 1fr); margin-top: -1px; height: 0; `; @@ -163,7 +162,7 @@ function NodeGroup({ return ( - + {Array.from(eventsByColumn.entries()).map(([column, groupEvents]) => { const isCurrentNode = groupEvents.some(e => e.id === currentEventId); return ( @@ -186,7 +185,7 @@ function NodeGroup({ ); })} - + } overlayStyle={{ @@ -199,6 +198,7 @@ function NodeGroup({ 1 ? `${minColumn} / ${maxColumn}` : columns[0], + width: 8 * columns.length, }} data-test-id={`trace-timeline-tooltip-${currentColumn}`} /> @@ -208,7 +208,7 @@ function NodeGroup({ ); } -const EventColumn = styled('li')` +const EventColumn = styled('div')` place-items: stretch; display: grid; align-items: center; @@ -230,6 +230,7 @@ const IconNode = styled('div')` box-shadow: ${p => p.theme.dropShadowLight}; user-select: none; background-color: ${p => color(p.theme.red200).alpha(0.3).string()}; + margin-left: -8px; `; const PerformanceIconNode = styled(IconNode)` @@ -252,7 +253,7 @@ const CurrentNodeRing = styled('div')` border-radius: 100%; position: absolute; top: -4px; - left: -4px; + left: -12px; animation: pulse 1s ease-out infinite; @keyframes pulse { @@ -277,9 +278,8 @@ const CurrentIconNode = styled(IconNode)` `; const TooltipHelper = styled('span')` - height: 8px; - margin-top: -4px; - margin-right: -2px; - min-width: 8px; + height: 12px; + margin-left: -8px; + margin-top: -6px; z-index: 1; `; From 4979394aa96ce45db60f44d24ec2a1402ee98d7e Mon Sep 17 00:00:00 2001 From: Richard Roggenkemper <46740234+roggenkemper@users.noreply.github.com> Date: Mon, 5 Feb 2024 14:26:40 -0800 Subject: [PATCH 018/357] fix(issue-alerts): Fix issue alert options on small screens (#64588) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit this pr updates the issue alert options shown on project creation to look better on small screens. previously, you wouldn't be able to see the boxes on small screen sizes. before: ![Screenshot 2024-02-05 at 10 24 52 AM](https://github.com/getsentry/sentry/assets/46740234/dcaca98c-c1d0-4d41-a125-ff26ece17887) after: ![Screenshot 2024-02-05 at 10 40 13 AM](https://github.com/getsentry/sentry/assets/46740234/c4aa1885-2ed6-48bd-875b-7a37f127bd12) --- static/app/views/projectInstall/issueAlertOptions.tsx | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/static/app/views/projectInstall/issueAlertOptions.tsx b/static/app/views/projectInstall/issueAlertOptions.tsx index 7748a6463d861f..94d0e768748ec6 100644 --- a/static/app/views/projectInstall/issueAlertOptions.tsx +++ b/static/app/views/projectInstall/issueAlertOptions.tsx @@ -136,7 +136,7 @@ class IssueAlertOptions extends DeprecatedAsyncComponent { ): [string, string | React.ReactElement][] { const customizedAlertOption: [string, React.ReactNode] = [ RuleAction.CUSTOMIZED_ALERTS.toString(), - { // XXX(epurkhiser): The `e.preventDefault` here is needed to stop @@ -173,7 +173,7 @@ class IssueAlertOptions extends DeprecatedAsyncComponent { }))} onChange={interval => this.setStateAndUpdateParents({interval: interval.value})} /> - , + , ]; const default_label = this.shouldUseNewDefaultSetting() @@ -331,10 +331,10 @@ const Content = styled('div')` padding-bottom: ${space(4)}; `; -const CustomizeAlertsGrid = styled('div')` - display: grid; - grid-template-columns: repeat(5, max-content); +const CustomizeAlert = styled('div')` + display: flex; gap: ${space(1)}; + flex-wrap: wrap; align-items: center; `; From c8fa3fd419302330facbea9505f89b8df5afc3f2 Mon Sep 17 00:00:00 2001 From: Richard Roggenkemper <46740234+roggenkemper@users.noreply.github.com> Date: Mon, 5 Feb 2024 14:26:59 -0800 Subject: [PATCH 019/357] feat(integrations): Add project creation option to Vercel integration (#64585) this pr adds in an option to create a project from the Vercel integration page. closes https://github.com/getsentry/sentry/issues/60063 --- .../components/forms/fields/projectMapperField.tsx | 14 ++++++++++++-- .../configureIntegration.tsx | 6 ++++++ 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/static/app/components/forms/fields/projectMapperField.tsx b/static/app/components/forms/fields/projectMapperField.tsx index 090292a5aaedc0..c7989342f2349a 100644 --- a/static/app/components/forms/fields/projectMapperField.tsx +++ b/static/app/components/forms/fields/projectMapperField.tsx @@ -2,6 +2,7 @@ import {Component, Fragment} from 'react'; import {components} from 'react-select'; import styled from '@emotion/styled'; +import {openProjectCreationModal} from 'sentry/actionCreators/modal'; import {Button} from 'sentry/components/button'; import SelectControl from 'sentry/components/forms/controls/selectControl'; import FormField from 'sentry/components/forms/formField'; @@ -108,12 +109,17 @@ export class RenderField extends Component { ); }; - const projectOptions = sentryProjects.map(({slug, id}) => ({ + const sentryProjectOptions = sentryProjects.map(({slug, id}) => ({ label: slug, value: id, leadingItems: renderIdBadge({id, hideName: true}), })); + const projectOptions = [ + {label: t('Create a Project'), value: -1, leadingItems: }, + ...sentryProjectOptions, + ]; + const mappedItemsToShow = mappedDropdownItems.filter( item => !mappedValuesUsed.has(item.value) ); @@ -123,7 +129,11 @@ export class RenderField extends Component { })); const handleSelectProject = ({value}: {value: number}) => { - this.setState({selectedSentryProjectId: value}); + if (value === -1) { + openProjectCreationModal({defaultCategory: 'popular'}); + } else { + this.setState({selectedSentryProjectId: value}); + } }; const handleSelectMappedValue = ({value}: {value: MappedValue}) => { diff --git a/static/app/views/settings/organizationIntegrations/configureIntegration.tsx b/static/app/views/settings/organizationIntegrations/configureIntegration.tsx index 030711feab38a6..d8af26c49c52c0 100644 --- a/static/app/views/settings/organizationIntegrations/configureIntegration.tsx +++ b/static/app/views/settings/organizationIntegrations/configureIntegration.tsx @@ -31,6 +31,7 @@ import useRouteAnalyticsEventNames from 'sentry/utils/routeAnalytics/useRouteAna import useRouteAnalyticsParams from 'sentry/utils/routeAnalytics/useRouteAnalyticsParams'; import useApi from 'sentry/utils/useApi'; import useOrganization from 'sentry/utils/useOrganization'; +import useProjects from 'sentry/utils/useProjects'; import {normalizeUrl} from 'sentry/utils/withDomainRequired'; import BreadcrumbTitle from 'sentry/views/settings/components/settingsBreadcrumb/breadcrumbTitle'; import SettingsPageHeader from 'sentry/views/settings/components/settingsPageHeader'; @@ -109,6 +110,7 @@ function ConfigureIntegration({params, router, routes, location}: Props) { }); const provider = config.providers.find(p => p.key === integration?.provider.key); + const {projects} = useProjects(); useRouteAnalyticsEventNames( 'integrations.details_viewed', @@ -123,6 +125,10 @@ function ConfigureIntegration({params, router, routes, location}: Props) { : {} ); + useEffect(() => { + refetchIntegration(); + }, [projects, refetchIntegration]); + useEffect(() => { // This page should not be accessible by members (unless its github or gitlab) const allowMemberConfiguration = ['github', 'gitlab'].includes(providerKey); From 0e4d585702646e4265a28a27f8bdb275c155ec48 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Mon, 5 Feb 2024 14:30:43 -0800 Subject: [PATCH 020/357] ref(issues/replay): update issue details replay CTA (#64478) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Update the Replay CTA (both frontend & web backend) on Issue Details: - Standardize Frontend & Web Backend CTA design. - Instead of show/hide details in the top right, the Replay CTA is now dismissible and snoozable with a dropdown menu "X" in the top right corner - Dismissing or snoozing will stick around for user & project level - After dismissing/snoozing, the whole Session Replay section goes away. - On prod it will show the "View sample replay" button as well. https://github.com/getsentry/sentry/assets/56095982/a73e6c54-96e3-4ed5-bbe6-d1b4783d4cac On larger screens: Screenshot 2024-02-02 at 11 24 50 AM SCR-20240202-kmgy On smaller screens we don't display the image because the display gets wonky: SCR-20240202-kjwc Closes https://github.com/getsentry/sentry/issues/64311 --- .../events/eventReplay/index.spec.tsx | 27 +- .../components/events/eventReplay/index.tsx | 25 +- .../replayInlineOnboardingPanel.spec.tsx | 44 +- .../replayInlineOnboardingPanel.tsx | 234 ++++++---- .../replayInlineOnboardingPanelBackend.tsx | 76 ---- .../utils/analytics/issueAnalyticsEvents.tsx | 2 + .../spot/replay-inline-onboarding-v2.svg | 408 ++++++++++++++++++ 7 files changed, 597 insertions(+), 219 deletions(-) delete mode 100644 static/app/components/events/eventReplay/replayInlineOnboardingPanelBackend.tsx create mode 100644 static/images/spot/replay-inline-onboarding-v2.svg diff --git a/static/app/components/events/eventReplay/index.spec.tsx b/static/app/components/events/eventReplay/index.spec.tsx index c7504dfcf180fa..33dffacd4c392e 100644 --- a/static/app/components/events/eventReplay/index.spec.tsx +++ b/static/app/components/events/eventReplay/index.spec.tsx @@ -145,30 +145,9 @@ describe('EventReplay', function () { }); render(, {organization}); - expect(await screen.findByText('Configure Session Replay')).toBeInTheDocument(); - }); - - it('should not render the replay inline onboarding component when the project is not JS', function () { - MockUseHasOrganizationSentAnyReplayEvents.mockReturnValue({ - hasOrgSentReplays: false, - fetching: false, - }); - MockUseReplayOnboardingSidebarPanel.mockReturnValue({ - activateSidebar: jest.fn(), - }); - render( - , - {organization} - ); - - expect(screen.queryByText('Configure Session Replay')).not.toBeInTheDocument(); - expect(screen.queryByTestId('player-container')).not.toBeInTheDocument(); + expect( + await screen.findByText('Watch the errors and latency issues your users face') + ).toBeInTheDocument(); }); it('should render a replay when there is a replayId from tags', async function () { diff --git a/static/app/components/events/eventReplay/index.tsx b/static/app/components/events/eventReplay/index.tsx index 8f360cc698d683..bdc09fa261945f 100644 --- a/static/app/components/events/eventReplay/index.tsx +++ b/static/app/components/events/eventReplay/index.tsx @@ -30,9 +30,8 @@ function EventReplayContent({ const organization = useOrganization(); const {hasOrgSentReplays, fetching} = useHasOrganizationSentAnyReplayEvents(); - const onboardingPanel = useCallback(() => import('./replayInlineOnboardingPanel'), []); - const onboardingPanelBackend = useCallback( - () => import('./replayInlineOnboardingPanelBackend'), + const replayOnboardingPanel = useCallback( + () => import('./replayInlineOnboardingPanel'), [] ); const replayPreview = useCallback(() => import('./replayPreview'), []); @@ -46,20 +45,18 @@ function EventReplayContent({ return null; } - if (!hasOrgSentReplays) { - return ( - - - - ); - } + const platform = group?.project.platform ?? group?.platform ?? 'other'; + const projectId = group?.project.id ?? event.projectID ?? ''; - const platform = group?.project.platform ?? 'other'; - if (!replayId && replayBackendPlatforms.includes(platform)) { - // if backend project, show new onboarding panel + // frontend or backend platforms + if (!hasOrgSentReplays || (!replayId && replayBackendPlatforms.includes(platform))) { return ( - + ); } diff --git a/static/app/components/events/eventReplay/replayInlineOnboardingPanel.spec.tsx b/static/app/components/events/eventReplay/replayInlineOnboardingPanel.spec.tsx index 0df9b4fc94d41c..afce672b4588c9 100644 --- a/static/app/components/events/eventReplay/replayInlineOnboardingPanel.spec.tsx +++ b/static/app/components/events/eventReplay/replayInlineOnboardingPanel.spec.tsx @@ -1,28 +1,44 @@ import {render, screen} from 'sentry-test/reactTestingLibrary'; -import localStorage from 'sentry/utils/localStorage'; +import useDismissAlertImport from 'sentry/utils/useDismissAlert'; import ReplayInlineOnboardingPanel from './replayInlineOnboardingPanel'; jest.mock('sentry/utils/localStorage'); - -const TEN_SECONDS = 10 * 1000; +jest.mock('sentry/utils/useDismissAlert'); +const useDismissAlert = jest.mocked(useDismissAlertImport); describe('replayInlineOnboardingPanel', () => { - it('should render by default', async () => { - render(); - expect(await screen.findByText('Configure Session Replay')).toBeInTheDocument(); + beforeEach(() => { + jest.clearAllMocks(); + useDismissAlert.mockClear(); }); - it('should not render if hideUntil is set', async () => { - localStorage.getItem = jest.fn().mockReturnValue(Date.now() + TEN_SECONDS); - render(); - expect(await screen.queryByText('Configure Session Replay')).not.toBeInTheDocument(); + it('should render if not dismissed', async () => { + const dismiss = jest.fn(); + useDismissAlert.mockImplementation(() => { + return { + dismiss, + isDismissed: false, + }; + }); + render(); + expect( + await screen.findByText('Watch the errors and latency issues your users face') + ).toBeInTheDocument(); }); - it('should clear the hideUntil time if it has expired', async () => { - localStorage.getItem = jest.fn().mockReturnValue(Date.now() - TEN_SECONDS); - render(); - expect(await screen.findByText('Configure Session Replay')).toBeInTheDocument(); + it('should not render if dismissed', async () => { + const dismiss = jest.fn(); + useDismissAlert.mockImplementation(() => { + return { + dismiss, + isDismissed: true, + }; + }); + render(); + expect( + await screen.queryByText('Watch the errors and latency issues your users face') + ).not.toBeInTheDocument(); }); }); diff --git a/static/app/components/events/eventReplay/replayInlineOnboardingPanel.tsx b/static/app/components/events/eventReplay/replayInlineOnboardingPanel.tsx index d6d99331635798..fddae1095a54be 100644 --- a/static/app/components/events/eventReplay/replayInlineOnboardingPanel.tsx +++ b/static/app/components/events/eventReplay/replayInlineOnboardingPanel.tsx @@ -1,126 +1,178 @@ -import {useState} from 'react'; import styled from '@emotion/styled'; -import replaysInlineOnboarding from 'sentry-images/spot/replay-inline-onboarding.svg'; +import replayInlineOnboarding from 'sentry-images/spot/replay-inline-onboarding-v2.svg'; import {Button} from 'sentry/components/button'; -import ButtonBar from 'sentry/components/buttonBar'; +import {DropdownMenu} from 'sentry/components/dropdownMenu'; import {EventReplaySection} from 'sentry/components/events/eventReplay/eventReplaySection'; -import {t} from 'sentry/locale'; +import HookOrDefault from 'sentry/components/hookOrDefault'; +import platforms, {otherPlatform} from 'sentry/data/platforms'; +import {IconClose} from 'sentry/icons'; +import {t, tct} from 'sentry/locale'; import {space} from 'sentry/styles/space'; -import localStorage from 'sentry/utils/localStorage'; +import type {PlatformKey} from 'sentry/types'; +import {trackAnalytics} from 'sentry/utils/analytics'; import {useReplayOnboardingSidebarPanel} from 'sentry/utils/replays/hooks/useReplayOnboarding'; +import theme from 'sentry/utils/theme'; +import useDismissAlert from 'sentry/utils/useDismissAlert'; +import useMedia from 'sentry/utils/useMedia'; +import useOrganization from 'sentry/utils/useOrganization'; + +type OnboardingCTAProps = { + platform: PlatformKey; + projectId: string; +}; + +const OnboardingCTAButton = HookOrDefault({ + hookName: 'component:replay-onboarding-cta-button', + defaultComponent: null, +}); + +export default function ReplayInlineOnboardingPanel({ + platform, + projectId, +}: OnboardingCTAProps) { + const LOCAL_STORAGE_KEY = `${projectId}:issue-details-replay-onboarding-hide`; + + const {dismiss: snooze, isDismissed: isSnoozed} = useDismissAlert({ + key: LOCAL_STORAGE_KEY, + expirationDays: 7, + }); -const LOCAL_STORAGE_KEY = 'replay-preview-onboarding-hide-until'; -const SNOOZE_TIME = 1000 * 60 * 60 * 24 * 7; // 1 week -const DISMISS_TIME = 1000 * 60 * 60 * 24 * 365; // 1 year - -function getHideUntilTime() { - return Number(localStorage.getItem(LOCAL_STORAGE_KEY)) || 0; -} - -function setHideUntilTime(offset: number) { - localStorage.setItem(LOCAL_STORAGE_KEY, String(Date.now() + offset)); -} - -function clearHideUntilTime() { - localStorage.removeItem(LOCAL_STORAGE_KEY); -} - -export default function ReplayInlineOnboardingPanel() { - const [isHidden, setIsHidden] = useState(() => { - const hideUntilTime = getHideUntilTime(); - if (hideUntilTime && Date.now() < hideUntilTime) { - return true; - } - clearHideUntilTime(); - return false; + const {dismiss, isDismissed} = useDismissAlert({ + key: LOCAL_STORAGE_KEY, + expirationDays: 365, }); + const {activateSidebar} = useReplayOnboardingSidebarPanel(); - if (isHidden) { + const platformKey = platforms.find(p => p.id === platform) ?? otherPlatform; + const platformName = platformKey === otherPlatform ? '' : platformKey.name; + const isScreenSmall = useMedia(`(max-width: ${theme.breakpoints.small})`); + const organization = useOrganization(); + + if (isDismissed || isSnoozed) { return null; } return ( - +
    - {t('Configure Session Replay')} - - {t( - 'Playback your app to identify the root cause of errors and latency issues.' - )} - - - - - - - - +
    - -
    + {!isScreenSmall && } + , + }} + size="xs" + items={[ + { + key: 'dismiss', + label: t('Dismiss'), + onAction: () => { + dismiss(); + trackAnalytics('issue-details.replay-cta-dismiss', { + organization, + type: 'dismiss', + }); + }, + }, + { + key: 'snooze', + label: t('Snooze'), + onAction: () => { + snooze(); + trackAnalytics('issue-details.replay-cta-dismiss', { + organization, + type: 'snooze', + }); + }, + }, + ]} + /> +
    ); } -const StyledOnboardingPanel = styled('div')` - display: flex; - flex-direction: column; - max-width: 600px; - border: 1px dashed ${p => p.theme.border}; - border-radius: ${p => p.theme.borderRadius}; - padding: ${space(3)}; - margin-bottom: ${space(3)}; +const PurpleText = styled('span')` + color: ${p => p.theme.purple300}; + font-weight: bold; +`; - @media (min-width: ${p => p.theme.breakpoints.small}) { - flex-direction: row; - } +const BannerWrapper = styled('div')` + position: relative; + border: 1px solid ${p => p.theme.border}; + border-radius: ${p => p.theme.borderRadius}; + padding: ${space(2)}; + margin: ${space(1)} 0; + background: linear-gradient( + 90deg, + ${p => p.theme.backgroundSecondary}00 0%, + ${p => p.theme.backgroundSecondary}FF 70%, + ${p => p.theme.backgroundSecondary}FF 100% + ); `; -const Heading = styled('h3')` - text-transform: uppercase; - font-size: ${p => p.theme.fontSizeSmall}; - color: ${p => p.theme.gray300}; +const BannerTitle = styled('div')` + font-size: ${p => p.theme.fontSizeExtraLarge}; margin-bottom: ${space(1)}; + font-weight: 600; `; -const Content = styled('p')` - margin-bottom: ${space(2)}; - font-size: ${p => p.theme.fontSizeMedium}; +const BannerDescription = styled('div')` + margin-bottom: ${space(1.5)}; + max-width: 340px; `; -const Illustration = styled('img')` - display: none; +const CloseDropdownMenu = styled(DropdownMenu)` + position: absolute; + display: block; + top: ${space(1)}; + right: ${space(1)}; + color: ${p => p.theme.white}; + cursor: pointer; + z-index: 1; +`; - @media (min-width: ${p => p.theme.breakpoints.small}) { - display: block; - } +const Background = styled('div')<{image: any}>` + display: flex; + justify-self: flex-end; + position: absolute; + top: 0px; + right: 25px; + height: 100%; + width: 100%; + max-width: 250px; + background-image: url(${p => p.image}); + background-repeat: no-repeat; + background-size: contain; `; -const ButtonList = styled('div')` - display: inline-flex; - justify-content: flex-start; - align-items: center; - gap: 0 ${space(1)}; +const ActionButton = styled('div')` + display: flex; + gap: ${space(1)} `; diff --git a/static/app/components/events/eventReplay/replayInlineOnboardingPanelBackend.tsx b/static/app/components/events/eventReplay/replayInlineOnboardingPanelBackend.tsx deleted file mode 100644 index 369c5e4426bb61..00000000000000 --- a/static/app/components/events/eventReplay/replayInlineOnboardingPanelBackend.tsx +++ /dev/null @@ -1,76 +0,0 @@ -import {useState} from 'react'; -import styled from '@emotion/styled'; - -import replaysInlineOnboarding from 'sentry-images/spot/replay-onboarding-backend.svg'; - -import PageBanner from 'sentry/components/alerts/pageBanner'; -import {Button} from 'sentry/components/button'; -import ButtonBar from 'sentry/components/buttonBar'; -import {EventReplaySection} from 'sentry/components/events/eventReplay/eventReplaySection'; -import HookOrDefault from 'sentry/components/hookOrDefault'; -import platforms, {otherPlatform} from 'sentry/data/platforms'; -import {IconBroadcast} from 'sentry/icons/iconBroadcast'; -import {t, tct} from 'sentry/locale'; -import type {PlatformKey} from 'sentry/types'; -import {useReplayOnboardingSidebarPanel} from 'sentry/utils/replays/hooks/useReplayOnboarding'; -import theme from 'sentry/utils/theme'; -import useMedia from 'sentry/utils/useMedia'; -import SectionToggleButton from 'sentry/views/issueDetails/sectionToggleButton'; - -type OnboardingCTAProps = { - platform: PlatformKey; -}; - -const OnboardingCTAButton = HookOrDefault({ - hookName: 'component:replay-onboarding-cta-button', - defaultComponent: null, -}); - -export default function ReplayInlineOnboardingPanelBackend({ - platform, -}: OnboardingCTAProps) { - const [isExpanded, setIsExpanded] = useState(true); - const {activateSidebar} = useReplayOnboardingSidebarPanel(); - - const platformName = platforms.find(p => p.id === platform) ?? otherPlatform; - const isScreenSmall = useMedia(`(max-width: ${theme.breakpoints.small})`); - - return ( - - } - > - {isExpanded ? ( - - {!isScreenSmall && } - - - } - description={t('Watch the errors and latency issues your users face')} - heading={tct('Set up your [platform] app now', { - platform: {platformName.name}, - })} - icon={} - image={replaysInlineOnboarding} - title={{t('What’s new')}} - /> - ) : null} - - ); -} - -const PurpleText = styled('span')` - color: ${p => p.theme.purple300}; - font-weight: bold; -`; diff --git a/static/app/utils/analytics/issueAnalyticsEvents.tsx b/static/app/utils/analytics/issueAnalyticsEvents.tsx index 5e586163d4348c..568183526199c6 100644 --- a/static/app/utils/analytics/issueAnalyticsEvents.tsx +++ b/static/app/utils/analytics/issueAnalyticsEvents.tsx @@ -61,6 +61,7 @@ export type IssueEventParameters = { 'integrations.integration_reinstall_clicked': { provider: string; }; + 'issue-details.replay-cta-dismiss': {type: string}; 'issue.search_sidebar_clicked': {}; 'issue.shared_publicly': {}; 'issue_details.copy_event_link_clicked': GroupEventParams; @@ -310,6 +311,7 @@ export const issueEventMap: Record = { 'issue_details.event_dropdown_option_selected': 'Issue Details: Event Dropdown Option Selected', 'issue_details.header_view_replay_clicked': 'Issue Details: Header View Replay Clicked', + 'issue-details.replay-cta-dismiss': 'Issue Details Replay CTA Dismissed', 'issue_group_details.anr_root_cause_detected': 'Detected ANR Root Cause', 'issue_details.external_issue_loaded': 'Issue Details: External Issue Loaded', 'issue_details.external_issue_modal_opened': diff --git a/static/images/spot/replay-inline-onboarding-v2.svg b/static/images/spot/replay-inline-onboarding-v2.svg new file mode 100644 index 00000000000000..3008ac54f24ce7 --- /dev/null +++ b/static/images/spot/replay-inline-onboarding-v2.svg @@ -0,0 +1,408 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + From ccffaf0c8b8dbd9c2496df7154a4c27d7bff6aef Mon Sep 17 00:00:00 2001 From: Malachi Willey Date: Mon, 5 Feb 2024 14:31:17 -0800 Subject: [PATCH 021/357] fix(clippedbox): Correctly clear max height when window is maximized (#64593) When maximizing a window, the reveal function would be called, but the content height would be the same as the revealed height which means that it would skip the reveal animation. I had erroneously not provided the element as an argument to `clearMaxHeight()` in this scenario. Adding that fixes the behavior, and I also modified the signature to require the argument. --- static/app/components/clippedBox.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/static/app/components/clippedBox.tsx b/static/app/components/clippedBox.tsx index bbc4aa9fbee57f..e3e8a4f503d398 100644 --- a/static/app/components/clippedBox.tsx +++ b/static/app/components/clippedBox.tsx @@ -50,7 +50,7 @@ function calculateAddedHeight({ } } -function clearMaxHeight(element?: HTMLElement | null) { +function clearMaxHeight(element: HTMLElement | null) { if (element) { element.style.maxHeight = 'none'; } @@ -91,7 +91,7 @@ function revealAndDisconnectObserver({ wrapperRef.current.addEventListener('transitionend', onTransitionEnd); wrapperRef.current.style.maxHeight = `${revealedWrapperHeight}px`; } else { - clearMaxHeight(); + clearMaxHeight(wrapperRef.current); } revealRef.current = true; From 571a45d85093d4f9d87d2c047a751dda6cdda42d Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Mon, 5 Feb 2024 17:57:56 -0500 Subject: [PATCH 022/357] fix(metrics-extraction): Only send exception for old widget (#64602) ### Summary New widgets are expected to have a period where they aren't in on-demand since the task hasn't run yet. This gap gets closed once we add code for updating widgets to set their on-demand row as well. Needs #64601 --- src/sentry/relay/config/metric_extraction.py | 9 +++++++ .../relay/config/test_metric_extraction.py | 26 +++++++++++++++++++ 2 files changed, 35 insertions(+) diff --git a/src/sentry/relay/config/metric_extraction.py b/src/sentry/relay/config/metric_extraction.py index 9cd595b3a699ae..d6aa52f9a04d9f 100644 --- a/src/sentry/relay/config/metric_extraction.py +++ b/src/sentry/relay/config/metric_extraction.py @@ -488,6 +488,14 @@ def _can_widget_query_use_stateful_extraction( on_demand_entry = on_demand_entries[0] on_demand_hashes = on_demand_entry.spec_hashes + if on_demand_entry.date_modified < widget_query.date_modified: + # On demand entry was updated before the widget_query got updated, meaning it's potentially out of date + metrics.incr( + "on_demand_metrics.on_demand_spec.out_of_date_on_demand", + sample_rate=1.0, + ) + return False + if set(spec_hashes) != set(on_demand_hashes): # Spec hashes should match. with sentry_sdk.push_scope() as scope: @@ -501,6 +509,7 @@ def _can_widget_query_use_stateful_extraction( amount=len(metrics_specs), sample_rate=1.0, ) + return False return True diff --git a/tests/sentry/relay/config/test_metric_extraction.py b/tests/sentry/relay/config/test_metric_extraction.py index 63aec2e4b35504..9234c496b8ddd8 100644 --- a/tests/sentry/relay/config/test_metric_extraction.py +++ b/tests/sentry/relay/config/test_metric_extraction.py @@ -1970,3 +1970,29 @@ def test_level_field(default_project: Project) -> None: create_widget([aggr], query, default_project) config = get_metric_extraction_config(default_project) assert config is None + + +@django_db_all +def test_widget_modifed_after_on_demand(default_project: Project) -> None: + duration = 1000 + with Feature( + { + ON_DEMAND_METRICS_WIDGETS: True, + "organizations:on-demand-metrics-query-spec-version-two": True, + } + ): + widget_query = create_widget( + ["epm()"], + f"transaction.duration:>={duration}", + default_project, + columns=["user.id", "release", "count()"], + ) + + with mock.patch("sentry_sdk.capture_exception") as capture_exception: + + process_widget_specs([widget_query.id]) + config = get_metric_extraction_config(default_project) + + assert config and config["metrics"] + + assert capture_exception.call_count == 0 From 952b664bf13f4d58fd28a74b41e92e0bddf5b511 Mon Sep 17 00:00:00 2001 From: Richard Roggenkemper <46740234+roggenkemper@users.noreply.github.com> Date: Mon, 5 Feb 2024 15:02:46 -0800 Subject: [PATCH 023/357] chore(integrations): Remove project creation instructions (#64584) this pr removes the instruction to create a new project now that we have the project creation modal closes https://github.com/getsentry/sentry/issues/64297 --- src/sentry/integrations/vercel/integration.py | 3 --- tests/sentry/integrations/vercel/test_integration.py | 5 ++--- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/src/sentry/integrations/vercel/integration.py b/src/sentry/integrations/vercel/integration.py index fab6f2ff642261..4996b8b6adb945 100644 --- a/src/sentry/integrations/vercel/integration.py +++ b/src/sentry/integrations/vercel/integration.py @@ -65,7 +65,6 @@ configure_integration = {"title": _("Connect Your Projects")} -create_project_instruction = _("Don't have a project yet? Click [here]({}) to create one.") install_source_code_integration = _( "Install a [source code integration]({}) and configure your repositories." ) @@ -98,11 +97,9 @@ def metadata(self): def get_dynamic_display_information(self): qs = urlencode({"category": "source code management"}) source_code_link = absolute_uri(f"/settings/{self.organization.slug}/integrations/?{qs}") - add_project_link = absolute_uri(f"/organizations/{self.organization.slug}/projects/new/") return { "configure_integration": { "instructions": [ - create_project_instruction.format(add_project_link), install_source_code_integration.format(source_code_link), ] } diff --git a/tests/sentry/integrations/vercel/test_integration.py b/tests/sentry/integrations/vercel/test_integration.py index 9022d6f5bfe871..a7074905bfff57 100644 --- a/tests/sentry/integrations/vercel/test_integration.py +++ b/tests/sentry/integrations/vercel/test_integration.py @@ -398,9 +398,8 @@ def test_get_dynamic_display_information(self): installation = integration.get_installation(self.organization.id) dynamic_display_info = installation.get_dynamic_display_information() instructions = dynamic_display_info["configure_integration"]["instructions"] - assert len(instructions) == 2 - assert "Don't have a project yet?" in instructions[0] - assert "configure your repositories." in instructions[1] + assert len(instructions) == 1 + assert "configure your repositories." in instructions[0] @responses.activate def test_uninstall(self): From 40b3ea3472ce36f534ce0bbfa90a7f14354d21e2 Mon Sep 17 00:00:00 2001 From: Colleen O'Rourke Date: Mon, 5 Feb 2024 15:53:23 -0800 Subject: [PATCH 024/357] ref(slack): Only rm user and event count context for some specific issue types (#64441) Initially in https://github.com/getsentry/sentry/pull/64328 I'd removed tags and context for all non-error issues, but it turns out we only want to remove tags and context in some specific cases. Note that this is a short term solution, there is a longer term solution proposed [here](https://www.notion.so/sentry/Rich-Slack-Notifications-Tech-Spec-867736397b4f4ef29b1d0251f5ae98c8?pvs=4#8c9a43bdc6224fcc8df7007189b268a7). --- .../slack/message_builder/issues.py | 79 +++++++++++++------ src/sentry/testutils/cases.py | 10 ++- .../slack/notifications/test_issue_alert.py | 61 +++++++++++++- 3 files changed, 125 insertions(+), 25 deletions(-) diff --git a/src/sentry/integrations/slack/message_builder/issues.py b/src/sentry/integrations/slack/message_builder/issues.py index be005d38c2dc9e..33e265cbf3ff0f 100644 --- a/src/sentry/integrations/slack/message_builder/issues.py +++ b/src/sentry/integrations/slack/message_builder/issues.py @@ -34,7 +34,11 @@ ) from sentry.integrations.slack.message_builder.base.block import BlockSlackMessageBuilder from sentry.integrations.slack.utils.escape import escape_slack_text -from sentry.issues.grouptype import GroupCategory +from sentry.issues.grouptype import ( + GroupCategory, + PerformanceP95EndpointRegressionGroupType, + ProfileFunctionRegressionType, +) from sentry.models.actor import ActorTuple from sentry.models.commit import Commit from sentry.models.group import Group, GroupStatus @@ -69,6 +73,11 @@ "integrations:bitbucket", ) +REGRESSION_PERFORMANCE_ISSUE_TYPES = [ + PerformanceP95EndpointRegressionGroupType, + ProfileFunctionRegressionType, +] + logger = logging.getLogger(__name__) @@ -164,6 +173,7 @@ def format_release_tag(value: str, event: GroupEvent | Group): def get_tags( + group: Group, event_for_tags: Any, tags: set[str] | None = None, ) -> Sequence[Mapping[str, str | bool]]: @@ -177,7 +187,23 @@ def get_tags( if tags and isinstance(tags, list): tags = set(tags[0]) - tags = tags | {"level", "release", "handled", "environment"} + default_tags = {"level", "release", "handled", "environment"} + # for performance issues we want to have the default tags _except_ level + if ( + group.issue_category == GroupCategory.PERFORMANCE + and group.issue_type not in REGRESSION_PERFORMANCE_ISSUE_TYPES + ): + default_tags.remove("level") + + # XXX(CEO): in the short term we're not adding these to all issue types (e.g. crons, user feedback) + # but in the future we'll read some config from the grouptype + if group.issue_category not in [GroupCategory.ERROR, GroupCategory.PERFORMANCE] or ( + group.issue_category == GroupCategory.PERFORMANCE + and group.issue_type in REGRESSION_PERFORMANCE_ISSUE_TYPES + ): + default_tags = set() + + tags = tags | default_tags if tags: event_tags = event_for_tags.tags if event_for_tags else [] for key, value in event_tags: @@ -196,6 +222,25 @@ def get_tags( return fields +def get_context(group: Group) -> str: + context_text = "" + context = { + "Events": get_group_global_count(group), + "Users Affected": group.count_users_seen(), + "State": SUBSTATUS_TO_STR.get(group.substatus, "").replace("_", " ").title(), + "First Seen": time_since(group.first_seen), + } + if group.issue_type in REGRESSION_PERFORMANCE_ISSUE_TYPES: + # another short term solution for non-error issues notification content + return context_text + + if group.issue_category in [GroupCategory.ERROR, GroupCategory.PERFORMANCE]: + for k, v in context.items(): + if k and v: + context_text += f"{k}: *{v}* " + return context_text + + def get_option_groups(group: Group) -> Sequence[Mapping[str, Any]]: all_members = group.project.get_members_as_rpc_users() members = list({m.id: m for m in all_members}.values()) @@ -591,27 +636,15 @@ def build(self, notification_uuid: str | None = None) -> SlackBlock | SlackAttac if self.actions: blocks.append(self.get_markdown_block(action_text)) - if self.group.issue_category == GroupCategory.ERROR: - # XXX(CEO): in the short term we're not adding these to non-error issues (e.g. crons) - # since they don't make sense, but in the future we'll read some config from the grouptype - - # build tags block - tags = get_tags(event_for_tags, self.tags) - if tags: - blocks.append(self.get_tags_block(tags)) - - # add event count, user count, substate, first seen - context = { - "Events": get_group_global_count(self.group), - "Users Affected": self.group.count_users_seen(), - "State": SUBSTATUS_TO_STR.get(self.group.substatus, "").replace("_", " ").title(), - "First Seen": time_since(self.group.first_seen), - } - context_text = "" - for k, v in context.items(): - if k and v: - context_text += f"{k}: *{v}* " - blocks.append(self.get_context_block(context_text[:-3])) + # build tags block + tags = get_tags(self.group, event_for_tags, self.tags) + if tags: + blocks.append(self.get_tags_block(tags)) + + # add event count, user count, substate, first seen + context = get_context(self.group) + if context: + blocks.append(self.get_context_block(context[:-3])) # build actions actions = [] diff --git a/src/sentry/testutils/cases.py b/src/sentry/testutils/cases.py index 7302b92d3e28fb..89a63fb5b39822 100644 --- a/src/sentry/testutils/cases.py +++ b/src/sentry/testutils/cases.py @@ -2822,7 +2822,15 @@ def assert_performance_issue_blocks( == "db - SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21" ) assert ( - blocks[3]["elements"][0]["text"] + blocks[3]["text"]["text"] + == "environment: `production` release: `` " + ) + assert ( + blocks[4]["elements"][0]["text"] + == "Events: *1* State: *Ongoing* First Seen: *10\xa0minutes ago*" + ) + assert ( + blocks[5]["elements"][0]["text"] == f"{project_slug} | production | " ) diff --git a/tests/sentry/integrations/slack/notifications/test_issue_alert.py b/tests/sentry/integrations/slack/notifications/test_issue_alert.py index 840c5f875d368f..a85bb2848debd2 100644 --- a/tests/sentry/integrations/slack/notifications/test_issue_alert.py +++ b/tests/sentry/integrations/slack/notifications/test_issue_alert.py @@ -1,4 +1,5 @@ import uuid +from datetime import datetime from unittest import mock from unittest.mock import patch from urllib.parse import parse_qs @@ -9,6 +10,9 @@ from sentry.constants import ObjectStatus from sentry.digests.backends.redis import RedisBackend from sentry.digests.notifications import event_to_record +from sentry.integrations.slack.message_builder.issues import get_tags +from sentry.issues.grouptype import MonitorCheckInFailure +from sentry.issues.issue_occurrence import IssueEvidence, IssueOccurrence from sentry.models.identity import Identity, IdentityStatus from sentry.models.integrations.external_actor import ExternalActor from sentry.models.integrations.organization_integration import OrganizationIntegration @@ -32,10 +36,18 @@ from sentry.testutils.skips import requires_snuba from sentry.types.integrations import ExternalProviders from sentry.utils import json +from sentry.utils.dates import ensure_aware pytestmark = [requires_snuba] +old_get_tags = get_tags + + +def fake_get_tags(group, event_for_tags, tags): + return old_get_tags(group, event_for_tags, None) + + @region_silo_test class SlackIssueAlertNotificationTest(SlackActivityNotificationTest, PerformanceIssueTestCase): def setUp(self): @@ -145,6 +157,7 @@ def test_performance_issue_alert_user(self, occurrence): ) @responses.activate + @mock.patch("sentry.integrations.slack.message_builder.issues.get_tags", new=fake_get_tags) @mock.patch( "sentry.eventstore.models.GroupEvent.occurrence", return_value=TEST_PERF_ISSUE_OCCURRENCE, @@ -158,7 +171,7 @@ def test_performance_issue_alert_user_block(self, occurrence): """ event = self.create_performance_issue() - + # this is a PerformanceNPlusOneGroupType event notification = AlertRuleNotification( Notification(event=event, rule=self.rule), ActionTargetType.MEMBER, self.user.id ) @@ -180,6 +193,52 @@ def test_performance_issue_alert_user_block(self, occurrence): alert_type="alerts", issue_link_extra_params=f"&alert_rule_id={self.rule.id}&alert_type=issue", ) + assert "level" not in blocks[3]["text"]["text"] + assert "release" in blocks[3]["text"]["text"] + + @mock.patch("sentry.integrations.slack.message_builder.issues.get_tags", new=fake_get_tags) + @responses.activate + @with_feature("organizations:slack-block-kit") + def test_crons_issue_alert_user_block(self): + orig_event = self.store_event( + data={"message": "Hello world", "level": "error"}, project_id=self.project.id + ) + event = orig_event.for_group(orig_event.groups[0]) + occurrence = IssueOccurrence( + uuid.uuid4().hex, + self.project.id, + uuid.uuid4().hex, + ["some-fingerprint"], + "something bad happened", + "it was bad", + "1234", + {"Test": 123}, + [ + IssueEvidence("Evidence 1", "Value 1", True), + IssueEvidence("Evidence 2", "Value 2", False), + IssueEvidence("Evidence 3", "Value 3", False), + ], + MonitorCheckInFailure, + ensure_aware(datetime.now()), + "info", + "/api/123", + ) + occurrence.save() + event.occurrence = occurrence + + event.group.type = MonitorCheckInFailure.type_id + notification = AlertRuleNotification( + Notification(event=event, rule=self.rule), ActionTargetType.MEMBER, self.user.id + ) + with self.tasks(): + notification.send() + + blocks, fallback_text = get_blocks_and_fallback_text() + assert ( + fallback_text + == f"Alert triggered " + ) + assert len(blocks) == 5 @responses.activate @mock.patch( From 670b08f6e5acfef65c084695c9a04270eb17d5fc Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Mon, 5 Feb 2024 16:02:12 -0800 Subject: [PATCH 025/357] feat(issues): Remove trace timeline from behind user flag (#64619) --- static/app/views/issueDetails/groupEventCarousel.tsx | 4 +--- .../issueDetails/traceTimeline/traceLink.spec.tsx | 12 ------------ .../traceTimeline/traceTimeline.spec.tsx | 12 ------------ .../issueDetails/traceTimeline/traceTimeline.tsx | 4 +--- .../app/views/issueDetails/traceTimeline/utils.tsx | 12 +++--------- 5 files changed, 5 insertions(+), 39 deletions(-) diff --git a/static/app/views/issueDetails/groupEventCarousel.tsx b/static/app/views/issueDetails/groupEventCarousel.tsx index a9c30959b92e34..6e6dc5a7952508 100644 --- a/static/app/views/issueDetails/groupEventCarousel.tsx +++ b/static/app/views/issueDetails/groupEventCarousel.tsx @@ -39,7 +39,6 @@ import {useLocation} from 'sentry/utils/useLocation'; import useMedia from 'sentry/utils/useMedia'; import useOrganization from 'sentry/utils/useOrganization'; import {useParams} from 'sentry/utils/useParams'; -import {useUser} from 'sentry/utils/useUser'; import {normalizeUrl} from 'sentry/utils/withDomainRequired'; import EventCreatedTooltip from 'sentry/views/issueDetails/eventCreatedTooltip'; import {TraceLink} from 'sentry/views/issueDetails/traceTimeline/traceLink'; @@ -363,7 +362,6 @@ export function GroupEventActions({event, group, projectSlug}: GroupEventActions export function GroupEventCarousel({event, group, projectSlug}: GroupEventCarouselProps) { const organization = useOrganization(); const location = useLocation(); - const user = useUser(); const latencyThreshold = 30 * 60 * 1000; // 30 minutes const isOverLatencyThreshold = @@ -379,7 +377,7 @@ export function GroupEventCarousel({event, group, projectSlug}: GroupEventCarous text: event.id, }); - const hasTraceTimeline = hasTraceTimelineFeature(organization, user); + const hasTraceTimeline = hasTraceTimelineFeature(organization); return ( diff --git a/static/app/views/issueDetails/traceTimeline/traceLink.spec.tsx b/static/app/views/issueDetails/traceTimeline/traceLink.spec.tsx index db0ea9e7b9cb76..a7309c4c9bc1d4 100644 --- a/static/app/views/issueDetails/traceTimeline/traceLink.spec.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceLink.spec.tsx @@ -1,11 +1,9 @@ import {EventFixture} from 'sentry-fixture/event'; import {OrganizationFixture} from 'sentry-fixture/organization'; import {ProjectFixture} from 'sentry-fixture/project'; -import {UserFixture} from 'sentry-fixture/user'; import {render, screen} from 'sentry-test/reactTestingLibrary'; -import ConfigStore from 'sentry/stores/configStore'; import ProjectsStore from 'sentry/stores/projectsStore'; import {TraceLink} from './traceLink'; @@ -55,17 +53,7 @@ describe('TraceLink', () => { }; beforeEach(() => { - // Can be removed with issueDetailsNewExperienceQ42023 ProjectsStore.loadInitialData([project]); - ConfigStore.set( - 'user', - UserFixture({ - options: { - ...UserFixture().options, - issueDetailsNewExperienceQ42023: true, - }, - }) - ); }); it('renders the number of issues', async () => { diff --git a/static/app/views/issueDetails/traceTimeline/traceTimeline.spec.tsx b/static/app/views/issueDetails/traceTimeline/traceTimeline.spec.tsx index e92f7c2566bbc8..f769c79a17b1e8 100644 --- a/static/app/views/issueDetails/traceTimeline/traceTimeline.spec.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceTimeline.spec.tsx @@ -1,11 +1,9 @@ import {EventFixture} from 'sentry-fixture/event'; import {OrganizationFixture} from 'sentry-fixture/organization'; import {ProjectFixture} from 'sentry-fixture/project'; -import {UserFixture} from 'sentry-fixture/user'; import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; -import ConfigStore from 'sentry/stores/configStore'; import ProjectsStore from 'sentry/stores/projectsStore'; import {TraceTimeline} from './traceTimeline'; @@ -56,17 +54,7 @@ describe('TraceTimeline', () => { }; beforeEach(() => { - // Can be removed with issueDetailsNewExperienceQ42023 ProjectsStore.loadInitialData([project]); - ConfigStore.set( - 'user', - UserFixture({ - options: { - ...UserFixture().options, - issueDetailsNewExperienceQ42023: true, - }, - }) - ); }); it('renders items and highlights the current event', async () => { diff --git a/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx b/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx index ba6ae051831a28..4f8068f0dc4f5e 100644 --- a/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx @@ -7,7 +7,6 @@ import {space} from 'sentry/styles/space'; import type {Event} from 'sentry/types'; import {useDimensions} from 'sentry/utils/useDimensions'; import useOrganization from 'sentry/utils/useOrganization'; -import {useUser} from 'sentry/utils/useUser'; import {hasTraceTimelineFeature} from 'sentry/views/issueDetails/traceTimeline/utils'; import {TraceTimelineEvents} from './traceTimelineEvents'; @@ -20,11 +19,10 @@ interface TraceTimelineProps { } export function TraceTimeline({event}: TraceTimelineProps) { - const user = useUser(); const organization = useOrganization({allowNull: true}); const timelineRef = useRef(null); const {width} = useDimensions({elementRef: timelineRef}); - const hasFeature = hasTraceTimelineFeature(organization, user); + const hasFeature = hasTraceTimelineFeature(organization); const {isError, isLoading, data} = useTraceTimelineEvents({event}, hasFeature); if (!hasFeature || !event.contexts?.trace?.trace_id) { diff --git a/static/app/views/issueDetails/traceTimeline/utils.tsx b/static/app/views/issueDetails/traceTimeline/utils.tsx index 512ab0942a837d..fa66b4e545e375 100644 --- a/static/app/views/issueDetails/traceTimeline/utils.tsx +++ b/static/app/views/issueDetails/traceTimeline/utils.tsx @@ -1,4 +1,4 @@ -import type {Organization, User} from 'sentry/types'; +import type {Organization} from 'sentry/types'; import type {TimelineEvent} from './useTraceTimelineEvents'; @@ -48,12 +48,6 @@ export function getChunkTimeRange( return [Math.floor(chunkStartTimestamp), Math.floor(chunkEndTimestamp) + 1]; } -export function hasTraceTimelineFeature( - organization: Organization | null, - user: User | undefined -) { - const newIssueExperienceEnabled = user?.options?.issueDetailsNewExperienceQ42023; - const hasFeature = organization?.features?.includes('issues-trace-timeline'); - - return !!(newIssueExperienceEnabled && hasFeature); +export function hasTraceTimelineFeature(organization: Organization | null) { + return organization?.features?.includes('issues-trace-timeline'); } From fdc88135245241c8c3a4a52a7952b71c57de52e5 Mon Sep 17 00:00:00 2001 From: Richard Roggenkemper <46740234+roggenkemper@users.noreply.github.com> Date: Mon, 5 Feb 2024 16:04:01 -0800 Subject: [PATCH 026/357] fix(integrations): Project creation modal shows browser platforms for Vercel (#64610) this pr changes the project creation modal so that it shows the browser platform page first for the Vercel integration page. --- static/app/components/forms/fields/projectMapperField.tsx | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/static/app/components/forms/fields/projectMapperField.tsx b/static/app/components/forms/fields/projectMapperField.tsx index c7989342f2349a..c550c4b67fa8ce 100644 --- a/static/app/components/forms/fields/projectMapperField.tsx +++ b/static/app/components/forms/fields/projectMapperField.tsx @@ -130,7 +130,9 @@ export class RenderField extends Component { const handleSelectProject = ({value}: {value: number}) => { if (value === -1) { - openProjectCreationModal({defaultCategory: 'popular'}); + openProjectCreationModal({ + defaultCategory: iconType === 'vercel' ? 'browser' : 'popular', + }); } else { this.setState({selectedSentryProjectId: value}); } From 64918dc853edf39fb3a2de308f2a58404636fcd5 Mon Sep 17 00:00:00 2001 From: Malachi Willey Date: Mon, 5 Feb 2024 16:09:03 -0800 Subject: [PATCH 027/357] feat(issue-priority): Add priority badge and dropdown components (#64615) --- .../group/groupPriority.stories.tsx | 29 ++++++ static/app/components/group/groupPriority.tsx | 95 +++++++++++++++++++ static/app/views/stories/storyFile.tsx | 3 + 3 files changed, 127 insertions(+) create mode 100644 static/app/components/group/groupPriority.stories.tsx create mode 100644 static/app/components/group/groupPriority.tsx diff --git a/static/app/components/group/groupPriority.stories.tsx b/static/app/components/group/groupPriority.stories.tsx new file mode 100644 index 00000000000000..990a8f5b47d092 --- /dev/null +++ b/static/app/components/group/groupPriority.stories.tsx @@ -0,0 +1,29 @@ +import {useState} from 'react'; + +import { + GroupPriorityBadge, + GroupPriorityDropdown, +} from 'sentry/components/group/groupPriority'; +import SideBySide from 'sentry/components/stories/sideBySide'; +import storyBook from 'sentry/stories/storyBook'; +import {PriorityLevel} from 'sentry/types'; + +const PRIORITIES = [PriorityLevel.HIGH, PriorityLevel.MEDIUM, PriorityLevel.LOW]; + +export const Badge = storyBook(GroupPriorityBadge, story => { + story('Default', () => ( + + {PRIORITIES.map(priority => ( + + ))} + + )); +}); + +export const Dropdown = storyBook(GroupPriorityDropdown, story => { + story('Default', () => { + const [value, setValue] = useState(PriorityLevel.MEDIUM); + + return ; + }); +}); diff --git a/static/app/components/group/groupPriority.tsx b/static/app/components/group/groupPriority.tsx new file mode 100644 index 00000000000000..9ba8112192dc92 --- /dev/null +++ b/static/app/components/group/groupPriority.tsx @@ -0,0 +1,95 @@ +import {useMemo} from 'react'; +import type {Theme} from '@emotion/react'; +import styled from '@emotion/styled'; + +import {Button} from 'sentry/components/button'; +import {DropdownMenu, type MenuItemProps} from 'sentry/components/dropdownMenu'; +import Tag from 'sentry/components/tag'; +import {IconChevron} from 'sentry/icons'; +import {t} from 'sentry/locale'; +import {space} from 'sentry/styles/space'; +import {PriorityLevel} from 'sentry/types'; + +type GroupPriorityDropdownProps = { + onChange: (value: PriorityLevel) => void; + value: PriorityLevel; +}; + +type GroupPriorityBadgeProps = { + priority: PriorityLevel; + children?: React.ReactNode; +}; + +const PRIORITY_KEY_TO_LABEL: Record = { + [PriorityLevel.HIGH]: t('High'), + [PriorityLevel.MEDIUM]: t('Medium'), + [PriorityLevel.LOW]: t('Low'), +}; + +const PRIORITY_OPTIONS = [PriorityLevel.HIGH, PriorityLevel.MEDIUM, PriorityLevel.LOW]; + +function getTagTypeForPriority(priority: string): keyof Theme['tag'] { + switch (priority) { + case PriorityLevel.HIGH: + return 'error'; + case PriorityLevel.MEDIUM: + return 'warning'; + case PriorityLevel.LOW: + default: + return 'default'; + } +} + +export function GroupPriorityBadge({priority, children}: GroupPriorityBadgeProps) { + return ( + + {PRIORITY_KEY_TO_LABEL[priority] ?? t('Unknown')} + {children} + + ); +} + +export function GroupPriorityDropdown({value, onChange}: GroupPriorityDropdownProps) { + const options: MenuItemProps[] = useMemo(() => { + return PRIORITY_OPTIONS.map(priority => ({ + textValue: PRIORITY_KEY_TO_LABEL[priority], + key: priority, + label: , + onAction: () => onChange(priority), + })); + }, [onChange]); + + return ( + ( + + + + + + )} + items={options} + /> + ); +} + +const DropdownButton = styled(Button)` + font-weight: normal; + border: none; + padding: 0; + height: unset; + border-radius: 10px; +`; + +const StyledTag = styled(Tag)` + span { + display: flex; + align-items: center; + gap: ${space(0.5)}; + } +`; diff --git a/static/app/views/stories/storyFile.tsx b/static/app/views/stories/storyFile.tsx index 1167977f437422..83b511523141e7 100644 --- a/static/app/views/stories/storyFile.tsx +++ b/static/app/views/stories/storyFile.tsx @@ -79,6 +79,9 @@ const FlexColumn = styled('section')` const StoryArea = styled('div')` overflow: scroll; + display: flex; + flex-direction: column; + gap: ${space(4)}; `; const Header = styled('h2')` From 7b7bd2103ba32b97528ec2d1e323b543ed63654b Mon Sep 17 00:00:00 2001 From: Josh Ferge Date: Mon, 5 Feb 2024 16:14:41 -0800 Subject: [PATCH 028/357] fix(replays): dont provide default on get_option call (#64622) if you pass a default on the `get_option` call, it doesn't use the default which is registered, so it was not using the default of `True`. --- src/sentry/replays/usecases/ingest/dom_index.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sentry/replays/usecases/ingest/dom_index.py b/src/sentry/replays/usecases/ingest/dom_index.py index 3a3d82787b216d..1919929ba08e74 100644 --- a/src/sentry/replays/usecases/ingest/dom_index.py +++ b/src/sentry/replays/usecases/ingest/dom_index.py @@ -368,6 +368,6 @@ def _project_has_option_enabled() -> bool: Check if the project has the option enabled, This is controlled by the project owner, and is a permanent setting """ - return project.get_option("sentry:replay_rage_click_issues", False) + return project.get_option("sentry:replay_rage_click_issues") return all([_project_has_feature_enabled(), _project_has_option_enabled()]) From 03ca9619c0715a21369bca330b0cb2094dc14a5c Mon Sep 17 00:00:00 2001 From: Cathy Teng <70817427+cathteng@users.noreply.github.com> Date: Mon, 5 Feb 2024 16:17:14 -0800 Subject: [PATCH 029/357] ref(superuser): only allow superuser write to remove team members (#64609) --- .../organization_member/team_details.py | 4 +- .../test_organization_member_team_details.py | 41 +++++++++++++++++++ 2 files changed, 43 insertions(+), 2 deletions(-) diff --git a/src/sentry/api/endpoints/organization_member/team_details.py b/src/sentry/api/endpoints/organization_member/team_details.py index fafc7c1d0f9ba7..45c0a29810d61b 100644 --- a/src/sentry/api/endpoints/organization_member/team_details.py +++ b/src/sentry/api/endpoints/organization_member/team_details.py @@ -26,7 +26,7 @@ from sentry.apidocs.examples.team_examples import TeamExamples from sentry.apidocs.parameters import GlobalParams from sentry.auth.access import Access -from sentry.auth.superuser import is_active_superuser +from sentry.auth.superuser import superuser_has_permission from sentry.models.organization import Organization from sentry.models.organizationaccessrequest import OrganizationAccessRequest from sentry.models.organizationmember import OrganizationMember @@ -135,7 +135,7 @@ def _can_delete( * If they are removing their own membership * If they are a team admin or have global write access """ - if is_active_superuser(request): + if superuser_has_permission(request): return True if not request.user.is_authenticated: diff --git a/tests/sentry/api/endpoints/test_organization_member_team_details.py b/tests/sentry/api/endpoints/test_organization_member_team_details.py index 87c6dc1d943f98..c46652dbcc1e2a 100644 --- a/tests/sentry/api/endpoints/test_organization_member_team_details.py +++ b/tests/sentry/api/endpoints/test_organization_member_team_details.py @@ -551,6 +551,47 @@ def test_admin_on_team_can_remove_members_using_user_token(self): team=self.team, organizationmember=self.member_on_team ).exists() + def test_superuser_can_remove_member(self): + superuser = self.create_user(is_superuser=True) + self.login_as(superuser, superuser=True) + + self.get_success_response( + self.org.slug, self.member_on_team.id, self.team.slug, status_code=200 + ) + + assert not OrganizationMemberTeam.objects.filter( + team=self.team, organizationmember=self.member_on_team + ).exists() + + @override_settings(SENTRY_SELF_HOSTED=False) + @with_feature("auth:enterprise-superuser-read-write") + def test_superuser_read_cannot_remove_member(self): + superuser = self.create_user(is_superuser=True) + self.login_as(superuser, superuser=True) + + self.get_error_response( + self.org.slug, self.member_on_team.id, self.team.slug, status_code=400 + ) + + assert OrganizationMemberTeam.objects.filter( + team=self.team, organizationmember=self.member_on_team + ).exists() + + @override_settings(SENTRY_SELF_HOSTED=False) + @with_feature("auth:enterprise-superuser-read-write") + def test_superuser_write_can_remove_member(self): + superuser = self.create_user(is_superuser=True) + self.add_user_permission(superuser, "superuser.write") + self.login_as(superuser, superuser=True) + + self.get_success_response( + self.org.slug, self.member_on_team.id, self.team.slug, status_code=200 + ) + + assert not OrganizationMemberTeam.objects.filter( + team=self.team, organizationmember=self.member_on_team + ).exists() + def test_manager_can_remove_members(self): self.login_as(self.manager_on_team) From 616aa28305ee2f8db18cb44bed8e9550e39db5d8 Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Mon, 5 Feb 2024 16:42:47 -0800 Subject: [PATCH 030/357] fix(issues): Render one "you are here" node in timeline (#64624) --- .../traceTimeline/traceTimelineEvents.tsx | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx b/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx index 6686fa1081d8a3..014bf8b8c2fdd2 100644 --- a/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx @@ -167,20 +167,20 @@ function NodeGroup({ const isCurrentNode = groupEvents.some(e => e.id === currentEventId); return ( - {groupEvents.map(groupEvent => ( - - {isCurrentNode ? ( - - - - - ) : !('event.type' in groupEvent) ? ( - + {isCurrentNode && ( + + + + + )} + {!isCurrentNode && + groupEvents.map(groupEvent => + 'event.type' in groupEvent ? ( + ) : ( - - )} - - ))} + + ) + )} ); })} From 168db4d487cd123e83c647114ea2d74b190e28ad Mon Sep 17 00:00:00 2001 From: Cathy Teng <70817427+cathteng@users.noreply.github.com> Date: Mon, 5 Feb 2024 17:16:19 -0800 Subject: [PATCH 031/357] ref(superuser): only allow superuser write to modify admin options for project perf issue settings (#64589) --- .../project_performance_issue_settings.py | 4 +- ...test_project_performance_issue_settings.py | 246 ++++++++++-------- 2 files changed, 137 insertions(+), 113 deletions(-) diff --git a/src/sentry/api/endpoints/project_performance_issue_settings.py b/src/sentry/api/endpoints/project_performance_issue_settings.py index 23c7ec33d09afc..b1c6ae887209af 100644 --- a/src/sentry/api/endpoints/project_performance_issue_settings.py +++ b/src/sentry/api/endpoints/project_performance_issue_settings.py @@ -9,7 +9,7 @@ from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import region_silo_endpoint from sentry.api.bases.project import ProjectEndpoint, ProjectSettingPermission -from sentry.auth.superuser import is_active_superuser +from sentry.auth.superuser import superuser_has_permission from sentry.issues.grouptype import ( GroupType, PerformanceConsecutiveDBQueriesGroupType, @@ -215,7 +215,7 @@ def put(self, request: Request, project) -> Response: ) body_has_admin_options = any([option in request.data for option in internal_only_settings]) - if body_has_admin_options and not is_active_superuser(request): + if body_has_admin_options and not superuser_has_permission(request): return Response( { "detail": { diff --git a/tests/sentry/api/endpoints/test_project_performance_issue_settings.py b/tests/sentry/api/endpoints/test_project_performance_issue_settings.py index 3c57c6668775fb..0c869489989d3e 100644 --- a/tests/sentry/api/endpoints/test_project_performance_issue_settings.py +++ b/tests/sentry/api/endpoints/test_project_performance_issue_settings.py @@ -1,18 +1,16 @@ from unittest.mock import MagicMock, patch -from django.urls import reverse +from django.test import override_settings +from rest_framework import status from rest_framework.exceptions import ErrorDetail from sentry.api.endpoints.project_performance_issue_settings import SETTINGS_PROJECT_OPTION_KEY from sentry.testutils.cases import APITestCase from sentry.testutils.helpers import override_options +from sentry.testutils.helpers.features import with_feature from sentry.testutils.silo import region_silo_test from sentry.utils.performance_issues.performance_detection import get_merged_settings -PERFORMANCE_ISSUE_FEATURES = { - "organizations:performance-view": True, -} - @region_silo_test class ProjectPerformanceIssueSettingsTest(APITestCase): @@ -23,20 +21,12 @@ def setUp(self) -> None: self.login_as(user=self.user, superuser=True) self.project = self.create_project() - self.url = reverse( - self.endpoint, - kwargs={ - "organization_slug": self.project.organization.slug, - "project_slug": self.project.slug, - }, - ) - @patch("sentry.models.ProjectOption.objects.get_value") + @with_feature("organizations:performance-view") def test_get_project_options_overrides_detection_defaults(self, get_value): - with self.feature(PERFORMANCE_ISSUE_FEATURES): - response = self.client.get(self.url, format="json") - - assert response.status_code == 200, response.content + response = self.get_success_response( + self.project.organization.slug, self.project.slug, format="json" + ) assert response.data["n_plus_one_db_queries_detection_enabled"] assert response.data["slow_db_queries_detection_enabled"] @@ -62,10 +52,9 @@ def test_get_project_options_overrides_detection_defaults(self, get_value): "large_render_blocking_asset_detection_enabled": False, } - with self.feature(PERFORMANCE_ISSUE_FEATURES): - response = self.client.get(self.url, format="json") - - assert response.status_code == 200, response.content + response = self.get_success_response( + self.project.organization.slug, self.project.slug, format="json" + ) assert not response.data["n_plus_one_db_queries_detection_enabled"] assert not response.data["slow_db_queries_detection_enabled"] @@ -79,6 +68,7 @@ def test_get_project_options_overrides_detection_defaults(self, get_value): assert not response.data["large_render_blocking_asset_detection_enabled"] @patch("sentry.models.ProjectOption.objects.get_value") + @with_feature("organizations:performance-view") def test_get_project_options_overrides_threshold_defaults(self, get_value): with override_options( { @@ -95,10 +85,9 @@ def test_get_project_options_overrides_threshold_defaults(self, get_value): "performance.issues.consecutive_http.min_time_saved_threshold": 2000, } ): - with self.feature(PERFORMANCE_ISSUE_FEATURES): - response = self.client.get(self.url, format="json") - - assert response.status_code == 200, response.content + response = self.get_success_response( + self.project.organization.slug, self.project.slug, format="json" + ) # System and project defaults assert response.data["slow_db_query_duration_threshold"] == 1000 @@ -127,10 +116,9 @@ def test_get_project_options_overrides_threshold_defaults(self, get_value): "consecutive_http_spans_min_time_saved_threshold": 1000, } - with self.feature(PERFORMANCE_ISSUE_FEATURES): - response = self.client.get(self.url, format="json") - - assert response.status_code == 200, response.content + response = self.get_success_response( + self.project.organization.slug, self.project.slug, format="json" + ) # Updated project settings assert response.data["slow_db_query_duration_threshold"] == 5000 @@ -146,21 +134,24 @@ def test_get_project_options_overrides_threshold_defaults(self, get_value): assert response.data["consecutive_http_spans_min_time_saved_threshold"] == 1000 def test_get_returns_error_without_feature_enabled(self): - with self.feature({}): - response = self.client.get(self.url, format="json") - assert response.status_code == 404 + self.get_error_response( + self.project.organization.slug, + self.project.slug, + format="json", + status_code=status.HTTP_404_NOT_FOUND, + ) + @with_feature("organizations:performance-view") def test_put_non_super_user_updates_detection_setting(self): self.login_as(user=self.user, superuser=False) - with self.feature(PERFORMANCE_ISSUE_FEATURES): - response = self.client.put( - self.url, - data={ - "n_plus_one_db_queries_detection_enabled": False, - }, - ) + response = self.get_error_response( + self.project.organization.slug, + self.project.slug, + n_plus_one_db_queries_detection_enabled=False, + method="put", + status_code=status.HTTP_403_FORBIDDEN, + ) - assert response.status_code == 403, response.content assert response.data == { "detail": { "message": "Passed options are only modifiable internally", @@ -168,109 +159,140 @@ def test_put_non_super_user_updates_detection_setting(self): }, } + @with_feature("organizations:performance-view") def test_put_super_user_updates_detection_setting(self): - with self.feature(PERFORMANCE_ISSUE_FEATURES): - response = self.client.put( - self.url, - data={ - "n_plus_one_db_queries_detection_enabled": False, - }, - ) + response = self.get_success_response( + self.project.organization.slug, + self.project.slug, + n_plus_one_db_queries_detection_enabled=False, + method="put", + status_code=status.HTTP_200_OK, + ) - assert response.status_code == 200, response.content assert not response.data["n_plus_one_db_queries_detection_enabled"] - with self.feature(PERFORMANCE_ISSUE_FEATURES): - get_response = self.client.get(self.url, format="json") + get_response = self.get_success_response( + self.project.organization.slug, self.project.slug, format="json" + ) - assert get_response.status_code == 200, response.content assert not get_response.data["n_plus_one_db_queries_detection_enabled"] + @override_settings(SENTRY_SELF_HOSTED=False) + @with_feature( + {"organizations:performance-view": True, "auth:enterprise-superuser-read-write": True} + ) + def test_put_superuser_read_write_updates_detection_setting(self): + # superuser read-only cannot hit put + self.get_error_response( + self.project.organization.slug, + self.project.slug, + n_plus_one_db_queries_detection_enabled=False, + method="put", + status_code=status.HTTP_403_FORBIDDEN, + ) + + # superuser with write can hit put + self.add_user_permission(self.user, "superuser.write") + + response = self.get_success_response( + self.project.organization.slug, + self.project.slug, + n_plus_one_db_queries_detection_enabled=False, + method="put", + status_code=status.HTTP_200_OK, + ) + + assert not response.data["n_plus_one_db_queries_detection_enabled"] + + get_response = self.get_success_response( + self.project.organization.slug, self.project.slug, format="json" + ) + + assert not get_response.data["n_plus_one_db_queries_detection_enabled"] + + @with_feature("organizations:performance-view") def test_put_update_non_super_user_option(self): self.login_as(user=self.user, superuser=False) - with self.feature(PERFORMANCE_ISSUE_FEATURES): - response = self.client.put( - self.url, - data={ - "n_plus_one_db_duration_threshold": 3000, - }, - ) + response = self.get_success_response( + self.project.organization.slug, + self.project.slug, + n_plus_one_db_duration_threshold=3000, + method="put", + status_code=status.HTTP_200_OK, + ) - assert response.status_code == 200, response.content assert response.data["n_plus_one_db_duration_threshold"] == 3000 - with self.feature(PERFORMANCE_ISSUE_FEATURES): - get_response = self.client.get(self.url, format="json") + get_response = self.get_success_response( + self.project.organization.slug, self.project.slug, format="json" + ) - assert get_response.status_code == 200, response.content assert get_response.data["n_plus_one_db_duration_threshold"] == 3000 @patch("sentry.models.ProjectOption.objects.get_value") + @with_feature("organizations:performance-view") def test_put_does_not_update_disabled_option(self, get_value): self.login_as(user=self.user, superuser=False) get_value.return_value = { "n_plus_one_db_queries_detection_enabled": False, } - with self.feature(PERFORMANCE_ISSUE_FEATURES): - response = self.client.put( - self.url, - data={ - "n_plus_one_db_duration_threshold": 3000, - }, - ) + response = self.get_error_response( + self.project.organization.slug, + self.project.slug, + n_plus_one_db_duration_threshold=3000, + method="put", + status_code=status.HTTP_403_FORBIDDEN, + ) - assert response.status_code == 403, response.content assert response.data == {"detail": "Disabled options can not be modified"} - with self.feature(PERFORMANCE_ISSUE_FEATURES): - get_response = self.client.get(self.url, format="json") + get_response = self.get_success_response( + self.project.organization.slug, self.project.slug, format="json" + ) - assert get_response.status_code == 200, response.content assert ( get_response.data["n_plus_one_db_duration_threshold"] == get_merged_settings(self.project)["n_plus_one_db_duration_threshold"] ) + @with_feature("organizations:performance-view") def test_update_project_setting_check_validation(self): - with self.feature(PERFORMANCE_ISSUE_FEATURES): - response = self.client.put( - self.url, - data={ - "n_plus_one_db_queries_detection_enabled": -1, - }, - ) + response = self.get_error_response( + self.project.organization.slug, + self.project.slug, + n_plus_one_db_queries_detection_enabled=-1, + method="put", + status_code=status.HTTP_400_BAD_REQUEST, + ) - assert response.status_code == 400, response.content assert response.data == { "n_plus_one_db_queries_detection_enabled": [ ErrorDetail(string="Must be a valid boolean.", code="invalid") ] } + @with_feature("organizations:performance-view") def test_update_project_setting_invalid_option(self): - with self.feature(PERFORMANCE_ISSUE_FEATURES): - response = self.client.put( - self.url, - data={ - "n_plus_one_db_queries_detection_enabled_invalid": 500, - }, - ) + response = self.get_error_response( + self.project.organization.slug, + self.project.slug, + n_plus_one_db_queries_detection_enabled_invalid=500, + method="put", + status_code=status.HTTP_400_BAD_REQUEST, + ) - assert response.status_code == 400, response.content assert response.data == {"detail": "Invalid settings option"} @patch("sentry.api.base.create_audit_entry") + @with_feature("organizations:performance-view") def test_changing_admin_settings_creates_audit_log(self, create_audit_entry: MagicMock): - - with self.feature(PERFORMANCE_ISSUE_FEATURES): - response = self.client.put( - self.url, - data={ - "n_plus_one_db_queries_detection_enabled": False, - }, - ) - - assert response.status_code == 200, response.content + self.get_success_response( + self.project.organization.slug, + self.project.slug, + n_plus_one_db_queries_detection_enabled=False, + method="put", + status_code=status.HTTP_200_OK, + ) assert create_audit_entry.called ((_, kwargs),) = create_audit_entry.call_args_list @@ -283,6 +305,7 @@ def test_changing_admin_settings_creates_audit_log(self, create_audit_entry: Mag "public": self.project.public, } + @with_feature("organizations:performance-view") def test_delete_resets_enabled_project_settings(self): self.project.update_option( SETTINGS_PROJECT_OPTION_KEY, @@ -304,13 +327,13 @@ def test_delete_resets_enabled_project_settings(self): == 5000 ) - with self.feature(PERFORMANCE_ISSUE_FEATURES): - response = self.client.delete( - self.url, - data={}, - ) + self.get_success_response( + self.project.organization.slug, + self.project.slug, + method="delete", + status_code=status.HTTP_204_NO_CONTENT, + ) - assert response.status_code == 204, response.content assert not self.project.get_option(SETTINGS_PROJECT_OPTION_KEY)[ "n_plus_one_db_queries_detection_enabled" ] # admin option should persist @@ -321,6 +344,7 @@ def test_delete_resets_enabled_project_settings(self): SETTINGS_PROJECT_OPTION_KEY ) # removes enabled threshold settings + @with_feature("organizations:performance-view") def test_delete_does_not_resets_enabled_project_settings(self): self.project.update_option( SETTINGS_PROJECT_OPTION_KEY, @@ -342,13 +366,13 @@ def test_delete_does_not_resets_enabled_project_settings(self): == 5000 ) - with self.feature(PERFORMANCE_ISSUE_FEATURES): - response = self.client.delete( - self.url, - data={}, - ) + self.get_success_response( + self.project.organization.slug, + self.project.slug, + method="delete", + status_code=status.HTTP_204_NO_CONTENT, + ) - assert response.status_code == 204, response.content assert not self.project.get_option(SETTINGS_PROJECT_OPTION_KEY)[ "n_plus_one_db_queries_detection_enabled" ] # admin option should persist From 44e42b9a0df2fd1ff709584f9e09ced3b262687c Mon Sep 17 00:00:00 2001 From: Seiji Chew <67301797+schew2381@users.noreply.github.com> Date: Mon, 5 Feb 2024 23:31:55 -0800 Subject: [PATCH 032/357] chore(staff): Let staff access user auth details endpoint (#64123) Let staff access these endpoints. PUT has logic inside the endpoint for removing the last authenticator from a user's account. --- src/sentry/api/bases/user.py | 5 +- .../endpoints/user_authenticator_details.py | 12 ++++- .../test_user_authenticator_details.py | 53 +++++++++++++++++-- 3 files changed, 62 insertions(+), 8 deletions(-) diff --git a/src/sentry/api/bases/user.py b/src/sentry/api/bases/user.py index b038a3e18461ea..d0bbcd6d5f9d24 100644 --- a/src/sentry/api/bases/user.py +++ b/src/sentry/api/bases/user.py @@ -45,13 +45,12 @@ class UserAndStaffPermission(StaffPermissionMixin, UserPermission): """ -class OrganizationUserPermission(UserPermission): +class OrganizationUserPermission(UserAndStaffPermission): scope_map = {"DELETE": ["member:admin"]} def has_org_permission(self, request: Request, user): """ - Org can act on a user account, - if the user is a member of only one org + Org can act on a user account, if the user is a member of only one org e.g. reset org member's 2FA """ diff --git a/src/sentry/api/endpoints/user_authenticator_details.py b/src/sentry/api/endpoints/user_authenticator_details.py index 20983a533f0f90..0971eaf0dbfc05 100644 --- a/src/sentry/api/endpoints/user_authenticator_details.py +++ b/src/sentry/api/endpoints/user_authenticator_details.py @@ -4,6 +4,7 @@ from rest_framework.request import Request from rest_framework.response import Response +from sentry import features from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import control_silo_endpoint @@ -11,6 +12,7 @@ from sentry.api.decorators import sudo_required from sentry.api.serializers import serialize from sentry.auth.authenticators.u2f import decode_credential_id +from sentry.auth.staff import is_active_staff from sentry.auth.superuser import is_active_superuser from sentry.models.authenticator import Authenticator from sentry.models.user import User @@ -162,7 +164,15 @@ def delete(self, request: Request, user: User, auth_id, interface_device_id=None ) return Response(status=status.HTTP_204_NO_CONTENT) - if not is_active_superuser(request): + # We should only be able to delete the last auth method through the + # _admin portal, which is indicated by staff. After the feature flag is + # removed, this will only check for is_active_staff. + if features.has("auth:enterprise-staff-cookie"): + check_remaining_auth = not is_active_staff(request) + else: + check_remaining_auth = not is_active_superuser(request) + + if check_remaining_auth: # if the user's organization requires 2fa, # don't delete the last auth method enrolled_methods = Authenticator.objects.all_interfaces_for_user( diff --git a/tests/sentry/api/endpoints/test_user_authenticator_details.py b/tests/sentry/api/endpoints/test_user_authenticator_details.py index e560544236a2ae..8500645213183c 100644 --- a/tests/sentry/api/endpoints/test_user_authenticator_details.py +++ b/tests/sentry/api/endpoints/test_user_authenticator_details.py @@ -15,6 +15,7 @@ from sentry.models.organization import Organization from sentry.models.user import User from sentry.testutils.cases import APITestCase +from sentry.testutils.helpers.features import with_feature from sentry.testutils.helpers.options import override_options from sentry.testutils.silo import control_silo_test @@ -285,7 +286,7 @@ def test_recovery_codes_regenerate(self): assert_security_email_sent("recovery-codes-regenerated") - def test_delete(self): + def test_delete_superuser(self): user = self.create_user(email="a@example.com", is_superuser=True) with override_options({"sms.twilio-account": "twilio-account"}): @@ -304,11 +305,30 @@ def test_delete(self): assert_security_email_sent("mfa-removed") - def test_cannot_delete_without_superuser(self): - user = self.create_user(email="a@example.com", is_superuser=False) + def test_delete_staff(self): + staff_user = self.create_user(email="a@example.com", is_staff=True) + + with override_options({"sms.twilio-account": "twilio-account"}): + auth = Authenticator.objects.create(type=2, user=staff_user) # sms + available_auths = Authenticator.objects.all_interfaces_for_user( + staff_user, ignore_backup=True + ) + + self.assertEqual(len(available_auths), 1) + self.login_as(user=staff_user, staff=True) + + with self.tasks(): + self.get_success_response(staff_user.id, auth.id, method="delete") + + assert not Authenticator.objects.filter(id=auth.id).exists() + + assert_security_email_sent("mfa-removed") + + def test_cannot_delete_without_superuser_or_staff(self): + user = self.create_user(email="a@example.com", is_superuser=False, is_staff=False) auth = Authenticator.objects.create(type=3, user=user) # u2f - actor = self.create_user(email="b@example.com", is_superuser=False) + actor = self.create_user(email="b@example.com", is_superuser=False, is_staff=False) self.login_as(user=actor) with self.tasks(): @@ -369,6 +389,31 @@ def test_require_2fa__can_delete_last_auth_superuser(self): assert not Authenticator.objects.filter(id=auth.id).exists() + @with_feature("auth:enterprise-staff-cookie") + def test_require_2fa__can_delete_last_auth_staff(self): + self._require_2fa_for_organization() + + staff_user = self.create_user(email="a@example.com", is_staff=True) + self.login_as(user=staff_user, staff=True) + + with override_options({"sms.twilio-account": "twilio-account"}): + # enroll in one auth method + interface = TotpInterface() + interface.enroll(self.user) + assert interface.authenticator is not None + auth = interface.authenticator + + with self.tasks(): + self.get_success_response( + self.user.id, + auth.id, + method="delete", + status_code=status.HTTP_204_NO_CONTENT, + ) + assert_security_email_sent("mfa-removed") + + assert not Authenticator.objects.filter(id=auth.id).exists() + def test_require_2fa__delete_with_multiple_auth__ok(self): self._require_2fa_for_organization() From 37db951d7840966091bba5b233ed59be7774ac7f Mon Sep 17 00:00:00 2001 From: Riccardo Busetti Date: Tue, 6 Feb 2024 08:35:45 +0100 Subject: [PATCH 033/357] fix(on-demand): Add escaping of glob patterns for meta chars supported by Relay (#64552) --- src/sentry/snuba/metrics/extraction.py | 25 ++++++++++++++++- tests/sentry/snuba/metrics/test_extraction.py | 28 +++++++++++++++++++ 2 files changed, 52 insertions(+), 1 deletion(-) diff --git a/src/sentry/snuba/metrics/extraction.py b/src/sentry/snuba/metrics/extraction.py index 746343650eeebe..0fbadff65faa3a 100644 --- a/src/sentry/snuba/metrics/extraction.py +++ b/src/sentry/snuba/metrics/extraction.py @@ -1531,6 +1531,29 @@ def _get_satisfactory_threshold_and_metric(project: Project) -> tuple[int, str]: return threshold, metric_field +def _escape_wildcard(value: str) -> str: + """ + Escapes all characters in the wildcard which are considered as meta characters in the glob + implementation in Relay, which can be found at: https://docs.rs/globset/latest/globset/#syntax. + + The goal of this function is to only preserve the `*` character as it is the only character that Sentry's + product offers to users to perform wildcard matching. + """ + i, n = 0, len(value) + escaped = "" + + while i < n: + c = value[i] + i = i + 1 + + if c in "[]{}?": + escaped += "\\" + c + else: + escaped += c + + return escaped + + T = TypeVar("T") @@ -1633,7 +1656,7 @@ def _filter(self, token: SearchFilter) -> RuleCondition: condition: RuleCondition = { "op": "glob", "name": _map_field_name(key), - "value": [value], + "value": [_escape_wildcard(value)], } else: # Special case for the `has` and `!has` operators which are parsed as follows: diff --git a/tests/sentry/snuba/metrics/test_extraction.py b/tests/sentry/snuba/metrics/test_extraction.py index 90e0e8baf22df3..0a382b7aca3eb3 100644 --- a/tests/sentry/snuba/metrics/test_extraction.py +++ b/tests/sentry/snuba/metrics/test_extraction.py @@ -16,6 +16,7 @@ to_standard_metrics_query, ) from sentry.testutils.pytest.fixtures import django_db_all +from sentry.utils.glob import glob_match @django_db_all @@ -460,6 +461,33 @@ def test_spec_wildcard() -> None: } +@pytest.mark.parametrize( + "query,title,expected_pattern", + [ + ("title:*[dispatch:*", "backend test [dispatch:something]", "*\\[dispatch:*"), + ("title:*{dispatch:*", "test {dispatch:something]", "*\\{dispatch:*"), + ("title:*dispatch]:*", "backend dispatch]:", "*dispatch\\]:*"), + ("title:*dispatch}:*", "test [dispatch}:", "*dispatch\\}:*"), + ("title:*?dispatch*", "backend ?dispatch", "*\\?dispatch*"), + ], +) +def test_spec_wildcard_escaping(query, title, expected_pattern) -> None: + spec = OnDemandMetricSpec("count()", query) + + assert spec._metric_type == "c" + assert spec.field_to_extract is None + assert spec.op == "sum" + assert spec.condition == { + "name": "event.transaction", + "op": "glob", + "value": [expected_pattern], + } + + # We also validate using Relay's glob implementation to make sure the escaping + # is interpreted correctly. + assert glob_match(title, expected_pattern, ignorecase=True) + + def test_spec_count_if() -> None: spec = OnDemandMetricSpec("count_if(transaction.duration,equals,300)", "") From f02f23d2b0ca71a4bb9527aac4396dddace69980 Mon Sep 17 00:00:00 2001 From: Ogi <86684834+obostjancic@users.noreply.github.com> Date: Tue, 6 Feb 2024 08:43:34 +0100 Subject: [PATCH 034/357] feat(dashboards): synced cursors (#64554) --- static/app/components/charts/utils.tsx | 10 ++- static/app/views/dashboards/dashboard.tsx | 4 +- static/app/views/dashboards/utils.tsx | 5 ++ .../widgetBuilderDataset.spec.tsx | 13 --- .../app/views/dashboards/widgetCard/chart.tsx | 82 ++++++++++++++++--- .../dashboards/widgetCard/index.spec.tsx | 3 +- .../app/views/dashboards/widgetCard/index.tsx | 3 + .../widgetCard/metricWidgetCard/index.tsx | 2 + .../widgetCard/widgetCardChartContainer.tsx | 5 ++ static/app/views/ddm/chart.tsx | 18 ++-- static/app/views/ddm/widget.tsx | 6 +- .../app/views/starfish/components/chart.tsx | 10 +-- 12 files changed, 113 insertions(+), 48 deletions(-) diff --git a/static/app/components/charts/utils.tsx b/static/app/components/charts/utils.tsx index da1c98bac24731..ae5454546cbe73 100644 --- a/static/app/components/charts/utils.tsx +++ b/static/app/components/charts/utils.tsx @@ -7,7 +7,7 @@ import moment from 'moment'; import {DEFAULT_STATS_PERIOD} from 'sentry/constants'; import type {EventsStats, MultiSeriesEventsStats, PageFilters} from 'sentry/types'; -import type {Series} from 'sentry/types/echarts'; +import type {ReactEchartsRef, Series} from 'sentry/types/echarts'; import {defined, escape} from 'sentry/utils'; import {getFormattedDate, parsePeriodToHours} from 'sentry/utils/dates'; import type {TableDataWithTitle} from 'sentry/utils/discover/discoverQuery'; @@ -405,3 +405,11 @@ export function useEchartsAriaLabels( export function isEmptySeries(series: Series) { return series.data.every(dataPoint => dataPoint.value === 0); } + +/** + * Used to determine which chart in a group is currently hovered. + */ +export function isChartHovered(chartRef: ReactEchartsRef | null) { + const hoveredEchartElement = document.querySelector('.echarts-for-react:hover'); + return hoveredEchartElement === chartRef?.ele; +} diff --git a/static/app/views/dashboards/dashboard.tsx b/static/app/views/dashboards/dashboard.tsx index 8784ac6065cc37..ac4f1fc16af123 100644 --- a/static/app/views/dashboards/dashboard.tsx +++ b/static/app/views/dashboards/dashboard.tsx @@ -52,7 +52,7 @@ import { import SortableWidget from './sortableWidget'; import type {DashboardDetails, Widget} from './types'; import {DashboardWidgetSource, WidgetType} from './types'; -import {getDashboardFiltersFromURL} from './utils'; +import {connectDashboardCharts, getDashboardFiltersFromURL} from './utils'; export const DRAG_HANDLE_CLASS = 'widget-drag'; const DRAG_RESIZE_CLASS = 'widget-resize'; @@ -69,6 +69,7 @@ const BOTTOM_MOBILE_VIEW_POSITION = { const MOBILE_BREAKPOINT = parseInt(theme.breakpoints.small, 10); const BREAKPOINTS = {[MOBILE]: 0, [DESKTOP]: MOBILE_BREAKPOINT}; const COLUMNS = {[MOBILE]: NUM_MOBILE_COLS, [DESKTOP]: NUM_DESKTOP_COLS}; +export const DASHBOARD_CHART_GROUP = 'dashboard-group'; type Props = { api: Client; @@ -115,6 +116,7 @@ class Dashboard extends Component { }, windowWidth: window.innerWidth, }; + connectDashboardCharts(DASHBOARD_CHART_GROUP); } static getDerivedStateFromProps(props, state) { diff --git a/static/app/views/dashboards/utils.tsx b/static/app/views/dashboards/utils.tsx index bf47a1d70a8660..b1599dd6711b76 100644 --- a/static/app/views/dashboards/utils.tsx +++ b/static/app/views/dashboards/utils.tsx @@ -1,4 +1,5 @@ import {browserHistory} from 'react-router'; +import {connect} from 'echarts'; import type {Location, Query} from 'history'; import cloneDeep from 'lodash/cloneDeep'; import isEqual from 'lodash/isEqual'; @@ -663,3 +664,7 @@ export function dashboardFiltersToString( } return dashboardFilterConditions; } + +export function connectDashboardCharts(groupName: string) { + connect?.(groupName); +} diff --git a/static/app/views/dashboards/widgetBuilder/widgetBuilderDataset.spec.tsx b/static/app/views/dashboards/widgetBuilder/widgetBuilderDataset.spec.tsx index 52d650a45051f1..86cba133bfb7b6 100644 --- a/static/app/views/dashboards/widgetBuilder/widgetBuilderDataset.spec.tsx +++ b/static/app/views/dashboards/widgetBuilder/widgetBuilderDataset.spec.tsx @@ -33,19 +33,6 @@ const defaultOrgFeatures = [ 'dashboards-rh-widget', ]; -// function mockDashboard(dashboard: Partial): DashboardDetails { -// return { -// id: '1', -// title: 'Dashboard', -// createdBy: undefined, -// dateCreated: '2020-01-01T00:00:00.000Z', -// widgets: [], -// projects: [], -// filters: {}, -// ...dashboard, -// }; -// } - function renderTestComponent({ dashboard, query, diff --git a/static/app/views/dashboards/widgetCard/chart.tsx b/static/app/views/dashboards/widgetCard/chart.tsx index d847da9d68d5af..56d48187a3e977 100644 --- a/static/app/views/dashboards/widgetCard/chart.tsx +++ b/static/app/views/dashboards/widgetCard/chart.tsx @@ -16,7 +16,7 @@ import {LineChart} from 'sentry/components/charts/lineChart'; import SimpleTableChart from 'sentry/components/charts/simpleTableChart'; import TransitionChart from 'sentry/components/charts/transitionChart'; import TransparentLoadingMask from 'sentry/components/charts/transparentLoadingMask'; -import {getSeriesSelection} from 'sentry/components/charts/utils'; +import {getSeriesSelection, isChartHovered} from 'sentry/components/charts/utils'; import LoadingIndicator from 'sentry/components/loadingIndicator'; import type {PlaceholderProps} from 'sentry/components/placeholder'; import Placeholder from 'sentry/components/placeholder'; @@ -24,7 +24,12 @@ import {Tooltip} from 'sentry/components/tooltip'; import {IconWarning} from 'sentry/icons'; import {space} from 'sentry/styles/space'; import type {Organization, PageFilters} from 'sentry/types'; -import type {EChartDataZoomHandler, EChartEventHandler} from 'sentry/types/echarts'; +import type { + EChartDataZoomHandler, + EChartEventHandler, + ReactEchartsRef, + Series, +} from 'sentry/types/echarts'; import { axisLabelFormatter, axisLabelFormatterUsingAggregateOutputType, @@ -50,6 +55,7 @@ import { } from 'sentry/views/dashboards/datasetConfig/metrics'; import {eventViewFromWidget} from 'sentry/views/dashboards/utils'; +import {getFormatter} from '../../../components/charts/components/tooltip'; import {getDatasetConfig} from '../datasetConfig/base'; import type {Widget} from '../types'; import {DisplayType, WidgetType} from '../types'; @@ -82,6 +88,7 @@ type WidgetCardChartProps = Pick< selection: PageFilters; theme: Theme; widget: Widget; + chartGroup?: string; chartZoomOptions?: DataZoomComponentOption; expandNumbers?: boolean; isMobile?: boolean; @@ -257,6 +264,23 @@ class WidgetCardChart extends Component { }); } + chartRef: ReactEchartsRef | null = null; + + handleRef = (chartRef: ReactEchartsRef): void => { + if (chartRef && !this.chartRef) { + this.chartRef = chartRef; + // add chart to the group so that it has synced cursors + const instance = chartRef.getEchartsInstance?.(); + if (instance && !instance.group && this.props.chartGroup) { + instance.group = this.props.chartGroup; + } + } + + if (!chartRef) { + this.chartRef = null; + } + }; + chartComponent(chartProps): React.ReactNode { const {widget} = this.props; const stacked = widget.queries[0]?.columns.length > 0; @@ -368,6 +392,20 @@ class WidgetCardChart extends Component { const durationUnit = isDurationChart ? timeseriesResults && getDurationUnit(timeseriesResults, legendOptions) : undefined; + const bucketSize = getBucketSize(timeseriesResults); + + const valueFormatter = (value: number, seriesName?: string) => { + if (widget.widgetType === WidgetType.METRICS) { + return formatMetricAxisValue(axisField, value); + } + const aggregateName = seriesName?.split(':').pop()?.trim(); + if (aggregateName) { + return timeseriesResultsTypes + ? tooltipFormatter(value, timeseriesResultsTypes[aggregateName]) + : tooltipFormatter(value, aggregateOutputType(aggregateName)); + } + return tooltipFormatter(value, 'number'); + }; const chartOptions = { autoHeightResize, @@ -382,17 +420,20 @@ class WidgetCardChart extends Component { }, tooltip: { trigger: 'axis', - valueFormatter: (value: number, seriesName: string) => { - if (widget.widgetType === WidgetType.METRICS) { - return formatMetricAxisValue(axisField, value); + formatter: (params, asyncTicket) => { + // tooltip is triggered whenever any chart in the group is hovered, + // so we need to check if the mouse is actually over this chart + if (!isChartHovered(this.chartRef)) { + return ''; } - const aggregateName = seriesName?.split(':').pop()?.trim(); - if (aggregateName) { - return timeseriesResultsTypes - ? tooltipFormatter(value, timeseriesResultsTypes[aggregateName]) - : tooltipFormatter(value, aggregateOutputType(aggregateName)); - } - return tooltipFormatter(value, 'number'); + + return getFormatter({ + valueFormatter, + isGroupedByDate: true, + bucketSize, + addSecondsToTimeFormat: false, + showTimeInTooltip: true, + })(params, asyncTicket); }, }, yAxis: { @@ -415,6 +456,11 @@ class WidgetCardChart extends Component { }, minInterval: durationUnit ?? 0, }, + xAxis: { + axisPointer: { + snap: true, + }, + }, }; return ( @@ -469,6 +515,9 @@ class WidgetCardChart extends Component { const seriesStart = series[0]?.data[0]?.name; const seriesEnd = series[0]?.data[series[0].data.length - 1]?.name; + + const forwardedRef = this.props.chartGroup ? this.handleRef : undefined; + return ( @@ -490,6 +539,7 @@ class WidgetCardChart extends Component { legend, series, onLegendSelectChanged, + forwardedRef, }), fixed: , })} @@ -502,6 +552,14 @@ class WidgetCardChart extends Component { } } +const getBucketSize = (series: Series[] | undefined) => { + if (!series || series.length < 2) { + return 0; + } + + return Number(series[0].data[1]?.name) - Number(series[0].data[0]?.name); +}; + export default withTheme(WidgetCardChart); const StyledTransparentLoadingMask = styled(props => ( diff --git a/static/app/views/dashboards/widgetCard/index.spec.tsx b/static/app/views/dashboards/widgetCard/index.spec.tsx index 3dc6b4e7884714..2a6bcfac2e3d81 100644 --- a/static/app/views/dashboards/widgetCard/index.spec.tsx +++ b/static/app/views/dashboards/widgetCard/index.spec.tsx @@ -663,10 +663,9 @@ describe('Dashboards > WidgetCard', function () { }); const {tooltip, yAxis} = spy.mock.calls.pop()?.[0] ?? {}; expect(tooltip).toBeDefined(); + expect(yAxis).toBeDefined(); // @ts-expect-error - expect(tooltip.valueFormatter(24, 'p95(measurements.custom)')).toEqual('24.00ms'); - // @ts-expect-error expect(yAxis.axisLabel.formatter(24, 'p95(measurements.custom)')).toEqual('24ms'); }); diff --git a/static/app/views/dashboards/widgetCard/index.tsx b/static/app/views/dashboards/widgetCard/index.tsx index ced30987abad6d..7d469ec3c5deb7 100644 --- a/static/app/views/dashboards/widgetCard/index.tsx +++ b/static/app/views/dashboards/widgetCard/index.tsx @@ -38,6 +38,7 @@ import withOrganization from 'sentry/utils/withOrganization'; import withPageFilters from 'sentry/utils/withPageFilters'; // eslint-disable-next-line no-restricted-imports import withSentryRouter from 'sentry/utils/withSentryRouter'; +import {DASHBOARD_CHART_GROUP} from 'sentry/views/dashboards/dashboard'; import {MetricWidgetCard} from 'sentry/views/dashboards/widgetCard/metricWidgetCard'; import {Toolbar} from 'sentry/views/dashboards/widgetCard/toolbar'; @@ -358,6 +359,7 @@ class WidgetCard extends Component { windowWidth={windowWidth} onDataFetched={this.setData} dashboardFilters={dashboardFilters} + chartGroup={DASHBOARD_CHART_GROUP} /> ) : ( @@ -373,6 +375,7 @@ class WidgetCard extends Component { windowWidth={windowWidth} onDataFetched={this.setData} dashboardFilters={dashboardFilters} + chartGroup={DASHBOARD_CHART_GROUP} /> )} diff --git a/static/app/views/dashboards/widgetCard/metricWidgetCard/index.tsx b/static/app/views/dashboards/widgetCard/metricWidgetCard/index.tsx index 9f34a570677e76..65558b1de108a3 100644 --- a/static/app/views/dashboards/widgetCard/metricWidgetCard/index.tsx +++ b/static/app/views/dashboards/widgetCard/metricWidgetCard/index.tsx @@ -24,6 +24,7 @@ import { toMetricDisplayType, } from '../../../../utils/metrics/dashboard'; import {parseField} from '../../../../utils/metrics/mri'; +import {DASHBOARD_CHART_GROUP} from '../../dashboard'; import type {DashboardFilters, Widget} from '../../types'; type Props = { @@ -206,6 +207,7 @@ export function MetricWidgetChartContainer({ query={extendQuery(metricWidgetQueryParams.query, dashboardFilters)} groupBy={metricWidgetQueryParams.groupBy} displayType={toMetricDisplayType(metricWidgetQueryParams.displayType)} + chartGroup={DASHBOARD_CHART_GROUP} /> ); } diff --git a/static/app/views/dashboards/widgetCard/widgetCardChartContainer.tsx b/static/app/views/dashboards/widgetCard/widgetCardChartContainer.tsx index 7c84018f8baea2..d6a36d2a4576fb 100644 --- a/static/app/views/dashboards/widgetCard/widgetCardChartContainer.tsx +++ b/static/app/views/dashboards/widgetCard/widgetCardChartContainer.tsx @@ -30,6 +30,7 @@ type Props = { organization: Organization; selection: PageFilters; widget: Widget; + chartGroup?: string; chartZoomOptions?: DataZoomComponentOption; dashboardFilters?: DashboardFilters; expandNumbers?: boolean; @@ -73,6 +74,7 @@ export function WidgetCardChartContainer({ showSlider, noPadding, chartZoomOptions, + chartGroup, }: Props) { const location = useLocation(); const router = useRouter(); @@ -143,6 +145,7 @@ export function WidgetCardChartContainer({ showSlider={showSlider} noPadding={noPadding} chartZoomOptions={chartZoomOptions} + chartGroup={chartGroup} />
    ); @@ -185,6 +188,7 @@ export function WidgetCardChartContainer({ showSlider={showSlider} noPadding={noPadding} chartZoomOptions={chartZoomOptions} + chartGroup={chartGroup} /> ); @@ -235,6 +239,7 @@ export function WidgetCardChartContainer({ noPadding={noPadding} chartZoomOptions={chartZoomOptions} timeseriesResultsTypes={timeseriesResultsTypes} + chartGroup={chartGroup} /> ); diff --git a/static/app/views/ddm/chart.tsx b/static/app/views/ddm/chart.tsx index 5be049cb6886e4..3394ffc7da0cf2 100644 --- a/static/app/views/ddm/chart.tsx +++ b/static/app/views/ddm/chart.tsx @@ -18,11 +18,11 @@ import {isCumulativeOp} from 'sentry/utils/metrics'; import {formatMetricsUsingUnitAndOp} from 'sentry/utils/metrics/formatters'; import {MetricDisplayType} from 'sentry/utils/metrics/types'; import useRouter from 'sentry/utils/useRouter'; -import {DDM_CHART_GROUP} from 'sentry/views/ddm/constants'; import type {FocusAreaProps} from 'sentry/views/ddm/context'; import {useFocusArea} from 'sentry/views/ddm/focusArea'; import {getFormatter} from '../../components/charts/components/tooltip'; +import {isChartHovered} from '../../components/charts/utils'; import {useChartSamples} from './useChartSamples'; import type {SamplesProps, ScatterSeries as ScatterSeriesType, Series} from './widget'; @@ -32,6 +32,7 @@ type ChartProps = { series: Series[]; widgetIndex: number; focusArea?: FocusAreaProps; + group?: string; height?: number; operation?: string; scatter?: SamplesProps; @@ -44,7 +45,7 @@ echarts.use(CanvasRenderer); export const MetricChart = forwardRef( ( - {series, displayType, operation, widgetIndex, focusArea, height, scatter}, + {series, displayType, operation, widgetIndex, focusArea, height, scatter, group}, forwardedRef ) => { const router = useRouter(); @@ -70,9 +71,12 @@ export const MetricChart = forwardRef( }); useEffect(() => { + if (!group) { + return; + } const echartsInstance = chartRef?.current?.getEchartsInstance(); if (echartsInstance && !echartsInstance.group) { - echartsInstance.group = DDM_CHART_GROUP; + echartsInstance.group = group; } }); @@ -143,13 +147,7 @@ export const MetricChart = forwardRef( if (focusAreaBrush.isDrawingRef.current) { return ''; } - const hoveredEchartElement = Array.from( - document.querySelectorAll(':hover') - ).find(element => { - return element.classList.contains('echarts-for-react'); - }); - const isThisChartHovered = hoveredEchartElement === chartRef?.current?.ele; - if (!isThisChartHovered) { + if (!isChartHovered(chartRef?.current)) { return ''; } if (params.seriesType === 'scatter') { diff --git a/static/app/views/ddm/widget.tsx b/static/app/views/ddm/widget.tsx index 7c5d6b71e3d413..26a39a41ceb05b 100644 --- a/static/app/views/ddm/widget.tsx +++ b/static/app/views/ddm/widget.tsx @@ -38,7 +38,7 @@ import {createChartPalette} from 'sentry/views/ddm/metricsChartPalette'; import {QuerySymbol} from 'sentry/views/ddm/querySymbol'; import {SummaryTable} from 'sentry/views/ddm/summaryTable'; -import {MIN_WIDGET_WIDTH} from './constants'; +import {DDM_CHART_GROUP, MIN_WIDGET_WIDTH} from './constants'; type MetricWidgetProps = { datetime: PageFilters['datetime']; @@ -190,6 +190,7 @@ export const MetricWidget = memo( focusArea={focusArea} samples={samples} chartHeight={300} + chartGroup={DDM_CHART_GROUP} {...widget} /> ) : ( @@ -210,6 +211,7 @@ export const MetricWidget = memo( interface MetricWidgetBodyProps extends MetricWidgetQueryParams { widgetIndex: number; + chartGroup?: string; chartHeight?: number; focusArea?: FocusAreaProps; getChartPalette?: (seriesNames: string[]) => Record; @@ -233,6 +235,7 @@ export const MetricWidgetBody = memo( getChartPalette = createChartPalette, focusArea, chartHeight, + chartGroup, samples, ...metricsQuery }: MetricWidgetBodyProps & PageFilters) => { @@ -341,6 +344,7 @@ export const MetricWidgetBody = memo( height={chartHeight} scatter={samples} focusArea={focusArea} + group={chartGroup} /> {metricsQuery.showSummaryTable && ( { - // Kinda jank. Get hovered dom elements and check if any of them are the chart - const hoveredEchartElement = Array.from(document.querySelectorAll(':hover')).find( - element => { - return element.classList.contains('echarts-for-react'); - } - ); - - if (hoveredEchartElement === chartRef?.current?.ele) { + if (isChartHovered(chartRef?.current)) { // Return undefined to use default formatter return getFormatter({ isGroupedByDate: true, From 4b04d98f83291df774ca25edba7cb8351bffb2d1 Mon Sep 17 00:00:00 2001 From: Daniel Szoke Date: Tue, 6 Feb 2024 09:57:14 +0100 Subject: [PATCH 035/357] ref: Remove `start_transaction` from `OrganizationSCIMMemberIndex` (#64582) This PR removes an unnecessary `start_transaction` that leads to undefined behavior, since the call occurs within an endpoint already auto-instrumented by the Python SDK's Django integration. The auto-instrumented transaction can be viewed [here](https://sentry.sentry.io/performance/summary/?project=1&query=http.method%3APOST&referrer=performance-transaction-summary&statsPeriod=24h&transaction=%2Fapi%2F0%2Forganizations%2F%7Borganization_slug%7D%2Fscim%2Fv2%2FUsers&unselectedSeries=p100%28%29&unselectedSeries=avg%28%29). ref #63590 --- src/sentry/scim/endpoints/members.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/sentry/scim/endpoints/members.py b/src/sentry/scim/endpoints/members.py index d02b754f74ab6c..4185619fd22c70 100644 --- a/src/sentry/scim/endpoints/members.py +++ b/src/sentry/scim/endpoints/members.py @@ -530,9 +530,7 @@ def post(self, request: Request, organization) -> Response: """ update_role = False - with sentry_sdk.start_transaction( - name="scim.provision_member", op="scim", sampled=True - ) as txn: + with sentry_sdk.configure_scope() as scope: if "sentryOrgRole" in request.data and request.data["sentryOrgRole"]: role = request.data["sentryOrgRole"].lower() idp_role_restricted = True @@ -540,7 +538,7 @@ def post(self, request: Request, organization) -> Response: else: role = organization.default_role idp_role_restricted = False - txn.set_tag("role_restricted", idp_role_restricted) + scope.set_tag("role_restricted", idp_role_restricted) # Allow any role as long as it doesn't have `org:admin` permissions allowed_roles = {role for role in roles.get_all() if not role.has_scope("org:admin")} @@ -548,10 +546,10 @@ def post(self, request: Request, organization) -> Response: # Check for roles not found # TODO: move this to the serializer verification if role not in {role.id for role in allowed_roles}: - txn.set_tag("invalid_role_selection", True) + scope.set_tag("invalid_role_selection", True) raise SCIMApiError(detail=SCIM_400_INVALID_ORGROLE) - txn.set_tag("invalid_role_selection", False) + scope.set_tag("invalid_role_selection", False) serializer = OrganizationMemberSerializer( data={ "email": request.data.get("userName"), @@ -608,9 +606,11 @@ def post(self, request: Request, organization) -> Response: organization_id=organization.id, target_object=member.id, data=member.get_audit_log_data(), - event=audit_log.get_event_id("MEMBER_INVITE") - if settings.SENTRY_ENABLE_INVITES - else audit_log.get_event_id("MEMBER_ADD"), + event=( + audit_log.get_event_id("MEMBER_INVITE") + if settings.SENTRY_ENABLE_INVITES + else audit_log.get_event_id("MEMBER_ADD") + ), ) if settings.SENTRY_ENABLE_INVITES and result.get("sendInvite"): From 1153a9e5fde345669b9ca81b0313f914f2d5eee9 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Tue, 6 Feb 2024 11:55:29 +0000 Subject: [PATCH 036/357] Revert "fix(on-demand): Add escaping of glob patterns for meta chars supported by Relay (#64552)" This reverts commit 37db951d7840966091bba5b233ed59be7774ac7f. Co-authored-by: iambriccardo <15819674+iambriccardo@users.noreply.github.com> --- src/sentry/snuba/metrics/extraction.py | 25 +---------------- tests/sentry/snuba/metrics/test_extraction.py | 28 ------------------- 2 files changed, 1 insertion(+), 52 deletions(-) diff --git a/src/sentry/snuba/metrics/extraction.py b/src/sentry/snuba/metrics/extraction.py index 0fbadff65faa3a..746343650eeebe 100644 --- a/src/sentry/snuba/metrics/extraction.py +++ b/src/sentry/snuba/metrics/extraction.py @@ -1531,29 +1531,6 @@ def _get_satisfactory_threshold_and_metric(project: Project) -> tuple[int, str]: return threshold, metric_field -def _escape_wildcard(value: str) -> str: - """ - Escapes all characters in the wildcard which are considered as meta characters in the glob - implementation in Relay, which can be found at: https://docs.rs/globset/latest/globset/#syntax. - - The goal of this function is to only preserve the `*` character as it is the only character that Sentry's - product offers to users to perform wildcard matching. - """ - i, n = 0, len(value) - escaped = "" - - while i < n: - c = value[i] - i = i + 1 - - if c in "[]{}?": - escaped += "\\" + c - else: - escaped += c - - return escaped - - T = TypeVar("T") @@ -1656,7 +1633,7 @@ def _filter(self, token: SearchFilter) -> RuleCondition: condition: RuleCondition = { "op": "glob", "name": _map_field_name(key), - "value": [_escape_wildcard(value)], + "value": [value], } else: # Special case for the `has` and `!has` operators which are parsed as follows: diff --git a/tests/sentry/snuba/metrics/test_extraction.py b/tests/sentry/snuba/metrics/test_extraction.py index 0a382b7aca3eb3..90e0e8baf22df3 100644 --- a/tests/sentry/snuba/metrics/test_extraction.py +++ b/tests/sentry/snuba/metrics/test_extraction.py @@ -16,7 +16,6 @@ to_standard_metrics_query, ) from sentry.testutils.pytest.fixtures import django_db_all -from sentry.utils.glob import glob_match @django_db_all @@ -461,33 +460,6 @@ def test_spec_wildcard() -> None: } -@pytest.mark.parametrize( - "query,title,expected_pattern", - [ - ("title:*[dispatch:*", "backend test [dispatch:something]", "*\\[dispatch:*"), - ("title:*{dispatch:*", "test {dispatch:something]", "*\\{dispatch:*"), - ("title:*dispatch]:*", "backend dispatch]:", "*dispatch\\]:*"), - ("title:*dispatch}:*", "test [dispatch}:", "*dispatch\\}:*"), - ("title:*?dispatch*", "backend ?dispatch", "*\\?dispatch*"), - ], -) -def test_spec_wildcard_escaping(query, title, expected_pattern) -> None: - spec = OnDemandMetricSpec("count()", query) - - assert spec._metric_type == "c" - assert spec.field_to_extract is None - assert spec.op == "sum" - assert spec.condition == { - "name": "event.transaction", - "op": "glob", - "value": [expected_pattern], - } - - # We also validate using Relay's glob implementation to make sure the escaping - # is interpreted correctly. - assert glob_match(title, expected_pattern, ignorecase=True) - - def test_spec_count_if() -> None: spec = OnDemandMetricSpec("count_if(transaction.duration,equals,300)", "") From 6a3bd972fbd88e134121b3b69c3f4aa0f6e6b743 Mon Sep 17 00:00:00 2001 From: ArthurKnaus Date: Tue, 6 Feb 2024 13:23:31 +0100 Subject: [PATCH 037/357] feat(ddm): Improved fog of war (#64650) - closes https://github.com/getsentry/sentry/issues /63106 --- static/app/views/ddm/chart.tsx | 125 ++++++++++++++++++--------------- 1 file changed, 70 insertions(+), 55 deletions(-) diff --git a/static/app/views/ddm/chart.tsx b/static/app/views/ddm/chart.tsx index 3394ffc7da0cf2..79859898ce6e11 100644 --- a/static/app/views/ddm/chart.tsx +++ b/static/app/views/ddm/chart.tsx @@ -1,6 +1,7 @@ import {forwardRef, useCallback, useEffect, useMemo, useRef} from 'react'; import styled from '@emotion/styled'; import * as Sentry from '@sentry/react'; +import Color from 'color'; import * as echarts from 'echarts/core'; import {CanvasRenderer} from 'echarts/renderers'; @@ -80,17 +81,32 @@ export const MetricChart = forwardRef( } }); + // TODO(ddm): This assumes that all series have the same bucket size + const bucketSize = series[0]?.data[1]?.name - series[0]?.data[0]?.name; + const isSubMinuteBucket = bucketSize < 60_000; + const unit = series[0]?.unit; + const fogOfWarBuckets = getWidthFactor(bucketSize); const seriesToShow = useMemo( () => series .filter(s => !s.hidden) - .map(s => ({ - ...s, - silent: true, - })), - [series] + // Split series in two parts, one for the main chart and one for the fog of war + // The order is important as the tooltip will show the first series first (for overlaps) + .flatMap(s => [ + { + ...s, + silent: true, + data: s.data.slice(0, -fogOfWarBuckets), + }, + displayType === MetricDisplayType.BAR + ? createFogOfWarBarSeries(s, fogOfWarBuckets) + : displayType === MetricDisplayType.LINE + ? createFogOfWarLineSeries(s, fogOfWarBuckets) + : createFogOfWarAreaSeries(s, fogOfWarBuckets), + ]), + [series, fogOfWarBuckets, displayType] ); const valueFormatter = useCallback( @@ -110,12 +126,6 @@ export const MetricChart = forwardRef( valueFormatter, }); - // TODO(ddm): This assumes that all series have the same bucket size - const bucketSize = seriesToShow[0]?.data[1]?.name - seriesToShow[0]?.data[0]?.name; - const isSubMinuteBucket = bucketSize < 60_000; - const seriesLength = seriesToShow[0]?.data.length; - const displayFogOfWar = isCumulativeOp(operation); - const chartProps = useMemo(() => { const timeseriesFormatters = { valueFormatter, @@ -134,7 +144,6 @@ export const MetricChart = forwardRef( return { ...heightOptions, ...focusAreaBrush.options, - forwardedRef: mergeRefs([forwardedRef, chartRef]), series: seriesToShow, renderer: seriesToShow.length > 20 ? ('canvas' as const) : ('svg' as const), @@ -153,6 +162,21 @@ export const MetricChart = forwardRef( if (params.seriesType === 'scatter') { return getFormatter(samples.formatters)(params, asyncTicket); } + + // The mechanism by which we add the fog of war series to the chart, duplicates the series in the chart data + // so we need to deduplicate the series before showing the tooltip + // this assumes that the first series is the main series and the second is the fog of war series + if (Array.isArray(params)) { + const uniqueSeries = new Set(); + const deDupedParams = params.filter(param => { + if (uniqueSeries.has(param.seriesName)) { + return false; + } + uniqueSeries.add(param.seriesName); + return true; + }); + return getFormatter(timeseriesFormatters)(deDupedParams, asyncTicket); + } return getFormatter(timeseriesFormatters)(params, asyncTicket); }, }, @@ -202,9 +226,6 @@ export const MetricChart = forwardRef( displayType={displayType} scatterSeries={samples.series} /> - {displayFogOfWar && ( - - )} ); } @@ -275,31 +296,40 @@ function transformToScatterSeries({ }); } -function FogOfWar({ - bucketSize, - seriesLength, -}: { - bucketSize?: number; - seriesLength?: number; -}) { - if (!bucketSize || !seriesLength) { - return null; - } - - const widthFactor = getWidthFactor(bucketSize); - const fogOfWarWidth = widthFactor * bucketSize + 30_000; - - const seriesWidth = bucketSize * seriesLength; - - // If either of these are undefiend, NaN or 0 the result will be invalid - if (!fogOfWarWidth || !seriesWidth) { - return null; - } - - const width = (fogOfWarWidth / seriesWidth) * 100; - - return ; -} +const createFogOfWarBarSeries = (series: Series, fogBucketCnt = 0) => ({ + ...series, + silent: true, + data: series.data.map((data, index) => ({ + ...data, + // W need to set a value for the non-fog of war buckets so that the stacking still works in echarts + value: index < series.data.length - fogBucketCnt ? 0 : data.value, + })), + itemStyle: { + opacity: 0.5, + }, +}); + +const createFogOfWarLineSeries = (series: Series, fogBucketCnt = 0) => ({ + ...series, + silent: true, + // We include the last non-fog of war bucket so that the line is connected + data: series.data.slice(-fogBucketCnt - 1), + lineStyle: { + type: 'dashed', + }, +}); + +const createFogOfWarAreaSeries = (series: Series, fogBucketCnt = 0) => ({ + ...series, + silent: true, + stack: 'fogOfWar', + // We include the last non-fog of war bucket so that the line is connected + data: series.data.slice(-fogBucketCnt - 1), + lineStyle: { + type: 'dashed', + color: Color(series.color).lighten(0.3).string(), + }, +}); function getWidthFactor(bucketSize: number) { // In general, fog of war should cover the last bucket @@ -321,18 +351,3 @@ const ChartWrapper = styled('div')` position: relative; height: 100%; `; - -const FogOfWarOverlay = styled('div')<{width?: number}>` - height: calc(100% - 29px); - width: ${p => p.width}%; - position: absolute; - right: 0px; - top: 5px; - pointer-events: none; - background: linear-gradient( - 90deg, - ${p => p.theme.background}00 0%, - ${p => p.theme.background}FF 70%, - ${p => p.theme.background}FF 100% - ); -`; From 7e84b0c0376e6a88b288655502bc5f1a95a7b6ae Mon Sep 17 00:00:00 2001 From: Riccardo Busetti Date: Tue, 6 Feb 2024 13:41:30 +0100 Subject: [PATCH 038/357] fix(on-demand): Escape glob patterns meta characters (#64651) --- src/sentry/snuba/metrics/extraction.py | 25 ++++++++++++- .../dispatch@something/@/dispatch@@.pysnap | 6 ++++ .../backend @dispatch/@/@dispatch@.pysnap | 6 ++++ .../@@/backend dispatch/@/@dispatch/@@.pysnap | 6 ++++ .../test /dispatch}@/@dispatch/}@@.pysnap | 6 ++++ .../@/{dispatch@@.pysnap | 6 ++++ tests/sentry/snuba/metrics/test_extraction.py | 36 +++++++++++++++++++ 7 files changed, 90 insertions(+), 1 deletion(-) create mode 100644 tests/sentry/snuba/metrics/snapshots/test_extraction/test_spec_wildcard_escaping/title@@/dispatch@@/backend test /dispatch@something/@/dispatch@@.pysnap create mode 100644 tests/sentry/snuba/metrics/snapshots/test_extraction/test_spec_wildcard_escaping/title@@@dispatch@/backend @dispatch/@/@dispatch@.pysnap create mode 100644 tests/sentry/snuba/metrics/snapshots/test_extraction/test_spec_wildcard_escaping/title@@dispatch/@@/backend dispatch/@/@dispatch/@@.pysnap create mode 100644 tests/sentry/snuba/metrics/snapshots/test_extraction/test_spec_wildcard_escaping/title@@dispatch}@@/test /dispatch}@/@dispatch/}@@.pysnap create mode 100644 tests/sentry/snuba/metrics/snapshots/test_extraction/test_spec_wildcard_escaping/title@@{dispatch@@/test {dispatch@something/@/{dispatch@@.pysnap diff --git a/src/sentry/snuba/metrics/extraction.py b/src/sentry/snuba/metrics/extraction.py index 746343650eeebe..b53a0c5dbec137 100644 --- a/src/sentry/snuba/metrics/extraction.py +++ b/src/sentry/snuba/metrics/extraction.py @@ -1531,6 +1531,29 @@ def _get_satisfactory_threshold_and_metric(project: Project) -> tuple[int, str]: return threshold, metric_field +def _escape_wildcard(value: str) -> str: + """ + Escapes all characters in the wildcard which are considered as meta characters in the glob + implementation in Relay, which can be found at: https://docs.rs/globset/latest/globset/#syntax. + + The goal of this function is to only preserve the `*` character as it is the only character that Sentry's + product offers to users to perform wildcard matching. + """ + i, n = 0, len(value) + escaped = "" + + while i < n: + c = value[i] + i = i + 1 + + if c in "[]{}?": + escaped += rf"\{c}" + else: + escaped += c + + return escaped + + T = TypeVar("T") @@ -1633,7 +1656,7 @@ def _filter(self, token: SearchFilter) -> RuleCondition: condition: RuleCondition = { "op": "glob", "name": _map_field_name(key), - "value": [value], + "value": [_escape_wildcard(value)], } else: # Special case for the `has` and `!has` operators which are parsed as follows: diff --git a/tests/sentry/snuba/metrics/snapshots/test_extraction/test_spec_wildcard_escaping/title@@/dispatch@@/backend test /dispatch@something/@/dispatch@@.pysnap b/tests/sentry/snuba/metrics/snapshots/test_extraction/test_spec_wildcard_escaping/title@@/dispatch@@/backend test /dispatch@something/@/dispatch@@.pysnap new file mode 100644 index 00000000000000..396f0aaddd7d70 --- /dev/null +++ b/tests/sentry/snuba/metrics/snapshots/test_extraction/test_spec_wildcard_escaping/title@@/dispatch@@/backend test /dispatch@something/@/dispatch@@.pysnap @@ -0,0 +1,6 @@ +--- +created: '2024-02-06T12:13:13.768289Z' +creator: sentry +source: tests/sentry/snuba/metrics/test_extraction.py +--- +{"category":"transaction","mri":"c:transactions/on_demand@none","field":null,"tags":[{"key":"query_hash","value":"32916cfa"}],"condition":{"op":"glob","name":"event.transaction","value":["*\\[dispatch:*"]}} diff --git a/tests/sentry/snuba/metrics/snapshots/test_extraction/test_spec_wildcard_escaping/title@@@dispatch@/backend @dispatch/@/@dispatch@.pysnap b/tests/sentry/snuba/metrics/snapshots/test_extraction/test_spec_wildcard_escaping/title@@@dispatch@/backend @dispatch/@/@dispatch@.pysnap new file mode 100644 index 00000000000000..872eef261d0833 --- /dev/null +++ b/tests/sentry/snuba/metrics/snapshots/test_extraction/test_spec_wildcard_escaping/title@@@dispatch@/backend @dispatch/@/@dispatch@.pysnap @@ -0,0 +1,6 @@ +--- +created: '2024-02-06T12:13:12.884044Z' +creator: sentry +source: tests/sentry/snuba/metrics/test_extraction.py +--- +{"category":"transaction","mri":"c:transactions/on_demand@none","field":null,"tags":[{"key":"query_hash","value":"0c6ddfa6"}],"condition":{"op":"glob","name":"event.transaction","value":["*\\?dispatch*"]}} diff --git a/tests/sentry/snuba/metrics/snapshots/test_extraction/test_spec_wildcard_escaping/title@@dispatch/@@/backend dispatch/@/@dispatch/@@.pysnap b/tests/sentry/snuba/metrics/snapshots/test_extraction/test_spec_wildcard_escaping/title@@dispatch/@@/backend dispatch/@/@dispatch/@@.pysnap new file mode 100644 index 00000000000000..e86472d94f8976 --- /dev/null +++ b/tests/sentry/snuba/metrics/snapshots/test_extraction/test_spec_wildcard_escaping/title@@dispatch/@@/backend dispatch/@/@dispatch/@@.pysnap @@ -0,0 +1,6 @@ +--- +created: '2024-02-06T12:13:13.328092Z' +creator: sentry +source: tests/sentry/snuba/metrics/test_extraction.py +--- +{"category":"transaction","mri":"c:transactions/on_demand@none","field":null,"tags":[{"key":"query_hash","value":"8bb6b7dd"}],"condition":{"op":"glob","name":"event.transaction","value":["*dispatch\\]:*"]}} diff --git a/tests/sentry/snuba/metrics/snapshots/test_extraction/test_spec_wildcard_escaping/title@@dispatch}@@/test /dispatch}@/@dispatch/}@@.pysnap b/tests/sentry/snuba/metrics/snapshots/test_extraction/test_spec_wildcard_escaping/title@@dispatch}@@/test /dispatch}@/@dispatch/}@@.pysnap new file mode 100644 index 00000000000000..d184f98b10e92d --- /dev/null +++ b/tests/sentry/snuba/metrics/snapshots/test_extraction/test_spec_wildcard_escaping/title@@dispatch}@@/test /dispatch}@/@dispatch/}@@.pysnap @@ -0,0 +1,6 @@ +--- +created: '2024-02-06T12:13:13.108539Z' +creator: sentry +source: tests/sentry/snuba/metrics/test_extraction.py +--- +{"category":"transaction","mri":"c:transactions/on_demand@none","field":null,"tags":[{"key":"query_hash","value":"aa691637"}],"condition":{"op":"glob","name":"event.transaction","value":["*dispatch\\}:*"]}} diff --git a/tests/sentry/snuba/metrics/snapshots/test_extraction/test_spec_wildcard_escaping/title@@{dispatch@@/test {dispatch@something/@/{dispatch@@.pysnap b/tests/sentry/snuba/metrics/snapshots/test_extraction/test_spec_wildcard_escaping/title@@{dispatch@@/test {dispatch@something/@/{dispatch@@.pysnap new file mode 100644 index 00000000000000..ac27c6e0f55b17 --- /dev/null +++ b/tests/sentry/snuba/metrics/snapshots/test_extraction/test_spec_wildcard_escaping/title@@{dispatch@@/test {dispatch@something/@/{dispatch@@.pysnap @@ -0,0 +1,6 @@ +--- +created: '2024-02-06T12:13:13.545655Z' +creator: sentry +source: tests/sentry/snuba/metrics/test_extraction.py +--- +{"category":"transaction","mri":"c:transactions/on_demand@none","field":null,"tags":[{"key":"query_hash","value":"1685da56"}],"condition":{"op":"glob","name":"event.transaction","value":["*\\{dispatch:*"]}} diff --git a/tests/sentry/snuba/metrics/test_extraction.py b/tests/sentry/snuba/metrics/test_extraction.py index 90e0e8baf22df3..15cdf50f4849c4 100644 --- a/tests/sentry/snuba/metrics/test_extraction.py +++ b/tests/sentry/snuba/metrics/test_extraction.py @@ -16,6 +16,8 @@ to_standard_metrics_query, ) from sentry.testutils.pytest.fixtures import django_db_all +from sentry.utils import json +from sentry.utils.glob import glob_match @django_db_all @@ -460,6 +462,40 @@ def test_spec_wildcard() -> None: } +@django_db_all +@pytest.mark.parametrize( + "query,title,expected_pattern", + [ + ("title:*[dispatch:*", "backend test [dispatch:something]", r"*\[dispatch:*"), + ("title:*{dispatch:*", "test {dispatch:something]", r"*\{dispatch:*"), + ("title:*dispatch]:*", "backend dispatch]:", r"*dispatch\]:*"), + ("title:*dispatch}:*", "test [dispatch}:", r"*dispatch\}:*"), + ("title:*?dispatch*", "backend ?dispatch", r"*\?dispatch*"), + ], +) +def test_spec_wildcard_escaping( + default_project, insta_snapshot, query, title, expected_pattern +) -> None: + spec = OnDemandMetricSpec("count()", query) + + assert spec._metric_type == "c" + assert spec.field_to_extract is None + assert spec.op == "sum" + assert spec.condition == { + "name": "event.transaction", + "op": "glob", + "value": [expected_pattern], + } + + # We also validate using Relay's glob implementation to make sure the escaping + # is interpreted correctly. + assert glob_match(title, expected_pattern, ignorecase=True) + + # We want to validate the json output, to make sure that characters are correctly escaped. + metric_spec = spec.to_metric_spec(default_project) + insta_snapshot(json.dumps(metric_spec)) + + def test_spec_count_if() -> None: spec = OnDemandMetricSpec("count_if(transaction.duration,equals,300)", "") From a9709befa38208c3ffb90f5a1b3e099829fcd3dd Mon Sep 17 00:00:00 2001 From: Iker Barriocanal <32816711+iker-barriocanal@users.noreply.github.com> Date: Tue, 6 Feb 2024 13:44:06 +0100 Subject: [PATCH 039/357] ref(proj-config): Remove span attributes (#64346) Removes the unused `spanAttributes` in project configs. --- src/sentry/relay/config/__init__.py | 1 - tests/relay_integration/test_integration.py | 16 ---------------- .../full_config/REGION.pysnap | 4 ---- tests/sentry/relay/test_config.py | 10 ---------- 4 files changed, 31 deletions(-) diff --git a/src/sentry/relay/config/__init__.py b/src/sentry/relay/config/__init__.py index 445da6723de5b9..1953d87ca4ebc7 100644 --- a/src/sentry/relay/config/__init__.py +++ b/src/sentry/relay/config/__init__.py @@ -671,7 +671,6 @@ def _get_project_config( ] } - config["spanAttributes"] = project.get_option("sentry:span_attributes") with Hub.current.start_span(op="get_filter_settings"): if filter_settings := get_filter_settings(project): config["filterSettings"] = filter_settings diff --git a/tests/relay_integration/test_integration.py b/tests/relay_integration/test_integration.py index 6ebe959f495479..21400bf298a401 100644 --- a/tests/relay_integration/test_integration.py +++ b/tests/relay_integration/test_integration.py @@ -5,7 +5,6 @@ import pytest from sentry.models.eventattachment import EventAttachment -from sentry.spans.grouping.utils import hash_values from sentry.tasks.relay import invalidate_project_config from sentry.testutils.cases import TransactionTestCase from sentry.testutils.helpers.datetime import before_now, iso_format, timestamp_format @@ -222,21 +221,6 @@ def test_transaction(self): event = self.post_and_retrieve_event(event_data) raw_event = event.get_raw_data() - exclusive_times = [ - pytest.approx(50, abs=2), - pytest.approx(0, abs=2), - pytest.approx(200, abs=2), - pytest.approx(0, abs=2), - pytest.approx(200, abs=2), - ] - for actual, expected, exclusive_time in zip( - raw_event["spans"], event_data["spans"], exclusive_times - ): - assert actual == dict( - expected, - exclusive_time=exclusive_time, - hash=hash_values([expected["description"]]), - ) assert raw_event["breakdowns"] == { "span_ops": { "ops.browser": {"unit": "millisecond", "value": pytest.approx(200, abs=2)}, diff --git a/tests/sentry/relay/snapshots/test_config/test_get_project_config/full_config/REGION.pysnap b/tests/sentry/relay/snapshots/test_config/test_get_project_config/full_config/REGION.pysnap index a9eda83dc82cfb..f14a0c93ca9c43 100644 --- a/tests/sentry/relay/snapshots/test_config/test_get_project_config/full_config/REGION.pysnap +++ b/tests/sentry/relay/snapshots/test_config/test_get_project_config/full_config/REGION.pysnap @@ -1,6 +1,4 @@ --- -created: '2024-01-24T14:07:55.778589Z' -creator: sentry source: tests/sentry/relay/test_config.py --- config: @@ -121,8 +119,6 @@ config: - '@ip' - '@mac' type: multiple - spanAttributes: - - exclusive-time trustedRelays: [] disabled: false slug: bar diff --git a/tests/sentry/relay/test_config.py b/tests/sentry/relay/test_config.py index 00e6fce6b6adb9..57c8e02394e67f 100644 --- a/tests/sentry/relay/test_config.py +++ b/tests/sentry/relay/test_config.py @@ -544,16 +544,6 @@ def test_project_config_satisfaction_thresholds( insta_snapshot(cfg["config"]["metricConditionalTagging"]) -@django_db_all -@region_silo_test -def test_project_config_with_span_attributes(default_project, insta_snapshot): - # The span attributes config is not set with the flag turnd off - project_cfg = get_project_config(default_project, full_config=True) - cfg = project_cfg.to_dict() - _validate_project_config(cfg["config"]) - insta_snapshot(cfg["config"]["spanAttributes"]) - - @django_db_all @region_silo_test @pytest.mark.parametrize("feature_flag", (False, True), ids=("feature_disabled", "feature_enabled")) From 914572258fcb86edec671ea6a3cdc84185ab7863 Mon Sep 17 00:00:00 2001 From: Jodi Jang <116035587+jangjodi@users.noreply.github.com> Date: Tue, 6 Feb 2024 08:18:26 -0500 Subject: [PATCH 040/357] ref(similarity-embedding): Modify API return to work with existing FE (#64403) Modify return type of `similar-issues-embeddings` endpoint to match `similar` so that it works with the existing frontend --- .../group_similar_issues_embeddings.py | 57 +++++++- src/sentry/seer/utils.py | 4 +- .../test_group_similar_issues_embeddings.py | 129 ++++++++++++++++-- 3 files changed, 169 insertions(+), 21 deletions(-) diff --git a/src/sentry/api/endpoints/group_similar_issues_embeddings.py b/src/sentry/api/endpoints/group_similar_issues_embeddings.py index 9cce7ac6da56b6..ba014eaa06678e 100644 --- a/src/sentry/api/endpoints/group_similar_issues_embeddings.py +++ b/src/sentry/api/endpoints/group_similar_issues_embeddings.py @@ -1,7 +1,8 @@ import logging -from collections.abc import Mapping -from typing import Any +from collections.abc import Mapping, Sequence +from typing import Any, TypedDict +from django.contrib.auth.models import AnonymousUser from rest_framework.request import Request from rest_framework.response import Response @@ -10,8 +11,15 @@ from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import region_silo_endpoint from sentry.api.bases.group import GroupEndpoint +from sentry.api.serializers import serialize from sentry.eventstore.models import GroupEvent -from sentry.seer.utils import SimilarIssuesEmbeddingsRequest, get_similar_issues_embeddings +from sentry.models.group import Group +from sentry.models.user import User +from sentry.seer.utils import ( + SimilarIssuesEmbeddingsData, + SimilarIssuesEmbeddingsRequest, + get_similar_issues_embeddings, +) from sentry.web.helpers import render_to_string logger = logging.getLogger(__name__) @@ -50,6 +58,12 @@ def get_stacktrace_string(exception: Mapping[Any, Any], event: GroupEvent) -> st return "\n".join(output) +class FormattedSimilarIssuesEmbeddingsData(TypedDict): + exception: float + message: float + shouldBeGrouped: str + + @region_silo_endpoint class GroupSimilarIssuesEmbeddingsEndpoint(GroupEndpoint): owner = ApiOwner.ISSUES @@ -57,6 +71,36 @@ class GroupSimilarIssuesEmbeddingsEndpoint(GroupEndpoint): "GET": ApiPublishStatus.PRIVATE, } + def get_formatted_results( + self, responses: Sequence[SimilarIssuesEmbeddingsData | None], user: User | AnonymousUser + ) -> Sequence[tuple[Mapping[str, Any], Mapping[str, Any]] | None]: + """Format the responses using to be used by the frontend.""" + group_data = {} + for response in responses: + if response: + formatted_response: FormattedSimilarIssuesEmbeddingsData = { + "message": response["message_similarity"], + "exception": response["stacktrace_similarity"], + "shouldBeGrouped": "Yes" if response["should_group"] else "No", + } + group_data.update({response["parent_group_id"]: formatted_response}) + + serialized_groups = { + int(g["id"]): g + for g in serialize( + list(Group.objects.get_many_from_cache(group_data.keys())), user=user + ) + } + + result = [] + for group_id in group_data: + try: + result.append((serialized_groups[group_id], group_data[group_id])) + except KeyError: + # KeyErrors may occur if seer API returns a deleted/merged group + continue + return result + def get(self, request: Request, group) -> Response: if not features.has("projects:similarity-embeddings", group.project): return Response(status=404) @@ -76,4 +120,9 @@ def get(self, request: Request, group) -> Response: similar_issues_params.update({"threshold": float(request.GET["threshold"])}) results = get_similar_issues_embeddings(similar_issues_params) - return Response(results) + + if not results["responses"]: + return Response([]) + formatted_results = self.get_formatted_results(results["responses"], request.user) + + return Response(formatted_results) diff --git a/src/sentry/seer/utils.py b/src/sentry/seer/utils.py index dc6a6fcb2aedf0..66743818b887b7 100644 --- a/src/sentry/seer/utils.py +++ b/src/sentry/seer/utils.py @@ -1,7 +1,7 @@ from typing import TypedDict from django.conf import settings -from urllib3 import HTTPResponse, Retry +from urllib3 import Retry from sentry.net.http import connection_from_url from sentry.utils import json @@ -70,7 +70,7 @@ class SimilarIssuesEmbeddingsResponse(TypedDict): def get_similar_issues_embeddings( similar_issues_request: SimilarIssuesEmbeddingsRequest, -) -> SimilarIssuesEmbeddingsResponse | HTTPResponse: +) -> SimilarIssuesEmbeddingsResponse: """Call /v0/issues/similar-issues endpoint from timeseries-analysis-service.""" response = seer_connection_pool.urlopen( "POST", diff --git a/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py b/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py index 2d33f36c510cc1..829cd5521c3de2 100644 --- a/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py +++ b/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py @@ -1,8 +1,16 @@ +from collections.abc import Mapping, Sequence +from typing import Any from unittest import mock from urllib3.response import HTTPResponse -from sentry.api.endpoints.group_similar_issues_embeddings import get_stacktrace_string +from sentry.api.endpoints.group_similar_issues_embeddings import ( + GroupSimilarIssuesEmbeddingsEndpoint, + get_stacktrace_string, +) +from sentry.api.serializers.base import serialize +from sentry.models.group import Group +from sentry.seer.utils import SimilarIssuesEmbeddingsData, SimilarIssuesEmbeddingsResponse from sentry.testutils.cases import APITestCase from sentry.testutils.helpers.features import with_feature from sentry.testutils.silo import region_silo_test @@ -46,6 +54,31 @@ def setUp(self): self.group = self.event.group assert self.group self.path = f"/api/0/issues/{self.group.id}/similar-issues-embeddings/" + self.similar_group = self.create_group(project=self.project) + + def get_expected_response( + self, + group_ids: Sequence[int], + message_similarities: Sequence[float], + exception_similarities: Sequence[float], + should_be_grouped: Sequence[str], + ) -> Sequence[tuple[Any, Mapping[str, Any]]]: + serialized_groups = serialize( + list(Group.objects.get_many_from_cache(group_ids)), user=self.user + ) + response = [] + for i, group in enumerate(serialized_groups): + response.append( + ( + group, + { + "message": message_similarities[i], + "exception": exception_similarities[i], + "shouldBeGrouped": should_be_grouped[i], + }, + ) + ) + return response def test_get_stacktrace_string(self): stacktrace_string = get_stacktrace_string(self.base_error_trace["exception"], self.event) # type: ignore @@ -55,6 +88,28 @@ def test_get_stacktrace_string_no_values(self): stacktrace_string = get_stacktrace_string({"values": []}, self.event) assert stacktrace_string == "" + def test_get_formatted_results(self): + new_group = self.create_group(project=self.project) + response_1: SimilarIssuesEmbeddingsData = { + "message_similarity": 0.95, + "parent_group_id": self.similar_group.id, + "should_group": True, + "stacktrace_similarity": 0.99, + } + response_2: SimilarIssuesEmbeddingsData = { + "message_similarity": 0.51, + "parent_group_id": new_group.id, + "should_group": False, + "stacktrace_similarity": 0.23, + } + group_similar_endpoint = GroupSimilarIssuesEmbeddingsEndpoint() + formatted_results = group_similar_endpoint.get_formatted_results( + responses=[response_1, response_2], user=self.user + ) + assert formatted_results == self.get_expected_response( + [self.similar_group.id, new_group.id], [0.95, 0.51], [0.99, 0.23], ["Yes", "No"] + ) + def test_no_feature_flag(self): response = self.client.get(self.path) @@ -63,26 +118,27 @@ def test_no_feature_flag(self): @with_feature("projects:similarity-embeddings") @mock.patch("sentry.seer.utils.seer_connection_pool.urlopen") def test_simple(self, mock_seer_request): - expected_return_value = { + seer_return_value: SimilarIssuesEmbeddingsResponse = { "responses": [ { "message_similarity": 0.95, - "parent_group_id": 6, + "parent_group_id": self.similar_group.id, "should_group": True, "stacktrace_similarity": 0.99, } ] } - mock_seer_request.return_value = HTTPResponse( - json.dumps(expected_return_value).encode("utf-8") - ) + mock_seer_request.return_value = HTTPResponse(json.dumps(seer_return_value).encode("utf-8")) response = self.client.get( self.path, data={"k": "1", "threshold": "0.98"}, ) - assert response.data == expected_return_value + assert response.data == self.get_expected_response( + [self.similar_group.id], [0.95], [0.99], ["Yes"] + ) + mock_seer_request.assert_called_with( "POST", "/v0/issues/similar-issues", @@ -98,30 +154,67 @@ def test_simple(self, mock_seer_request): headers={"Content-Type": "application/json;charset=utf-8"}, ) + @with_feature("projects:similarity-embeddings") + @mock.patch("sentry.seer.utils.seer_connection_pool.urlopen") + def test_invalid_return(self, mock_seer_request): + """ + The seer API can return groups that do not exist if they have been deleted/merged. + Test that these groups are not returned. + """ + seer_return_value: SimilarIssuesEmbeddingsResponse = { + "responses": [ + { + "message_similarity": 0.95, + "parent_group_id": self.similar_group.id, + "should_group": True, + "stacktrace_similarity": 0.99, + }, + { + "message_similarity": 0.95, + "parent_group_id": 10000000, # An arbitrarily large group ID that will not exist + "should_group": True, + "stacktrace_similarity": 0.99, + }, + ] + } + mock_seer_request.return_value = HTTPResponse(json.dumps(seer_return_value).encode("utf-8")) + response = self.client.get(self.path) + assert response.data == self.get_expected_response( + [self.similar_group.id], [0.95], [0.99], ["Yes"] + ) + + @with_feature("projects:similarity-embeddings") + @mock.patch("sentry.seer.utils.seer_connection_pool.urlopen") + def test_empty_return(self, mock_seer_request): + mock_seer_request.return_value = HTTPResponse([]) + response = self.client.get(self.path) + assert response.data == [] + @with_feature("projects:similarity-embeddings") @mock.patch("sentry.seer.utils.seer_connection_pool.urlopen") def test_no_optional_params(self, mock_seer_request): """ Test that optional parameters, k and threshold, can not be included. """ - expected_return_value = { + seer_return_value: SimilarIssuesEmbeddingsResponse = { "responses": [ { "message_similarity": 0.95, - "parent_group_id": 6, + "parent_group_id": self.similar_group.id, "should_group": True, "stacktrace_similarity": 0.99, } ] } - mock_seer_request.return_value = HTTPResponse( - json.dumps(expected_return_value).encode("utf-8") - ) + + mock_seer_request.return_value = HTTPResponse(json.dumps(seer_return_value).encode("utf-8")) # Include no optional parameters response = self.client.get(self.path) + assert response.data == self.get_expected_response( + [self.similar_group.id], [0.95], [0.99], ["Yes"] + ) - assert response.data == expected_return_value mock_seer_request.assert_called_with( "POST", "/v0/issues/similar-issues", @@ -140,7 +233,10 @@ def test_no_optional_params(self, mock_seer_request): self.path, data={"k": 1}, ) - assert response.data == expected_return_value + assert response.data == self.get_expected_response( + [self.similar_group.id], [0.95], [0.99], ["Yes"] + ) + mock_seer_request.assert_called_with( "POST", "/v0/issues/similar-issues", @@ -160,7 +256,10 @@ def test_no_optional_params(self, mock_seer_request): self.path, data={"threshold": "0.98"}, ) - assert response.data == expected_return_value + assert response.data == self.get_expected_response( + [self.similar_group.id], [0.95], [0.99], ["Yes"] + ) + mock_seer_request.assert_called_with( "POST", "/v0/issues/similar-issues", From 5252401011955575477e01b7e14140f7fd013bfc Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Tue, 6 Feb 2024 08:32:47 -0500 Subject: [PATCH 041/357] fix(metrics-extraction): Remove isExtrapolatedData from legend (#64654) This keys shouldn't be part of the data body as it's not data, and for dashboards it automatically shows up in legends, if they are needed elsewhere they can be added to `ResponseMeta`. It looks like this is used in alerts but I don't see instances of this used in dashboards. ![Screenshot 2024-02-06 at 7 47 45 AM](https://github.com/getsentry/sentry/assets/6111995/55f6abc9-51bf-485a-ab5e-060c7a1654b0) Closes https://github.com/orgs/getsentry/projects/156/views/2\?pane\=issue\&itemId\=39293709 --- .../views/dashboards/datasetConfig/errorsAndTransactions.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/app/views/dashboards/datasetConfig/errorsAndTransactions.tsx b/static/app/views/dashboards/datasetConfig/errorsAndTransactions.tsx index 03fec2bb14d6a4..66f84735742418 100644 --- a/static/app/views/dashboards/datasetConfig/errorsAndTransactions.tsx +++ b/static/app/views/dashboards/datasetConfig/errorsAndTransactions.tsx @@ -636,7 +636,7 @@ async function doOnDemandMetricsRequest( generatePathname: isEditing ? fetchEstimatedStats : undefined, }); - response[0] = {...response[0], isMetricsData: true, isExtrapolatedData: isEditing}; + response[0] = {...response[0]}; return [response[0], response[1], response[2]]; } catch (err) { From e8365cb95e0ff9e1bb8965a6dee0bfbe85ad8350 Mon Sep 17 00:00:00 2001 From: Ogi <86684834+obostjancic@users.noreply.github.com> Date: Tue, 6 Feb 2024 14:32:59 +0100 Subject: [PATCH 042/357] fix(dashboards): metric widget overflow (#64644) --- static/app/views/dashboards/dashboard.tsx | 2 -- .../dashboards/widgetCard/metricWidgetCard/inlineEditor.tsx | 2 ++ 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/static/app/views/dashboards/dashboard.tsx b/static/app/views/dashboards/dashboard.tsx index ac4f1fc16af123..a8a0b7a5d5d1c5 100644 --- a/static/app/views/dashboards/dashboard.tsx +++ b/static/app/views/dashboards/dashboard.tsx @@ -630,6 +630,4 @@ const WidgetWidthWrapper = styled('div')` left: 16px !important; right: 16px !important; width: auto !important; - /* minimal working z-index */ - z-index: 6; `; diff --git a/static/app/views/dashboards/widgetCard/metricWidgetCard/inlineEditor.tsx b/static/app/views/dashboards/widgetCard/metricWidgetCard/inlineEditor.tsx index b6a67a46685771..1903909faefe75 100644 --- a/static/app/views/dashboards/widgetCard/metricWidgetCard/inlineEditor.tsx +++ b/static/app/views/dashboards/widgetCard/metricWidgetCard/inlineEditor.tsx @@ -346,6 +346,8 @@ const InlineEditorWrapper = styled('div')` padding: ${space(1)}; gap: ${space(1)}; width: 100%; + /* minimal z-index that allows dropdowns to expand over other dashboard elements */ + z-index: 6; `; const QueryDefinitionWrapper = styled('div')` From eb5765a46ba4e66da8c9e79fbf6f3d8a9e179f50 Mon Sep 17 00:00:00 2001 From: Iker Barriocanal <32816711+iker-barriocanal@users.noreply.github.com> Date: Tue, 6 Feb 2024 14:46:48 +0100 Subject: [PATCH 043/357] ref(proj-config): Remove span attributes option (#64347) --- src/sentry/models/options/project_option.py | 1 - src/sentry/projectoptions/defaults.py | 3 --- 2 files changed, 4 deletions(-) diff --git a/src/sentry/models/options/project_option.py b/src/sentry/models/options/project_option.py index 2b06f14aeb9e37..09b829d8ae82f1 100644 --- a/src/sentry/models/options/project_option.py +++ b/src/sentry/models/options/project_option.py @@ -56,7 +56,6 @@ "sentry:dynamic_sampling", "sentry:dynamic_sampling_biases", "sentry:breakdowns", - "sentry:span_attributes", "sentry:transaction_name_cluster_rules", "sentry:span_description_cluster_rules", "quotas:spike-protection-disabled", diff --git a/src/sentry/projectoptions/defaults.py b/src/sentry/projectoptions/defaults.py index 55ad25a2b2308c..d2a312239c21b8 100644 --- a/src/sentry/projectoptions/defaults.py +++ b/src/sentry/projectoptions/defaults.py @@ -102,9 +102,6 @@ # extracted performance metrics. register(key="sentry:transaction_metrics_custom_tags", epoch_defaults={1: []}) -# Default span attributes config -register(key="sentry:span_attributes", epoch_defaults={1: ["exclusive-time"]}) - DEFAULT_PROJECT_PERFORMANCE_DETECTION_SETTINGS = { "uncompressed_assets_detection_enabled": True, "consecutive_http_spans_detection_enabled": True, From c4dc41d101d5e64c0e9f46c8d4f5bb77c3fd20aa Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Tue, 6 Feb 2024 09:23:27 -0500 Subject: [PATCH 044/357] ref: upgrade to django 5.0 (#64360) --- requirements-base.txt | 2 +- requirements-dev-frozen.txt | 2 +- requirements-frozen.txt | 2 +- src/sentry/auth/providers/saml2/forms.py | 6 ++-- .../auth/providers/saml2/onelogin/provider.py | 2 +- src/sentry/new_migrations/monkey/__init__.py | 2 +- src/sentry/signals.py | 34 ++++--------------- src/sentry_plugins/opsgenie/plugin.py | 1 + src/sentry_plugins/redmine/forms.py | 2 +- tests/sentry/plugins/test_config.py | 2 +- 10 files changed, 18 insertions(+), 37 deletions(-) diff --git a/requirements-base.txt b/requirements-base.txt index 336baab96a6534..08f41718c14c18 100644 --- a/requirements-base.txt +++ b/requirements-base.txt @@ -13,7 +13,7 @@ datadog>=0.44 django-crispy-forms>=1.14.0 django-csp>=3.7 django-pg-zero-downtime-migrations>=0.13 -Django>=4.2.8,<5 +Django>=5,<6 djangorestframework>=3.14.0 drf-spectacular>=0.26.3 email-reply-parser>=0.5.12 diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index 21d886b6ca2468..528f7a94d5b801 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -39,7 +39,7 @@ cssutils==2.9.0 datadog==0.44.0 distlib==0.3.4 distro==1.8.0 -django==4.2.8 +django==5.0.1 django-crispy-forms==1.14.0 django-csp==3.7 django-pg-zero-downtime-migrations==0.13 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index 347953ac8109ec..dfcc2187f611ae 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -32,7 +32,7 @@ cssselect==1.0.3 cssutils==2.9.0 datadog==0.44.0 distro==1.8.0 -django==4.2.8 +django==5.0.1 django-crispy-forms==1.14.0 django-csp==3.7 django-pg-zero-downtime-migrations==0.13 diff --git a/src/sentry/auth/providers/saml2/forms.py b/src/sentry/auth/providers/saml2/forms.py index f70f0c3103de83..dce57ea246e092 100644 --- a/src/sentry/auth/providers/saml2/forms.py +++ b/src/sentry/auth/providers/saml2/forms.py @@ -43,7 +43,7 @@ def process_xml(form): class URLMetadataForm(forms.Form): - metadata_url = forms.URLField(label="Metadata URL") + metadata_url = forms.URLField(label="Metadata URL", assume_scheme="https") processor = process_url @@ -54,8 +54,8 @@ class XMLMetadataForm(forms.Form): class SAMLForm(forms.Form): entity_id = forms.CharField(label="Entity ID") - sso_url = forms.URLField(label="Single Sign On URL") - slo_url = forms.URLField(label="Single Log Out URL", required=False) + sso_url = forms.URLField(label="Single Sign On URL", assume_scheme="https") + slo_url = forms.URLField(label="Single Log Out URL", required=False, assume_scheme="https") x509cert = forms.CharField(label="x509 public certificate", widget=forms.Textarea) processor = lambda d: d.cleaned_data diff --git a/src/sentry/auth/providers/saml2/onelogin/provider.py b/src/sentry/auth/providers/saml2/onelogin/provider.py index 52a42f6c4dd97b..3642a8d7f179e1 100644 --- a/src/sentry/auth/providers/saml2/onelogin/provider.py +++ b/src/sentry/auth/providers/saml2/onelogin/provider.py @@ -7,7 +7,7 @@ # Onelogin specifically calls their Metadata URL a 'Issuer URL' class OneLoginURLMetadataForm(URLMetadataForm): - metadata_url = forms.URLField(label="Issuer URL") + metadata_url = forms.URLField(label="Issuer URL", assume_scheme="https") SelectIdP = make_simple_setup(OneLoginURLMetadataForm, "sentry_auth_onelogin/select-idp.html") diff --git a/src/sentry/new_migrations/monkey/__init__.py b/src/sentry/new_migrations/monkey/__init__.py index 3eb466006e693a..500d627c521a14 100644 --- a/src/sentry/new_migrations/monkey/__init__.py +++ b/src/sentry/new_migrations/monkey/__init__.py @@ -4,7 +4,7 @@ from sentry.new_migrations.monkey.executor import SentryMigrationExecutor from sentry.new_migrations.monkey.fields import deconstruct -LAST_VERIFIED_DJANGO_VERSION = (4, 2) +LAST_VERIFIED_DJANGO_VERSION = (5, 0) CHECK_MESSAGE = """Looks like you're trying to upgrade Django! Since we monkeypatch Django in several places, please verify that we have the latest code, and that the monkeypatching still works as expected. Currently the main things to check are: diff --git a/src/sentry/signals.py b/src/sentry/signals.py index a7b706b8404d8b..0b0d97fd1995e3 100644 --- a/src/sentry/signals.py +++ b/src/sentry/signals.py @@ -6,7 +6,7 @@ from collections.abc import Callable from typing import Any -from django.dispatch.dispatcher import NO_RECEIVERS, Signal +from django.dispatch.dispatcher import Signal from sentry.utils.env import in_test_environment @@ -79,32 +79,12 @@ def wrapped(func): wrapped.__doc__ = receiver.__doc__ return wrapped(receiver) - def send_robust(self, sender, **named) -> list[tuple[Receiver, Exception | Any]]: - """ - A reimplementation of send_robust which logs failures, thus recovering stacktraces. - """ - responses: list[tuple[Receiver, Exception | Any]] = [] - if not self.receivers or self.sender_receivers_cache.get(sender) is NO_RECEIVERS: - return responses - - # Call each receiver with whatever arguments it can accept. - # Return a list of tuple pairs [(receiver, response), ... ]. - for receiver in self._live_receivers(sender): - try: - response = receiver(signal=self, sender=sender, **named) - except Exception as err: - if in_test_environment(): - if ( - _receivers_that_raise is _AllReceivers.ALL - or receiver in _receivers_that_raise - ): - raise - - logging.exception("signal.failure", extra={"receiver": repr(receiver)}) - responses.append((receiver, err)) - else: - responses.append((receiver, response)) - return responses + def _log_robust_failure(self, receiver: object, err: Exception) -> None: + if in_test_environment(): + if _receivers_that_raise is _AllReceivers.ALL or receiver in _receivers_that_raise: + raise + + logging.error("signal.failure", extra={"receiver": repr(receiver)}, exc_info=err) buffer_incr_complete = BetterSignal() # ["model", "columns", "extra", "result"] diff --git a/src/sentry_plugins/opsgenie/plugin.py b/src/sentry_plugins/opsgenie/plugin.py index 13247f84992ad2..5e57f8ec362aee 100644 --- a/src/sentry_plugins/opsgenie/plugin.py +++ b/src/sentry_plugins/opsgenie/plugin.py @@ -38,6 +38,7 @@ class OpsGenieOptionsForm(notify.NotificationConfigurationForm): attrs={"class": "span6", "placeholder": "e.g. https://api.opsgenie.com/v2/alerts"} ), help_text="It must be visible to the Sentry server", + assume_scheme="https", required=True, ) diff --git a/src/sentry_plugins/redmine/forms.py b/src/sentry_plugins/redmine/forms.py index e37d4bb384d604..18159643a46b34 100644 --- a/src/sentry_plugins/redmine/forms.py +++ b/src/sentry_plugins/redmine/forms.py @@ -11,7 +11,7 @@ class RedmineOptionsForm(forms.Form): - host = forms.URLField(help_text=_("e.g. http://bugs.redmine.org")) + host = forms.URLField(help_text=_("e.g. http://bugs.redmine.org"), assume_scheme="https") key = forms.CharField( widget=forms.TextInput(attrs={"class": "span9"}), help_text="Your API key is available on your account page after enabling the Rest API (Administration -> Settings -> Authentication)", diff --git a/tests/sentry/plugins/test_config.py b/tests/sentry/plugins/test_config.py index c4f763808acfef..cee96130117001 100644 --- a/tests/sentry/plugins/test_config.py +++ b/tests/sentry/plugins/test_config.py @@ -11,7 +11,7 @@ class DummyForm(forms.Form): textarea = forms.CharField(widget=forms.Textarea, required=False) password = forms.CharField(label="A Password", widget=forms.PasswordInput) choice = forms.ChoiceField(choices=((1, "one"), (2, "two"))) - url = forms.URLField() + url = forms.URLField(assume_scheme="https") class DummyPlugin(Plugin2): From 3a32f2380643454287658ec4e669679fba97d82e Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Tue, 6 Feb 2024 09:45:43 -0500 Subject: [PATCH 045/357] feat(metrics-extraction): Add condition to update on-demand row (#64659) ### Summary This will cause the row to always be updated, which fixes the trouble with all the widget queries date modified being set to when the migration ran (Feb 5th) despite them likely being older than an on demand row --- src/sentry/options/defaults.py | 6 ++++++ src/sentry/tasks/on_demand_metrics.py | 6 ++++++ 2 files changed, 12 insertions(+) diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index df3ae71a9314de..3eccbc67e4b695 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -1702,6 +1702,12 @@ default=False, flags=FLAG_AUTOMATOR_MODIFIABLE, ) +# Overrides modified date and always updates the row. Can be removed if not needed later. +register( + "on_demand.update_on_demand_modified", + default=False, + flags=FLAG_AUTOMATOR_MODIFIABLE, +) register( "delightful_metrics.minimetrics_sample_rate", diff --git a/src/sentry/tasks/on_demand_metrics.py b/src/sentry/tasks/on_demand_metrics.py index f0aceeb0a4d7a6..ec652c08fc002b 100644 --- a/src/sentry/tasks/on_demand_metrics.py +++ b/src/sentry/tasks/on_demand_metrics.py @@ -6,6 +6,7 @@ import sentry_sdk from celery.exceptions import SoftTimeLimitExceeded +from django.utils import timezone from sentry import options from sentry.api.utils import get_date_range_from_params @@ -295,6 +296,11 @@ def _set_widget_on_demand_state( if on_demand.can_extraction_be_auto_overridden(): on_demand.extraction_state = extraction_state + if options.get("on_demand.update_on_demand_modified"): + # Only temporarily required to check we've updated data on rows the task has passed + # Or updated to pass the check against widget query date_modified. + on_demand.date_modified = timezone.now() + on_demand.spec_hashes = spec_hashes on_demand.save() From 8f5633b2df808deb5ee35d14fda77121524154e8 Mon Sep 17 00:00:00 2001 From: Ash <0Calories@users.noreply.github.com> Date: Tue, 6 Feb 2024 10:11:47 -0500 Subject: [PATCH 046/357] feat(plugins): Add Sentry Babel plugin for React component annotation (#64439) Replaces the Fullstory Babel annotate plugin with our own version. See https://github.com/getsentry/sentry-javascript-bundler-plugins/pull/468 for more details This plugin essentially does the same thing, but will annotate the DOM with `data-sentry-component` annotations instead, which is what our SDK looks for when sending component name data to Sentry. This will allow us to start dogfooding new features like span grouping by component names, breadcrumbs, and component names in Replays! Check out the [NPM Page](https://www.npmjs.com/package/@sentry/component-annotate-plugin) for more details on the plugin --- babel.config.ts | 2 ++ package.json | 1 + static/app/utils/performanceForSentry/index.tsx | 4 +++- yarn.lock | 5 +++++ 4 files changed, 11 insertions(+), 1 deletion(-) diff --git a/babel.config.ts b/babel.config.ts index f7cebd18e6a222..b7ba2975e6f28a 100644 --- a/babel.config.ts +++ b/babel.config.ts @@ -44,12 +44,14 @@ const config: TransformOptions = { }, ], ['babel-plugin-add-react-displayname'], + '@sentry/babel-plugin-component-annotate', ], }, development: { plugins: [ '@emotion/babel-plugin', '@babel/plugin-transform-react-jsx-source', + '@sentry/babel-plugin-component-annotate', ...(process.env.SENTRY_UI_HOT_RELOAD ? ['react-refresh/babel'] : []), ], }, diff --git a/package.json b/package.json index 37a1e7367bfcb6..3e770de06be568 100644 --- a/package.json +++ b/package.json @@ -54,6 +54,7 @@ "@sentry-internal/rrweb": "2.9.0", "@sentry-internal/rrweb-player": "2.9.0", "@sentry-internal/rrweb-snapshot": "2.9.0", + "@sentry/babel-plugin-component-annotate": "^2.14.0", "@sentry/core": "^7.99.0", "@sentry/integrations": "^7.99.0", "@sentry/node": "^7.99.0", diff --git a/static/app/utils/performanceForSentry/index.tsx b/static/app/utils/performanceForSentry/index.tsx index d66a0e794fc846..a4935334af9c86 100644 --- a/static/app/utils/performanceForSentry/index.tsx +++ b/static/app/utils/performanceForSentry/index.tsx @@ -604,7 +604,9 @@ function getNearestElementName(node: HTMLElement | undefined | null): string | u let current: HTMLElement | null = node; while (current && current !== document.body) { const elementName = - current.dataset?.testId ?? current.dataset?.component ?? current.dataset?.element; + current.dataset?.testId ?? + current.dataset?.sentryComponent ?? + current.dataset?.element; if (elementName) { return elementName; diff --git a/yarn.lock b/yarn.lock index ace868ee7d4397..2db61ec20bca7b 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2858,6 +2858,11 @@ "@sentry/types" "7.99.0" "@sentry/utils" "7.99.0" +"@sentry/babel-plugin-component-annotate@^2.14.0": + version "2.14.0" + resolved "https://registry.yarnpkg.com/@sentry/babel-plugin-component-annotate/-/babel-plugin-component-annotate-2.14.0.tgz#e62f448dd3c922a6d32e9f1c0a5ae85fa6ec22c2" + integrity sha512-FWU4+Lx6fgxjAkwmc3S9j1Q/6pqKZyZzfi52B+8WMNw7a5QjGXgxc5ucBazZYgrcsJKCFBp4QG3PPxNAieFimQ== + "@sentry/browser@7.99.0": version "7.99.0" resolved "https://registry.yarnpkg.com/@sentry/browser/-/browser-7.99.0.tgz#3e78beb490d141c988038ea902689a1e9171c6cf" From 66b56b8c8cb662a8ebfab2bfc3e6026937a585f0 Mon Sep 17 00:00:00 2001 From: edwardgou-sentry <83961295+edwardgou-sentry@users.noreply.github.com> Date: Tue, 6 Feb 2024 10:35:03 -0500 Subject: [PATCH 047/357] feat(webvitals): Adds inp support to performance score rings (#64505) Adds inp support to Performance Score rings in the Webvitals landing page, page overview, and Performance All Transactions page. Also refactors parts of `performanceScoreRingWithTooltips`. --- .../components/pageOverviewSidebar.tsx | 13 +- .../performanceScoreRingWithTooltips.spec.tsx | 76 +++++++ .../performanceScoreRingWithTooltips.tsx | 199 ++++++------------ .../webVitals/performanceScoreChart.tsx | 8 +- .../widgets/performanceScoreWidget.tsx | 10 +- 5 files changed, 157 insertions(+), 149 deletions(-) create mode 100644 static/app/views/performance/browser/webVitals/components/performanceScoreRingWithTooltips.spec.tsx diff --git a/static/app/views/performance/browser/webVitals/components/pageOverviewSidebar.tsx b/static/app/views/performance/browser/webVitals/components/pageOverviewSidebar.tsx index 55162938d2219c..d655ff83975d43 100644 --- a/static/app/views/performance/browser/webVitals/components/pageOverviewSidebar.tsx +++ b/static/app/views/performance/browser/webVitals/components/pageOverviewSidebar.tsx @@ -19,6 +19,7 @@ import {MiniAggregateWaterfall} from 'sentry/views/performance/browser/webVitals import PerformanceScoreRingWithTooltips from 'sentry/views/performance/browser/webVitals/components/performanceScoreRingWithTooltips'; import {useProjectRawWebVitalsValuesTimeseriesQuery} from 'sentry/views/performance/browser/webVitals/utils/queries/rawWebVitalsQueries/useProjectRawWebVitalsValuesTimeseriesQuery'; import type {ProjectScore} from 'sentry/views/performance/browser/webVitals/utils/types'; +import {useReplaceFidWithInpSetting} from 'sentry/views/performance/browser/webVitals/utils/useReplaceFidWithInpSetting'; import {SidebarSpacer} from 'sentry/views/performance/transactionSummary/utils'; const CHART_HEIGHTS = 100; @@ -53,6 +54,8 @@ export function PageOverviewSidebar({ utc, }; + const shouldReplaceFidWithInp = useReplaceFidWithInpSetting(); + const {data, isLoading: isLoading} = useProjectRawWebVitalsValuesTimeseriesQuery({ transaction, datetime: doubledDatetime, @@ -118,12 +121,12 @@ export function PageOverviewSidebar({ // Gets weights to dynamically size the performance score ring segments const weights = projectScore ? { - cls: projectScore.clsWeight, - fcp: projectScore.fcpWeight, - fid: projectScore.fidWeight, lcp: projectScore.lcpWeight, + fcp: projectScore.fcpWeight, + fid: shouldReplaceFidWithInp ? 0 : projectScore.fidWeight, + inp: shouldReplaceFidWithInp ? projectScore.inpWeight : 0, + cls: projectScore.clsWeight, ttfb: projectScore.ttfbWeight, - inp: projectScore.inpWeight, } : undefined; @@ -151,7 +154,7 @@ export function PageOverviewSidebar({ projectScore={projectScore} text={projectScore.totalScore} width={220} - height={180} + height={200} ringBackgroundColors={ringBackgroundColors} ringSegmentColors={ringSegmentColors} weights={weights} diff --git a/static/app/views/performance/browser/webVitals/components/performanceScoreRingWithTooltips.spec.tsx b/static/app/views/performance/browser/webVitals/components/performanceScoreRingWithTooltips.spec.tsx new file mode 100644 index 00000000000000..9fad1a777a9d5f --- /dev/null +++ b/static/app/views/performance/browser/webVitals/components/performanceScoreRingWithTooltips.spec.tsx @@ -0,0 +1,76 @@ +import {OrganizationFixture} from 'sentry-fixture/organization'; + +import {render, screen} from 'sentry-test/reactTestingLibrary'; + +import useOrganization from 'sentry/utils/useOrganization'; +import PerformanceScoreRingWithTooltips from 'sentry/views/performance/browser/webVitals/components/performanceScoreRingWithTooltips'; + +jest.mock('sentry/utils/useOrganization'); +describe('PerformanceScoreRingWithTooltips', function () { + const organization = OrganizationFixture(); + const projectScore = { + lcpScore: 74, + fcpScore: 92, + clsScore: 71, + ttfbScore: 99, + fidScore: 98, + inpScore: 98, + totalScore: 83, + lcpWeight: 38, + fcpWeight: 23, + clsWeight: 18, + ttfbWeight: 16, + fidWeight: 5, + inpWeight: 5, + }; + + beforeEach(function () { + jest.mocked(useOrganization).mockReturnValue(organization); + }); + + afterEach(function () { + jest.resetAllMocks(); + }); + + it('renders segment labels', async () => { + render( + + ); + await screen.findByText('lcp'); + screen.getByText('fcp'); + screen.getByText('cls'); + screen.getByText('ttfb'); + screen.getByText('fid'); + }); + + it('renders inp', async () => { + const organizationWithInp = OrganizationFixture({ + features: ['starfish-browser-webvitals-replace-fid-with-inp'], + }); + jest.mocked(useOrganization).mockReturnValue(organizationWithInp); + render( + + ); + await screen.findByText('inp'); + screen.getByText('fcp'); + screen.getByText('cls'); + screen.getByText('ttfb'); + screen.getByText('lcp'); + }); +}); diff --git a/static/app/views/performance/browser/webVitals/components/performanceScoreRingWithTooltips.tsx b/static/app/views/performance/browser/webVitals/components/performanceScoreRingWithTooltips.tsx index 94892181c4b34b..3ab7ed88c3d8eb 100644 --- a/static/app/views/performance/browser/webVitals/components/performanceScoreRingWithTooltips.tsx +++ b/static/app/views/performance/browser/webVitals/components/performanceScoreRingWithTooltips.tsx @@ -17,8 +17,9 @@ import type { ProjectScore, WebVitals, } from 'sentry/views/performance/browser/webVitals/utils/types'; +import {useReplaceFidWithInpSetting} from 'sentry/views/performance/browser/webVitals/utils/useReplaceFidWithInpSetting'; -import {ORDER} from '../performanceScoreChart'; +import {ORDER, ORDER_WITH_INP} from '../performanceScoreChart'; import {getFormattedDuration} from './webVitalMeters'; @@ -137,6 +138,7 @@ function PerformanceScoreRingWithTooltips({ height, text, webVitalLabelCoordinates, + // TODO: This prop isn't really needed anymore since we should always get weights from projectScore weights = { lcp: LCP_WEIGHT, fcp: FCP_WEIGHT, @@ -172,8 +174,11 @@ function PerformanceScoreRingWithTooltips({ const [webVitalTooltip, setWebVitalTooltip] = useState(null); const [labelHovered, setLabelHovered] = useState(null); + const shouldReplaceFidWithInp = useReplaceFidWithInpSetting(); + const ringSegmentOrder = shouldReplaceFidWithInp ? ORDER_WITH_INP : ORDER; + if (labelHovered && inPerformanceWidget) { - const index = ORDER.indexOf(labelHovered); + const index = ringSegmentOrder.indexOf(labelHovered); ringSegmentColors = ringSegmentColors.map((color, i) => { return i === index ? color : theme.gray200; }); @@ -192,17 +197,16 @@ function PerformanceScoreRingWithTooltips({ onUnHover: () => setLabelHovered(null), }; - const {lcpX, lcpY, fcpX, fcpY, fidX, fidY, clsX, clsY, ttfbX, ttfbY} = - calculateLabelCoordinates( - size, - x, - y, - barWidth, - weights, - labelWidthPadding, - labelHeightPadding, - radiusPadding - ); + const coordinates = calculateLabelCoordinates( + size, + x, + y, + barWidth, + weights, + labelWidthPadding, + labelHeightPadding, + radiusPadding + ); return ( @@ -210,7 +214,9 @@ function PerformanceScoreRingWithTooltips({ - + {webVitalTooltip.toUpperCase()} {t('Opportunity')} @@ -219,7 +225,7 @@ function PerformanceScoreRingWithTooltips({ - + {webVitalTooltip.toUpperCase()} {t('Score')} {projectScore[`${webVitalTooltip}Score`]} @@ -230,56 +236,20 @@ function PerformanceScoreRingWithTooltips({ {!hideWebVitalLabels && ( - {weights.lcp > 0 && ( - - )} - {weights.fcp > 0 && ( - - )} - {weights.fid > 0 && ( - - )} - {weights.cls > 0 && ( - - )} - {weights.ttfb > 0 && ( - - )} + {Object.keys(weights).map((key, index) => { + const webVital = key as WebVitals; + if (weights[key] > 0 && coordinates[webVital] !== undefined) { + return ( + + ); + } + return null; + })} )} setWebVitalTooltip('fcp'), }, { - value: (projectScore.fidScore ?? 0) * weights.fid * 0.01, - maxValue: weights.fid, - key: 'fid', - onHoverActions: () => setWebVitalTooltip('fid'), + value: shouldReplaceFidWithInp + ? (projectScore.inpScore ?? 0) * weights.inp * 0.01 + : (projectScore.fidScore ?? 0) * weights.fid * 0.01, + maxValue: shouldReplaceFidWithInp ? weights.inp : weights.fid, + key: shouldReplaceFidWithInp ? 'inp' : 'fid', + onHoverActions: shouldReplaceFidWithInp + ? () => setWebVitalTooltip('inp') + : () => setWebVitalTooltip('fid'), }, { value: (projectScore.clsScore ?? 0) * weights.cls * 0.01, @@ -354,74 +328,27 @@ function calculateLabelCoordinates( const sumMaxValues = Object.values(weights).reduce((acc, val) => acc + val, 0); const BASE_ANGLE = -90; const weightToAngle = (weight: number) => (weight / sumMaxValues) * 360; - const [lcpAngle, fcpAngle, fidAngle, clsAngle, ttfbAngle] = [ - weights.lcp, - weights.fcp, - weights.fid, - weights.cls, - weights.ttfb, - ].map(weightToAngle); - const lcpX = - center.x + radius * Math.cos(((BASE_ANGLE + lcpAngle / 2) * Math.PI) / 180); - const lcpY = - center.y + radius * Math.sin(((BASE_ANGLE + lcpAngle / 2) * Math.PI) / 180); - const fcpX = - center.x + - radius * Math.cos(((BASE_ANGLE + lcpAngle + fcpAngle / 2) * Math.PI) / 180); - const fcpY = - center.y + - radius * Math.sin(((BASE_ANGLE + lcpAngle + fcpAngle / 2) * Math.PI) / 180); - const fidX = - center.x + - radius * - Math.cos(((BASE_ANGLE + lcpAngle + fcpAngle + fidAngle / 2) * Math.PI) / 180); - const fidY = - center.y + - radius * - Math.sin(((BASE_ANGLE + lcpAngle + fcpAngle + fidAngle / 2) * Math.PI) / 180); - const clsX = - center.x + - radius * - Math.cos( - ((BASE_ANGLE + lcpAngle + fcpAngle + fidAngle + clsAngle / 2) * Math.PI) / 180 - ); - const clsY = - center.y + - radius * - Math.sin( - ((BASE_ANGLE + lcpAngle + fcpAngle + fidAngle + clsAngle / 2) * Math.PI) / 180 - ); - // Padding hack for now since ttfb label is longer than the others - const ttfbX = - center.x - - 12 + - radius * - Math.cos( - ((BASE_ANGLE + lcpAngle + fcpAngle + fidAngle + clsAngle + ttfbAngle / 2) * - Math.PI) / - 180 - ); - const ttfbY = - center.y + - radius * - Math.sin( - ((BASE_ANGLE + lcpAngle + fcpAngle + fidAngle + clsAngle + ttfbAngle / 2) * - Math.PI) / - 180 - ); + const angles = Object.values(weights).map(weightToAngle); + const coordinates = angles.map((angle, index) => { + const previousAngles = angles.slice(0, index).reduce((acc, value) => acc + value, 0); + const segmentX = + center.x + + radius * Math.cos(((BASE_ANGLE + previousAngles + angle / 2) * Math.PI) / 180); + const segmentY = + center.y + + radius * Math.sin(((BASE_ANGLE + previousAngles + angle / 2) * Math.PI) / 180); + return {x: segmentX, y: segmentY}; + }); - return { - lcpX, - lcpY, - fcpX, - fcpY, - fidX, - fidY, - clsX, - clsY, - ttfbX, - ttfbY, - }; + const results: {[key in WebVitals]?: {x: number; y: number}} = {}; + Object.keys(weights).forEach((key, index) => { + results[key] = { + // Padding hack for now since ttfb label is longer than the others + x: coordinates[index].x + (key === 'ttfb' ? -12 : 0), + y: coordinates[index].y, + }; + }); + return results; } const ProgressRingContainer = styled('div')``; diff --git a/static/app/views/performance/browser/webVitals/performanceScoreChart.tsx b/static/app/views/performance/browser/webVitals/performanceScoreChart.tsx index cd6dea39452b23..1ba08c2fe9a8d3 100644 --- a/static/app/views/performance/browser/webVitals/performanceScoreChart.tsx +++ b/static/app/views/performance/browser/webVitals/performanceScoreChart.tsx @@ -63,12 +63,12 @@ export function PerformanceScoreChart({ // Gets weights to dynamically size the performance score ring segments const weights = projectScore ? { - cls: projectScore.clsWeight, - fcp: projectScore.fcpWeight, - fid: projectScore.fidWeight, lcp: projectScore.lcpWeight, + fcp: projectScore.fcpWeight, + fid: shouldReplaceFidWithInp ? 0 : projectScore.fidWeight, + inp: shouldReplaceFidWithInp ? projectScore.inpWeight : 0, + cls: projectScore.clsWeight, ttfb: projectScore.ttfbWeight, - inp: projectScore.inpWeight, } : undefined; diff --git a/static/app/views/performance/landing/widgets/widgets/performanceScoreWidget.tsx b/static/app/views/performance/landing/widgets/widgets/performanceScoreWidget.tsx index 0e8ba6f5cc3bfb..1ad86809bc6f02 100644 --- a/static/app/views/performance/landing/widgets/widgets/performanceScoreWidget.tsx +++ b/static/app/views/performance/landing/widgets/widgets/performanceScoreWidget.tsx @@ -12,6 +12,7 @@ import {calculatePerformanceScoreFromTableDataRow} from 'sentry/views/performanc import {useProjectRawWebVitalsQuery} from 'sentry/views/performance/browser/webVitals/utils/queries/rawWebVitalsQueries/useProjectRawWebVitalsQuery'; import {calculatePerformanceScoreFromStoredTableDataRow} from 'sentry/views/performance/browser/webVitals/utils/queries/storedScoreQueries/calculatePerformanceScoreFromStored'; import {useProjectWebVitalsScoresQuery} from 'sentry/views/performance/browser/webVitals/utils/queries/storedScoreQueries/useProjectWebVitalsScoresQuery'; +import {useReplaceFidWithInpSetting} from 'sentry/views/performance/browser/webVitals/utils/useReplaceFidWithInpSetting'; import {useStoredScoresSetting} from 'sentry/views/performance/browser/webVitals/utils/useStoredScoresSetting'; import {GenericPerformanceWidget} from '../components/performanceWidget'; @@ -23,6 +24,7 @@ export function PerformanceScoreWidget(props: PerformanceWidgetProps) { const {InteractiveTitle, organization} = props; const theme = useTheme(); const shouldUseStoredScores = useStoredScoresSetting(); + const shouldReplaceFidWithInp = useReplaceFidWithInpSetting(); const {data: projectData, isLoading} = useProjectRawWebVitalsQuery(); const {data: projectScores, isLoading: isProjectScoresLoading} = useProjectWebVitalsScoresQuery({enabled: shouldUseStoredScores}); @@ -40,12 +42,12 @@ export function PerformanceScoreWidget(props: PerformanceWidgetProps) { const weights = projectScore ? { - cls: projectScore.clsWeight, - fid: projectScore.fidWeight, - fcp: projectScore.fcpWeight, lcp: projectScore.lcpWeight, + fcp: projectScore.fcpWeight, + inp: shouldReplaceFidWithInp ? projectScore.inpWeight : 0, + fid: shouldReplaceFidWithInp ? 0 : projectScore.fidWeight, + cls: projectScore.clsWeight, ttfb: projectScore.ttfbWeight, - inp: projectScore.inpWeight, } : undefined; From 660dfe18524e6239feb9843778e1ad69da7de385 Mon Sep 17 00:00:00 2001 From: Leander Rodrigues Date: Tue, 6 Feb 2024 10:36:21 -0500 Subject: [PATCH 048/357] chore(hybrid-cloud): More region-by-default endpoint fixes (#64511) More region by default endpoint fixes. (currently debugging one problematic test though) --- ...t_project_app_store_connect_credentials.py | 72 +++++++------------ .../api/endpoints/test_project_index.py | 37 ++++++++-- .../endpoints/test_project_release_details.py | 7 +- 3 files changed, 60 insertions(+), 56 deletions(-) diff --git a/tests/sentry/api/endpoints/test_project_app_store_connect_credentials.py b/tests/sentry/api/endpoints/test_project_app_store_connect_credentials.py index 15ef9aa680bdde..16381d552cdfaa 100644 --- a/tests/sentry/api/endpoints/test_project_app_store_connect_credentials.py +++ b/tests/sentry/api/endpoints/test_project_app_store_connect_credentials.py @@ -1,15 +1,14 @@ -from unittest import mock +from unittest.mock import patch -import django.urls -import pytest from django.test import override_settings +from django.urls import reverse -import sentry.tasks.app_store_connect from sentry.api.endpoints.project_app_store_connect_credentials import ( AppStoreUpdateCredentialsSerializer, ) from sentry.lang.native.appconnect import AppStoreConnectConfig -from sentry.testutils.pytest.fixtures import django_db_all +from sentry.testutils.cases import TestCase +from sentry.testutils.silo import region_silo_test from sentry.utils import json @@ -82,15 +81,17 @@ def test_validate_secrets_string(self): assert data["appconnectPrivateKey"] == "honk" -class TestAppStoreConnectRefreshEndpoint: - @pytest.fixture - def config_id(self, default_project): - """A valid App Store Connect symbol server config ID.""" - cfg_id = "abc123" +@region_silo_test +class TestAppStoreConnectRefreshEndpoint(TestCase): + def setUp(self): + super().setUp() + self.login_as(self.user) + # A valid App Store Connect symbol server config ID. + self.config_id = "abc123" cfg = AppStoreConnectConfig.from_json( { "type": "appStoreConnect", - "id": cfg_id, + "id": self.config_id, "name": "Apple App Store Connect", "appconnectIssuer": "abc123" * 6, "appconnectKey": "abc123", @@ -100,56 +101,33 @@ def config_id(self, default_project): "bundleId": "com.example.app", } ) - cfg.update_project_symbol_source(default_project, allow_multiple=True) - return cfg_id - - @pytest.fixture - def mocked_dsym_download_task(self, monkeypatch): - dsym_download_task = mock.Mock() - monkeypatch.setattr( - sentry.tasks.app_store_connect, "inner_dsym_download", dsym_download_task - ) - return dsym_download_task - - @pytest.fixture - def refresh_url(self, default_project, default_organization, config_id): - return django.urls.reverse( + cfg.update_project_symbol_source(self.project, allow_multiple=True) + self.refresh_url = reverse( "sentry-api-0-project-appstoreconnect-refresh", kwargs={ - "project_slug": default_project.slug, - "organization_slug": default_organization.slug, - "credentials_id": config_id, + "project_slug": self.project.slug, + "organization_slug": self.organization.slug, + "credentials_id": self.config_id, }, ) - @django_db_all - def test_ok( - self, - client, - default_user, - default_project, - config_id, - mocked_dsym_download_task, - refresh_url, - ): - client.login(username=default_user.username, password="admin") - - response = client.post(refresh_url, format="json") + @patch("sentry.tasks.app_store_connect.inner_dsym_download") + def test_ok(self, mocked_dsym_download_task): + response = self.client.post(self.refresh_url, format="json") assert response.status_code == 200, response.content assert mocked_dsym_download_task.call_assert_called_once_with( - project_id=default_project.id, config_id=config_id + project_id=self.project.id, config_id=self.config_id ) - @django_db_all @override_settings(SENTRY_SELF_HOSTED=False) - def test_rate_limited(self, client, default_user, mocked_dsym_download_task, refresh_url): - client.login(username=default_user.username, password="admin") + @patch("sentry.tasks.app_store_connect.inner_dsym_download") + def test_rate_limited(self, mocked_dsym_download_task): for i in range(5): - client.post(refresh_url, format="json") + self.client.post(self.refresh_url, format="json") mocked_dsym_download_task.reset_mock() - response = client.post(refresh_url, format="json") + response = self.client.post(self.refresh_url, format="json") assert response.status_code == 429, response.content assert not mocked_dsym_download_task.called diff --git a/tests/sentry/api/endpoints/test_project_index.py b/tests/sentry/api/endpoints/test_project_index.py index a94e6a5e25a3e7..61d593b365d4a9 100644 --- a/tests/sentry/api/endpoints/test_project_index.py +++ b/tests/sentry/api/endpoints/test_project_index.py @@ -1,3 +1,4 @@ +import responses from django.db import router from django.urls import reverse from rest_framework import status @@ -8,9 +9,17 @@ from sentry.models.project import Project from sentry.models.projectkey import ProjectKey from sentry.silo import unguarded_write +from sentry.silo.base import SiloMode +from sentry.tasks.deletion.hybrid_cloud import ( + schedule_hybrid_cloud_foreign_key_jobs, + schedule_hybrid_cloud_foreign_key_jobs_control, +) from sentry.testutils.cases import APITestCase +from sentry.testutils.outbox import outbox_runner +from sentry.testutils.silo import assume_test_silo_mode, region_silo_test +@region_silo_test class ProjectsListTest(APITestCase): endpoint = "sentry-api-0-projects" @@ -186,7 +195,8 @@ def test_valid_with_internal_integration(self): webhook_url="http://example.com", ) # there should only be one record created so just grab the first one - token = SentryAppInstallationToken.objects.first() + with assume_test_silo_mode(SiloMode.CONTROL): + token = SentryAppInstallationToken.objects.first() path = reverse(self.endpoint) response = self.client.get(path, HTTP_AUTHORIZATION=f"Bearer {token.api_token.token}") assert project.name.encode("utf-8") in response.content @@ -198,12 +208,19 @@ def test_deleted_token_with_internal_integration(self): scopes=("project:read",), webhook_url="http://example.com", ) - # there should only be one record created so just grab the first one - token = SentryAppInstallationToken.objects.first() - token = token.api_token.token - # Delete the token - SentryAppInstallationToken.objects.all().delete() + with self.tasks(), assume_test_silo_mode(SiloMode.CONTROL), outbox_runner(): + # there should only be one record created so just grab the first one + install_token = SentryAppInstallationToken.objects.first() + api_token = install_token.api_token + token = api_token.token + # Delete the token + install_token.delete() + schedule_hybrid_cloud_foreign_key_jobs_control() + + with self.tasks(): + schedule_hybrid_cloud_foreign_key_jobs() + self.get_error_response( extra_headers={"HTTP_AUTHORIZATION": f"Bearer {token}"}, status_code=status.HTTP_401_UNAUTHORIZED, @@ -231,10 +248,16 @@ def test_valid_with_public_integration(self): ) assert self.project.name.encode("utf-8") in response.content + @responses.activate def test_deleted_token_with_public_integration(self): token = self.get_installed_unpublished_sentry_app_access_token() - ApiToken.objects.all().delete() + with assume_test_silo_mode(SiloMode.CONTROL), outbox_runner(): + token = ApiToken.objects.get(token=token) + token.delete() + + with self.tasks(): + schedule_hybrid_cloud_foreign_key_jobs() self.get_error_response( extra_headers={"HTTP_AUTHORIZATION": f"Bearer {token}"}, diff --git a/tests/sentry/api/endpoints/test_project_release_details.py b/tests/sentry/api/endpoints/test_project_release_details.py index 3c390b88f85b39..e54bd90538fa48 100644 --- a/tests/sentry/api/endpoints/test_project_release_details.py +++ b/tests/sentry/api/endpoints/test_project_release_details.py @@ -7,11 +7,11 @@ from sentry.constants import MAX_VERSION_LENGTH from sentry.models.activity import Activity from sentry.models.files.file import File -from sentry.models.orgauthtoken import OrgAuthToken from sentry.models.release import Release, ReleaseProject from sentry.models.releasecommit import ReleaseCommit from sentry.models.releasefile import ReleaseFile from sentry.testutils.cases import APITestCase +from sentry.testutils.silo import region_silo_test from sentry.testutils.skips import requires_snuba from sentry.types.activity import ActivityType from sentry.utils.security.orgauthtoken_token import generate_token, hash_token @@ -19,6 +19,7 @@ pytestmark = [requires_snuba] +@region_silo_test class ReleaseDetailsTest(APITestCase): def test_simple(self): self.login_as(user=self.user) @@ -47,6 +48,7 @@ def test_simple(self): assert response.data["newGroups"] == 5 +@region_silo_test class UpdateReleaseDetailsTest(APITestCase): def test_simple(self): self.login_as(user=self.user) @@ -170,7 +172,7 @@ def test_org_auth_token(self): project2 = self.create_project(name="bar", organization=project.organization) good_token_str = generate_token(project.organization.slug, "") - OrgAuthToken.objects.create( + self.create_org_auth_token( organization_id=project.organization.id, name="token 1", token_hashed=hash_token(good_token_str), @@ -204,6 +206,7 @@ def test_org_auth_token(self): assert release.ref == "master" +@region_silo_test class ReleaseDeleteTest(APITestCase): def test_simple(self): self.login_as(user=self.user) From bbd8acc48e345626f2183a3bee8fe5b1840b85ad Mon Sep 17 00:00:00 2001 From: Leander Rodrigues Date: Tue, 6 Feb 2024 10:36:33 -0500 Subject: [PATCH 049/357] chore(hybrid-cloud): Remove deprecated testutil (#64662) All instances have already been replaced with `assert_webhook_outboxes_with_shard_id` cc @GabeVillalobos --- src/sentry/testutils/outbox.py | 21 +-------------------- 1 file changed, 1 insertion(+), 20 deletions(-) diff --git a/src/sentry/testutils/outbox.py b/src/sentry/testutils/outbox.py index 95fa368a11e6e7..d4e1f8b2afb252 100644 --- a/src/sentry/testutils/outbox.py +++ b/src/sentry/testutils/outbox.py @@ -8,13 +8,7 @@ from django.conf import settings from django.core.handlers.wsgi import WSGIRequest -from sentry.models.outbox import ( - ControlOutbox, - OutboxBase, - OutboxCategory, - OutboxScope, - WebhookProviderIdentifier, -) +from sentry.models.outbox import ControlOutbox, OutboxBase, OutboxCategory, OutboxScope from sentry.silo import SiloMode from sentry.tasks.deliver_from_outbox import enqueue_outbox_jobs, enqueue_outbox_jobs_control from sentry.testutils.silo import assume_test_silo_mode @@ -64,19 +58,6 @@ def assert_no_webhook_outboxes(): assert outboxes == 0, "No outboxes should be created" -# DEPRECATED: use assert_webhook_outboxes_for_integration instead -def assert_webhook_outboxes( - factory_request: WSGIRequest, - webhook_identifier: WebhookProviderIdentifier, - region_names: list[str], -): - assert_webhook_outboxes_with_shard_id( - factory_request=factory_request, - expected_shard_id=webhook_identifier.value, - region_names=region_names, - ) - - def assert_webhook_outboxes_with_shard_id( factory_request: WSGIRequest, expected_shard_id: int, From 3424664a3ab973c05f9e223957c6b5fca4c10aed Mon Sep 17 00:00:00 2001 From: Ogi <86684834+obostjancic@users.noreply.github.com> Date: Tue, 6 Feb 2024 16:37:05 +0100 Subject: [PATCH 050/357] feat(dashboards): uniform metrics widgets (#64657) --- static/app/utils/metrics/useMetricsMeta.tsx | 10 + static/app/views/dashboards/dashboard.tsx | 6 +- static/app/views/dashboards/detail.spec.tsx | 8 + static/app/views/dashboards/detail.tsx | 419 +++++++++--------- static/app/views/dashboards/layoutUtils.tsx | 2 +- .../app/views/dashboards/widgetCard/index.tsx | 1 + .../widgetCard/metricWidgetCard/index.tsx | 128 ++++-- .../metricWidgetCard/inlineEditor.tsx | 4 +- .../dashboards/widgetCard/metricsContext.tsx | 36 ++ .../widgetCard/widgetCardChartContainer.tsx | 2 +- static/app/views/ddm/widget.tsx | 2 +- 11 files changed, 380 insertions(+), 238 deletions(-) create mode 100644 static/app/views/dashboards/widgetCard/metricsContext.tsx diff --git a/static/app/utils/metrics/useMetricsMeta.tsx b/static/app/utils/metrics/useMetricsMeta.tsx index 444a02a5425379..f63e3f0004ea8e 100644 --- a/static/app/utils/metrics/useMetricsMeta.tsx +++ b/static/app/utils/metrics/useMetricsMeta.tsx @@ -8,6 +8,16 @@ import type {MetricMeta, MRI, UseCase} from '../../types/metrics'; const DEFAULT_USE_CASES = ['sessions', 'transactions', 'custom', 'spans']; +export function getMetricsMetaQueryKeys( + orgSlug: string, + projects: PageFilters['projects'], + useCases?: UseCase[] +): ApiQueryKey[] { + return ( + useCases?.map(useCase => getMetricsMetaQueryKey(orgSlug, projects, useCase)) ?? [] + ); +} + export function getMetricsMetaQueryKey( orgSlug: string, projects: PageFilters['projects'], diff --git a/static/app/views/dashboards/dashboard.tsx b/static/app/views/dashboards/dashboard.tsx index a8a0b7a5d5d1c5..2adb93df7b3265 100644 --- a/static/app/views/dashboards/dashboard.tsx +++ b/static/app/views/dashboards/dashboard.tsx @@ -116,7 +116,6 @@ class Dashboard extends Component { }, windowWidth: window.innerWidth, }; - connectDashboardCharts(DASHBOARD_CHART_GROUP); } static getDerivedStateFromProps(props, state) { @@ -160,6 +159,8 @@ class Dashboard extends Component { // Get member list data for issue widgets this.fetchMemberList(); + + connectDashboardCharts(DASHBOARD_CHART_GROUP); } componentDidUpdate(prevProps: Props) { @@ -540,6 +541,9 @@ class Dashboard extends Component { if (widgetType === WidgetType.RELEASE) { return organization.features.includes('dashboards-rh-widget'); } + if (widgetType === WidgetType.METRICS) { + return hasDDMFeature(organization); + } return true; }); diff --git a/static/app/views/dashboards/detail.spec.tsx b/static/app/views/dashboards/detail.spec.tsx index 40f7612b09054d..f2da7b6a318c91 100644 --- a/static/app/views/dashboards/detail.spec.tsx +++ b/static/app/views/dashboards/detail.spec.tsx @@ -93,6 +93,10 @@ describe('Dashboards > Detail', function () { url: '/organizations/org-slug/releases/', body: [], }); + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/metrics/meta/', + body: [], + }); }); afterEach(function () { @@ -356,6 +360,10 @@ describe('Dashboards > Detail', function () { url: '/organizations/org-slug/prompts-activity/', body: {}, }); + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/metrics/meta/', + body: [], + }); }); afterEach(function () { diff --git a/static/app/views/dashboards/detail.tsx b/static/app/views/dashboards/detail.tsx index 468686cb86f09e..0dbc3cebb2f767 100644 --- a/static/app/views/dashboards/detail.tsx +++ b/static/app/views/dashboards/detail.tsx @@ -50,6 +50,7 @@ import { resetPageFilters, } from 'sentry/views/dashboards/utils'; import {DataSet} from 'sentry/views/dashboards/widgetBuilder/utils'; +import {MetricsDashboardContextProvider} from 'sentry/views/dashboards/widgetCard/metricsContext'; import {MetricsDataSwitcherAlert} from 'sentry/views/performance/landing/metricsDataSwitcherAlert'; import {defaultMetricWidget} from '../../utils/metrics/dashboard'; @@ -746,74 +747,76 @@ class DashboardDetail extends Component { > - - - - - + + + + + + + - - + + - - - - - - {metricsDataSide => ( - - + {metricsDataSide => ( + - - )} - - - - + forceTransactions={metricsDataSide.forceTransactionsOnly} + > + + + )} + + + + + @@ -876,158 +879,162 @@ class DashboardDetail extends Component { > - - - - - - - + + + + + - - - - - - - - - - + + + + + + + + + + - {metricsDataSide => ( - - {isDashboardUsingTransaction ? ( - - ) : null} - + {metricsDataSide => ( + { - resetPageFilters(dashboard, location); - this.setState({ - modifiedDashboard: { - ...(modifiedDashboard ?? dashboard), - filters: dashboard.filters, - }, - }); - }} - onSave={() => { - const newModifiedDashboard = { - ...cloneDashboard(modifiedDashboard ?? dashboard), - ...getCurrentPageFilters(location), - filters: - getDashboardFiltersFromURL(location) ?? - (modifiedDashboard ?? dashboard).filters, - }; - updateDashboard( - api, - organization.slug, - newModifiedDashboard - ).then( - (newDashboard: DashboardDetails) => { - if (onDashboardUpdate) { - onDashboardUpdate(newDashboard); - this.setState({ - modifiedDashboard: null, - }); - } - addSuccessMessage(t('Dashboard filters updated')); - browserHistory.replace( - normalizeUrl({ - pathname: `/organizations/${organization.slug}/dashboard/${newDashboard.id}/`, - query: omit( - location.query, - Object.values(DashboardFilterKeys) - ), - }) - ); - }, - // `updateDashboard` does its own error handling - () => undefined - ); - }} - /> - - - + {isDashboardUsingTransaction ? ( + + ) : null} + { + resetPageFilters(dashboard, location); + this.setState({ + modifiedDashboard: { + ...(modifiedDashboard ?? dashboard), + filters: dashboard.filters, + }, + }); + }} + onSave={() => { + const newModifiedDashboard = { + ...cloneDashboard(modifiedDashboard ?? dashboard), + ...getCurrentPageFilters(location), + filters: + getDashboardFiltersFromURL(location) ?? + (modifiedDashboard ?? dashboard).filters, + }; + updateDashboard( + api, + organization.slug, + newModifiedDashboard + ).then( + (newDashboard: DashboardDetails) => { + if (onDashboardUpdate) { + onDashboardUpdate(newDashboard); + this.setState({ + modifiedDashboard: null, + }); + } + addSuccessMessage(t('Dashboard filters updated')); + browserHistory.replace( + normalizeUrl({ + pathname: `/organizations/${organization.slug}/dashboard/${newDashboard.id}/`, + query: omit( + location.query, + Object.values(DashboardFilterKeys) + ), + }) + ); + }, + // `updateDashboard` does its own error handling + () => undefined + ); + }} /> - - - )} - - - - - - + + + + + + )} + + + + + + + diff --git a/static/app/views/dashboards/layoutUtils.tsx b/static/app/views/dashboards/layoutUtils.tsx index 78a8f2ba0844e7..dc319fbcbea55f 100644 --- a/static/app/views/dashboards/layoutUtils.tsx +++ b/static/app/views/dashboards/layoutUtils.tsx @@ -12,7 +12,7 @@ import type {Widget, WidgetLayout} from './types'; import {DisplayType} from './types'; export const DEFAULT_WIDGET_WIDTH = 2; -export const METRIC_WIDGET_MIN_SIZE = {minH: 2, h: 2, w: 3}; +export const METRIC_WIDGET_MIN_SIZE = {minH: 2, h: 2, w: 2}; const WIDGET_PREFIX = 'grid-item'; diff --git a/static/app/views/dashboards/widgetCard/index.tsx b/static/app/views/dashboards/widgetCard/index.tsx index 7d469ec3c5deb7..2eb41c2b5fc16c 100644 --- a/static/app/views/dashboards/widgetCard/index.tsx +++ b/static/app/views/dashboards/widgetCard/index.tsx @@ -286,6 +286,7 @@ class WidgetCard extends Component { selection={selection} widget={widget} dashboardFilters={dashboardFilters} + renderErrorMessage={renderErrorMessage} /> ); } diff --git a/static/app/views/dashboards/widgetCard/metricWidgetCard/index.tsx b/static/app/views/dashboards/widgetCard/metricWidgetCard/index.tsx index 65558b1de108a3..3acacb86871053 100644 --- a/static/app/views/dashboards/widgetCard/metricWidgetCard/index.tsx +++ b/static/app/views/dashboards/widgetCard/metricWidgetCard/index.tsx @@ -1,23 +1,34 @@ -import {useCallback, useMemo, useState} from 'react'; +import {Fragment, useCallback, useMemo, useRef, useState} from 'react'; import type {InjectedRouter} from 'react-router'; import styled from '@emotion/styled'; import type {Location} from 'history'; import ErrorPanel from 'sentry/components/charts/errorPanel'; import {HeaderTitle} from 'sentry/components/charts/styles'; +import TransitionChart from 'sentry/components/charts/transitionChart'; +import EmptyMessage from 'sentry/components/emptyMessage'; import TextOverflow from 'sentry/components/textOverflow'; -import {IconWarning} from 'sentry/icons'; +import {IconSearch, IconWarning} from 'sentry/icons'; +import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import type {MRI, Organization, PageFilters} from 'sentry/types'; +import type {ReactEchartsRef} from 'sentry/types/echarts'; import {stringifyMetricWidget} from 'sentry/utils/metrics'; -import type {MetricWidgetQueryParams} from 'sentry/utils/metrics/types'; +import { + MetricDisplayType, + type MetricWidgetQueryParams, +} from 'sentry/utils/metrics/types'; +import {useMetricsDataZoom} from 'sentry/utils/metrics/useMetricsData'; import {WidgetCardPanel, WidgetTitleRow} from 'sentry/views/dashboards/widgetCard'; import type {AugmentedEChartDataZoomHandler} from 'sentry/views/dashboards/widgetCard/chart'; import {DashboardsMEPContext} from 'sentry/views/dashboards/widgetCard/dashboardsMEPContext'; import {InlineEditor} from 'sentry/views/dashboards/widgetCard/metricWidgetCard/inlineEditor'; import {Toolbar} from 'sentry/views/dashboards/widgetCard/toolbar'; import WidgetCardContextMenu from 'sentry/views/dashboards/widgetCard/widgetCardContextMenu'; -import {MetricWidgetBody} from 'sentry/views/ddm/widget'; +import {MetricChart} from 'sentry/views/ddm/chart'; +import {createChartPalette} from 'sentry/views/ddm/metricsChartPalette'; +import {getChartTimeseries} from 'sentry/views/ddm/widget'; +import {LoadingScreen} from 'sentry/views/starfish/components/chart'; import { convertToDashboardWidget, @@ -26,6 +37,7 @@ import { import {parseField} from '../../../../utils/metrics/mri'; import {DASHBOARD_CHART_GROUP} from '../../dashboard'; import type {DashboardFilters, Widget} from '../../types'; +import {useMetricsDashboardContext} from '../metricsContext'; type Props = { isEditingDashboard: boolean; @@ -43,6 +55,7 @@ type Props = { onEdit?: (index: string) => void; onUpdate?: (widget: Widget | null) => void; onZoom?: AugmentedEChartDataZoomHandler; + renderErrorMessage?: (errorMessage?: string) => React.ReactNode; showSlider?: boolean; tableItemLimit?: number; windowWidth?: number; @@ -62,7 +75,10 @@ export function MetricWidgetCard({ router, index, dashboardFilters, + renderErrorMessage, }: Props) { + useMetricsDashboardContext(); + const [metricWidgetQueryParams, setMetricWidgetQueryParams] = useState(convertFromWidget(widget)); @@ -164,14 +180,14 @@ export function MetricWidgetCard({ )} - - - + + {isEditingDashboard && } @@ -183,6 +199,7 @@ type MetricWidgetChartContainerProps = { widget: Widget; dashboardFilters?: DashboardFilters; editorParams?: Partial; + renderErrorMessage?: (errorMessage?: string) => React.ReactNode; }; export function MetricWidgetChartContainer({ @@ -190,25 +207,83 @@ export function MetricWidgetChartContainer({ widget, editorParams = {}, dashboardFilters, + renderErrorMessage, }: MetricWidgetChartContainerProps) { const metricWidgetQueryParams = { ...convertFromWidget(widget), ...editorParams, }; + const {projects, environments, datetime} = selection; + const {mri, op, groupBy, displayType} = metricWidgetQueryParams; + + const { + data: timeseriesData, + isLoading, + isError, + error, + } = useMetricsDataZoom( + { + mri, + op, + query: extendQuery(metricWidgetQueryParams.query, dashboardFilters), + groupBy, + projects, + environments, + datetime, + }, + {fidelity: displayType === MetricDisplayType.BAR ? 'low' : 'high'} + ); + + const chartRef = useRef(null); + + const chartSeries = useMemo(() => { + return timeseriesData + ? getChartTimeseries(timeseriesData, { + getChartPalette: createChartPalette, + mri, + groupBy, + }) + : []; + }, [timeseriesData, mri, groupBy]); + + if (isError) { + const errorMessage = + error?.responseJSON?.detail?.toString() || t('Error while fetching metrics data'); + return ( + + {renderErrorMessage?.(errorMessage)} + + + + + ); + } + + if (timeseriesData?.groups.length === 0) { + return ( + } + title={t('No results')} + description={t('No results found for the given query')} + /> + ); + } + return ( - + + + + + + ); } @@ -278,5 +353,6 @@ const WidgetTitle = styled(HeaderTitle)` const MetricWidgetChartWrapper = styled('div')` height: 100%; width: 100%; - padding: ${space(2)}; -`; + padding: ${space(3)}; + padding-top: ${space(2)}; + `; diff --git a/static/app/views/dashboards/widgetCard/metricWidgetCard/inlineEditor.tsx b/static/app/views/dashboards/widgetCard/metricWidgetCard/inlineEditor.tsx index 1903909faefe75..f3ba2c8675d82e 100644 --- a/static/app/views/dashboards/widgetCard/metricWidgetCard/inlineEditor.tsx +++ b/static/app/views/dashboards/widgetCard/metricWidgetCard/inlineEditor.tsx @@ -33,11 +33,11 @@ import type { MetricWidgetQueryParams, } from 'sentry/utils/metrics/types'; import {MetricDisplayType} from 'sentry/utils/metrics/types'; -import {useMetricsMeta} from 'sentry/utils/metrics/useMetricsMeta'; import {useMetricsTags} from 'sentry/utils/metrics/useMetricsTags'; import {MetricSearchBar} from 'sentry/views/ddm/metricSearchBar'; import {formatMRI} from '../../../../utils/metrics/mri'; +import {useMetricsDashboardContext} from '../metricsContext'; type InlineEditorProps = { displayType: MetricDisplayType; @@ -72,7 +72,7 @@ export const InlineEditor = memo(function InlineEditor({ size = 'sm', }: InlineEditorProps) { const [editingName, setEditingName] = useState(false); - const {data: meta, isLoading: isMetaLoading} = useMetricsMeta(projects); + const {metricsMeta: meta, isLoading: isMetaLoading} = useMetricsDashboardContext(); const {data: tags = []} = useMetricsTags(metricsQuery.mri, projects); diff --git a/static/app/views/dashboards/widgetCard/metricsContext.tsx b/static/app/views/dashboards/widgetCard/metricsContext.tsx new file mode 100644 index 00000000000000..80e5fb211571c3 --- /dev/null +++ b/static/app/views/dashboards/widgetCard/metricsContext.tsx @@ -0,0 +1,36 @@ +import {createContext, useContext, useMemo} from 'react'; + +import {useMetricsMeta} from 'sentry/utils/metrics/useMetricsMeta'; +import usePageFilters from 'sentry/utils/usePageFilters'; + +interface MetricsDashboardContextValue { + isLoading: boolean; + metricsMeta: ReturnType['data']; +} + +export const MetricsDashboardContext = createContext({ + metricsMeta: [], + isLoading: false, +}); + +export function useMetricsDashboardContext() { + return useContext(MetricsDashboardContext); +} + +export function MetricsDashboardContextProvider({children}: {children: React.ReactNode}) { + const pageFilters = usePageFilters().selection; + const metricsMetaQuery = useMetricsMeta(pageFilters.projects); + + const contextValue = useMemo(() => { + return { + metricsMeta: metricsMetaQuery.data, + isLoading: metricsMetaQuery.isLoading, + }; + }, [metricsMetaQuery]); + + return ( + + {children} + + ); +} diff --git a/static/app/views/dashboards/widgetCard/widgetCardChartContainer.tsx b/static/app/views/dashboards/widgetCard/widgetCardChartContainer.tsx index d6a36d2a4576fb..96bb80ef6a2bcb 100644 --- a/static/app/views/dashboards/widgetCard/widgetCardChartContainer.tsx +++ b/static/app/views/dashboards/widgetCard/widgetCardChartContainer.tsx @@ -258,7 +258,7 @@ const StyledTransparentLoadingMask = styled(props => ( align-items: center; `; -function LoadingScreen({loading}: {loading: boolean}) { +export function LoadingScreen({loading}: {loading: boolean}) { if (!loading) { return null; } diff --git a/static/app/views/ddm/widget.tsx b/static/app/views/ddm/widget.tsx index 26a39a41ceb05b..8702388e9a652a 100644 --- a/static/app/views/ddm/widget.tsx +++ b/static/app/views/ddm/widget.tsx @@ -225,7 +225,7 @@ export interface SamplesProps { onClick?: (sample: Sample) => void; } -export const MetricWidgetBody = memo( +const MetricWidgetBody = memo( ({ onChange, displayType, From 86489efcb6422a717c55e35b06c3e090dc6539b0 Mon Sep 17 00:00:00 2001 From: Julia Hoge Date: Tue, 6 Feb 2024 08:01:30 -0800 Subject: [PATCH 051/357] chore(api): make certain group endpoints private (#64535) --- src/sentry/api/endpoints/group_hashes.py | 4 ++-- src/sentry/api/endpoints/group_hashes_split.py | 6 +++--- src/sentry/api/endpoints/group_notes_details.py | 4 ++-- src/sentry/api/endpoints/group_reprocessing.py | 2 +- src/sentry/api/endpoints/group_similar_issues.py | 2 +- src/sentry/api/endpoints/group_tombstone.py | 2 +- src/sentry/api/endpoints/group_tombstone_details.py | 2 +- src/sentry/plugins/bases/issue2.py | 4 ++-- 8 files changed, 13 insertions(+), 13 deletions(-) diff --git a/src/sentry/api/endpoints/group_hashes.py b/src/sentry/api/endpoints/group_hashes.py index 94947718445121..89d80fa5af290e 100644 --- a/src/sentry/api/endpoints/group_hashes.py +++ b/src/sentry/api/endpoints/group_hashes.py @@ -18,8 +18,8 @@ @region_silo_endpoint class GroupHashesEndpoint(GroupEndpoint): publish_status = { - "DELETE": ApiPublishStatus.UNKNOWN, - "GET": ApiPublishStatus.UNKNOWN, + "DELETE": ApiPublishStatus.PRIVATE, + "GET": ApiPublishStatus.PRIVATE, } def get(self, request: Request, group) -> Response: diff --git a/src/sentry/api/endpoints/group_hashes_split.py b/src/sentry/api/endpoints/group_hashes_split.py index d487e14f69fea4..f902e1d24090e5 100644 --- a/src/sentry/api/endpoints/group_hashes_split.py +++ b/src/sentry/api/endpoints/group_hashes_split.py @@ -25,9 +25,9 @@ @region_silo_endpoint class GroupHashesSplitEndpoint(GroupEndpoint): publish_status = { - "DELETE": ApiPublishStatus.UNKNOWN, - "GET": ApiPublishStatus.UNKNOWN, - "PUT": ApiPublishStatus.UNKNOWN, + "DELETE": ApiPublishStatus.PRIVATE, + "GET": ApiPublishStatus.PRIVATE, + "PUT": ApiPublishStatus.PRIVATE, } def get(self, request: Request, group) -> Response: diff --git a/src/sentry/api/endpoints/group_notes_details.py b/src/sentry/api/endpoints/group_notes_details.py index b81e6ea5943201..65fb6012f2eedf 100644 --- a/src/sentry/api/endpoints/group_notes_details.py +++ b/src/sentry/api/endpoints/group_notes_details.py @@ -19,8 +19,8 @@ @region_silo_endpoint class GroupNotesDetailsEndpoint(GroupEndpoint): publish_status = { - "DELETE": ApiPublishStatus.UNKNOWN, - "PUT": ApiPublishStatus.UNKNOWN, + "DELETE": ApiPublishStatus.PRIVATE, + "PUT": ApiPublishStatus.PRIVATE, } # We explicitly don't allow a request with an ApiKey diff --git a/src/sentry/api/endpoints/group_reprocessing.py b/src/sentry/api/endpoints/group_reprocessing.py index 1015bd7314d556..d69b196617f2ca 100644 --- a/src/sentry/api/endpoints/group_reprocessing.py +++ b/src/sentry/api/endpoints/group_reprocessing.py @@ -11,7 +11,7 @@ @region_silo_endpoint class GroupReprocessingEndpoint(GroupEndpoint): publish_status = { - "POST": ApiPublishStatus.UNKNOWN, + "POST": ApiPublishStatus.PRIVATE, } def post(self, request: Request, group) -> Response: diff --git a/src/sentry/api/endpoints/group_similar_issues.py b/src/sentry/api/endpoints/group_similar_issues.py index 90511bd196dcfd..cc7d8cedf49367 100644 --- a/src/sentry/api/endpoints/group_similar_issues.py +++ b/src/sentry/api/endpoints/group_similar_issues.py @@ -22,7 +22,7 @@ def _fix_label(label): @region_silo_endpoint class GroupSimilarIssuesEndpoint(GroupEndpoint): publish_status = { - "GET": ApiPublishStatus.UNKNOWN, + "GET": ApiPublishStatus.PRIVATE, } def get(self, request: Request, group) -> Response: diff --git a/src/sentry/api/endpoints/group_tombstone.py b/src/sentry/api/endpoints/group_tombstone.py index fa53d2348b9437..ac5c2faee71b1d 100644 --- a/src/sentry/api/endpoints/group_tombstone.py +++ b/src/sentry/api/endpoints/group_tombstone.py @@ -14,7 +14,7 @@ class GroupTombstoneEndpoint(ProjectEndpoint): owner = ApiOwner.ISSUES publish_status = { - "GET": ApiPublishStatus.UNKNOWN, + "GET": ApiPublishStatus.PRIVATE, } def get(self, request: Request, project) -> Response: diff --git a/src/sentry/api/endpoints/group_tombstone_details.py b/src/sentry/api/endpoints/group_tombstone_details.py index c30862e3be689d..ce9f52740802af 100644 --- a/src/sentry/api/endpoints/group_tombstone_details.py +++ b/src/sentry/api/endpoints/group_tombstone_details.py @@ -14,7 +14,7 @@ class GroupTombstoneDetailsEndpoint(ProjectEndpoint): owner = ApiOwner.ISSUES publish_status = { - "DELETE": ApiPublishStatus.UNKNOWN, + "DELETE": ApiPublishStatus.PRIVATE, } def delete(self, request: Request, project, tombstone_id) -> Response: diff --git a/src/sentry/plugins/bases/issue2.py b/src/sentry/plugins/bases/issue2.py index cf788277bc9c8e..13ca262478517e 100644 --- a/src/sentry/plugins/bases/issue2.py +++ b/src/sentry/plugins/bases/issue2.py @@ -30,8 +30,8 @@ @region_silo_endpoint class IssueGroupActionEndpoint(PluginGroupEndpoint): publish_status = { - "GET": ApiPublishStatus.UNKNOWN, - "POST": ApiPublishStatus.UNKNOWN, + "GET": ApiPublishStatus.PRIVATE, + "POST": ApiPublishStatus.PRIVATE, } view_method_name = None plugin = None From f0ee34a650fe90733819f0294fccf5b1088b96c0 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Tue, 6 Feb 2024 11:07:22 -0500 Subject: [PATCH 052/357] ref: fix one reference to removed timezone.utc (#64666) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit this slipped through because it is untested 😬 -- django 5.x removed this resolves https://sentry.sentry.io/issues/4950278046 --- src/sentry/tasks/auto_resolve_issues.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/sentry/tasks/auto_resolve_issues.py b/src/sentry/tasks/auto_resolve_issues.py index 07e8cd0edcc32d..384f97c2147c4f 100644 --- a/src/sentry/tasks/auto_resolve_issues.py +++ b/src/sentry/tasks/auto_resolve_issues.py @@ -2,10 +2,10 @@ from collections import defaultdict from collections.abc import Mapping -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from time import time -from django.utils import timezone +from django.utils import timezone as django_timezone from sentry import analytics from sentry.issues import grouptype @@ -74,7 +74,7 @@ def auto_resolve_project_issues(project_id, cutoff=None, chunk_size=1000, **kwar if cutoff: cutoff = datetime.utcfromtimestamp(cutoff).replace(tzinfo=timezone.utc) else: - cutoff = timezone.now() - timedelta(hours=int(age)) + cutoff = django_timezone.now() - timedelta(hours=int(age)) filter_conditions = { "project": project, @@ -96,7 +96,7 @@ def auto_resolve_project_issues(project_id, cutoff=None, chunk_size=1000, **kwar for group in queryset: happened = Group.objects.filter(id=group.id, status=GroupStatus.UNRESOLVED).update( status=GroupStatus.RESOLVED, - resolved_at=timezone.now(), + resolved_at=django_timezone.now(), substatus=None, ) remove_group_from_inbox(group, action=GroupInboxRemoveAction.RESOLVED) From 9835676ee1c8a1115c1bcf4c84374372804ec6da Mon Sep 17 00:00:00 2001 From: edwardgou-sentry <83961295+edwardgou-sentry@users.noreply.github.com> Date: Tue, 6 Feb 2024 12:40:20 -0500 Subject: [PATCH 053/357] feat(webvitals): Updates webvitals page overview samples table to support inp, and adds an interactions data switcher (#64515) Updates page overview to support inp samples. Adds a dataswitcher to switch between page loads to interactions. --- .../browser/webVitals/pageOverview.spec.tsx | 42 +++++- .../browser/webVitals/pageOverview.tsx | 21 +-- .../pageOverviewWebVitalsDetailPanel.tsx | 2 +- .../webVitals/pagePerformanceTable.tsx | 19 ++- .../webVitals/pageSamplePerformanceTable.tsx | 138 ++++++++++++++---- ...TransactionSamplesWebVitalsScoresQuery.tsx | 3 +- 6 files changed, 168 insertions(+), 57 deletions(-) diff --git a/static/app/views/performance/browser/webVitals/pageOverview.spec.tsx b/static/app/views/performance/browser/webVitals/pageOverview.spec.tsx index e9eb69965ebd10..6de45e350e9356 100644 --- a/static/app/views/performance/browser/webVitals/pageOverview.spec.tsx +++ b/static/app/views/performance/browser/webVitals/pageOverview.spec.tsx @@ -1,6 +1,6 @@ import {OrganizationFixture} from 'sentry-fixture/organization'; -import {render, screen} from 'sentry-test/reactTestingLibrary'; +import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary'; import {useLocation} from 'sentry/utils/useLocation'; import useOrganization from 'sentry/utils/useOrganization'; @@ -16,6 +16,8 @@ describe('PageOverview', function () { features: ['starfish-browser-webvitals', 'performance-database-view'], }); + let eventsMock; + beforeEach(function () { jest.mocked(useLocation).mockReturnValue({ pathname: '', @@ -44,7 +46,7 @@ describe('PageOverview', function () { }); jest.mocked(useOrganization).mockReturnValue(organization); - MockApiClient.addMockResponse({ + eventsMock = MockApiClient.addMockResponse({ url: `/organizations/${organization.slug}/events/`, body: { data: [], @@ -61,7 +63,7 @@ describe('PageOverview', function () { }); afterEach(function () { - jest.resetAllMocks(); + jest.clearAllMocks(); }); it('renders performance score migration alert', async () => { @@ -79,4 +81,38 @@ describe('PageOverview', function () { /We made improvements to how Performance Scores are calculated for your projects/ ); }); + + it('renders pageload and interaction switcher', async () => { + const organizationWithInp = OrganizationFixture({ + features: [ + 'starfish-browser-webvitals', + 'performance-database-view', + 'starfish-browser-webvitals-replace-fid-with-inp', + ], + }); + jest.mocked(useOrganization).mockReturnValue(organizationWithInp); + jest.mocked(useLocation).mockReturnValue({ + pathname: '', + search: '', + query: {useStoredScores: 'true', transaction: '/'}, + hash: '', + state: undefined, + action: 'PUSH', + key: '', + }); + render(); + await screen.findAllByText('Interactions'); + userEvent.click(screen.getAllByText('Interactions')[0]); + await waitFor(() => + expect(eventsMock).toHaveBeenCalledWith( + '/organizations/org-slug/events/', + expect.objectContaining({ + query: expect.objectContaining({ + query: + 'transaction.op:pageload transaction:"/" has:measurements.score.total has:measurements.fid ', + }), + }) + ) + ); + }); }); diff --git a/static/app/views/performance/browser/webVitals/pageOverview.tsx b/static/app/views/performance/browser/webVitals/pageOverview.tsx index 03bfee6af9be1c..e14c2559732246 100644 --- a/static/app/views/performance/browser/webVitals/pageOverview.tsx +++ b/static/app/views/performance/browser/webVitals/pageOverview.tsx @@ -9,8 +9,6 @@ import {Breadcrumbs} from 'sentry/components/breadcrumbs'; import {LinkButton} from 'sentry/components/button'; import {AggregateSpans} from 'sentry/components/events/interfaces/spans/aggregateSpans'; import FloatingFeedbackWidget from 'sentry/components/feedback/widget/floatingFeedbackWidget'; -import type {GridColumnOrder} from 'sentry/components/gridEditable'; -import {COL_WIDTH_UNDEFINED} from 'sentry/components/gridEditable'; import * as Layout from 'sentry/components/layouts/thirds'; import ExternalLink from 'sentry/components/links/externalLink'; import {DatePageFilter} from 'sentry/components/organizations/datePageFilter'; @@ -42,10 +40,7 @@ import {calculatePerformanceScoreFromTableDataRow} from 'sentry/views/performanc import {useProjectRawWebVitalsQuery} from 'sentry/views/performance/browser/webVitals/utils/queries/rawWebVitalsQueries/useProjectRawWebVitalsQuery'; import {calculatePerformanceScoreFromStoredTableDataRow} from 'sentry/views/performance/browser/webVitals/utils/queries/storedScoreQueries/calculatePerformanceScoreFromStored'; import {useProjectWebVitalsScoresQuery} from 'sentry/views/performance/browser/webVitals/utils/queries/storedScoreQueries/useProjectWebVitalsScoresQuery'; -import type { - TransactionSampleRowWithScore, - WebVitals, -} from 'sentry/views/performance/browser/webVitals/utils/types'; +import type {WebVitals} from 'sentry/views/performance/browser/webVitals/utils/types'; import {useStoredScoresSetting} from 'sentry/views/performance/browser/webVitals/utils/useStoredScoresSetting'; import { AlertContent, @@ -72,19 +67,6 @@ const LANDING_DISPLAYS = [ }, ]; -const SAMPLES_COLUMN_ORDER: GridColumnOrder[] = [ - {key: 'id', width: COL_WIDTH_UNDEFINED, name: 'Event ID'}, - {key: 'user.display', width: COL_WIDTH_UNDEFINED, name: 'User'}, - {key: 'measurements.lcp', width: COL_WIDTH_UNDEFINED, name: 'LCP'}, - {key: 'measurements.fcp', width: COL_WIDTH_UNDEFINED, name: 'FCP'}, - {key: 'measurements.fid', width: COL_WIDTH_UNDEFINED, name: 'FID'}, - {key: 'measurements.cls', width: COL_WIDTH_UNDEFINED, name: 'CLS'}, - {key: 'measurements.ttfb', width: COL_WIDTH_UNDEFINED, name: 'TTFB'}, - {key: 'profile.id', width: COL_WIDTH_UNDEFINED, name: 'Profile'}, - {key: 'replayId', width: COL_WIDTH_UNDEFINED, name: 'Replay'}, - {key: 'totalScore', width: COL_WIDTH_UNDEFINED, name: 'Score'}, -]; - function getCurrentTabSelection(selectedTab) { const tab = decodeScalar(selectedTab); if (tab && Object.values(LandingDisplayField).includes(tab as LandingDisplayField)) { @@ -284,7 +266,6 @@ export default function PageOverview() { diff --git a/static/app/views/performance/browser/webVitals/pageOverviewWebVitalsDetailPanel.tsx b/static/app/views/performance/browser/webVitals/pageOverviewWebVitalsDetailPanel.tsx index c6ba38f6122a71..3d0a57d0ed070a 100644 --- a/static/app/views/performance/browser/webVitals/pageOverviewWebVitalsDetailPanel.tsx +++ b/static/app/views/performance/browser/webVitals/pageOverviewWebVitalsDetailPanel.tsx @@ -275,7 +275,7 @@ export function PageOverviewWebVitalsDetailPanel({ }; const webVitalScore = projectScore[`${webVital}Score`]; - const webVitalValue = projectData?.data[0][`p75(measurements.${webVital})`] as + const webVitalValue = projectData?.data[0]?.[`p75(measurements.${webVital})`] as | number | undefined; diff --git a/static/app/views/performance/browser/webVitals/pagePerformanceTable.tsx b/static/app/views/performance/browser/webVitals/pagePerformanceTable.tsx index ed71b13b2e393b..5d3718ee931108 100644 --- a/static/app/views/performance/browser/webVitals/pagePerformanceTable.tsx +++ b/static/app/views/performance/browser/webVitals/pagePerformanceTable.tsx @@ -34,12 +34,19 @@ import { SORTABLE_FIELDS, SORTABLE_SCORE_FIELDS, } from 'sentry/views/performance/browser/webVitals/utils/types'; +import {useReplaceFidWithInpSetting} from 'sentry/views/performance/browser/webVitals/utils/useReplaceFidWithInpSetting'; import {useStoredScoresSetting} from 'sentry/views/performance/browser/webVitals/utils/useStoredScoresSetting'; import {useWebVitalsSort} from 'sentry/views/performance/browser/webVitals/utils/useWebVitalsSort'; type Column = GridColumnHeader; -const columnOrder: GridColumnOrder[] = [ +const INP_COLUMN: GridColumnOrder = { + key: 'p75(measurements.inp)', + width: COL_WIDTH_UNDEFINED, + name: 'INP', +}; + +const COLUMN_ORDER: GridColumnOrder[] = [ {key: 'transaction', width: COL_WIDTH_UNDEFINED, name: 'Pages'}, {key: 'count()', width: COL_WIDTH_UNDEFINED, name: 'Pageloads'}, {key: 'p75(measurements.lcp)', width: COL_WIDTH_UNDEFINED, name: 'LCP'}, @@ -58,6 +65,15 @@ export function PagePerformanceTable() { const location = useLocation(); const {projects} = useProjects(); const shouldUseStoredScores = useStoredScoresSetting(); + const shouldReplaceFidWithInp = useReplaceFidWithInpSetting(); + + const columnOrder = useMemo(() => { + const columns = [...COLUMN_ORDER]; + if (shouldReplaceFidWithInp) { + columns.splice(4, 1, INP_COLUMN); + } + return columns; + }, [shouldReplaceFidWithInp]); const query = decodeScalar(location.query.query, ''); @@ -252,6 +268,7 @@ export function PagePerformanceTable() { 'p75(measurements.lcp)', 'p75(measurements.ttfb)', 'p75(measurements.fid)', + 'p75(measurements.inp)', ].includes(key) ) { const measurement = parseFunction(key)?.arguments?.[0]; diff --git a/static/app/views/performance/browser/webVitals/pageSamplePerformanceTable.tsx b/static/app/views/performance/browser/webVitals/pageSamplePerformanceTable.tsx index 8add79d9dbb8e3..8f54215162f7f9 100644 --- a/static/app/views/performance/browser/webVitals/pageSamplePerformanceTable.tsx +++ b/static/app/views/performance/browser/webVitals/pageSamplePerformanceTable.tsx @@ -1,4 +1,4 @@ -import {useMemo} from 'react'; +import {useMemo, useState} from 'react'; import {Link} from 'react-router'; import styled from '@emotion/styled'; @@ -11,6 +11,7 @@ import GridEditable, {COL_WIDTH_UNDEFINED} from 'sentry/components/gridEditable' import SortLink from 'sentry/components/gridEditable/sortLink'; import ExternalLink from 'sentry/components/links/externalLink'; import Pagination from 'sentry/components/pagination'; +import {SegmentedControl} from 'sentry/components/segmentedControl'; import {Tooltip} from 'sentry/components/tooltip'; import {IconChevron, IconPlay, IconProfiling} from 'sentry/icons'; import {t} from 'sentry/locale'; @@ -30,48 +31,76 @@ import useRouter from 'sentry/utils/useRouter'; import {useRoutes} from 'sentry/utils/useRoutes'; import {PerformanceBadge} from 'sentry/views/performance/browser/webVitals/components/performanceBadge'; import {useTransactionSamplesWebVitalsQuery} from 'sentry/views/performance/browser/webVitals/utils/queries/useTransactionSamplesWebVitalsQuery'; -import type {TransactionSampleRowWithScore} from 'sentry/views/performance/browser/webVitals/utils/types'; +import type { + InteractionSpanSampleRowWithScore, + TransactionSampleRowWithScore, +} from 'sentry/views/performance/browser/webVitals/utils/types'; import { DEFAULT_INDEXED_SORT, SORTABLE_INDEXED_FIELDS, SORTABLE_INDEXED_SCORE_FIELDS, } from 'sentry/views/performance/browser/webVitals/utils/types'; +import {useReplaceFidWithInpSetting} from 'sentry/views/performance/browser/webVitals/utils/useReplaceFidWithInpSetting'; import {useStoredScoresSetting} from 'sentry/views/performance/browser/webVitals/utils/useStoredScoresSetting'; import {useWebVitalsSort} from 'sentry/views/performance/browser/webVitals/utils/useWebVitalsSort'; import {generateReplayLink} from 'sentry/views/performance/transactionSummary/utils'; type Column = GridColumnHeader; +type InteractionsColumn = GridColumnHeader; -export const COLUMN_ORDER: GridColumnOrder[] = [ +const PAGELOADS_COLUMN_ORDER: GridColumnOrder[] = [ + {key: 'id', width: COL_WIDTH_UNDEFINED, name: 'Event ID'}, {key: 'user.display', width: COL_WIDTH_UNDEFINED, name: 'User'}, - {key: 'transaction.duration', width: COL_WIDTH_UNDEFINED, name: 'Duration'}, {key: 'measurements.lcp', width: COL_WIDTH_UNDEFINED, name: 'LCP'}, {key: 'measurements.fcp', width: COL_WIDTH_UNDEFINED, name: 'FCP'}, {key: 'measurements.fid', width: COL_WIDTH_UNDEFINED, name: 'FID'}, {key: 'measurements.cls', width: COL_WIDTH_UNDEFINED, name: 'CLS'}, {key: 'measurements.ttfb', width: COL_WIDTH_UNDEFINED, name: 'TTFB'}, + {key: 'profile.id', width: COL_WIDTH_UNDEFINED, name: 'Profile'}, + {key: 'replayId', width: COL_WIDTH_UNDEFINED, name: 'Replay'}, {key: 'totalScore', width: COL_WIDTH_UNDEFINED, name: 'Score'}, ]; +const INTERACTION_SAMPLES_COLUMN_ORDER: GridColumnOrder< + keyof InteractionSpanSampleRowWithScore +>[] = [ + {key: 'user.display', width: COL_WIDTH_UNDEFINED, name: 'User'}, + {key: 'measurements.inp', width: COL_WIDTH_UNDEFINED, name: 'INP'}, + {key: 'profile.id', width: COL_WIDTH_UNDEFINED, name: 'Profile'}, + {key: 'replayId', width: COL_WIDTH_UNDEFINED, name: 'Replay'}, + {key: 'totalScore', width: COL_WIDTH_UNDEFINED, name: 'Score'}, +]; + +const INP_SEARCH_FILTER = 'has:measurements.fid'; + +enum Dataset { + PAGELOADS = 'pageloads', + INTERACTIONS = 'interactions', +} + type Props = { transaction: string; - columnOrder?: GridColumnOrder[]; limit?: number; search?: string; }; -export function PageSamplePerformanceTable({ - transaction, - columnOrder, - search, - limit = 9, -}: Props) { +export function PageSamplePerformanceTable({transaction, search, limit = 9}: Props) { const location = useLocation(); const {projects} = useProjects(); const organization = useOrganization(); const routes = useRoutes(); const router = useRouter(); const shouldUseStoredScores = useStoredScoresSetting(); + const shouldReplaceFidWithInp = useReplaceFidWithInpSetting(); + + const [dataset, setDataset] = useState(Dataset.PAGELOADS); + + const samplesColumnOrder = useMemo(() => { + if (shouldReplaceFidWithInp) { + return PAGELOADS_COLUMN_ORDER.filter(col => col.key !== 'measurements.fid'); + } + return PAGELOADS_COLUMN_ORDER; + }, [shouldReplaceFidWithInp]); const sortableFields = shouldUseStoredScores ? SORTABLE_INDEXED_FIELDS @@ -92,8 +121,6 @@ export function PageSamplePerformanceTable({ const query = decodeScalar(location.query.query); - // Do 3 queries filtering on LCP to get a spread of good, meh, and poor events - // We can't query by performance score yet, so we're using LCP as a best estimate const { data: tableData, isLoading, @@ -103,13 +130,26 @@ export function PageSamplePerformanceTable({ transaction, query: search, withProfiles: true, + enabled: dataset === Dataset.PAGELOADS, + }); + + const { + data: interactionsTableData, + isLoading: isInteractionsLoading, + pageLinks: interactionsPageLinks, + } = useTransactionSamplesWebVitalsQuery({ + limit, + transaction, + query: `${INP_SEARCH_FILTER} ${search ?? ''}`, + withProfiles: true, + enabled: dataset === Dataset.INTERACTIONS, }); const getFormattedDuration = (value: number) => { return getDuration(value, value < 1 ? 0 : 2, true); }; - function renderHeadCell(col: Column) { + function renderHeadCell(col: Column | InteractionsColumn) { function generateSortLink() { const key = col.key === 'totalScore' ? 'measurements.score.total' : col.key; let newSortDirection: Sort['kind'] = 'desc'; @@ -136,6 +176,7 @@ export function PageSamplePerformanceTable({ 'measurements.ttfb', 'measurements.fid', 'measurements.cls', + 'measurements.inp', 'transaction.duration', ].includes(col.key) ) { @@ -194,7 +235,10 @@ export function PageSamplePerformanceTable({ return {col.name}; } - function renderBodyCell(col: Column, row: TransactionSampleRowWithScore) { + function renderBodyCell( + col: Column | InteractionsColumn, + row: TransactionSampleRowWithScore | InteractionSpanSampleRowWithScore + ) { const {key} = col; if (key === 'totalScore') { return ( @@ -203,7 +247,7 @@ export function PageSamplePerformanceTable({ ); } - if (key === 'transaction') { + if (key === 'transaction' && 'transaction' in row) { return ( {project && ( @@ -229,6 +273,7 @@ export function PageSamplePerformanceTable({ 'measurements.lcp', 'measurements.ttfb', 'measurements.fid', + 'measurements.inp', 'transaction.duration', ].includes(key) ) { @@ -277,7 +322,7 @@ export function PageSamplePerformanceTable({ ); } - if (key === 'replayId') { + if (key === 'replayId' && 'id' in row) { const replayTarget = row['transaction.duration'] !== undefined && replayLinkGenerator( @@ -305,7 +350,7 @@ export function PageSamplePerformanceTable({ ); } - if (key === 'id') { + if (key === 'id' && 'id' in row) { const eventSlug = generateEventSlug({...row, project: row.projectSlug}); const eventTarget = getTransactionDetailsUrl(organization.slug, eventSlug); return ( @@ -322,6 +367,17 @@ export function PageSamplePerformanceTable({ return ( + {shouldReplaceFidWithInp && ( + + + {t('Pageloads')} + + + {t('Interactions')} + + + )} + - + {/* The Pagination component disappears if pageLinks is not defined, which happens any time the table data is loading. So we render a disabled button bar if pageLinks is not defined to minimize ui shifting */} @@ -354,18 +414,34 @@ export function PageSamplePerformanceTable({ )} - + {dataset === Dataset.PAGELOADS && ( + + )} + {dataset === Dataset.INTERACTIONS && ( + + )} ); diff --git a/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/useTransactionSamplesWebVitalsScoresQuery.tsx b/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/useTransactionSamplesWebVitalsScoresQuery.tsx index 80470c5ec82c4a..edf4e6831eefa5 100644 --- a/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/useTransactionSamplesWebVitalsScoresQuery.tsx +++ b/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/useTransactionSamplesWebVitalsScoresQuery.tsx @@ -117,6 +117,7 @@ export const useTransactionSamplesWebVitalsScoresQuery = ({ 'measurements.cls': toNumber(row['measurements.cls']), 'measurements.ttfb': toNumber(row['measurements.ttfb']), 'measurements.fid': toNumber(row['measurements.fid']), + 'measurements.inp': toNumber(row['measurements.fid']), 'transaction.duration': toNumber(row['transaction.duration']), replayId: row.replayId?.toString(), 'profile.id': row['profile.id']?.toString(), @@ -140,7 +141,7 @@ export const useTransactionSamplesWebVitalsScoresQuery = ({ }) // TODO: Discover doesn't let us query more than 20 fields and we're hitting that limit. // Clean up the types to account for this so we don't need to do this casting. - ) as TransactionSampleRowWithScore[]) + ) as unknown as TransactionSampleRowWithScore[]) : []; return { From b71214e1b983aa87e6cb2bb95eef5a57e0258c00 Mon Sep 17 00:00:00 2001 From: Malachi Willey Date: Tue, 6 Feb 2024 09:46:58 -0800 Subject: [PATCH 054/357] feat(issue-priority): Add feature flag for issue priority UI (#64600) --- src/sentry/conf/server.py | 2 ++ src/sentry/features/__init__.py | 1 + 2 files changed, 3 insertions(+) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 86f4186a3ad48d..c64a2fd0bdbbbf 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1612,6 +1612,8 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:issue-details-tag-improvements": False, # Enable issue platform "organizations:issue-platform": False, + # Enable issue priority in the UI + "organizations:issue-priority-ui": False, # Whether to allow issue only search on the issue list "organizations:issue-search-allow-postgres-only-search": False, # Whether to make a side/parallel query against events -> group_attributes when searching issues diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index 8706441b8ba00d..35ee40080b9a46 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -127,6 +127,7 @@ default_manager.add("organizations:issue-details-stacktrace-link-in-frame", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:issue-details-tag-improvements", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:issue-platform", OrganizationFeature, FeatureHandlerStrategy.REMOTE) +default_manager.add("organizations:issue-priority-ui", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:issue-search-allow-postgres-only-search", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:issue-search-group-attributes-side-query", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:issue-search-use-cdc-primary", OrganizationFeature, FeatureHandlerStrategy.REMOTE) From 5e96e89d16c7c1e22b995eb0e1e3584629b8ce22 Mon Sep 17 00:00:00 2001 From: Malachi Willey Date: Tue, 6 Feb 2024 09:48:07 -0800 Subject: [PATCH 055/357] ref(replays): Move clip logic from ReplayContext to ReplayReader (#64307) Removes `startTimeOffsetMs` and `durationMs` from the replay context. Consumers now get this from `ReplayReader.getDurationMs()` and `ReplayReader.getStartOffsetMs()`. When constructing ReplayReader, we trim the end of the RRWeb frames but keep the start the same. We save the start time offset, which is used in the replay context to begin playback at the correct time. Breadcrumbs and spans are trimmed from the front _and_ end and the public accessor (used for rendering the breadcrumbs), but we still keep the historical values for things like showing the current URL. --- .../eventReplay/replayClipPreview.spec.tsx | 21 +-- .../events/eventReplay/replayClipPreview.tsx | 41 ++---- .../replays/breadcrumbs/breadcrumbItem.tsx | 6 +- .../replays/breadcrumbs/replayTimeline.tsx | 14 +- .../breadcrumbs/replayTimelineEvents.tsx | 9 +- .../components/replays/player/scrubber.tsx | 21 ++- .../player/useScrubberMouseTracking.tsx | 6 +- .../app/components/replays/replayContext.tsx | 133 ++++------------- .../replays/timeAndScrubberGrid.tsx | 10 +- static/app/components/replays/utils.tsx | 29 ++-- .../utils/replays/hooks/useReplayReader.tsx | 10 +- .../app/utils/replays/replayReader.spec.tsx | 128 +++++++++++++++++ static/app/utils/replays/replayReader.tsx | 134 ++++++++++++++++-- .../replays/detail/breadcrumbs/index.tsx | 17 +-- 14 files changed, 345 insertions(+), 234 deletions(-) diff --git a/static/app/components/events/eventReplay/replayClipPreview.spec.tsx b/static/app/components/events/eventReplay/replayClipPreview.spec.tsx index b4e587e971cb03..8cd32dcea3cb34 100644 --- a/static/app/components/events/eventReplay/replayClipPreview.spec.tsx +++ b/static/app/components/events/eventReplay/replayClipPreview.spec.tsx @@ -1,3 +1,4 @@ +import {duration} from 'moment'; import {OrganizationFixture} from 'sentry-fixture/organization'; import {ProjectFixture} from 'sentry-fixture/project'; import {RRWebInitFrameEventsFixture} from 'sentry-fixture/replay/rrweb'; @@ -34,11 +35,18 @@ const mockReplay = ReplayReader.factory({ name: 'Chrome', version: '110.0.0', }, + started_at: new Date('Sep 22, 2022 4:58:39 PM UTC'), + finished_at: new Date(mockEventTimestampMs + 5_000), + duration: duration(10, 'seconds'), }), errors: [], attachments: RRWebInitFrameEventsFixture({ timestamp: new Date('Sep 22, 2022 4:58:39 PM UTC'), }), + clipWindow: { + startTimestampMs: mockEventTimestampMs - 5_000, + endTimestampMs: mockEventTimestampMs + 5_000, + }, }); mockUseReplayReader.mockImplementation(() => { @@ -165,12 +173,10 @@ describe('ReplayClipPreview', () => { // Should be two sliders, one for the scrubber and one for timeline const sliders = screen.getAllByRole('slider', {name: 'Seek slider'}); - // Replay should start at 57000ms because event happened at 62000ms - expect(sliders[0]).toHaveValue('57000'); - expect(sliders[0]).toHaveAttribute('min', '57000'); - - // End of range should be 5 seconds after at 67000ms - expect(sliders[0]).toHaveAttribute('max', '67000'); + // Replay should be 10 seconds long and start at the beginning + expect(sliders[0]).toHaveValue('0'); + expect(sliders[0]).toHaveAttribute('min', '0'); + expect(sliders[0]).toHaveAttribute('max', '10000'); }); it('Should link to the full replay correctly', () => { @@ -195,9 +201,6 @@ describe('ReplayClipPreview', () => { // Breadcrumbs sidebar should be open expect(screen.getByTestId('replay-details-breadcrumbs-tab')).toBeInTheDocument(); - // Should filter out breadcrumbs that aren't part of the clip - expect(screen.getByText('No breadcrumbs recorded')).toBeInTheDocument(); - // Can close the breadcrumbs sidebar await userEvent.click(screen.getByRole('button', {name: 'Collapse Sidebar'})); expect( diff --git a/static/app/components/events/eventReplay/replayClipPreview.tsx b/static/app/components/events/eventReplay/replayClipPreview.tsx index 79bddedff4ebb7..c402c033b866f4 100644 --- a/static/app/components/events/eventReplay/replayClipPreview.tsx +++ b/static/app/components/events/eventReplay/replayClipPreview.tsx @@ -83,7 +83,7 @@ function ReplayPreviewPlayer({ const routes = useRoutes(); const organization = useOrganization(); const [isSidebarOpen, setIsSidebarOpen] = useState(true); - const {currentTime} = useReplayContext(); + const {replay, currentTime} = useReplayContext(); const fullscreenRef = useRef(null); const {toggle: toggleFullscreen} = useFullscreen({ @@ -91,12 +91,13 @@ function ReplayPreviewPlayer({ }); const isFullscreen = useIsFullscreen(); + const startOffsetMs = replay?.getStartOffsetMs() ?? 0; const fullReplayUrl = { pathname: normalizeUrl(`/organizations/${organization.slug}/replays/${replayId}/`), query: { referrer: getRouteStringFromRoutes(routes), t_main: TabKey.ERRORS, - t: currentTime / 1000, + t: (currentTime + startOffsetMs) / 1000, }, }; @@ -147,40 +148,24 @@ function ReplayClipPreview({ replaySlug, fullReplayButtonProps, }: Props) { + const clipWindow = useMemo( + () => ({ + startTimestampMs: eventTimestampMs - CLIP_DURATION_BEFORE_EVENT, + endTimestampMs: eventTimestampMs + CLIP_DURATION_AFTER_EVENT, + }), + [eventTimestampMs] + ); + const {fetching, replay, replayRecord, fetchError, replayId} = useReplayReader({ orgSlug, replaySlug, + clipWindow, }); - const startTimestampMs = replayRecord?.started_at?.getTime() ?? 0; - const endTimestampMs = replayRecord?.finished_at?.getTime() ?? 0; - const eventTimeOffsetMs = Math.abs(eventTimestampMs - startTimestampMs); - const endTimeOffsetMs = Math.abs(endTimestampMs - startTimestampMs); - useRouteAnalyticsParams({ event_replay_status: getReplayAnalyticsStatus({fetchError, replayRecord}), }); - const clipStartTimeOffsetMs = Math.max( - eventTimeOffsetMs - CLIP_DURATION_BEFORE_EVENT, - 0 - ); - const clipDurationMs = - Math.min(eventTimeOffsetMs + CLIP_DURATION_AFTER_EVENT, endTimeOffsetMs) - - clipStartTimeOffsetMs; - - const clipWindow = useMemo( - () => ({ - startTimeOffsetMs: clipStartTimeOffsetMs, - durationMs: clipDurationMs, - }), - [clipDurationMs, clipStartTimeOffsetMs] - ); - const offset = useMemo( - () => ({offsetMs: clipWindow.startTimeOffsetMs}), - [clipWindow.startTimeOffsetMs] - ); - if (replayRecord?.is_archived) { return ( @@ -210,8 +195,6 @@ function ReplayClipPreview({ diff --git a/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx b/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx index 8facd74eb9daa8..81ed89d8b72527 100644 --- a/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx +++ b/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx @@ -70,7 +70,7 @@ function BreadcrumbItem({ traces, }: Props) { const {color, description, title, icon, timestampMs} = getCrumbOrFrameData(frame); - const {replay, startTimeOffsetMs} = useReplayContext(); + const {replay} = useReplayContext(); const forceSpan = 'category' in frame && FRAMES_WITH_BUTTONS.includes(frame.category); @@ -96,7 +96,7 @@ function BreadcrumbItem({ {onClick ? ( ) : null} @@ -123,7 +123,7 @@ function BreadcrumbItem({
    (null); const mouseTrackingProps = useTimelineScrubberMouseTracking( @@ -35,12 +34,13 @@ function ReplayTimeline({}: Props) { return ; } - const startTimestampMs = replay.getReplay().started_at.getTime() + startTimeOffsetMs; + const durationMs = replay.getDurationMs(); + const startTimestampMs = replay.getStartTimestampMs(); const chapterFrames = replay.getChapterFrames(); // timeline is in the middle const initialTranslate = 0.5 / timelineScale; - const percentComplete = divide(currentTime - startTimeOffsetMs, durationMs); + const percentComplete = divide(currentTime, durationMs); const starting = percentComplete < initialTranslate; const ending = percentComplete + initialTranslate > 1; @@ -52,10 +52,7 @@ function ReplayTimeline({}: Props) { if (ending) { return initialTranslate - (1 - initialTranslate); } - return ( - initialTranslate - - (currentTime - startTimeOffsetMs > durationMs ? 1 : percentComplete) - ); + return initialTranslate - (currentTime > durationMs ? 1 : percentComplete); }; return ( @@ -76,7 +73,6 @@ function ReplayTimeline({}: Props) { frames={chapterFrames} startTimestampMs={startTimestampMs} width={width} - startTimeOffsetMs={startTimeOffsetMs} /> diff --git a/static/app/components/replays/breadcrumbs/replayTimelineEvents.tsx b/static/app/components/replays/breadcrumbs/replayTimelineEvents.tsx index 129262219fc63e..39ac256584d2be 100644 --- a/static/app/components/replays/breadcrumbs/replayTimelineEvents.tsx +++ b/static/app/components/replays/breadcrumbs/replayTimelineEvents.tsx @@ -19,7 +19,6 @@ const NODE_SIZES = [8, 12, 16]; interface Props { durationMs: number; frames: ReplayFrame[]; - startTimeOffsetMs: number; startTimestampMs: number; width: number; className?: string; @@ -30,18 +29,12 @@ function ReplayTimelineEvents({ durationMs, frames, startTimestampMs, - startTimeOffsetMs, width, }: Props) { const markerWidth = frames.length < 200 ? 4 : frames.length < 500 ? 6 : 10; const totalColumns = Math.floor(width / markerWidth); - const framesByCol = getFramesByColumn( - durationMs, - frames, - totalColumns, - startTimeOffsetMs - ); + const framesByCol = getFramesByColumn(durationMs, frames, totalColumns); return ( diff --git a/static/app/components/replays/player/scrubber.tsx b/static/app/components/replays/player/scrubber.tsx index c8969c8c31b6e9..18b9719f4338f2 100644 --- a/static/app/components/replays/player/scrubber.tsx +++ b/static/app/components/replays/player/scrubber.tsx @@ -17,17 +17,12 @@ type Props = { }; function Scrubber({className, showZoomIndicators = false}: Props) { - const { - currentHoverTime, - currentTime, - setCurrentTime, - timelineScale, - startTimeOffsetMs, - durationMs, - } = useReplayContext(); - - const percentComplete = divide(currentTime - startTimeOffsetMs, durationMs); - const hoverPlace = divide(currentHoverTime || 0 - startTimeOffsetMs, durationMs); + const {replay, currentHoverTime, currentTime, setCurrentTime, timelineScale} = + useReplayContext(); + + const durationMs = replay?.getDurationMs() ?? 0; + const percentComplete = divide(currentTime, durationMs); + const hoverPlace = divide(currentHoverTime || 0, durationMs); const initialTranslate = 0.5 / timelineScale; @@ -80,8 +75,8 @@ function Scrubber({className, showZoomIndicators = false}: Props) { setCurrentTime(value || 0)} showLabel={false} diff --git a/static/app/components/replays/player/useScrubberMouseTracking.tsx b/static/app/components/replays/player/useScrubberMouseTracking.tsx index b0428ec8f8a760..9d66072b59d482 100644 --- a/static/app/components/replays/player/useScrubberMouseTracking.tsx +++ b/static/app/components/replays/player/useScrubberMouseTracking.tsx @@ -10,7 +10,8 @@ type Opts = { }; export function useScrubberMouseTracking({elem}: Opts) { - const {setCurrentHoverTime, durationMs} = useReplayContext(); + const {replay, setCurrentHoverTime} = useReplayContext(); + const durationMs = replay?.getDurationMs(); const handlePositionChange = useCallback( params => { @@ -42,7 +43,8 @@ export function useTimelineScrubberMouseTracking( {elem}: Opts, scale: number ) { - const {currentTime, setCurrentHoverTime, durationMs} = useReplayContext(); + const {replay, currentTime, setCurrentHoverTime} = useReplayContext(); + const durationMs = replay?.getDurationMs(); const handlePositionChange = useCallback( params => { diff --git a/static/app/components/replays/replayContext.tsx b/static/app/components/replays/replayContext.tsx index 74d7525d54d812..e327c1fe9761d1 100644 --- a/static/app/components/replays/replayContext.tsx +++ b/static/app/components/replays/replayContext.tsx @@ -55,11 +55,6 @@ interface ReplayPlayerContextProps extends HighlightCallbacks { */ dimensions: Dimensions; - /** - * Total duration of the replay, in milliseconds - */ - durationMs: number; - /** * The calculated speed of the player when fast-forwarding through idle moments in the video * The value is set to `0` when the video is not fast-forwarding @@ -136,11 +131,6 @@ interface ReplayPlayerContextProps extends HighlightCallbacks { */ speed: number; - /** - * The time, in milliseconds, where the video should start - */ - startTimeOffsetMs: number; - /** * Scale of the timeline width, starts from 1x and increases by 1x */ @@ -166,7 +156,6 @@ const ReplayPlayerContext = createContext({ currentHoverTime: undefined, currentTime: 0, dimensions: {height: 0, width: 0}, - durationMs: 0, fastForwardSpeed: 0, addHighlight: () => {}, initRoot: () => {}, @@ -183,7 +172,6 @@ const ReplayPlayerContext = createContext({ setSpeed: () => {}, setTimelineScale: () => {}, speed: 1, - startTimeOffsetMs: 0, timelineScale: 1, togglePlayPause: () => {}, toggleSkipInactive: () => {}, @@ -205,14 +193,6 @@ type Props = { replay: ReplayReader | null; - /** - * If provided, the replay will be clipped to this window. - */ - clipWindow?: { - durationMs: number; - startTimeOffsetMs: number; - }; - /** * Time, in seconds, when the video should start */ @@ -234,54 +214,9 @@ function updateSavedReplayConfig(config: ReplayConfig) { localStorage.setItem(ReplayLocalstorageKeys.REPLAY_CONFIG, JSON.stringify(config)); } -/** - * When a clip window is provided, this hook will automatically pause and end - * the replay when the provided window time has passed. - */ -function useClipWindow({ - clipWindow, - replayer, - onFinished, - isPlaying, -}: { - clipWindow: Props['clipWindow']; - isPlaying: boolean; - onFinished: () => void; - replayer: Replayer | null; -}) { - useEffect(() => { - if (!replayer || !clipWindow || !isPlaying) { - return () => {}; - } - - let timer: number | undefined; - - const checkForEndOfClip = () => { - const currentTime = replayer.getCurrentTime(); - const endTimeOffsetMs = clipWindow.startTimeOffsetMs + clipWindow.durationMs; - - if (currentTime >= endTimeOffsetMs) { - replayer.pause(endTimeOffsetMs); - onFinished(); - } - - timer = requestAnimationFrame(checkForEndOfClip); - }; - - timer = requestAnimationFrame(checkForEndOfClip); - - return () => { - if (timer) { - cancelAnimationFrame(timer); - } - }; - }, [clipWindow, isPlaying, onFinished, replayer]); -} - export function Provider({ analyticsContext, children, - clipWindow, initialTimeOffsetMs, isFetching, replay, @@ -313,15 +248,8 @@ export function Provider({ const didApplyInitialOffset = useRef(false); const [timelineScale, setTimelineScale] = useState(1); - const fullReplayDurationMs = replay?.getDurationMs() ?? 0; - const startTimeOffsetMs = clipWindow?.startTimeOffsetMs - ? clamp(clipWindow.startTimeOffsetMs, 0, fullReplayDurationMs) - : 0; - const durationMs = clipWindow?.durationMs - ? Math.min(clipWindow.durationMs, fullReplayDurationMs - startTimeOffsetMs) - : fullReplayDurationMs; - - const isFinished = replayerRef.current?.getCurrentTime() === finishedAtMS; + const durationMs = replay?.getDurationMs() ?? 0; + const startTimeOffsetMs = replay?.getStartOffsetMs() ?? 0; const forceDimensions = (dimension: Dimensions) => { setDimensions(dimension); @@ -337,22 +265,16 @@ export function Provider({ replayerRef, }); - const setReplayFinished = useCallback(() => { - setFinishedAtMS(replayerRef.current?.getCurrentTime() ?? -1); - setIsPlaying(false); - }, []); - - const getCurrentTime = useCallback( + const getCurrentPlayerTime = useCallback( () => (replayerRef.current ? Math.max(replayerRef.current.getCurrentTime(), 0) : 0), [] ); - useClipWindow({ - clipWindow, - replayer: replayerRef.current, - isPlaying, - onFinished: setReplayFinished, - }); + const isFinished = getCurrentPlayerTime() === finishedAtMS; + const setReplayFinished = useCallback(() => { + setFinishedAtMS(getCurrentPlayerTime()); + setIsPlaying(false); + }, [getCurrentPlayerTime]); const privateSetCurrentTime = useCallback( (requestedTimeMs: number) => { @@ -368,15 +290,11 @@ export function Provider({ replayer.setConfig({skipInactive: false}); } - const time = clamp( - requestedTimeMs, - startTimeOffsetMs, - startTimeOffsetMs + durationMs - ); + const time = clamp(requestedTimeMs, 0, startTimeOffsetMs + durationMs); // Sometimes rrweb doesn't get to the exact target time, as long as it has // changed away from the previous time then we can hide then buffering message. - setBufferTime({target: time, previous: getCurrentTime()}); + setBufferTime({target: time, previous: getCurrentPlayerTime()}); // Clear previous timers. Without this (but with the setTimeout) multiple // requests to set the currentTime could finish out of order and cause jumping. @@ -394,25 +312,27 @@ export function Provider({ setIsPlaying(false); } }, - [startTimeOffsetMs, durationMs, getCurrentTime, isPlaying] + [startTimeOffsetMs, durationMs, getCurrentPlayerTime, isPlaying] ); const setCurrentTime = useCallback( (requestedTimeMs: number) => { - privateSetCurrentTime(requestedTimeMs); + privateSetCurrentTime(requestedTimeMs + startTimeOffsetMs); clearAllHighlights(); }, - [privateSetCurrentTime, clearAllHighlights] + [privateSetCurrentTime, startTimeOffsetMs, clearAllHighlights] ); const applyInitialOffset = useCallback(() => { + const offsetMs = (initialTimeOffsetMs?.offsetMs ?? 0) + startTimeOffsetMs; + if ( !didApplyInitialOffset.current && - initialTimeOffsetMs && + (initialTimeOffsetMs || offsetMs) && events && replayerRef.current ) { - const {highlight: highlightArgs, offsetMs} = initialTimeOffsetMs; + const highlightArgs = initialTimeOffsetMs?.highlight; if (offsetMs > 0) { privateSetCurrentTime(offsetMs); } @@ -431,6 +351,7 @@ export function Provider({ addHighlight, initialTimeOffsetMs, privateSetCurrentTime, + startTimeOffsetMs, ]); useEffect(clearAllHighlights, [clearAllHighlights, isPlaying]); @@ -517,14 +438,14 @@ export function Provider({ if (isPlaying) { replayer.pause(); replayer.setConfig({speed: newSpeed}); - replayer.play(getCurrentTime()); + replayer.play(getCurrentPlayerTime()); } else { replayer.setConfig({speed: newSpeed}); } setSpeedState(newSpeed); }, - [getCurrentTime, isPlaying] + [getCurrentPlayerTime, isPlaying] ); const togglePlayPause = useCallback( @@ -535,9 +456,9 @@ export function Provider({ } if (play) { - replayer.play(getCurrentTime()); + replayer.play(getCurrentPlayerTime()); } else { - replayer.pause(getCurrentTime()); + replayer.pause(getCurrentPlayerTime()); } setIsPlaying(play); @@ -548,7 +469,7 @@ export function Provider({ context: analyticsContext, }); }, - [organization, user.email, analyticsContext, getCurrentTime] + [organization, user.email, analyticsContext, getCurrentPlayerTime] ); useEffect(() => { @@ -594,15 +515,17 @@ export function Provider({ setIsSkippingInactive(skip); }, []); - const currentPlayerTime = useCurrentTime(getCurrentTime); + const currentPlayerTime = useCurrentTime(getCurrentPlayerTime); - const [isBuffering, currentTime] = + const [isBuffering, currentBufferedPlayerTime] = buffer.target !== -1 && buffer.previous === currentPlayerTime && buffer.target !== buffer.previous ? [true, buffer.target] : [false, currentPlayerTime]; + const currentTime = currentBufferedPlayerTime - startTimeOffsetMs; + useEffect(() => { if (!isBuffering && events && events.length >= 2 && replayerRef.current) { applyInitialOffset(); @@ -623,7 +546,6 @@ export function Provider({ currentHoverTime, currentTime, dimensions, - durationMs, fastForwardSpeed, addHighlight, initRoot, @@ -640,7 +562,6 @@ export function Provider({ setSpeed, setTimelineScale, speed, - startTimeOffsetMs, timelineScale, togglePlayPause, toggleSkipInactive, diff --git a/static/app/components/replays/timeAndScrubberGrid.tsx b/static/app/components/replays/timeAndScrubberGrid.tsx index 67802e6590e7c0..68bc63c4bcf572 100644 --- a/static/app/components/replays/timeAndScrubberGrid.tsx +++ b/static/app/components/replays/timeAndScrubberGrid.tsx @@ -18,7 +18,8 @@ type TimeAndScrubberGridProps = { }; function TimelineSizeBar() { - const {timelineScale, setTimelineScale, durationMs} = useReplayContext(); + const {replay, timelineScale, setTimelineScale} = useReplayContext(); + const durationMs = replay?.getDurationMs(); const maxScale = durationMs ? Math.ceil(durationMs / 60000) : 10; return ( @@ -53,15 +54,14 @@ function TimeAndScrubberGrid({ isCompact = false, showZoom = false, }: TimeAndScrubberGridProps) { - const {currentTime, startTimeOffsetMs, durationMs} = useReplayContext(); + const {currentTime, replay} = useReplayContext(); + const durationMs = replay?.getDurationMs(); const elem = useRef(null); const mouseTrackingProps = useScrubberMouseTracking({elem}); return ( - +
    diff --git a/static/app/components/replays/utils.tsx b/static/app/components/replays/utils.tsx index 8cebede97c503d..bd2c2a0d2db52e 100644 --- a/static/app/components/replays/utils.tsx +++ b/static/app/components/replays/utils.tsx @@ -86,28 +86,19 @@ export function countColumns(durationMs: number, width: number, minWidth: number export function getFramesByColumn( durationMs: number, frames: ReplayFrame[], - totalColumns: number, - startTimeOffsetMs: number = 0 + totalColumns: number ) { const safeDurationMs = isNaN(durationMs) ? 1 : durationMs; - const columnFramePairs = frames - .filter( - frame => - frame.offsetMs >= startTimeOffsetMs && - frame.offsetMs <= startTimeOffsetMs + safeDurationMs - ) - .map(frame => { - const columnPositionCalc = - Math.floor( - ((frame.offsetMs - startTimeOffsetMs) / safeDurationMs) * (totalColumns - 1) - ) + 1; - - // Should start at minimum in the first column - const column = Math.max(1, columnPositionCalc); - - return [column, frame] as [number, ReplayFrame]; - }); + const columnFramePairs = frames.map(frame => { + const columnPositionCalc = + Math.floor((frame.offsetMs / safeDurationMs) * (totalColumns - 1)) + 1; + + // Should start at minimum in the first column + const column = Math.max(1, columnPositionCalc); + + return [column, frame] as [number, ReplayFrame]; + }); const framesByColumn = columnFramePairs.reduce>( (map, [column, frame]) => { diff --git a/static/app/utils/replays/hooks/useReplayReader.tsx b/static/app/utils/replays/hooks/useReplayReader.tsx index e2f0db845b80c9..5ab557f429f11e 100644 --- a/static/app/utils/replays/hooks/useReplayReader.tsx +++ b/static/app/utils/replays/hooks/useReplayReader.tsx @@ -6,9 +6,13 @@ import ReplayReader from 'sentry/utils/replays/replayReader'; type Props = { orgSlug: string; replaySlug: string; + clipWindow?: { + endTimestampMs: number; + startTimestampMs: number; + }; }; -export default function useReplayReader({orgSlug, replaySlug}: Props) { +export default function useReplayReader({orgSlug, replaySlug, clipWindow}: Props) { const replayId = parseReplayId(replaySlug); const {attachments, errors, replayRecord, ...replayData} = useReplayData({ @@ -17,8 +21,8 @@ export default function useReplayReader({orgSlug, replaySlug}: Props) { }); const replay = useMemo( - () => ReplayReader.factory({attachments, errors, replayRecord}), - [attachments, errors, replayRecord] + () => ReplayReader.factory({attachments, errors, replayRecord, clipWindow}), + [attachments, clipWindow, errors, replayRecord] ); return { diff --git a/static/app/utils/replays/replayReader.spec.tsx b/static/app/utils/replays/replayReader.spec.tsx index 18b1a10964e49b..46d4efd0669b30 100644 --- a/static/app/utils/replays/replayReader.spec.tsx +++ b/static/app/utils/replays/replayReader.spec.tsx @@ -18,6 +18,7 @@ import { RRWebDOMFrameFixture, RRWebFullSnapshotFrameEventFixture, } from 'sentry-fixture/replay/rrweb'; +import {ReplayErrorFixture} from 'sentry-fixture/replayError'; import {ReplayRecordFixture} from 'sentry-fixture/replayRecord'; import {BreadcrumbType} from 'sentry/types/breadcrumbs'; @@ -361,4 +362,131 @@ describe('ReplayReader', () => { expect(replay?.hasCanvasElementInReplay()).toBe(true); }); + + describe('clip window', () => { + const replayStartedAt = new Date('2024-01-01T00:02:00'); + const replayFinishedAt = new Date('2024-01-01T00:04:00'); + + const clipStartTimestamp = new Date('2024-01-01T00:03:00'); + const clipEndTimestamp = new Date('2024-01-01T00:03:10'); + + const rrwebFrame1 = RRWebFullSnapshotFrameEventFixture({ + timestamp: new Date('2024-01-01T00:02:30'), + }); + const rrwebFrame2 = RRWebFullSnapshotFrameEventFixture({ + timestamp: new Date('2024-01-01T00:03:09'), + }); + const rrwebFrame3 = RRWebFullSnapshotFrameEventFixture({ + timestamp: new Date('2024-01-01T00:03:30'), + }); + + const breadcrumbAttachment1 = ReplayBreadcrumbFrameEventFixture({ + timestamp: new Date('2024-01-01T00:02:30'), + data: { + payload: ReplayNavFrameFixture({ + timestamp: new Date('2024-01-01T00:02:30'), + }), + }, + }); + const breadcrumbAttachment2 = ReplayBreadcrumbFrameEventFixture({ + timestamp: new Date('2024-01-01T00:03:05'), + data: { + payload: ReplayNavFrameFixture({ + timestamp: new Date('2024-01-01T00:03:05'), + }), + }, + }); + const breadcrumbAttachment3 = ReplayBreadcrumbFrameEventFixture({ + timestamp: new Date('2024-01-01T00:03:30'), + data: { + payload: ReplayNavFrameFixture({ + timestamp: new Date('2024-01-01T00:03:30'), + }), + }, + }); + + const error1 = ReplayErrorFixture({ + id: '1', + issue: '100', + timestamp: '2024-01-01T00:02:30', + }); + const error2 = ReplayErrorFixture({ + id: '2', + issue: '200', + timestamp: '2024-01-01T00:03:06', + }); + const error3 = ReplayErrorFixture({ + id: '1', + issue: '100', + timestamp: '2024-01-01T00:03:30', + }); + + const replay = ReplayReader.factory({ + attachments: [ + rrwebFrame1, + rrwebFrame2, + rrwebFrame3, + breadcrumbAttachment1, + breadcrumbAttachment2, + breadcrumbAttachment3, + ], + errors: [error1, error2, error3], + replayRecord: ReplayRecordFixture({ + started_at: replayStartedAt, + finished_at: replayFinishedAt, + }), + clipWindow: { + startTimestampMs: clipStartTimestamp.getTime(), + endTimestampMs: clipEndTimestamp.getTime(), + }, + }); + + it('should adjust the end time and duration for the clip window', () => { + // Duration should be between the clip start time and the last rrweb frame + // within the clip window + expect(replay?.getDurationMs()).toEqual( + rrwebFrame2.timestamp - clipStartTimestamp.getTime() + ); + // Start offset should be set + expect(replay?.getStartOffsetMs()).toEqual( + clipStartTimestamp.getTime() - replayStartedAt.getTime() + ); + expect(replay?.getStartTimestampMs()).toEqual(clipStartTimestamp.getTime()); + }); + + it('should trim rrweb frames from the end but not the beginning', () => { + expect(replay?.getRRWebFrames()).toEqual([ + expect.objectContaining({ + type: EventType.Custom, + data: {tag: 'replay.start', payload: {}}, + }), + expect.objectContaining({ + type: EventType.FullSnapshot, + timestamp: rrwebFrame1.timestamp, + }), + expect.objectContaining({ + type: EventType.FullSnapshot, + timestamp: rrwebFrame2.timestamp, + }), + // rrwebFrame3 should not be returned + ]); + }); + + it('should only return chapter frames within window and shift their clipOffsets', () => { + expect(replay?.getChapterFrames()).toEqual([ + // Only breadcrumb2 and error2 should be included + expect.objectContaining({ + category: 'navigation', + timestampMs: breadcrumbAttachment2.timestamp, + // offset is relative to the start of the clip window + offsetMs: 5_000, + }), + expect.objectContaining({ + category: 'issue', + timestampMs: new Date(error2.timestamp).getTime(), + offsetMs: 6_000, + }), + ]); + }); + }); }); diff --git a/static/app/utils/replays/replayReader.tsx b/static/app/utils/replays/replayReader.tsx index 7f141ad4f05372..766bf320764a7d 100644 --- a/static/app/utils/replays/replayReader.tsx +++ b/static/app/utils/replays/replayReader.tsx @@ -2,11 +2,12 @@ import * as Sentry from '@sentry/react'; import type {incrementalSnapshotEvent} from '@sentry-internal/rrweb'; import {IncrementalSource} from '@sentry-internal/rrweb'; import memoize from 'lodash/memoize'; -import {duration} from 'moment'; +import {type Duration, duration} from 'moment'; import {defined} from 'sentry/utils'; import domId from 'sentry/utils/domId'; import localStorageWrapper from 'sentry/utils/localStorage'; +import clamp from 'sentry/utils/number/clamp'; import hydrateBreadcrumbs, { replayInitBreadcrumb, } from 'sentry/utils/replays/hydrateBreadcrumbs'; @@ -39,6 +40,11 @@ import { } from 'sentry/utils/replays/types'; import type {ReplayError, ReplayRecord} from 'sentry/views/replays/types'; +interface ClipWindow { + endTimestampMs: number; + startTimestampMs: number; +} + interface ReplayReaderParams { /** * Loaded segment data @@ -60,6 +66,11 @@ interface ReplayReaderParams { * The root Replay event, created at the start of the browser session. */ replayRecord: ReplayRecord | undefined; + + /** + * If provided, the replay will be clipped to this window. + */ + clipWindow?: ClipWindow; } type RequiredNotNull = { @@ -95,13 +106,13 @@ function removeDuplicateClicks(frames: BreadcrumbFrame[]) { } export default class ReplayReader { - static factory({attachments, errors, replayRecord}: ReplayReaderParams) { + static factory({attachments, errors, replayRecord, clipWindow}: ReplayReaderParams) { if (!attachments || !replayRecord || !errors) { return null; } try { - return new ReplayReader({attachments, errors, replayRecord}); + return new ReplayReader({attachments, errors, replayRecord, clipWindow}); } catch (err) { Sentry.captureException(err); @@ -113,6 +124,7 @@ export default class ReplayReader { attachments: [], errors: [], replayRecord, + clipWindow, }); } } @@ -121,6 +133,7 @@ export default class ReplayReader { attachments, errors, replayRecord, + clipWindow, }: RequiredNotNull) { this._cacheKey = domId('replayReader-'); @@ -186,17 +199,102 @@ export default class ReplayReader { this._sortedBreadcrumbFrames.push(replayInitBreadcrumb(replayRecord)); this._sortedRRWebEvents.unshift(recordingStartFrame(replayRecord)); this._sortedRRWebEvents.push(recordingEndFrame(replayRecord)); + + this._duration = replayRecord.duration; + + if (clipWindow) { + this._applyClipWindow(clipWindow); + } } public timestampDeltas = {startedAtDelta: 0, finishedAtDelta: 0}; private _cacheKey: string; + private _duration: Duration = duration(0); private _errors: ErrorFrame[] = []; private _optionFrame: undefined | OptionFrame; private _replayRecord: ReplayRecord; private _sortedBreadcrumbFrames: BreadcrumbFrame[] = []; private _sortedRRWebEvents: RecordingFrame[] = []; private _sortedSpanFrames: SpanFrame[] = []; + private _startOffsetMs = 0; + + private _applyClipWindow = (clipWindow: ClipWindow) => { + const clipStartTimestampMs = clamp( + clipWindow.startTimestampMs, + this._replayRecord.started_at.getTime(), + this._replayRecord.finished_at.getTime() + ); + const clipEndTimestampMs = clamp( + clipWindow.endTimestampMs, + clipStartTimestampMs, + this._replayRecord.finished_at.getTime() + ); + + // For RRWeb frames we only trim from the end because playback will + // not work otherwise. The start offset is used to begin playback at + // the correct time. + this._sortedRRWebEvents = this._sortedRRWebEvents.filter( + frame => frame.timestamp <= clipEndTimestampMs + ); + + // We only want playback to occur while events are still being recorded. + // Without doing this, the replay will appear to stop prematurely. + const lastRecordingFrameTimestampMs = + this._sortedRRWebEvents.at(-1)?.timestamp ?? clipEndTimestampMs; + + this._startOffsetMs = clipStartTimestampMs - this._replayRecord.started_at.getTime(); + this._duration = duration(lastRecordingFrameTimestampMs - clipStartTimestampMs); + + // We also only trim from the back for breadcrumbs/spans to keep + // historical information about the replay, such as the current URL. + this._sortedBreadcrumbFrames = this._updateFrameOffsets( + this._trimFramesToClipWindow( + this._sortedBreadcrumbFrames, + this._replayRecord.started_at.getTime(), + lastRecordingFrameTimestampMs + ) + ); + this._sortedSpanFrames = this._updateFrameOffsets( + this._trimFramesToClipWindow( + this._sortedSpanFrames, + this._replayRecord.started_at.getTime(), + lastRecordingFrameTimestampMs + ) + ); + + this._errors = this._updateFrameOffsets( + this._trimFramesToClipWindow( + this._errors, + clipStartTimestampMs, + lastRecordingFrameTimestampMs + ) + ); + }; + + /** + * Filters out frames that are outside of the supplied window + */ + _trimFramesToClipWindow = ( + frames: Array, + startTimestampMs: number, + endTimestampMs: number + ) => { + return frames.filter( + frame => + frame.timestampMs >= startTimestampMs && frame.timestampMs <= endTimestampMs + ); + }; + + /** + * Updates the offsetMs of all frames to be relative to the start of the clip window + */ + _updateFrameOffsets = (frames: Array) => { + return frames.map(frame => ({ + ...frame, + offsetMs: frame.offsetMs - this.getStartOffsetMs(), + })); + }; toJSON = () => this._cacheKey; @@ -210,7 +308,6 @@ export default class ReplayReader { : null, ].filter(defined); }); - hasProcessingErrors = () => { return this.processingErrors().length; }; @@ -219,9 +316,14 @@ export default class ReplayReader { * @returns Duration of Replay (milliseonds) */ getDurationMs = () => { - return this._replayRecord.duration.asMilliseconds(); + return this._duration.asMilliseconds(); }; + getStartOffsetMs = () => this._startOffsetMs; + + getStartTimestampMs = () => + this._replayRecord.started_at.getTime() + this._startOffsetMs; + getReplay = () => { return this._replayRecord; }; @@ -282,15 +384,19 @@ export default class ReplayReader { ); getChapterFrames = memoize(() => - [ - ...this.getPerfFrames(), - ...this._sortedBreadcrumbFrames.filter(frame => - ['replay.init', 'replay.mutations', 'replay.hydrate-error'].includes( - frame.category - ) - ), - ...this._errors, - ].sort(sortFrames) + this._trimFramesToClipWindow( + [ + ...this.getPerfFrames(), + ...this._sortedBreadcrumbFrames.filter(frame => + ['replay.init', 'replay.mutations', 'replay.hydrate-error'].includes( + frame.category + ) + ), + ...this._errors, + ].sort(sortFrames), + this.getStartTimestampMs(), + this.getStartTimestampMs() + this.getDurationMs() + ) ); getPerfFrames = memoize(() => diff --git a/static/app/views/replays/detail/breadcrumbs/index.tsx b/static/app/views/replays/detail/breadcrumbs/index.tsx index c06cd34b3a0f32..74301e625c8a28 100644 --- a/static/app/views/replays/detail/breadcrumbs/index.tsx +++ b/static/app/views/replays/detail/breadcrumbs/index.tsx @@ -31,7 +31,7 @@ const cellMeasurer = { }; function Breadcrumbs() { - const {currentTime, replay, startTimeOffsetMs, durationMs} = useReplayContext(); + const {currentTime, replay} = useReplayContext(); const organization = useOrganization(); const hasPerfTab = organization.features.includes('session-replay-trace-table'); @@ -40,19 +40,8 @@ function Breadcrumbs() { useExtractedDomNodes({replay}); const {data: frameToTrace, isFetching: isFetchingTraces} = useReplayPerfData({replay}); - const startTimestampMs = - replay?.getReplay()?.started_at?.getTime() ?? 0 + startTimeOffsetMs; - const allFrames = replay?.getChapterFrames(); - - const frames = useMemo( - () => - allFrames?.filter( - frame => - frame.offsetMs >= startTimeOffsetMs && - frame.offsetMs <= startTimeOffsetMs + durationMs - ), - [allFrames, durationMs, startTimeOffsetMs] - ); + const startTimestampMs = replay?.getStartTimestampMs() ?? 0; + const frames = replay?.getChapterFrames(); const [scrollToRow, setScrollToRow] = useState(undefined); From 3c536ffd4735555c0685fa73c367dff04f1276aa Mon Sep 17 00:00:00 2001 From: Dan Fuller Date: Tue, 6 Feb 2024 10:49:46 -0800 Subject: [PATCH 056/357] fix(group-attributes): Fix redis cluster usage (#64677) I need to use a setting to define the cluster in use. Since we're using just one key, I'm just piggybacking off of an existing system's redis cluster --- src/sentry/migrations/0641_backfill_group_attributes.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/sentry/migrations/0641_backfill_group_attributes.py b/src/sentry/migrations/0641_backfill_group_attributes.py index 6bdb8f336828f0..92b125d7abf677 100644 --- a/src/sentry/migrations/0641_backfill_group_attributes.py +++ b/src/sentry/migrations/0641_backfill_group_attributes.py @@ -3,6 +3,7 @@ from datetime import datetime from enum import Enum +from django.conf import settings from django.db import migrations from django.db.models import F, Window from django.db.models.functions import Rank @@ -124,7 +125,7 @@ def backfill_group_attributes_to_snuba(apps, schema_editor): GroupOwner = apps.get_model("sentry", "GroupOwner") backfill_key = "backfill_group_attributes_to_snuba_progress" - redis_client = redis.redis_clusters.get("default") + redis_client = redis.redis_clusters.get(settings.SENTRY_MONITORS_REDIS_CLUSTER) progress_id = int(redis_client.get(backfill_key) or 0) From 7c2d70ac7d61bd992cf517a21aef1a6cae1241c7 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Tue, 6 Feb 2024 13:58:11 -0500 Subject: [PATCH 057/357] ref: move the warnings carevout for migrations to pytest init (#64678) since django 4.2 raises warnings on legacy usage of `index_together` -- until we squash we'll need this to make it easier to run locally --- .github/workflows/backend.yml | 2 +- src/sentry/testutils/pytest/sentry.py | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml index 51a155a32c51b9..7ce7f3f1517d48 100644 --- a/.github/workflows/backend.yml +++ b/.github/workflows/backend.yml @@ -140,7 +140,7 @@ jobs: - name: run tests run: | # historic migrations trigger some warnings - PYTEST_ADDOPTS="$PYTEST_ADDOPTS -m migrations --migrations -W ignore" make test-python-ci + PYTEST_ADDOPTS="$PYTEST_ADDOPTS -m migrations --migrations" make test-python-ci # Upload coverage data even if running the tests step fails since # it reduces large coverage fluctuations diff --git a/src/sentry/testutils/pytest/sentry.py b/src/sentry/testutils/pytest/sentry.py index 91b54758050da6..e95eef0c40d6d5 100644 --- a/src/sentry/testutils/pytest/sentry.py +++ b/src/sentry/testutils/pytest/sentry.py @@ -91,6 +91,10 @@ def pytest_configure(config: pytest.Config) -> None: config.addinivalue_line("markers", "migrations: requires --migrations") + if not config.getvalue("nomigrations"): + # XXX: ignore warnings in historic migrations + config.addinivalue_line("filterwarnings", "ignore:.*index_together.*") + if sys.platform == "darwin" and shutil.which("colima"): # This is the only way other than pytest --basetemp to change # the temproot. We'd like to keep invocations to just "pytest". From 5dd15fa33f7c052d51d0183ec203e4f45bbc7b31 Mon Sep 17 00:00:00 2001 From: edwardgou-sentry <83961295+edwardgou-sentry@users.noreply.github.com> Date: Tue, 6 Feb 2024 14:02:14 -0500 Subject: [PATCH 058/357] feat(webvitals): Force inp sample query to results to contain profile or replay (#64628) Force inp sample query to results to contain profile or replay. Also update all INP sorts to sort on FID (because we are using FID to fake INP data) --- .../performance/browser/webVitals/pageOverview.spec.tsx | 4 ++-- .../browser/webVitals/pageSamplePerformanceTable.tsx | 2 +- .../browser/webVitals/utils/useWebVitalsSort.tsx | 9 +++++++++ 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/static/app/views/performance/browser/webVitals/pageOverview.spec.tsx b/static/app/views/performance/browser/webVitals/pageOverview.spec.tsx index 6de45e350e9356..4f5ee23439d647 100644 --- a/static/app/views/performance/browser/webVitals/pageOverview.spec.tsx +++ b/static/app/views/performance/browser/webVitals/pageOverview.spec.tsx @@ -104,12 +104,12 @@ describe('PageOverview', function () { await screen.findAllByText('Interactions'); userEvent.click(screen.getAllByText('Interactions')[0]); await waitFor(() => - expect(eventsMock).toHaveBeenCalledWith( + expect(eventsMock).toHaveBeenLastCalledWith( '/organizations/org-slug/events/', expect.objectContaining({ query: expect.objectContaining({ query: - 'transaction.op:pageload transaction:"/" has:measurements.score.total has:measurements.fid ', + 'transaction.op:pageload transaction:"/" has:measurements.score.total has:measurements.fid (has:profile.id OR has:replayId) ', }), }) ) diff --git a/static/app/views/performance/browser/webVitals/pageSamplePerformanceTable.tsx b/static/app/views/performance/browser/webVitals/pageSamplePerformanceTable.tsx index 8f54215162f7f9..e87eb58e290547 100644 --- a/static/app/views/performance/browser/webVitals/pageSamplePerformanceTable.tsx +++ b/static/app/views/performance/browser/webVitals/pageSamplePerformanceTable.tsx @@ -71,7 +71,7 @@ const INTERACTION_SAMPLES_COLUMN_ORDER: GridColumnOrder< {key: 'totalScore', width: COL_WIDTH_UNDEFINED, name: 'Score'}, ]; -const INP_SEARCH_FILTER = 'has:measurements.fid'; +const INP_SEARCH_FILTER = 'has:measurements.fid (has:profile.id OR has:replayId)'; enum Dataset { PAGELOADS = 'pageloads', diff --git a/static/app/views/performance/browser/webVitals/utils/useWebVitalsSort.tsx b/static/app/views/performance/browser/webVitals/utils/useWebVitalsSort.tsx index b68afe972249d1..fe18385ee8b049 100644 --- a/static/app/views/performance/browser/webVitals/utils/useWebVitalsSort.tsx +++ b/static/app/views/performance/browser/webVitals/utils/useWebVitalsSort.tsx @@ -7,8 +7,11 @@ import { SORTABLE_FIELDS, SORTABLE_SCORE_FIELDS, } from 'sentry/views/performance/browser/webVitals/utils/types'; +import {useReplaceFidWithInpSetting} from 'sentry/views/performance/browser/webVitals/utils/useReplaceFidWithInpSetting'; import {useStoredScoresSetting} from 'sentry/views/performance/browser/webVitals/utils/useStoredScoresSetting'; +const INP_FIELDS = ['measurements.inp', 'p75(measurements.inp)']; + export function useWebVitalsSort({ sortName = 'sort', defaultSort = DEFAULT_SORT, @@ -20,6 +23,7 @@ export function useWebVitalsSort({ } = {}) { const location = useLocation(); const shouldUseStoredScores = useStoredScoresSetting(); + const shouldReplaceFidWithInp = useReplaceFidWithInpSetting(); const filteredSortableFields = shouldUseStoredScores ? sortableFields : sortableFields.filter(field => !SORTABLE_SCORE_FIELDS.includes(field)); @@ -29,5 +33,10 @@ export function useWebVitalsSort({ (filteredSortableFields as unknown as string[]).includes(s.field) )[0] ?? defaultSort; + // TODO: Remove this once we can query for INP. + if (shouldReplaceFidWithInp && INP_FIELDS.includes(sort.field)) { + sort.field = 'measurements.fid'; + } + return sort; } From adbd88a300110f803c36fb8f29fc6bf84b8ce141 Mon Sep 17 00:00:00 2001 From: Cathy Teng <70817427+cathteng@users.noreply.github.com> Date: Tue, 6 Feb 2024 11:13:12 -0800 Subject: [PATCH 059/357] chore(roles): remove UI to set team org role (#64626) --- static/app/data/forms/teamSettingsFields.tsx | 27 ------- .../teamSettings/index.spec.tsx | 78 ------------------- .../organizationTeams/teamSettings/index.tsx | 10 --- 3 files changed, 115 deletions(-) diff --git a/static/app/data/forms/teamSettingsFields.tsx b/static/app/data/forms/teamSettingsFields.tsx index ceb466e534c9fa..2502ef6a9d11eb 100644 --- a/static/app/data/forms/teamSettingsFields.tsx +++ b/static/app/data/forms/teamSettingsFields.tsx @@ -1,6 +1,5 @@ import type {JsonFormObject} from 'sentry/components/forms/types'; import {t} from 'sentry/locale'; -import type {MemberRole} from 'sentry/types'; import slugify from 'sentry/utils/slugify'; // Export route to make these forms searchable by label/help @@ -26,32 +25,6 @@ const formGroups: JsonFormObject[] = [ }, ], }, - { - title: 'Team Organization Role', - fields: [ - { - name: 'orgRole', - type: 'select', - choices: ({orgRoleList}) => { - const choices = orgRoleList.map((r: MemberRole) => [r.id, r.name]) ?? []; - choices.unshift(['', 'None']); - return choices; - }, - required: false, - label: t('Organization Role'), - help: t( - 'Organization owners can bulk assign an org-role for all the members in this team' - ), - disabled: ({hasOrgAdmin, idpProvisioned}) => !hasOrgAdmin || idpProvisioned, - visible: ({hasOrgRoleFlag}) => hasOrgRoleFlag, - saveOnBlur: false, - saveMessageAlertType: 'info', - saveMessage: t( - 'You are giving all team members the permissions of this organization role' - ), - }, - ], - }, ]; export default formGroups; diff --git a/static/app/views/settings/organizationTeams/teamSettings/index.spec.tsx b/static/app/views/settings/organizationTeams/teamSettings/index.spec.tsx index d83b364547ade4..960454ef513ec9 100644 --- a/static/app/views/settings/organizationTeams/teamSettings/index.spec.tsx +++ b/static/app/views/settings/organizationTeams/teamSettings/index.spec.tsx @@ -1,5 +1,4 @@ import {browserHistory} from 'react-router'; -import selectEvent from 'react-select-event'; import {OrganizationFixture} from 'sentry-fixture/organization'; import {TeamFixture} from 'sentry-fixture/team'; @@ -63,55 +62,6 @@ describe('TeamSettings', function () { ); }); - it('can set team org-role', async function () { - const team = TeamFixture({orgRole: ''}); - const putMock = MockApiClient.addMockResponse({ - url: `/teams/org-slug/${team.slug}/`, - method: 'PUT', - body: { - slug: 'new-slug', - orgRole: 'owner', - }, - }); - const organization = OrganizationFixture({ - access: ['org:admin'], - features: ['org-roles-for-teams'], - }); - - render(, { - organization, - }); - - // set org role - const unsetDropdown = await screen.findByText('None'); - await selectEvent.select(unsetDropdown, 'Owner'); - - await userEvent.click(screen.getByRole('button', {name: 'Save'})); - expect(putMock).toHaveBeenCalledWith( - `/teams/org-slug/${team.slug}/`, - expect.objectContaining({ - data: { - orgRole: 'owner', - }, - }) - ); - - // unset org role - const setDropdown = await screen.findByText('Owner'); - await selectEvent.select(setDropdown, 'None'); - - await userEvent.click(screen.getByRole('button', {name: 'Save'})); - - expect(putMock).toHaveBeenCalledWith( - `/teams/org-slug/${team.slug}/`, - expect.objectContaining({ - data: { - orgRole: '', - }, - }) - ); - }); - it('needs team:admin in order to see an enabled Remove Team button', function () { const team = TeamFixture(); const organization = OrganizationFixture({access: []}); @@ -123,34 +73,6 @@ describe('TeamSettings', function () { expect(screen.getByTestId('button-remove-team')).toBeDisabled(); }); - it('needs org:admin in order to set team org-role', function () { - const team = TeamFixture(); - const organization = OrganizationFixture({ - access: [], - features: ['org-roles-for-teams'], - }); - - render(, { - organization, - }); - - expect(screen.getByRole('textbox', {name: 'Organization Role'})).toBeDisabled(); - }); - - it('cannot set team org-role for idp:provisioned team', function () { - const team = TeamFixture({flags: {'idp:provisioned': true}}); - const organization = OrganizationFixture({ - access: ['org:admin'], - features: ['org-roles-for-teams'], - }); - - render(, { - organization, - }); - - expect(screen.getByRole('textbox', {name: 'Organization Role'})).toBeDisabled(); - }); - it('can remove team', async function () { const team = TeamFixture({hasAccess: true}); const deleteMock = MockApiClient.addMockResponse({ diff --git a/static/app/views/settings/organizationTeams/teamSettings/index.tsx b/static/app/views/settings/organizationTeams/teamSettings/index.tsx index 81d9714d8f47c1..d1978ee47fdd30 100644 --- a/static/app/views/settings/organizationTeams/teamSettings/index.tsx +++ b/static/app/views/settings/organizationTeams/teamSettings/index.tsx @@ -52,13 +52,8 @@ function TeamSettings({team, params}: TeamSettingsProps) { } }; - const idpProvisioned = team.flags['idp:provisioned']; - const orgRoleList = organization.orgRoleList; - const hasOrgRoleFlag = organization.features.includes('org-roles-for-teams'); - const hasTeamWrite = hasEveryAccess(['team:write'], {organization, team}); const hasTeamAdmin = hasEveryAccess(['team:admin'], {organization, team}); - const hasOrgAdmin = hasEveryAccess(['org:admin'], {organization}); return ( @@ -76,16 +71,11 @@ function TeamSettings({team, params}: TeamSettingsProps) { initialData={{ name: team.name, slug: team.slug, - orgRole: team.orgRole, }} > From b11bf5d1cf9a8f44788a15888097b982557bb4d3 Mon Sep 17 00:00:00 2001 From: Cathy Teng <70817427+cathteng@users.noreply.github.com> Date: Tue, 6 Feb 2024 11:18:26 -0800 Subject: [PATCH 060/357] nit(superuser): add comments for not replacing is_active_superuser with superuser_has_permission (#64614) --- .../endpoints/organization_member/details.py | 2 ++ src/sentry/api/endpoints/user_details.py | 2 ++ .../test_organization_member_details.py | 32 +++++++++++++++++ .../sentry/api/endpoints/test_user_details.py | 36 +++++++++++++++++++ 4 files changed, 72 insertions(+) diff --git a/src/sentry/api/endpoints/organization_member/details.py b/src/sentry/api/endpoints/organization_member/details.py index cfe838f5bfe02a..dd666a1a1e5357 100644 --- a/src/sentry/api/endpoints/organization_member/details.py +++ b/src/sentry/api/endpoints/organization_member/details.py @@ -383,6 +383,8 @@ def delete( Remove an organization member. """ + # with superuser read write separation, superuser read cannot hit this endpoint + # so we can keep this as is_active_superuser if request.user.is_authenticated and not is_active_superuser(request): try: acting_member = OrganizationMember.objects.get( diff --git a/src/sentry/api/endpoints/user_details.py b/src/sentry/api/endpoints/user_details.py index 608beb250f05bf..0c76c4c2e91837 100644 --- a/src/sentry/api/endpoints/user_details.py +++ b/src/sentry/api/endpoints/user_details.py @@ -196,6 +196,8 @@ def put(self, request: Request, user) -> Response: if request.access.has_permission("users.admin"): serializer_cls = PrivilegedUserSerializer + # with superuser read write separation, superuser read cannot hit this endpoint + # so we can keep this as is_active_superuser elif is_active_superuser(request): serializer_cls = SuperuserUserSerializer else: diff --git a/tests/sentry/api/endpoints/test_organization_member_details.py b/tests/sentry/api/endpoints/test_organization_member_details.py index a27c49e1905aaa..d94a71bbf9bbac 100644 --- a/tests/sentry/api/endpoints/test_organization_member_details.py +++ b/tests/sentry/api/endpoints/test_organization_member_details.py @@ -2,6 +2,7 @@ from django.core import mail from django.db.models import F +from django.test import override_settings from django.urls import reverse from sentry.auth.authenticators.recovery_code import RecoveryCodeInterface @@ -748,6 +749,37 @@ def test_cannot_delete_partnership_member(self): self.get_error_response(self.organization.slug, member_om.id, status_code=403) + @override_settings(SENTRY_SELF_HOSTED=False) + @with_feature("auth:enterprise-superuser-read-write") + def test_cannot_delete_as_superuser_read(self): + superuser = self.create_user(is_superuser=True) + self.login_as(superuser, superuser=True) + + member = self.create_user("bar@example.com") + member_om = self.create_member( + organization=self.organization, + user=member, + role="member", + ) + + self.get_error_response(self.organization.slug, member_om.id, status_code=400) + + @override_settings(SENTRY_SELF_HOSTED=False) + @with_feature("auth:enterprise-superuser-read-write") + def test_can_delete_as_superuser_write(self): + superuser = self.create_user(is_superuser=True) + self.add_user_permission(superuser, "superuser.write") + self.login_as(superuser, superuser=True) + + member = self.create_user("bar@example.com") + member_om = self.create_member( + organization=self.organization, + user=member, + role="member", + ) + + self.get_success_response(self.organization.slug, member_om.id) + @region_silo_test class ResetOrganizationMember2faTest(APITestCase): diff --git a/tests/sentry/api/endpoints/test_user_details.py b/tests/sentry/api/endpoints/test_user_details.py index f691f1237af776..13aaa17c08d925 100644 --- a/tests/sentry/api/endpoints/test_user_details.py +++ b/tests/sentry/api/endpoints/test_user_details.py @@ -1,3 +1,5 @@ +from django.test import override_settings + from sentry.models.deletedorganization import DeletedOrganization from sentry.models.options.user_option import UserOption from sentry.models.organization import Organization, OrganizationStatus @@ -8,6 +10,7 @@ from sentry.silo.base import SiloMode from sentry.tasks.deletion.hybrid_cloud import schedule_hybrid_cloud_foreign_key_jobs from sentry.testutils.cases import APITestCase +from sentry.testutils.helpers.features import with_feature from sentry.testutils.hybrid_cloud import HybridCloudTestMixin from sentry.testutils.outbox import outbox_runner from sentry.testutils.silo import assume_test_silo_mode, control_silo_test @@ -189,6 +192,39 @@ def test_superuser_with_permission_can_change_is_active(self): user = User.objects.get(id=self.user.id) assert not user.is_active + @override_settings(SENTRY_SELF_HOSTED=False) + @with_feature("auth:enterprise-superuser-read-write") + def test_superuser_read_cannot_change_is_active(self): + self.user.update(is_active=True) + superuser = self.create_user(email="b@example.com", is_superuser=True) + self.login_as(user=superuser, superuser=True) + + self.get_error_response( + self.user.id, + isActive="false", + status_code=403, + ) + + self.user.refresh_from_db() + assert self.user.is_active + + @override_settings(SENTRY_SELF_HOSTED=False) + @with_feature("auth:enterprise-superuser-read-write") + def test_superuser_write_can_change_is_active(self): + self.user.update(is_active=True) + superuser = self.create_user(email="b@example.com", is_superuser=True) + self.add_user_permission(superuser, "superuser.write") + self.login_as(user=superuser, superuser=True) + + resp = self.get_success_response( + self.user.id, + isActive="false", + ) + assert resp.data["id"] == str(self.user.id) + + self.user.refresh_from_db() + assert not self.user.is_active + def test_superuser_cannot_add_superuser(self): self.user.update(is_superuser=False) superuser = self.create_user(email="b@example.com", is_superuser=True) From fb51794d1c36e16a68238d0049763c7203ba850c Mon Sep 17 00:00:00 2001 From: Bartek Ogryczak Date: Tue, 6 Feb 2024 11:23:11 -0800 Subject: [PATCH 061/357] feat(grouping): Broaden scope of Chunkload and Hydration built-in fingerprinting from NextJS to all JS (#64494) This broadens the scope of ChunkloadError and HydrationError built-in fingerprints from being restricted to only NextJS to all of JavaScript. --- .../001_chunkload_errors.txt | 2 - .../002_hydration_errors.txt | 5 -- .../001_chunkload_errors.txt | 2 + .../002_hydration_errors.txt | 5 ++ .../grouping/strategies/configurations.py | 2 +- .../grouping/test_builtin_fingerprinting.py | 86 ++++++++++--------- 6 files changed, 54 insertions(+), 48 deletions(-) delete mode 100644 src/sentry/grouping/fingerprinting/configs/sentry.javascript.nextjs@2023-12-22/001_chunkload_errors.txt delete mode 100644 src/sentry/grouping/fingerprinting/configs/sentry.javascript.nextjs@2023-12-22/002_hydration_errors.txt create mode 100644 src/sentry/grouping/fingerprinting/configs/sentry.javascript@2024-02-02/001_chunkload_errors.txt create mode 100644 src/sentry/grouping/fingerprinting/configs/sentry.javascript@2024-02-02/002_hydration_errors.txt diff --git a/src/sentry/grouping/fingerprinting/configs/sentry.javascript.nextjs@2023-12-22/001_chunkload_errors.txt b/src/sentry/grouping/fingerprinting/configs/sentry.javascript.nextjs@2023-12-22/001_chunkload_errors.txt deleted file mode 100644 index fae24f7eb79a69..00000000000000 --- a/src/sentry/grouping/fingerprinting/configs/sentry.javascript.nextjs@2023-12-22/001_chunkload_errors.txt +++ /dev/null @@ -1,2 +0,0 @@ -sdk:sentry.javascript.nextjs error.type:"ChunkLoadError" -> chunkloaderror -sdk:sentry.javascript.nextjs error.value:"ChunkLoadError*" -> chunkloaderror diff --git a/src/sentry/grouping/fingerprinting/configs/sentry.javascript.nextjs@2023-12-22/002_hydration_errors.txt b/src/sentry/grouping/fingerprinting/configs/sentry.javascript.nextjs@2023-12-22/002_hydration_errors.txt deleted file mode 100644 index 86d29a2d3d4349..00000000000000 --- a/src/sentry/grouping/fingerprinting/configs/sentry.javascript.nextjs@2023-12-22/002_hydration_errors.txt +++ /dev/null @@ -1,5 +0,0 @@ -sdk:sentry.javascript.nextjs tags.transaction:"*" message:"Hydration failed because the initial UI does not match what was rendered on the server." -> hydrationerror, {{tags.transaction}} -sdk:sentry.javascript.nextjs tags.transaction:"*" message:"The server could not finish this Suspense boundary, likely due to an error during server rendering. Switched to client rendering." -> hydrationerror, {{tags.transaction}} -sdk:sentry.javascript.nextjs tags.transaction:"*" message:"There was an error while hydrating this Suspense boundary. Switched to client rendering." -> hydrationerror, {{tags.transaction}} -sdk:sentry.javascript.nextjs tags.transaction:"*" message:"There was an error while hydrating. Because the error happened outside of a Suspense boundary, the entire root will switch to client rendering." -> hydrationerror, {{tags.transaction}} -sdk:sentry.javascript.nextjs tags.transaction:"*" message:"Text content does not match server-rendered HTML." -> hydrationerror, {{tags.transaction}} diff --git a/src/sentry/grouping/fingerprinting/configs/sentry.javascript@2024-02-02/001_chunkload_errors.txt b/src/sentry/grouping/fingerprinting/configs/sentry.javascript@2024-02-02/001_chunkload_errors.txt new file mode 100644 index 00000000000000..5f0831ff55360d --- /dev/null +++ b/src/sentry/grouping/fingerprinting/configs/sentry.javascript@2024-02-02/001_chunkload_errors.txt @@ -0,0 +1,2 @@ +family:javascript error.type:"ChunkLoadError" -> chunkloaderror +family:javascript error.value:"ChunkLoadError*" -> chunkloaderror diff --git a/src/sentry/grouping/fingerprinting/configs/sentry.javascript@2024-02-02/002_hydration_errors.txt b/src/sentry/grouping/fingerprinting/configs/sentry.javascript@2024-02-02/002_hydration_errors.txt new file mode 100644 index 00000000000000..e487904eba9a7d --- /dev/null +++ b/src/sentry/grouping/fingerprinting/configs/sentry.javascript@2024-02-02/002_hydration_errors.txt @@ -0,0 +1,5 @@ +family:javascript tags.transaction:"*" message:"Hydration failed because the initial UI does not match what was rendered on the server." -> hydrationerror, {{tags.transaction}} +family:javascript tags.transaction:"*" message:"The server could not finish this Suspense boundary, likely due to an error during server rendering. Switched to client rendering." -> hydrationerror, {{tags.transaction}} +family:javascript tags.transaction:"*" message:"There was an error while hydrating this Suspense boundary. Switched to client rendering." -> hydrationerror, {{tags.transaction}} +family:javascript tags.transaction:"*" message:"There was an error while hydrating. Because the error happened outside of a Suspense boundary, the entire root will switch to client rendering." -> hydrationerror, {{tags.transaction}} +family:javascript tags.transaction:"*" message:"Text content does not match server-rendered HTML." -> hydrationerror, {{tags.transaction}} diff --git a/src/sentry/grouping/strategies/configurations.py b/src/sentry/grouping/strategies/configurations.py index f88dba2fcf6a3c..51a0b1bf34868d 100644 --- a/src/sentry/grouping/strategies/configurations.py +++ b/src/sentry/grouping/strategies/configurations.py @@ -201,7 +201,7 @@ def register_strategy_config(id: str, **kwargs) -> type[StrategyConfiguration]: "java_cglib_hibernate_logic": True, }, enhancements_base="newstyle:2023-01-11", - fingerprinting_bases=["sentry.javascript.nextjs@2023-12-22"], + fingerprinting_bases=["sentry.javascript@2024-02-02"], ) diff --git a/tests/sentry/grouping/test_builtin_fingerprinting.py b/tests/sentry/grouping/test_builtin_fingerprinting.py index 6853eae6557e40..ba87419f53f651 100644 --- a/tests/sentry/grouping/test_builtin_fingerprinting.py +++ b/tests/sentry/grouping/test_builtin_fingerprinting.py @@ -24,7 +24,7 @@ @pytest.fixture def default_bases(): - return ["sentry.javascript.nextjs@2023-12-22"] + return ["sentry.javascript@2024-02-02"] def test_default_bases(default_bases): @@ -33,22 +33,22 @@ def test_default_bases(default_bases): assert { k: [r._to_config_structure() for r in rs] for k, rs in FINGERPRINTING_BASES.items() } == { - "sentry.javascript.nextjs@2023-12-22": [ + "sentry.javascript@2024-02-02": [ { - "matchers": [["sdk", "sentry.javascript.nextjs"], ["type", "ChunkLoadError"]], + "matchers": [["family", "javascript"], ["type", "ChunkLoadError"]], "fingerprint": ["chunkloaderror"], "attributes": {}, "is_builtin": True, }, { - "matchers": [["sdk", "sentry.javascript.nextjs"], ["value", "ChunkLoadError*"]], + "matchers": [["family", "javascript"], ["value", "ChunkLoadError*"]], "fingerprint": ["chunkloaderror"], "attributes": {}, "is_builtin": True, }, { "matchers": [ - ["sdk", "sentry.javascript.nextjs"], + ["family", "javascript"], ["tags.transaction", "*"], [ "message", @@ -61,7 +61,7 @@ def test_default_bases(default_bases): }, { "matchers": [ - ["sdk", "sentry.javascript.nextjs"], + ["family", "javascript"], ["tags.transaction", "*"], [ "message", @@ -74,7 +74,7 @@ def test_default_bases(default_bases): }, { "matchers": [ - ["sdk", "sentry.javascript.nextjs"], + ["family", "javascript"], ["tags.transaction", "*"], [ "message", @@ -87,7 +87,7 @@ def test_default_bases(default_bases): }, { "matchers": [ - ["sdk", "sentry.javascript.nextjs"], + ["family", "javascript"], ["tags.transaction", "*"], [ "message", @@ -100,7 +100,7 @@ def test_default_bases(default_bases): }, { "matchers": [ - ["sdk", "sentry.javascript.nextjs"], + ["family", "javascript"], ["tags.transaction", "*"], ["message", "Text content does not match server-rendered HTML."], ], @@ -119,20 +119,20 @@ def test_built_in_nextjs_rules_base(default_bases): assert rules._to_config_structure(include_builtin=True) == { "rules": [ { - "matchers": [["sdk", "sentry.javascript.nextjs"], ["type", "ChunkLoadError"]], + "matchers": [["family", "javascript"], ["type", "ChunkLoadError"]], "fingerprint": ["chunkloaderror"], "attributes": {}, "is_builtin": True, }, { - "matchers": [["sdk", "sentry.javascript.nextjs"], ["value", "ChunkLoadError*"]], + "matchers": [["family", "javascript"], ["value", "ChunkLoadError*"]], "fingerprint": ["chunkloaderror"], "attributes": {}, "is_builtin": True, }, { "matchers": [ - ["sdk", "sentry.javascript.nextjs"], + ["family", "javascript"], ["tags.transaction", "*"], [ "message", @@ -145,7 +145,7 @@ def test_built_in_nextjs_rules_base(default_bases): }, { "matchers": [ - ["sdk", "sentry.javascript.nextjs"], + ["family", "javascript"], ["tags.transaction", "*"], [ "message", @@ -158,7 +158,7 @@ def test_built_in_nextjs_rules_base(default_bases): }, { "matchers": [ - ["sdk", "sentry.javascript.nextjs"], + ["family", "javascript"], ["tags.transaction", "*"], [ "message", @@ -171,7 +171,7 @@ def test_built_in_nextjs_rules_base(default_bases): }, { "matchers": [ - ["sdk", "sentry.javascript.nextjs"], + ["family", "javascript"], ["tags.transaction", "*"], [ "message", @@ -184,7 +184,7 @@ def test_built_in_nextjs_rules_base(default_bases): }, { "matchers": [ - ["sdk", "sentry.javascript.nextjs"], + ["family", "javascript"], ["tags.transaction", "*"], ["message", "Text content does not match server-rendered HTML."], ], @@ -204,20 +204,20 @@ def test_built_in_nextjs_rules_from_empty_config_string(default_bases): assert rules._to_config_structure(include_builtin=True) == { "rules": [ { - "matchers": [["sdk", "sentry.javascript.nextjs"], ["type", "ChunkLoadError"]], + "matchers": [["family", "javascript"], ["type", "ChunkLoadError"]], "fingerprint": ["chunkloaderror"], "attributes": {}, "is_builtin": True, }, { - "matchers": [["sdk", "sentry.javascript.nextjs"], ["value", "ChunkLoadError*"]], + "matchers": [["family", "javascript"], ["value", "ChunkLoadError*"]], "fingerprint": ["chunkloaderror"], "attributes": {}, "is_builtin": True, }, { "matchers": [ - ["sdk", "sentry.javascript.nextjs"], + ["family", "javascript"], ["tags.transaction", "*"], [ "message", @@ -230,7 +230,7 @@ def test_built_in_nextjs_rules_from_empty_config_string(default_bases): }, { "matchers": [ - ["sdk", "sentry.javascript.nextjs"], + ["family", "javascript"], ["tags.transaction", "*"], [ "message", @@ -243,7 +243,7 @@ def test_built_in_nextjs_rules_from_empty_config_string(default_bases): }, { "matchers": [ - ["sdk", "sentry.javascript.nextjs"], + ["family", "javascript"], ["tags.transaction", "*"], [ "message", @@ -256,7 +256,7 @@ def test_built_in_nextjs_rules_from_empty_config_string(default_bases): }, { "matchers": [ - ["sdk", "sentry.javascript.nextjs"], + ["family", "javascript"], ["tags.transaction", "*"], [ "message", @@ -269,7 +269,7 @@ def test_built_in_nextjs_rules_from_empty_config_string(default_bases): }, { "matchers": [ - ["sdk", "sentry.javascript.nextjs"], + ["family", "javascript"], ["tags.transaction", "*"], ["message", "Text content does not match server-rendered HTML."], ], @@ -305,20 +305,20 @@ def test_built_in_nextjs_rules_from_config_string_with_custom(default_bases): "attributes": {}, }, { - "matchers": [["sdk", "sentry.javascript.nextjs"], ["type", "ChunkLoadError"]], + "matchers": [["family", "javascript"], ["type", "ChunkLoadError"]], "fingerprint": ["chunkloaderror"], "attributes": {}, "is_builtin": True, }, { - "matchers": [["sdk", "sentry.javascript.nextjs"], ["value", "ChunkLoadError*"]], + "matchers": [["family", "javascript"], ["value", "ChunkLoadError*"]], "fingerprint": ["chunkloaderror"], "attributes": {}, "is_builtin": True, }, { "matchers": [ - ["sdk", "sentry.javascript.nextjs"], + ["family", "javascript"], ["tags.transaction", "*"], [ "message", @@ -331,7 +331,7 @@ def test_built_in_nextjs_rules_from_config_string_with_custom(default_bases): }, { "matchers": [ - ["sdk", "sentry.javascript.nextjs"], + ["family", "javascript"], ["tags.transaction", "*"], [ "message", @@ -344,7 +344,7 @@ def test_built_in_nextjs_rules_from_config_string_with_custom(default_bases): }, { "matchers": [ - ["sdk", "sentry.javascript.nextjs"], + ["family", "javascript"], ["tags.transaction", "*"], [ "message", @@ -357,7 +357,7 @@ def test_built_in_nextjs_rules_from_config_string_with_custom(default_bases): }, { "matchers": [ - ["sdk", "sentry.javascript.nextjs"], + ["family", "javascript"], ["tags.transaction", "*"], [ "message", @@ -370,7 +370,7 @@ def test_built_in_nextjs_rules_from_config_string_with_custom(default_bases): }, { "matchers": [ - ["sdk", "sentry.javascript.nextjs"], + ["family", "javascript"], ["tags.transaction", "*"], ["message", "Text content does not match server-rendered HTML."], ], @@ -522,7 +522,7 @@ def test_built_in_chunkload_rules(self): assert event.data["_fingerprint_info"]["matched_rule"] == { "attributes": {}, "fingerprint": ["chunkloaderror"], - "matchers": [["sdk", "sentry.javascript.nextjs"], ["type", "ChunkLoadError"]], + "matchers": [["family", "javascript"], ["type", "ChunkLoadError"]], "is_builtin": True, } @@ -541,7 +541,7 @@ def test_built_in_chunkload_rules_variants(self): "description": "Sentry defined fingerprint", "values": ["chunkloaderror"], "client_values": ["my-route", "{{ default }}"], - "matched_rule": 'sdk:"sentry.javascript.nextjs" type:"ChunkLoadError" -> "chunkloaderror"', + "matched_rule": 'family:"javascript" type:"ChunkLoadError" -> "chunkloaderror"', } def test_built_in_chunkload_rules_disabled(self): @@ -564,20 +564,26 @@ def test_built_in_chunkload_rules_value_only(self): assert event.data["_fingerprint_info"]["matched_rule"] == { "attributes": {}, "fingerprint": ["chunkloaderror"], - "matchers": [["sdk", "sentry.javascript.nextjs"], ["value", "ChunkLoadError*"]], + "matchers": [["family", "javascript"], ["value", "ChunkLoadError*"]], "is_builtin": True, } @with_feature("organizations:grouping-built-in-fingerprint-rules") def test_built_in_chunkload_rules_wrong_sdk(self): """ - Built-in ChunkLoadError rule should not apply if SDK is not sentry.javascript.nextjs. + Built-in ChunkLoadError rule should also apply event if SDK is not sentry.javascript.nextjs. """ self.chunkload_error_trace["sdk"]["name"] = "sentry.javascript.react" # type: ignore[index] event = self._get_event_for_trace(stacktrace=self.chunkload_error_trace) - assert event.data["fingerprint"] == ["my-route", "{{ default }}"] - assert event.data.get("_fingerprint_info") is None + + assert event.data["fingerprint"] == ["chunkloaderror"] + assert event.data["_fingerprint_info"]["matched_rule"] == { + "attributes": {}, + "fingerprint": ["chunkloaderror"], + "matchers": [["family", "javascript"], ["type", "ChunkLoadError"]], + "is_builtin": True, + } @with_feature("organizations:grouping-built-in-fingerprint-rules") def test_built_in_hydration_rules_same_transactions(self): @@ -598,7 +604,7 @@ def test_built_in_hydration_rules_same_transactions(self): "attributes": {}, "fingerprint": ["hydrationerror", "{{tags.transaction}}"], "matchers": [ - ["sdk", "sentry.javascript.nextjs"], + ["family", "javascript"], ["tags.transaction", "*"], ["message", self.hydration_error_trace["message"]], ], @@ -609,7 +615,7 @@ def test_built_in_hydration_rules_same_transactions(self): "attributes": {}, "fingerprint": ["hydrationerror", "{{tags.transaction}}"], "matchers": [ - ["sdk", "sentry.javascript.nextjs"], + ["family", "javascript"], ["tags.transaction", "*"], ["message", data_message2["message"]], ], @@ -642,7 +648,7 @@ def test_built_in_hydration_rules_different_transactions(self): "attributes": {}, "fingerprint": ["hydrationerror", "{{tags.transaction}}"], "matchers": [ - ["sdk", "sentry.javascript.nextjs"], + ["family", "javascript"], ["tags.transaction", "*"], ["message", self.hydration_error_trace["message"]], ], @@ -656,7 +662,7 @@ def test_built_in_hydration_rules_different_transactions(self): "attributes": {}, "fingerprint": ["hydrationerror", "{{tags.transaction}}"], "matchers": [ - ["sdk", "sentry.javascript.nextjs"], + ["family", "javascript"], ["tags.transaction", "*"], ["message", self.hydration_error_trace["message"]], ], From 71b77a86fd67d7e95b522cc423e8fabe8d26f667 Mon Sep 17 00:00:00 2001 From: Dan Fuller Date: Tue, 6 Feb 2024 11:37:09 -0800 Subject: [PATCH 062/357] feat(crons): Allow max_workers to be configured for consumer (#64512) Try 3 of this. Reverts the revert of https://github.com/getsentry/sentry/pull/64432. This time we have a test in place that uses the parallel code, so we shouldn't have any further breakages. Using a threadpool means that the thread will use a different db connection, and so to see the results of the threadpool we have to use `TransactionTestCase`. I wanted to apply this to all tests, but they started taking 50s to run instead of 10, so I settled for just the one test. --- src/sentry/consumers/__init__.py | 6 +++ .../monitors/consumers/monitor_consumer.py | 30 +++++++----- .../sentry/monitors/test_monitor_consumer.py | 48 +++++++++++++++++-- 3 files changed, 69 insertions(+), 15 deletions(-) diff --git a/src/sentry/consumers/__init__.py b/src/sentry/consumers/__init__.py index dbe5f47f3ef7f2..70025a73e3d803 100644 --- a/src/sentry/consumers/__init__.py +++ b/src/sentry/consumers/__init__.py @@ -102,6 +102,12 @@ def ingest_monitors_options() -> list[click.Option]: default=10, help="Maximum time spent batching check-ins to batch before processing in parallel.", ), + click.Option( + ["--max-workers", "max_workers"], + type=int, + default=None, + help="The maximum number of threads to spawn in parallel mode.", + ), ] return options diff --git a/src/sentry/monitors/consumers/monitor_consumer.py b/src/sentry/monitors/consumers/monitor_consumer.py index 3c6e0ff7a0c7e3..656150b8c2313e 100644 --- a/src/sentry/monitors/consumers/monitor_consumer.py +++ b/src/sentry/monitors/consumers/monitor_consumer.py @@ -4,8 +4,9 @@ import uuid from collections import defaultdict from collections.abc import Mapping -from concurrent.futures import ThreadPoolExecutor, wait +from concurrent.futures import Executor, ThreadPoolExecutor, wait from datetime import datetime, timedelta +from functools import partial from typing import Literal import msgpack @@ -54,6 +55,7 @@ CHECKIN_QUOTA_LIMIT = 6 CHECKIN_QUOTA_WINDOW = 60 +StrategyMode = Literal["parallel", "serial"] def _ensure_monitor_with_config( @@ -746,9 +748,6 @@ def _process_checkin(item: CheckinItem, txn: Transaction | Span): logger.exception("Failed to process check-in") -_checkin_worker = ThreadPoolExecutor() - - def process_checkin(item: CheckinItem): """ Process an individual check-in @@ -772,7 +771,7 @@ def process_checkin_group(items: list[CheckinItem]): process_checkin(item) -def process_batch(message: Message[ValuesBatch[KafkaPayload]]): +def process_batch(message: Message[ValuesBatch[KafkaPayload]], worker: Executor): """ Receives batches of check-in messages. This function will take the batch and group them together by monitor ID (ensuring order is preserved) and @@ -820,8 +819,7 @@ def process_batch(message: Message[ValuesBatch[KafkaPayload]]): # Submit check-in groups for processing with sentry_sdk.start_transaction(op="process_batch", name="monitors.monitor_consumer"): futures = [ - _checkin_worker.submit(process_checkin_group, group) - for group in checkin_mapping.values() + worker.submit(process_checkin_group, group) for group in checkin_mapping.values() ] wait(futures) @@ -866,6 +864,11 @@ class StoreMonitorCheckInStrategyFactory(ProcessingStrategyFactory[KafkaPayload] Does the consumer process unrelated check-ins in parallel? """ + max_workers: int | None = None + """ + Number of Executor workers to use when running in parallel + """ + max_batch_size = 500 """ How many messages will be batched at once when in parallel mode. @@ -878,9 +881,10 @@ class StoreMonitorCheckInStrategyFactory(ProcessingStrategyFactory[KafkaPayload] def __init__( self, - mode: Literal["parallel", "serial"] | None = None, + mode: StrategyMode | None = None, max_batch_size: int | None = None, max_batch_time: int | None = None, + max_workers: int | None = None, ) -> None: if mode == "parallel": self.parallel = True @@ -889,10 +893,14 @@ def __init__( self.max_batch_size = max_batch_size if max_batch_time is not None: self.max_batch_time = max_batch_time + if max_workers is not None: + self.max_workers = max_workers + + def create_parallel_worker(self, commit: Commit) -> ProcessingStrategy[KafkaPayload]: + worker = ThreadPoolExecutor(max_workers=self.max_workers) - def create_paralell_worker(self, commit: Commit) -> ProcessingStrategy[KafkaPayload]: batch_processor = RunTask( - function=process_batch, + function=partial(process_batch, worker=worker), next_step=CommitOffsets(commit), ) return BatchStep( @@ -913,6 +921,6 @@ def create_with_partitions( partitions: Mapping[Partition, int], ) -> ProcessingStrategy[KafkaPayload]: if self.parallel: - return self.create_paralell_worker(commit) + return self.create_parallel_worker(commit) else: return self.create_synchronous_worker(commit) diff --git a/tests/sentry/monitors/test_monitor_consumer.py b/tests/sentry/monitors/test_monitor_consumer.py index 3839c27bc551ce..d1021b57a6a1ba 100644 --- a/tests/sentry/monitors/test_monitor_consumer.py +++ b/tests/sentry/monitors/test_monitor_consumer.py @@ -13,7 +13,10 @@ from sentry.constants import ObjectStatus from sentry.db.models import BoundedPositiveIntegerField from sentry.monitors.constants import TIMEOUT, PermitCheckInStatus -from sentry.monitors.consumers.monitor_consumer import StoreMonitorCheckInStrategyFactory +from sentry.monitors.consumers.monitor_consumer import ( + StoreMonitorCheckInStrategyFactory, + StrategyMode, +) from sentry.monitors.models import ( CheckInStatus, Monitor, @@ -23,7 +26,7 @@ MonitorType, ScheduleType, ) -from sentry.testutils.cases import TestCase +from sentry.testutils.cases import BaseTestCase, TestCase, TransactionTestCase from sentry.utils import json from sentry.utils.locking.manager import LockManager from sentry.utils.outcomes import Outcome @@ -32,7 +35,9 @@ locks = LockManager(build_instance_from_options(settings.SENTRY_POST_PROCESS_LOCKS_BACKEND_OPTIONS)) -class MonitorConsumerTest(TestCase): +class MonitorConsumerTest(BaseTestCase): + mode: StrategyMode = "serial" + def _create_monitor(self, **kwargs): return Monitor.objects.create( organization_id=self.organization.id, @@ -80,7 +85,10 @@ def send_checkin( commit = mock.Mock() partition = Partition(Topic("test"), 0) - StoreMonitorCheckInStrategyFactory().create_with_partitions(commit, {partition: 0}).submit( + factory = StoreMonitorCheckInStrategyFactory( + mode=self.mode, max_workers=1 + ).create_with_partitions(commit, {partition: 0}) + factory.submit( Message( BrokerValue( KafkaPayload(b"fake-key", msgpack.packb(wrapper), []), @@ -90,7 +98,39 @@ def send_checkin( ) ) ) + factory.join() + + +class ParallelMonitorConsumerTest(TransactionTestCase, MonitorConsumerTest): + mode: StrategyMode = "parallel" + + def test(self) -> None: + monitor = self._create_monitor(slug="my-monitor") + + self.send_checkin(monitor.slug) + + checkin = MonitorCheckIn.objects.get(guid=self.guid) + assert checkin.status == CheckInStatus.OK + assert checkin.monitor_config == monitor.config + + monitor_environment = MonitorEnvironment.objects.get(id=checkin.monitor_environment.id) + assert monitor_environment.status == MonitorStatus.OK + assert monitor_environment.last_checkin == checkin.date_added + assert monitor_environment.next_checkin == monitor.get_next_expected_checkin( + checkin.date_added + ) + assert monitor_environment.next_checkin_latest == monitor.get_next_expected_checkin_latest( + checkin.date_added + ) + + # Process another check-in to verify we set an expected time for the next check-in + self.send_checkin(monitor.slug) + checkin = MonitorCheckIn.objects.get(guid=self.guid) + assert checkin.expected_time == monitor_environment.next_checkin + assert checkin.trace_id.hex == self.trace_id + +class SynchronousMonitorConsumerTest(MonitorConsumerTest, TestCase): def send_clock_pulse( self, ts: datetime | None = None, From 4ff02060c13d4e4c984e649afc3841913146055e Mon Sep 17 00:00:00 2001 From: Cathy Teng <70817427+cathteng@users.noreply.github.com> Date: Tue, 6 Feb 2024 11:55:10 -0800 Subject: [PATCH 063/357] fix(superuser): allow superusers to invite members in other orgs (#64686) --- .../modals/inviteMembersModal/index.spec.tsx | 25 +++++++++++++++++++ .../modals/inviteMembersModal/index.tsx | 3 ++- .../inviteMembersModalview.tsx | 2 +- 3 files changed, 28 insertions(+), 2 deletions(-) diff --git a/static/app/components/modals/inviteMembersModal/index.spec.tsx b/static/app/components/modals/inviteMembersModal/index.spec.tsx index 8bf27fb8cde7f5..c2e7220e8c19da 100644 --- a/static/app/components/modals/inviteMembersModal/index.spec.tsx +++ b/static/app/components/modals/inviteMembersModal/index.spec.tsx @@ -9,6 +9,7 @@ import {textWithMarkupMatcher} from 'sentry-test/utils'; import {makeCloseButton} from 'sentry/components/globalModal/components'; import InviteMembersModal from 'sentry/components/modals/inviteMembersModal'; +import {ORG_ROLES} from 'sentry/constants'; import TeamStore from 'sentry/stores/teamStore'; import useOrganization from 'sentry/utils/useOrganization'; @@ -72,6 +73,30 @@ describe('InviteMembersModal', function () { expect(screen.getByRole('menuitemradio', {name: 'Member'})).toBeChecked(); }); + it('renders for superuser', async function () { + jest.mock('sentry/utils/isActiveSuperuser', () => ({ + isActiveSuperuser: jest.fn(), + })); + + MockApiClient.addMockResponse({ + url: `/organizations/${org.slug}/members/me/`, + method: 'GET', + status: 404, + }); + + jest.mocked(useOrganization).mockReturnValue(org); + render(); + + await waitFor(() => { + // Starts with one invite row + expect(screen.getByRole('listitem')).toBeInTheDocument(); + }); + + await userEvent.click(screen.getByRole('textbox', {name: 'Role'})); + expect(screen.getAllByRole('menuitemradio')).toHaveLength(ORG_ROLES.length); + expect(screen.getByRole('menuitemradio', {name: 'Member'})).toBeChecked(); + }); + it('renders without organization.access', async function () { const organization = OrganizationFixture({access: undefined}); jest.mocked(useOrganization).mockReturnValue(organization); diff --git a/static/app/components/modals/inviteMembersModal/index.tsx b/static/app/components/modals/inviteMembersModal/index.tsx index 7d46602b0c9ac0..1053edfead21a3 100644 --- a/static/app/components/modals/inviteMembersModal/index.tsx +++ b/static/app/components/modals/inviteMembersModal/index.tsx @@ -10,6 +10,7 @@ import useInviteModal from 'sentry/components/modals/inviteMembersModal/useInvit import {InviteModalHook} from 'sentry/components/modals/memberInviteModalCustomization'; import {t} from 'sentry/locale'; import {trackAnalytics} from 'sentry/utils/analytics'; +import {isActiveSuperuser} from 'sentry/utils/isActiveSuperuser'; import useOrganization from 'sentry/utils/useOrganization'; interface InviteMembersModalProps extends ModalRenderProps { @@ -51,7 +52,7 @@ function InviteMembersModal({ return ; } - if (memberResult.isError) { + if (memberResult.isError && !isActiveSuperuser()) { return ( void; reset: () => void; From ea99fa3668e12ef72ebc3f0f1d0db3541bc28b6b Mon Sep 17 00:00:00 2001 From: Dan Fuller Date: Tue, 6 Feb 2024 11:56:12 -0800 Subject: [PATCH 064/357] fix(group-attributes): Avoid timeout when backfilling group table (#64688) We hit a query timeout when trying to get the accurate count of rows in the group table. Use this approximate progress bar instead. --- src/sentry/migrations/0641_backfill_group_attributes.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/sentry/migrations/0641_backfill_group_attributes.py b/src/sentry/migrations/0641_backfill_group_attributes.py index 92b125d7abf677..2e0dad0d1441c4 100644 --- a/src/sentry/migrations/0641_backfill_group_attributes.py +++ b/src/sentry/migrations/0641_backfill_group_attributes.py @@ -12,7 +12,7 @@ from sentry.new_migrations.migrations import CheckedMigration from sentry.utils import redis from sentry.utils.iterators import chunked -from sentry.utils.query import RangeQuerySetWrapperWithProgressBar +from sentry.utils.query import RangeQuerySetWrapperWithProgressBarApprox CHUNK_SIZE = 10000 @@ -130,7 +130,7 @@ def backfill_group_attributes_to_snuba(apps, schema_editor): progress_id = int(redis_client.get(backfill_key) or 0) for group_ids in chunked( - RangeQuerySetWrapperWithProgressBar( + RangeQuerySetWrapperWithProgressBarApprox( Group.objects.filter(id__gt=progress_id).values_list("id", flat=True), step=CHUNK_SIZE, result_value_getter=lambda item: item, From 6887c9c9b4980357e475a18cb28da668399cba7f Mon Sep 17 00:00:00 2001 From: Yagiz Nizipli Date: Tue, 6 Feb 2024 14:59:28 -0500 Subject: [PATCH 065/357] feat: move `noNodejsModules` to biome (#64682) Since Biome supports `no-nodejs-modules`, we can move it to Biome and saving 1.5 seconds in the process. --- .eslintrc.js | 8 +++++++- api-docs/index.ts | 5 ++--- api-docs/openapi-diff.ts | 5 ++--- api-docs/watch.ts | 8 ++++---- biome.json | 25 ++++++++++++++++++++++++- build-utils/last-built-plugin.ts | 5 ++--- build-utils/sentry-instrumentation.ts | 8 +++----- config/webpack.chartcuterie.config.ts | 5 ++--- jest.config.ts | 1 - scripts/build-js-loader.ts | 2 -- scripts/extract-android-device-names.js | 2 +- scripts/extract-ios-device-names.ts | 5 ++--- scripts/test.js | 1 - tests/js/jest-pegjs-transform.js | 3 +-- tests/js/sentry-test/loadFixtures.ts | 6 ++---- tests/js/setup.ts | 4 +--- tests/js/test-balancer/index.js | 5 ++--- webpack.config.ts | 1 - 18 files changed, 55 insertions(+), 44 deletions(-) diff --git a/.eslintrc.js b/.eslintrc.js index d3d02b798cc6f6..f80c3cdcf4126e 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -1,5 +1,4 @@ /* eslint-env node */ -/* eslint import/no-nodejs-modules:0 */ const isRelaxed = !!process.env.SENTRY_ESLINT_RELAXED; const isCi = !!process.env.CI; @@ -32,6 +31,9 @@ module.exports = { {additionalHooks: ADDITIONAL_HOOKS_TO_CHECK_DEPS_FOR}, ], ...(!isRelaxed && !isCi ? strictRulesNotCi : {}), + + // TODO(@anonrig): Remove these rules from eslint-sentry-config. + 'import/no-nodejs-modules': 'off', }, // JSON file formatting is handled by Biome. ESLint should not be linting // and formatting these files. @@ -40,6 +42,10 @@ module.exports = { { files: ['tests/js/**/*.{ts,js}'], extends: ['plugin:testing-library/react', 'sentry-app/strict'], + rules: { + // TODO(@anonrig): Remove these rules from eslint-sentry-config. + 'import/no-nodejs-modules': 'off', + }, }, { files: ['*.ts', '*.tsx'], diff --git a/api-docs/index.ts b/api-docs/index.ts index 7e026ba7afc402..cd54965f639942 100644 --- a/api-docs/index.ts +++ b/api-docs/index.ts @@ -1,9 +1,8 @@ /* global process */ /* eslint-env node */ -/* eslint import/no-nodejs-modules:0 */ /* eslint import/no-unresolved:0 */ -import fs from 'fs'; -import path from 'path'; +import fs from 'node:fs'; +import path from 'node:path'; import yaml from 'js-yaml'; import JsonRefs from 'json-refs'; diff --git a/api-docs/openapi-diff.ts b/api-docs/openapi-diff.ts index 0dc8c9eb16148a..fbbe89e9b2fa81 100644 --- a/api-docs/openapi-diff.ts +++ b/api-docs/openapi-diff.ts @@ -1,9 +1,8 @@ /* eslint-env node */ -/* eslint import/no-nodejs-modules:0 */ /* eslint import/no-unresolved:0 */ -import fs from 'fs'; -import https from 'https'; +import fs from 'node:fs'; +import https from 'node:https'; import yaml from 'js-yaml'; import jsonDiff from 'json-diff'; diff --git a/api-docs/watch.ts b/api-docs/watch.ts index 5b602a20766701..7820c049b0132c 100644 --- a/api-docs/watch.ts +++ b/api-docs/watch.ts @@ -1,8 +1,8 @@ /* eslint-env node */ -/* eslint import/no-nodejs-modules:0, import/no-unresolved:0, no-console:0 */ -import {spawn} from 'child_process'; -import {join} from 'path'; -import {stderr, stdout} from 'process'; +/* eslint import/no-unresolved:0, no-console:0 */ +import {spawn} from 'node:child_process'; +import {join} from 'node:path'; +import {stderr, stdout} from 'node:process'; import sane from 'sane'; diff --git a/biome.json b/biome.json index d64287b829f90c..ba3a9562556eff 100644 --- a/biome.json +++ b/biome.json @@ -15,6 +15,7 @@ "recommended": false, "nursery": { "noDuplicateJsonKeys": "error", + "noNodejsModules": "error", "useExportType": "error", "useImportType": "error" } @@ -60,5 +61,27 @@ "allowComments": true, "allowTrailingCommas": true } - } + }, + "overrides": [ + { + "include": [ + "api-docs/*.ts", + "build-utils/*.ts", + "config/webpack.chartcuterie.config.ts", + "scripts", + "tests/js/sentry-test/loadFixtures.ts", + "tests/js/jest-pegjs-transform.js", + "tests/js/setup.ts", + "tests/js/test-balancer/index.js", + "*.config.ts" + ], + "linter": { + "rules": { + "nursery": { + "noNodejsModules": "off" + } + } + } + } + ] } diff --git a/build-utils/last-built-plugin.ts b/build-utils/last-built-plugin.ts index 0b7c6ba7f464d4..5dacbeed9e94f3 100644 --- a/build-utils/last-built-plugin.ts +++ b/build-utils/last-built-plugin.ts @@ -1,8 +1,7 @@ /* eslint-env node */ -/* eslint import/no-nodejs-modules:0 */ -import fs from 'fs'; -import path from 'path'; +import fs from 'node:fs'; +import path from 'node:path'; import type webpack from 'webpack'; diff --git a/build-utils/sentry-instrumentation.ts b/build-utils/sentry-instrumentation.ts index cfc3dcba4794d6..c72afb3ae58d8a 100644 --- a/build-utils/sentry-instrumentation.ts +++ b/build-utils/sentry-instrumentation.ts @@ -1,9 +1,7 @@ /* eslint-env node */ -/* eslint import/no-nodejs-modules:0 */ - -import crypto from 'crypto'; -import https from 'https'; -import os from 'os'; +import crypto from 'node:crypto'; +import https from 'node:https'; +import os from 'node:os'; import type Sentry from '@sentry/node'; import type {Transaction} from '@sentry/types'; diff --git a/config/webpack.chartcuterie.config.ts b/config/webpack.chartcuterie.config.ts index 16ddd5cf233aac..f6a7bfd0a11f37 100644 --- a/config/webpack.chartcuterie.config.ts +++ b/config/webpack.chartcuterie.config.ts @@ -1,8 +1,7 @@ /* eslint-env node */ -/* eslint import/no-nodejs-modules:0 */ -import childProcess from 'child_process'; -import path from 'path'; +import childProcess from 'node:child_process'; +import path from 'node:path'; import webpack from 'webpack'; diff --git a/jest.config.ts b/jest.config.ts index d2c91c886ba09f..91fa1b1bd7559d 100644 --- a/jest.config.ts +++ b/jest.config.ts @@ -1,5 +1,4 @@ /* eslint-env node */ -/* eslint import/no-nodejs-modules:0 */ import type {Config} from '@jest/types'; import path from 'node:path'; import process from 'node:process'; diff --git a/scripts/build-js-loader.ts b/scripts/build-js-loader.ts index e078768c6312b7..728327f83e5ec9 100644 --- a/scripts/build-js-loader.ts +++ b/scripts/build-js-loader.ts @@ -1,6 +1,4 @@ /* eslint-disable no-console */ -/* eslint import/no-nodejs-modules:0 */ - import fs from 'node:fs'; import {minify} from 'terser'; import * as ts from 'typescript'; diff --git a/scripts/extract-android-device-names.js b/scripts/extract-android-device-names.js index 3324992c650705..6740dad7cbc931 100644 --- a/scripts/extract-android-device-names.js +++ b/scripts/extract-android-device-names.js @@ -1,5 +1,5 @@ const csv = require('csv-parser'); -const fs = require('fs'); +const fs = require('node:fs'); const transformResults = res => { const deviceMapping = {}; diff --git a/scripts/extract-ios-device-names.ts b/scripts/extract-ios-device-names.ts index eb782cae9749f6..c37765be547f33 100644 --- a/scripts/extract-ios-device-names.ts +++ b/scripts/extract-ios-device-names.ts @@ -1,7 +1,6 @@ -/* eslint-disable import/no-nodejs-modules */ /* eslint-env node */ -import fs from 'fs'; -import path from 'path'; +import fs from 'node:fs'; +import path from 'node:path'; import prettier from 'prettier'; diff --git a/scripts/test.js b/scripts/test.js index 8be14ce820e59d..38a2e54a72be49 100644 --- a/scripts/test.js +++ b/scripts/test.js @@ -1,5 +1,4 @@ /* global process */ -/* eslint import/no-nodejs-modules:0 */ // Do this as the first thing so that any code reading it knows the right env. // process.env.BABEL_ENV = 'test'; diff --git a/tests/js/jest-pegjs-transform.js b/tests/js/jest-pegjs-transform.js index 420d48e969c956..e6e35f431c668c 100644 --- a/tests/js/jest-pegjs-transform.js +++ b/tests/js/jest-pegjs-transform.js @@ -1,7 +1,6 @@ /* eslint-env node */ -// eslint-disable-next-line import/no-nodejs-modules -const crypto = require('crypto'); +const crypto = require('node:crypto'); const peg = require('pegjs'); function getCacheKey(fileData, _filePath, config, _options) { diff --git a/tests/js/sentry-test/loadFixtures.ts b/tests/js/sentry-test/loadFixtures.ts index b4cebfb8de1a8c..d7d51ebec4fee8 100644 --- a/tests/js/sentry-test/loadFixtures.ts +++ b/tests/js/sentry-test/loadFixtures.ts @@ -1,8 +1,6 @@ /* global __dirname */ -/* eslint import/no-nodejs-modules:0 */ - -import fs from 'fs'; -import path from 'path'; +import fs from 'node:fs'; +import path from 'node:path'; const FIXTURES_ROOT = path.join(__dirname, '../../../fixtures'); diff --git a/tests/js/setup.ts b/tests/js/setup.ts index 1d975f97482863..78c599dd406287 100644 --- a/tests/js/setup.ts +++ b/tests/js/setup.ts @@ -1,10 +1,8 @@ /* eslint-env node */ -/* eslint import/no-nodejs-modules:0 */ -import {TextDecoder, TextEncoder} from 'util'; - import type {ReactElement} from 'react'; import {configure as configureRtl} from '@testing-library/react'; // eslint-disable-line no-restricted-imports import MockDate from 'mockdate'; +import {TextDecoder, TextEncoder} from 'node:util'; import {ConfigFixture} from 'sentry-fixture/config'; // eslint-disable-next-line jest/no-mocks-import diff --git a/tests/js/test-balancer/index.js b/tests/js/test-balancer/index.js index 982710495b26eb..2d31daf423a0fa 100644 --- a/tests/js/test-balancer/index.js +++ b/tests/js/test-balancer/index.js @@ -1,7 +1,6 @@ /* eslint-env node */ -/* eslint import/no-nodejs-modules:0 */ -const fs = require('fs'); -const path = require('path'); +const fs = require('node:fs'); +const path = require('node:path'); module.exports = results => { if (!results.success) { diff --git a/webpack.config.ts b/webpack.config.ts index 606a91b597cff2..2d30f38cc89a0f 100644 --- a/webpack.config.ts +++ b/webpack.config.ts @@ -1,5 +1,4 @@ /* eslint-env node */ -/* eslint import/no-nodejs-modules:0 */ import {WebpackReactSourcemapsPlugin} from '@acemarke/react-prod-sourcemaps'; import {RsdoctorWebpackPlugin} from '@rsdoctor/webpack-plugin'; From 250b7cb34e51afb15132f5a08ab6cc310b3196ab Mon Sep 17 00:00:00 2001 From: Hubert Deng Date: Tue, 6 Feb 2024 12:08:28 -0800 Subject: [PATCH 066/357] feat(beacon): Add type of events accepted in beacon last 24h (#64508) This adds functionality to start sending self hosted beacon statistics for types of events that are accepted per sentry instance --- src/sentry/tasks/beacon.py | 36 ++++++++++++++++++++++++++ tests/sentry/tasks/test_beacon.py | 43 +++++++++++++++++++++++++++++++ 2 files changed, 79 insertions(+) diff --git a/src/sentry/tasks/beacon.py b/src/sentry/tasks/beacon.py index 683d75f23a91df..4c896ca43c802d 100644 --- a/src/sentry/tasks/beacon.py +++ b/src/sentry/tasks/beacon.py @@ -13,6 +13,12 @@ from sentry.http import safe_urlopen, safe_urlread from sentry.locks import locks from sentry.silo.base import SiloMode +from sentry.snuba.outcomes import ( + QueryDefinition, + massage_outcomes_result, + run_outcomes_query_timeseries, + run_outcomes_query_totals, +) from sentry.tasks.base import instrumented_task from sentry.tsdb.base import TSDBModel from sentry.utils import json @@ -68,6 +74,30 @@ def get_events_24h() -> int: return sum_events +def get_category_event_count_24h() -> dict[str, int]: + from sentry.models.organization import Organization + + organization_ids = list(Organization.objects.all().values_list("id", flat=True)) + event_categories_count = {"error": 0, "replay": 0, "transaction": 0, "profile": 0, "monitor": 0} + for organization_id in organization_ids: + # Utilize the outcomes dataset to send snql queries for event stats + query = QueryDefinition( + fields=["sum(quantity)"], + organization_id=organization_id, + stats_period="24h", + group_by=["category"], + outcome=["accepted"], + ) + tenant_ids = {"organization_id": organization_id} + result_totals = run_outcomes_query_totals(query, tenant_ids=tenant_ids) + result_timeseries = run_outcomes_query_timeseries(query, tenant_ids=tenant_ids) + result = massage_outcomes_result(query, result_totals, result_timeseries) + for group in result["groups"]: + if group["by"]["category"] in event_categories_count.keys(): + event_categories_count[group["by"]["category"]] += group["totals"]["sum(quantity)"] + return event_categories_count + + @instrumented_task(name="sentry.tasks.send_beacon", queue="update") def send_beacon(): """ @@ -90,6 +120,7 @@ def send_beacon(): # we need this to be explicitly configured and it defaults to None, # which is the same as False anonymous = options.get("beacon.anonymous") is not False + event_categories_count = get_category_event_count_24h() payload = { "install_id": install_id, @@ -102,6 +133,11 @@ def send_beacon(): "teams": Team.objects.count(), "organizations": Organization.objects.count(), "events.24h": get_events_24h(), + "errors.24h": event_categories_count["error"], + "transactions.24h": event_categories_count["transaction"], + "replays.24h": event_categories_count["replay"], + "profiles.24h": event_categories_count["profile"], + "monitors.24h": event_categories_count["monitor"], }, "packages": get_all_package_versions(), "anonymous": anonymous, diff --git a/tests/sentry/tasks/test_beacon.py b/tests/sentry/tasks/test_beacon.py index a679edd74cccc4..1b8352a5abdf7a 100644 --- a/tests/sentry/tasks/test_beacon.py +++ b/tests/sentry/tasks/test_beacon.py @@ -33,6 +33,39 @@ def setUp(self): }, 5, # Num of outcomes to be stored ) + self.store_outcomes( + { + "org_id": self.organization.id, + "timestamp": timezone.now() - timedelta(hours=1), + "project_id": self.project.id, + "outcome": Outcome.ACCEPTED, + "reason": "none", + "category": DataCategory.REPLAY, + "quantity": 1, + }, + ) + self.store_outcomes( + { + "org_id": self.organization.id, + "timestamp": timezone.now() - timedelta(hours=1), + "project_id": self.project.id, + "outcome": Outcome.ACCEPTED, + "reason": "none", + "category": DataCategory.TRANSACTION, + "quantity": 2, + }, + ) + self.store_outcomes( + { + "org_id": self.organization.id, + "timestamp": timezone.now() - timedelta(hours=1), + "project_id": self.project.id, + "outcome": Outcome.ACCEPTED, + "reason": "none", + "category": DataCategory.PROFILE, + "quantity": 3, + }, + ) self.org2 = self.create_organization() self.project2 = self.create_project( name="bar", teams=[self.create_team(organization=self.org2, members=[self.user])] @@ -81,6 +114,11 @@ def test_simple(self, safe_urlread, safe_urlopen, mock_get_all_package_versions) "projects": 2, "teams": 2, "events.24h": 8, # We expect the number of events to be the sum of events from two orgs. First org has 5 events while the second org has 3 events. + "errors.24h": 8, + "transactions.24h": 2, + "replays.24h": 1, + "profiles.24h": 3, + "monitors.24h": 0, }, "anonymous": False, "admin_email": "foo@example.com", @@ -123,6 +161,11 @@ def test_anonymous(self, safe_urlread, safe_urlopen, mock_get_all_package_versio "projects": 2, "teams": 2, "events.24h": 8, # We expect the number of events to be the sum of events from two orgs. First org has 5 events while the second org has 3 events. + "errors.24h": 8, + "transactions.24h": 2, + "replays.24h": 1, + "profiles.24h": 3, + "monitors.24h": 0, }, "anonymous": True, "packages": mock_get_all_package_versions.return_value, From 862a4fdd29c38af6a5d9f2baf2bd2fc12d996ea1 Mon Sep 17 00:00:00 2001 From: Dalitso Banda Date: Tue, 6 Feb 2024 12:14:56 -0800 Subject: [PATCH 067/357] =?UTF-8?q?Revert=20"[Hack]=20Add=20hashing=20inde?= =?UTF-8?q?xer=20and=20experimental=20indexer=20pipeline=20=E2=80=A6=20(#6?= =?UTF-8?q?4627)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit …(#63641)" This reverts commit 96e1ef540bdacc3538e473a0b7a0d68c41cb63ba. We are done with the experiment. These changes are not needed anymore --- src/sentry/consumers/__init__.py | 11 ----- src/sentry/sentry_metrics/configuration.py | 21 --------- .../consumers/indexer/processing.py | 2 - src/sentry/sentry_metrics/indexer/hash.py | 43 ------------------- .../indexer/limiters/cardinality.py | 1 - 5 files changed, 78 deletions(-) delete mode 100644 src/sentry/sentry_metrics/indexer/hash.py diff --git a/src/sentry/consumers/__init__.py b/src/sentry/consumers/__init__.py index 70025a73e3d803..543af02ff2f458 100644 --- a/src/sentry/consumers/__init__.py +++ b/src/sentry/consumers/__init__.py @@ -292,17 +292,6 @@ def ingest_monitors_options() -> list[click.Option]: "dlq_max_invalid_ratio": 0.01, "dlq_max_consecutive_count": 1000, }, - "ingest-generic-metrics-experimental": { - "topic": settings.KAFKA_INGEST_PERFORMANCE_METRICS, - "strategy_factory": "sentry.sentry_metrics.consumers.indexer.parallel.MetricsConsumerStrategyFactory", - "click_options": [ - *_METRICS_INDEXER_OPTIONS, - click.Option(["--indexer-db"], default="experimental"), - ], - "static_args": { - "ingest_profile": "experimental", - }, - }, "generic-metrics-last-seen-updater": { "topic": settings.KAFKA_SNUBA_GENERIC_METRICS, "strategy_factory": "sentry.sentry_metrics.consumers.last_seen_updater.LastSeenUpdaterStrategyFactory", diff --git a/src/sentry/sentry_metrics/configuration.py b/src/sentry/sentry_metrics/configuration.py index c5d270378847ac..eddebed13a3220 100644 --- a/src/sentry/sentry_metrics/configuration.py +++ b/src/sentry/sentry_metrics/configuration.py @@ -18,7 +18,6 @@ class UseCaseKey(Enum): RELEASE_HEALTH = "release-health" PERFORMANCE = "performance" - EXPERIMENTAL = "experimental" # Rate limiter namespaces, the postgres (PG) @@ -28,7 +27,6 @@ class UseCaseKey(Enum): PERFORMANCE_PG_NAMESPACE = "performance" RELEASE_HEALTH_CS_NAMESPACE = "releasehealth.cs" PERFORMANCE_CS_NAMESPACE = "performance.cs" -REBALANCING_EXP_NAMESPACE = "rebalancing.experiment" RELEASE_HEALTH_SCHEMA_VALIDATION_RULES_OPTION_NAME = ( "sentry-metrics.indexer.release-health.schema-validation-rules" @@ -41,7 +39,6 @@ class UseCaseKey(Enum): class IndexerStorage(Enum): POSTGRES = "postgres" MOCK = "mock" - EXPERIMENRAL = "experimental" @dataclass(frozen=True) @@ -112,24 +109,6 @@ def get_ingest_config( ) ) - _register_ingest_config( - MetricsIngestConfiguration( - db_backend=IndexerStorage.EXPERIMENRAL, - db_backend_options={}, - input_topic=settings.KAFKA_INGEST_PERFORMANCE_METRICS, - output_topic="snuba-generic-metrics-experimental", - use_case_id=UseCaseKey.EXPERIMENTAL, - internal_metrics_tag="rebalance-exp", - writes_limiter_cluster_options=settings.SENTRY_METRICS_INDEXER_WRITES_LIMITER_OPTIONS_PERFORMANCE, - writes_limiter_namespace=REBALANCING_EXP_NAMESPACE, - cardinality_limiter_cluster_options=settings.SENTRY_METRICS_INDEXER_CARDINALITY_LIMITER_OPTIONS_PERFORMANCE, - cardinality_limiter_namespace=REBALANCING_EXP_NAMESPACE, - is_output_sliced=settings.SENTRY_METRICS_INDEXER_ENABLE_SLICED_PRODUCER, - should_index_tag_values=False, - schema_validation_rule_option_name=GENERIC_METRICS_SCHEMA_VALIDATION_RULES_OPTION_NAME, - ) - ) - if (use_case_key, db_backend) == (UseCaseKey.RELEASE_HEALTH, IndexerStorage.MOCK): _register_ingest_config( MetricsIngestConfiguration( diff --git a/src/sentry/sentry_metrics/consumers/indexer/processing.py b/src/sentry/sentry_metrics/consumers/indexer/processing.py index 30c29ea400e51e..da9c4ccea6494d 100644 --- a/src/sentry/sentry_metrics/consumers/indexer/processing.py +++ b/src/sentry/sentry_metrics/consumers/indexer/processing.py @@ -22,7 +22,6 @@ ReleaseHealthTagsValidator, ) from sentry.sentry_metrics.indexer.base import StringIndexer -from sentry.sentry_metrics.indexer.hash import StaticSha1Indexer from sentry.sentry_metrics.indexer.limiters.cardinality import cardinality_limiter_factory from sentry.sentry_metrics.indexer.mock import MockIndexer from sentry.sentry_metrics.indexer.postgres.postgres_v2 import PostgresIndexer @@ -33,7 +32,6 @@ STORAGE_TO_INDEXER: Mapping[IndexerStorage, Callable[[], StringIndexer]] = { IndexerStorage.POSTGRES: PostgresIndexer, IndexerStorage.MOCK: MockIndexer, - IndexerStorage.EXPERIMENRAL: StaticSha1Indexer, } INGEST_CODEC: sentry_kafka_schemas.codecs.Codec[Any] = sentry_kafka_schemas.get_codec( diff --git a/src/sentry/sentry_metrics/indexer/hash.py b/src/sentry/sentry_metrics/indexer/hash.py deleted file mode 100644 index 991a24ef6ac86f..00000000000000 --- a/src/sentry/sentry_metrics/indexer/hash.py +++ /dev/null @@ -1,43 +0,0 @@ -import hashlib -from collections.abc import Mapping - -from django.conf import settings - -from sentry.sentry_metrics.indexer.base import ( - FetchType, - OrgId, - StringIndexer, - UseCaseKeyCollection, - UseCaseKeyResult, - UseCaseKeyResults, -) -from sentry.sentry_metrics.indexer.cache import CachingIndexer, StringIndexerCache -from sentry.sentry_metrics.indexer.strings import StaticStringIndexer -from sentry.sentry_metrics.use_case_id_registry import UseCaseID - - -class Sha1Indexer(StringIndexer): - def __init__(self) -> None: - super().__init__() - - def bulk_record( - self, strings: Mapping[UseCaseID, Mapping[OrgId, set[str]]] - ) -> UseCaseKeyResults: - keys = UseCaseKeyCollection(strings) - res = UseCaseKeyResults() - for use_case_id, org_id, string in keys.as_tuples(): - id = int(hashlib.sha1(string.encode("utf-8")).hexdigest(), 16) % 10**64 - res.add_use_case_key_result( - UseCaseKeyResult(use_case_id, org_id, string, id), FetchType.DB_READ - ) - return res - - -indexer_cache = StringIndexerCache( - **settings.SENTRY_STRING_INDEXER_CACHE_OPTIONS, partition_key="exp" -) - - -class StaticSha1Indexer(StaticStringIndexer): - def __init__(self) -> None: - super().__init__(CachingIndexer(indexer_cache, Sha1Indexer())) diff --git a/src/sentry/sentry_metrics/indexer/limiters/cardinality.py b/src/sentry/sentry_metrics/indexer/limiters/cardinality.py index d6347e56148f68..6aedcd91f37200 100644 --- a/src/sentry/sentry_metrics/indexer/limiters/cardinality.py +++ b/src/sentry/sentry_metrics/indexer/limiters/cardinality.py @@ -91,7 +91,6 @@ def check_cardinality_limits( # for each metric path. ultimately, this can be moved into the # loop below to make rollout options occur on a per use case-basis rollout_option = { - UseCaseKey.EXPERIMENTAL: "sentry-metrics.cardinality-limiter.orgs-rollout-rate", UseCaseKey.PERFORMANCE: "sentry-metrics.cardinality-limiter.orgs-rollout-rate", UseCaseKey.RELEASE_HEALTH: "sentry-metrics.cardinality-limiter-rh.orgs-rollout-rate", }[metric_path_key] From 96a147c3c93737bd9be071e57d4023d9e50cc626 Mon Sep 17 00:00:00 2001 From: Jodi Jang <116035587+jangjodi@users.noreply.github.com> Date: Tue, 6 Feb 2024 15:22:11 -0500 Subject: [PATCH 068/357] feat(similarity-embedding): Make similarity embeddings API call (#64482) Call similarity embeddings API and show Would Group column Co-authored-by: Scott Cooper --- static/app/stores/groupingStore.tsx | 18 ++- .../groupSimilarIssues/index.spec.tsx | 130 +++++++++++++++++- .../similarStackTrace/index.tsx | 31 +++-- .../similarStackTrace/item.tsx | 35 +++-- .../similarStackTrace/list.tsx | 9 +- .../similarStackTrace/toolbar.tsx | 14 +- 6 files changed, 197 insertions(+), 40 deletions(-) diff --git a/static/app/stores/groupingStore.tsx b/static/app/stores/groupingStore.tsx index f38b143f4a8c45..738a332027a58a 100644 --- a/static/app/stores/groupingStore.tsx +++ b/static/app/stores/groupingStore.tsx @@ -47,7 +47,7 @@ type State = { unmergeState: Map; }; -type ScoreMap = Record; +type ScoreMap = Record; type ApiFingerprint = { id: string; @@ -88,18 +88,20 @@ export type SimilarItem = { aggregate?: { exception: number; message: number; + shouldBeGrouped?: string; }; score?: Record; scoresByInterface?: { exception: Array<[string, number | null]>; message: Array<[string, any | null]>; + shouldBeGrouped?: Array<[string, string | null]>; }; }; type ResponseProcessors = { merged: (item: ApiFingerprint[]) => Fingerprint[]; similar: (data: [Group, ScoreMap]) => { - aggregate: Record; + aggregate: Record; isBelowThreshold: boolean; issue: Group; score: ScoreMap; @@ -328,8 +330,15 @@ const storeConfig: GroupingStoreDefinition = { return newItems; }, similar: ([issue, scoreMap]) => { + // Check which similarity endpoint is being used + const hasSimilarityEmbeddingsFeature = requests[0]?.endpoint.includes( + 'similar-issues-embeddings' + ); + // Hide items with a low scores - const isBelowThreshold = checkBelowThreshold(scoreMap); + const isBelowThreshold = hasSimilarityEmbeddingsFeature + ? false + : checkBelowThreshold(scoreMap); // List of scores indexed by interface (i.e., exception and message) // Note: for v2, the interface is always "similarity". When v2 is @@ -357,8 +366,7 @@ const storeConfig: GroupingStoreDefinition = { const scores = allScores.filter(([, score]) => score !== null); const avg = scores.reduce((sum, [, score]) => sum + score, 0) / scores.length; - - acc[interfaceName] = avg; + acc[interfaceName] = hasSimilarityEmbeddingsFeature ? scores[0][1] : avg; return acc; }, {}); diff --git a/static/app/views/issueDetails/groupSimilarIssues/index.spec.tsx b/static/app/views/issueDetails/groupSimilarIssues/index.spec.tsx index a7f3d152b98727..b26c3ebf49a0d9 100644 --- a/static/app/views/issueDetails/groupSimilarIssues/index.spec.tsx +++ b/static/app/views/issueDetails/groupSimilarIssues/index.spec.tsx @@ -1,5 +1,4 @@ import {GroupsFixture} from 'sentry-fixture/groups'; -import {OrganizationFixture} from 'sentry-fixture/organization'; import {ProjectFixture} from 'sentry-fixture/project'; import {RouterContextFixture} from 'sentry-fixture/routerContextFixture'; import {RouterFixture} from 'sentry-fixture/routerFixture'; @@ -147,16 +146,70 @@ describe('Issues Similar View', function () { ); renderGlobalModal(); - await userEvent.click(await screen.findByTestId('similar-item-row')); + await selectNthSimilarItem(0); expect(screen.getByText('Merge (1)')).toBeInTheDocument(); // Correctly show "Merge (0)" when the item is un-clicked - await userEvent.click(await screen.findByTestId('similar-item-row')); + await selectNthSimilarItem(0); expect(screen.getByText('Merge (0)')).toBeInTheDocument(); }); +}); + +describe('Issues Similar Embeddings View', function () { + let mock; + + const project = ProjectFixture({ + features: ['similarity-view', 'similarity-embeddings'], + }); + + const routerContext = RouterContextFixture([ + { + router: { + ...RouterFixture(), + params: {orgId: 'org-slug', projectId: 'project-slug', groupId: 'group-id'}, + }, + }, + ]); + + const similarEmbeddingsScores = [ + {exception: 0.9987, message: 0.3748, shouldBeGrouped: 'Yes'}, + {exception: 0.9985, message: 0.3738, shouldBeGrouped: 'Yes'}, + {exception: 0.7384, message: 0.3743, shouldBeGrouped: 'No'}, + {exception: 0.3849, message: 0.4738, shouldBeGrouped: 'No'}, + ]; + + const mockData = { + simlarEmbeddings: GroupsFixture().map((issue, i) => [ + issue, + similarEmbeddingsScores[i], + ]), + }; + + const router = RouterFixture(); - it('renders all filtered issues with issues-similarity-embeddings flag', async function () { - const features = ['issues-similarity-embeddings']; + beforeEach(function () { + mock = MockApiClient.addMockResponse({ + url: '/organizations/org-slug/issues/group-id/similar-issues-embeddings/?k=5&threshold=0.99', + body: mockData.simlarEmbeddings, + }); + }); + + afterEach(() => { + MockApiClient.clearMockResponses(); + jest.clearAllMocks(); + }); + + const selectNthSimilarItem = async (index: number) => { + const items = await screen.findAllByTestId('similar-item-row'); + + const item = items.at(index); + + expect(item).toBeDefined(); + + await userEvent.click(item!); + }; + + it('renders with mocked data', async function () { render( , - {context: routerContext, organization: OrganizationFixture({features})} + {context: routerContext} ); expect(screen.getByTestId('loading-indicator')).toBeInTheDocument(); @@ -175,5 +228,70 @@ describe('Issues Similar View', function () { await waitFor(() => expect(mock).toHaveBeenCalled()); expect(screen.queryByText('Show 3 issues below threshold')).not.toBeInTheDocument(); + expect(screen.queryByText('Would Group')).toBeInTheDocument(); + }); + + it('can merge and redirect to new parent', async function () { + const merge = MockApiClient.addMockResponse({ + method: 'PUT', + url: '/projects/org-slug/project-slug/issues/', + body: { + merge: {children: ['123'], parent: '321'}, + }, + }); + + render( + , + {context: routerContext} + ); + renderGlobalModal(); + + await selectNthSimilarItem(0); + await userEvent.click(await screen.findByRole('button', {name: 'Merge (1)'})); + await userEvent.click(screen.getByRole('button', {name: 'Confirm'})); + + await waitFor(() => { + expect(merge).toHaveBeenCalledWith( + '/projects/org-slug/project-slug/issues/', + expect.objectContaining({ + data: {merge: 1}, + }) + ); + }); + + expect(MockNavigate).toHaveBeenCalledWith( + '/organizations/org-slug/issues/321/similar/' + ); + }); + + it('correctly shows merge count', async function () { + render( + , + {context: routerContext} + ); + renderGlobalModal(); + + await selectNthSimilarItem(0); + expect(screen.getByText('Merge (1)')).toBeInTheDocument(); + + // Correctly show "Merge (0)" when the item is un-clicked + await selectNthSimilarItem(0); + expect(screen.getByText('Merge (0)')).toBeInTheDocument(); }); }); diff --git a/static/app/views/issueDetails/groupSimilarIssues/similarStackTrace/index.tsx b/static/app/views/issueDetails/groupSimilarIssues/similarStackTrace/index.tsx index d29c56bae99ad0..7e608623dcb593 100644 --- a/static/app/views/issueDetails/groupSimilarIssues/similarStackTrace/index.tsx +++ b/static/app/views/issueDetails/groupSimilarIssues/similarStackTrace/index.tsx @@ -15,7 +15,6 @@ import GroupingStore from 'sentry/stores/groupingStore'; import {space} from 'sentry/styles/space'; import type {Project} from 'sentry/types'; import {useNavigate} from 'sentry/utils/useNavigate'; -import useOrganization from 'sentry/utils/useOrganization'; import usePrevious from 'sentry/utils/usePrevious'; import List from './list'; @@ -49,9 +48,8 @@ function SimilarStackTrace({params, location, project}: Props) { const navigate = useNavigate(); const prevLocationSearch = usePrevious(location.search); const hasSimilarityFeature = project.features.includes('similarity-view'); - const organization = useOrganization(); - const hasSimilarityEmbeddingsFeature = organization?.features?.includes( - 'issues-similarity-embeddings' + const hasSimilarityEmbeddingsFeature = project.features.includes( + 'similarity-embeddings' ); const fetchData = useCallback(() => { @@ -59,7 +57,17 @@ function SimilarStackTrace({params, location, project}: Props) { const reqs: Parameters[0] = []; - if (hasSimilarityFeature) { + if (hasSimilarityEmbeddingsFeature) { + reqs.push({ + endpoint: `/organizations/${orgId}/issues/${groupId}/similar-issues-embeddings/?${qs.stringify( + { + k: 5, + threshold: 0.99, + } + )}`, + dataKey: 'similar', + }); + } else if (hasSimilarityFeature) { reqs.push({ endpoint: `/organizations/${orgId}/issues/${groupId}/similar/?${qs.stringify({ ...location.query, @@ -70,7 +78,13 @@ function SimilarStackTrace({params, location, project}: Props) { } GroupingStore.onFetch(reqs); - }, [location.query, groupId, orgId, hasSimilarityFeature]); + }, [ + location.query, + groupId, + orgId, + hasSimilarityFeature, + hasSimilarityEmbeddingsFeature, + ]); const onGroupingChange = useCallback( ({ @@ -137,7 +151,8 @@ function SimilarStackTrace({params, location, project}: Props) { }, [params, location.query, items]); const hasSimilarItems = - hasSimilarityFeature && (items.similar.length > 0 || items.filtered.length > 0); + (hasSimilarityFeature || hasSimilarityEmbeddingsFeature) && + (items.similar.length > 0 || items.filtered.length > 0); return ( @@ -171,7 +186,6 @@ function SimilarStackTrace({params, location, project}: Props) { onMerge={handleMerge} orgId={orgId} project={project} - organization={organization} groupId={groupId} pageLinks={items.pageLinks} /> @@ -183,7 +197,6 @@ function SimilarStackTrace({params, location, project}: Props) { onMerge={handleMerge} orgId={orgId} project={project} - organization={organization} groupId={groupId} pageLinks={items.pageLinks} /> diff --git a/static/app/views/issueDetails/groupSimilarIssues/similarStackTrace/item.tsx b/static/app/views/issueDetails/groupSimilarIssues/similarStackTrace/item.tsx index fd8d02c2212dc2..373503a903c0fe 100644 --- a/static/app/views/issueDetails/groupSimilarIssues/similarStackTrace/item.tsx +++ b/static/app/views/issueDetails/groupSimilarIssues/similarStackTrace/item.tsx @@ -22,11 +22,11 @@ type Props = { groupId: Group['id']; issue: Group; orgId: Organization['id']; - organization: Organization; project: Project; aggregate?: { exception: number; message: number; + shouldBeGrouped?: string; }; score?: Record; scoresByInterface?: { @@ -95,12 +95,14 @@ class Item extends Component { }; render() { - const {aggregate, scoresByInterface, issue, organization} = this.props; + const {aggregate, scoresByInterface, issue, project} = this.props; const {visible, busy} = this.state; - const similarInterfaces = ['exception', 'message']; - const hasSimilarityEmbeddingsFeature = organization?.features?.includes( - 'issues-similarity-embeddings' + const hasSimilarityEmbeddingsFeature = project.features.includes( + 'similarity-embeddings' ); + const similarInterfaces = hasSimilarityEmbeddingsFeature + ? ['exception', 'message', 'shouldBeGrouped'] + : ['exception', 'message']; if (!visible) { return null; @@ -141,10 +143,17 @@ class Item extends Component { {similarInterfaces.map(interfaceName => { const avgScore = aggregate?.[interfaceName]; const scoreList = scoresByInterface?.[interfaceName] || []; - // Check for valid number (and not NaN) - const scoreValue = - typeof avgScore === 'number' && !Number.isNaN(avgScore) ? avgScore : 0; + // If hasSimilarityEmbeddingsFeature is on, avgScore can be a string + let scoreValue = avgScore; + if ( + (typeof avgScore !== 'string' && hasSimilarityEmbeddingsFeature) || + !hasSimilarityEmbeddingsFeature + ) { + // Check for valid number (and not NaN) + scoreValue = + typeof avgScore === 'number' && !Number.isNaN(avgScore) ? avgScore : 0; + } return ( {!hasSimilarityEmbeddingsFeature && ( @@ -154,7 +163,11 @@ class Item extends Component { )} - {hasSimilarityEmbeddingsFeature &&
    {scoreValue.toFixed(4)}
    } + {hasSimilarityEmbeddingsFeature && ( +
    + {typeof scoreValue === 'number' ? scoreValue.toFixed(4) : scoreValue} +
    + )}
    ); })} @@ -181,8 +194,8 @@ const Columns = styled('div')` display: flex; align-items: center; flex-shrink: 0; - min-width: 300px; - width: 300px; + min-width: 350px; + width: 350px; `; const columnStyle = css` diff --git a/static/app/views/issueDetails/groupSimilarIssues/similarStackTrace/list.tsx b/static/app/views/issueDetails/groupSimilarIssues/similarStackTrace/list.tsx index 6a548ec25851d1..015cd8053dc871 100644 --- a/static/app/views/issueDetails/groupSimilarIssues/similarStackTrace/list.tsx +++ b/static/app/views/issueDetails/groupSimilarIssues/similarStackTrace/list.tsx @@ -24,7 +24,6 @@ type Props = { items: Array; onMerge: () => void; orgId: Organization['id']; - organization: Organization; pageLinks: string | null; project: Project; } & DefaultProps; @@ -45,7 +44,6 @@ function List({ orgId, groupId, project, - organization, items, filteredItems = [], pageLinks, @@ -56,8 +54,8 @@ function List({ const hasHiddenItems = !!filteredItems.length; const hasResults = items.length > 0 || hasHiddenItems; const itemsWithFiltered = items.concat(showAllItems ? filteredItems : []); - const hasSimilarityEmbeddingsFeature = organization?.features?.includes( - 'issues-similarity-embeddings' + const hasSimilarityEmbeddingsFeature = project.features.includes( + 'similarity-embeddings' ); if (!hasResults) { @@ -72,7 +70,7 @@ function List({ )} - + {itemsWithFiltered.map(item => ( @@ -81,7 +79,6 @@ function List({ orgId={orgId} groupId={groupId} project={project} - organization={organization} {...item} /> ))} diff --git a/static/app/views/issueDetails/groupSimilarIssues/similarStackTrace/toolbar.tsx b/static/app/views/issueDetails/groupSimilarIssues/similarStackTrace/toolbar.tsx index 3330a82ceda32d..6968b65d12a95b 100644 --- a/static/app/views/issueDetails/groupSimilarIssues/similarStackTrace/toolbar.tsx +++ b/static/app/views/issueDetails/groupSimilarIssues/similarStackTrace/toolbar.tsx @@ -8,9 +8,11 @@ import ToolbarHeader from 'sentry/components/toolbarHeader'; import {t} from 'sentry/locale'; import GroupingStore from 'sentry/stores/groupingStore'; import {space} from 'sentry/styles/space'; +import type {Project} from 'sentry/types'; type Props = { onMerge: () => void; + project?: Project; }; const initialState = { @@ -40,8 +42,11 @@ class SimilarToolbar extends Component { listener = GroupingStore.listen(this.onGroupChange, undefined); render() { - const {onMerge} = this.props; + const {onMerge, project} = this.props; const {mergeCount} = this.state; + const hasSimilarityEmbeddingsFeature = project?.features.includes( + 'similarity-embeddings' + ); return ( @@ -59,6 +64,9 @@ class SimilarToolbar extends Component { {t('Events')} {t('Exception')} {t('Message')} + {hasSimilarityEmbeddingsFeature && ( + {t('Would Group')} + )} ); @@ -70,8 +78,8 @@ const Columns = styled('div')` display: flex; align-items: center; flex-shrink: 0; - min-width: 300px; - width: 300px; + min-width: 350px; + width: 350px; `; const StyledToolbarHeader = styled(ToolbarHeader)` From b4f236a28bdd128d717573672405198602043835 Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Tue, 6 Feb 2024 12:46:35 -0800 Subject: [PATCH 069/357] feat(issues): Cleanup timeline loading state (#64691) --- .../traceTimeline/traceTimeline.tsx | 21 +++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx b/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx index 4f8068f0dc4f5e..040adafa1a39c0 100644 --- a/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx @@ -12,8 +12,6 @@ import {hasTraceTimelineFeature} from 'sentry/views/issueDetails/traceTimeline/u import {TraceTimelineEvents} from './traceTimelineEvents'; import {useTraceTimelineEvents} from './useTraceTimelineEvents'; -const PLACEHOLDER_SIZE = '45px'; - interface TraceTimelineProps { event: Event; } @@ -35,14 +33,17 @@ export function TraceTimeline({event}: TraceTimelineProps) { !isLoading && data.length > 0 && data.every(item => item.id === event.id); if (isError || noEvents || onlySelfEvent) { // display empty placeholder to reduce layout shift - return
    ; + return
    ; } return ( {isLoading ? ( - + + + + ) : ( @@ -84,11 +85,19 @@ const Stacked = styled('div')` > * { grid-area: 1 / 1; } - margin-top: ${space(1)}; + margin-top: ${space(0.5)}; `; const TimelineEventsContainer = styled('div')` position: relative; - height: 45px; + height: 34px; padding-top: 10px; `; + +const LoadingSkeleton = styled('div')` + display: flex; + flex-direction: column; + gap: ${space(0.25)}; + padding: ${space(0.75)} 0 ${space(1)}; + height: 34px; +`; From ac45f7f9bbe0d5bc5e1d0e31b336ecce7ea3ed37 Mon Sep 17 00:00:00 2001 From: Yagiz Nizipli Date: Tue, 6 Feb 2024 15:53:33 -0500 Subject: [PATCH 070/357] build: enable `useFlatMap` and `useIsArray` rules (#64693) Enables `useFlatMap` for reducing array iterations where `.map().flat()` exist --------- Co-authored-by: getsantry[bot] <66042841+getsantry[bot]@users.noreply.github.com> --- biome.json | 6 +++++ static/app/components/compactSelect/utils.tsx | 3 +-- .../exception/actionableItems.tsx | 5 ++-- .../crashContent/exception/sourceMapDebug.tsx | 13 +++------- .../events/interfaces/spans/traceView.tsx | 9 ++++--- .../app/components/smartSearchBar/utils.tsx | 2 +- static/app/utils/events.tsx | 26 +++++++------------ .../utils/metrics/useMetricsCorrelations.tsx | 3 +-- static/app/utils/middleEllipsis.tsx | 14 +++++----- static/app/utils/prism.tsx | 24 ++++++++--------- .../app/views/alerts/list/incidents/index.tsx | 2 +- static/app/views/alerts/rules/utils.tsx | 2 +- .../views/discover/table/arithmeticInput.tsx | 2 +- .../breadcrumbs/useBreadcrumbFilters.tsx | 2 +- 14 files changed, 51 insertions(+), 62 deletions(-) diff --git a/biome.json b/biome.json index ba3a9562556eff..a50109a6152ccf 100644 --- a/biome.json +++ b/biome.json @@ -13,11 +13,17 @@ "enabled": true, "rules": { "recommended": false, + "complexity": { + "useFlatMap": "error" + }, "nursery": { "noDuplicateJsonKeys": "error", "noNodejsModules": "error", "useExportType": "error", "useImportType": "error" + }, + "suspicious": { + "useIsArray": "error" } } }, diff --git a/static/app/components/compactSelect/utils.tsx b/static/app/components/compactSelect/utils.tsx index fe5b02731f23d2..3fe1c97fe4d575 100644 --- a/static/app/components/compactSelect/utils.tsx +++ b/static/app/components/compactSelect/utils.tsx @@ -112,7 +112,7 @@ export function getHiddenOptions( const hiddenOptionsSet = new Set(); const remainingItems = items - .map | null>(item => { + .flatMap | null>(item => { if ('options' in item) { const filteredOptions = item.options .map(opt => { @@ -135,7 +135,6 @@ export function getHiddenOptions( hiddenOptionsSet.add(item.value); return null; }) - .flat() .filter((item): item is SelectOptionOrSection => !!item); // diff --git a/static/app/components/events/interfaces/crashContent/exception/actionableItems.tsx b/static/app/components/events/interfaces/crashContent/exception/actionableItems.tsx index 5da3e5b744602d..57bae4a5e3708d 100644 --- a/static/app/components/events/interfaces/crashContent/exception/actionableItems.tsx +++ b/static/app/components/events/interfaces/crashContent/exception/actionableItems.tsx @@ -326,13 +326,12 @@ function groupedErrors( const {_meta} = event; const errors = [...data.errors, ...progaurdErrors] .filter(error => shouldErrorBeShown(error, event)) - .map((error, errorIdx) => + .flatMap((error, errorIdx) => getErrorMessage(error, _meta?.errors?.[errorIdx]).map(message => ({ ...message, type: error.type, })) - ) - .flat(); + ); const grouped = errors.reduce((rv, error) => { rv[error.type] = rv[error.type] || []; diff --git a/static/app/components/events/interfaces/crashContent/exception/sourceMapDebug.tsx b/static/app/components/events/interfaces/crashContent/exception/sourceMapDebug.tsx index 7b27246ea69b57..77e0b359836c5f 100644 --- a/static/app/components/events/interfaces/crashContent/exception/sourceMapDebug.tsx +++ b/static/app/components/events/interfaces/crashContent/exception/sourceMapDebug.tsx @@ -217,17 +217,12 @@ function combineErrors( sdkName?: string ) { const combinedErrors = uniqBy( - response - .map(res => res?.errors) - .flat() - .filter(defined), + response.flatMap(res => res?.errors).filter(defined), error => error?.type ); - const errors = combinedErrors - .map(error => - getErrorMessage(error, sdkName).map(message => ({...message, type: error.type})) - ) - .flat(); + const errors = combinedErrors.flatMap(error => + getErrorMessage(error, sdkName).map(message => ({...message, type: error.type})) + ); return errors; } diff --git a/static/app/components/events/interfaces/spans/traceView.tsx b/static/app/components/events/interfaces/spans/traceView.tsx index 26b4d140f86191..fd98cf7189fa62 100644 --- a/static/app/components/events/interfaces/spans/traceView.tsx +++ b/static/app/components/events/interfaces/spans/traceView.tsx @@ -86,11 +86,12 @@ function TraceView(props: Props) { (!waterfallModel.affectedSpanIds || !waterfallModel.affectedSpanIds.length) && performanceIssues ) { - const suspectSpans = performanceIssues.map(issue => issue.suspect_spans).flat(); + const suspectSpans = performanceIssues.flatMap(issue => issue.suspect_spans); if (suspectSpans.length) { - waterfallModel.affectedSpanIds = performanceIssues - .map(issue => [...issue.suspect_spans, ...issue.span]) - .flat(); + waterfallModel.affectedSpanIds = performanceIssues.flatMap(issue => [ + ...issue.suspect_spans, + ...issue.span, + ]); } } diff --git a/static/app/components/smartSearchBar/utils.tsx b/static/app/components/smartSearchBar/utils.tsx index 436804ec8c2f13..d78d6b5a003ac6 100644 --- a/static/app/components/smartSearchBar/utils.tsx +++ b/static/app/components/smartSearchBar/utils.tsx @@ -333,7 +333,7 @@ export function getValidOps( // Find all valid operations const validOps = new Set( - allValidTypes.map(type => filterTypeConfig[type].validOps).flat() + allValidTypes.flatMap(type => filterTypeConfig[type].validOps) ); return [...validOps]; diff --git a/static/app/utils/events.tsx b/static/app/utils/events.tsx index 6b102278a206ab..c55fb1b11c255a 100644 --- a/static/app/utils/events.tsx +++ b/static/app/utils/events.tsx @@ -17,7 +17,7 @@ import { IssueCategory, IssueType, } from 'sentry/types'; -import type {Event, ExceptionValue, Thread} from 'sentry/types/event'; +import type {Event, ExceptionValue, Frame, Thread} from 'sentry/types/event'; import {EntryType} from 'sentry/types/event'; import {defined} from 'sentry/utils'; import type {BaseEventAnalyticsParams} from 'sentry/utils/analytics/workflowAnalyticsEvents'; @@ -309,10 +309,8 @@ export function getFrameBreakdownOfSourcemaps(event?: Event | null) { function getExceptionFrames(event: Event, inAppOnly: boolean) { const exceptions = getExceptionEntries(event); const frames = exceptions - .map(exception => exception.data.values || []) - .flat() - .map(exceptionValue => exceptionValue?.stacktrace?.frames || []) - .flat(); + .flatMap(exception => exception.data.values || []) + .flatMap(exceptionValue => exceptionValue?.stacktrace?.frames || []); return inAppOnly ? frames.filter(frame => frame.inApp) : frames; } @@ -327,18 +325,14 @@ function getExceptionEntries(event: Event) { /** * Returns all stack frames of type 'exception' or 'threads' of this event */ -function getAllFrames(event: Event, inAppOnly: boolean) { - const exceptions = getEntriesWithFrames(event); - const frames = exceptions - .map( - (withStacktrace: EntryException | EntryThreads) => withStacktrace.data.values || [] - ) - .flat() - .map( +function getAllFrames(event: Event, inAppOnly: boolean): Frame[] { + const exceptions: EntryException[] | EntryThreads[] = getEntriesWithFrames(event); + const frames: Frame[] = exceptions + .flatMap(withStacktrace => withStacktrace.data.values ?? []) + .flatMap( (withStacktrace: ExceptionValue | Thread) => - withStacktrace?.stacktrace?.frames || [] - ) - .flat(); + withStacktrace?.stacktrace?.frames ?? [] + ); return inAppOnly ? frames.filter(frame => frame.inApp) : frames; } diff --git a/static/app/utils/metrics/useMetricsCorrelations.tsx b/static/app/utils/metrics/useMetricsCorrelations.tsx index 23aca33fd907ae..99b6ca54f813d4 100644 --- a/static/app/utils/metrics/useMetricsCorrelations.tsx +++ b/static/app/utils/metrics/useMetricsCorrelations.tsx @@ -152,8 +152,7 @@ export function useMetricSamples( } const data = queryInfo.data.metrics - .map(m => m.metricSpans) - .flat() + .flatMap(m => m.metricSpans) .filter(correlation => !!correlation) .slice(0, 10) as MetricCorrelation[]; diff --git a/static/app/utils/middleEllipsis.tsx b/static/app/utils/middleEllipsis.tsx index fc6a7ed056ddf5..a5c23fed95c419 100644 --- a/static/app/utils/middleEllipsis.tsx +++ b/static/app/utils/middleEllipsis.tsx @@ -54,16 +54,14 @@ export function middleEllipsis( if (getLength(words) <= maxLength) { const divider = Math.floor(words.length / 2); const firstHalf = words.slice(0, divider); - const firstHalfWithDelimiters = firstHalf - .map((word, i) => (i === divider - 1 ? [word] : [word, delimiters[i]])) - .flat(); + const firstHalfWithDelimiters = firstHalf.flatMap((word, i) => + i === divider - 1 ? [word] : [word, delimiters[i]] + ); const secondHalf = words.slice(divider); - const secondHalfWithDelimiters = secondHalf - .map((word, i) => - i === 0 ? [word] : [delimiters[delimiters.length - secondHalf.length + i], word] - ) - .flat(); + const secondHalfWithDelimiters = secondHalf.flatMap((word, i) => + i === 0 ? [word] : [delimiters[delimiters.length - secondHalf.length + i], word] + ); return `${firstHalfWithDelimiters.join('')}\u2026${secondHalfWithDelimiters.join( '' diff --git a/static/app/utils/prism.tsx b/static/app/utils/prism.tsx index 13964ffa369f27..63e722a3077912 100644 --- a/static/app/utils/prism.tsx +++ b/static/app/utils/prism.tsx @@ -16,20 +16,18 @@ Prism.manual = true; * (`javascript`). */ const PRISM_LANGUAGE_MAP: Record = Object.fromEntries( - Object.entries(prismComponents.languages) - .map(([lang, value]) => { - if (!value.alias) { - return [[lang, lang]]; // map the full language name to itself - } + Object.entries(prismComponents.languages).flatMap(([lang, value]) => { + if (!value.alias) { + return [[lang, lang]]; // map the full language name to itself + } - return [ - [lang, lang], // map the full language name to itself - ...(Array.isArray(value.alias) // map aliases to full language name - ? value.alias.map(alias => [alias, lang]) - : [[value.alias, lang]]), - ]; - }) - .flat(1) + return [ + [lang, lang], // map the full language name to itself + ...(Array.isArray(value.alias) // map aliases to full language name + ? value.alias.map(alias => [alias, lang]) + : [[value.alias, lang]]), + ]; + }) ); // Aliases that don't already exist in Prism.js diff --git a/static/app/views/alerts/list/incidents/index.tsx b/static/app/views/alerts/list/incidents/index.tsx index 9965db40db4bc6..8b52058449f1e1 100644 --- a/static/app/views/alerts/list/incidents/index.tsx +++ b/static/app/views/alerts/list/incidents/index.tsx @@ -132,7 +132,7 @@ class IncidentsList extends DeprecatedAsyncComponent< get projectsFromIncidents() { const {incidentList} = this.state; - return [...new Set(incidentList?.map(({projects}) => projects).flat())]; + return [...new Set(incidentList?.flatMap(({projects}) => projects))]; } handleChangeSearch = (title: string) => { diff --git a/static/app/views/alerts/rules/utils.tsx b/static/app/views/alerts/rules/utils.tsx index 7633869dc7bcf3..1b9b1e67eb1e91 100644 --- a/static/app/views/alerts/rules/utils.tsx +++ b/static/app/views/alerts/rules/utils.tsx @@ -85,7 +85,7 @@ export function getRuleActionCategory(rule: IssueAlertRule) { } export function getAlertRuleActionCategory(rule: MetricRule) { - const actions = rule.triggers.map(trigger => trigger.actions).flat(); + const actions = rule.triggers.flatMap(trigger => trigger.actions); const numDefaultActions = actions.filter(action => action.type === 'email').length; switch (numDefaultActions) { diff --git a/static/app/views/discover/table/arithmeticInput.tsx b/static/app/views/discover/table/arithmeticInput.tsx index d89ec9e46a3ef8..9b31158c5837c9 100644 --- a/static/app/views/discover/table/arithmeticInput.tsx +++ b/static/app/views/discover/table/arithmeticInput.tsx @@ -164,7 +164,7 @@ export default class ArithmeticInput extends PureComponent { event.preventDefault(); const newOptionGroups = makeOptions(options, partialTerm, hideFieldOptions); - const flattenedOptions = newOptionGroups.map(group => group.options).flat(); + const flattenedOptions = newOptionGroups.flatMap(group => group.options); if (flattenedOptions.length === 0) { return; } diff --git a/static/app/views/replays/detail/breadcrumbs/useBreadcrumbFilters.tsx b/static/app/views/replays/detail/breadcrumbs/useBreadcrumbFilters.tsx index 9d82841da530dc..04d865367d7af6 100644 --- a/static/app/views/replays/detail/breadcrumbs/useBreadcrumbFilters.tsx +++ b/static/app/views/replays/detail/breadcrumbs/useBreadcrumbFilters.tsx @@ -103,7 +103,7 @@ function useBreadcrumbFilters({frames}: Options): Return { dict[value] ? {...dict, [value]: [dict[value], key]} : {...dict, [value]: key}, {} ); - const OpOrCategory = type.map(theType => TYPE_TO_OPORCATEGORY[theType]).flat(); + const OpOrCategory = type.flatMap(theType => TYPE_TO_OPORCATEGORY[theType]); return filterItems({ items: frames, filterFns: FILTERS, From 24dd87cc46b75a064457904548c3e21a992a6d91 Mon Sep 17 00:00:00 2001 From: Yagiz Nizipli Date: Tue, 6 Feb 2024 15:53:52 -0500 Subject: [PATCH 071/357] build: update corejs to v3.35.1 (#64695) Updates core.js to latest version --- babel.config.ts | 2 +- package.json | 2 +- yarn.lock | 8 ++++---- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/babel.config.ts b/babel.config.ts index b7ba2975e6f28a..0e29d6603ee058 100644 --- a/babel.config.ts +++ b/babel.config.ts @@ -15,7 +15,7 @@ const config: TransformOptions = { '@babel/preset-env', { useBuiltIns: 'usage', - corejs: '3.27', + corejs: '3.35.1', }, ], '@babel/preset-typescript', diff --git a/package.json b/package.json index 3e770de06be568..043ee7b977e0d5 100644 --- a/package.json +++ b/package.json @@ -101,7 +101,7 @@ "color": "^4.2.3", "compression-webpack-plugin": "10.0.0", "copy-webpack-plugin": "^11.0.0", - "core-js": "^3.33.0", + "core-js": "^3.35.1", "cronstrue": "^2.26.0", "crypto-browserify": "^3.12.0", "crypto-js": "4.2.0", diff --git a/yarn.lock b/yarn.lock index 2db61ec20bca7b..95ba2aa1d1d45f 100644 --- a/yarn.lock +++ b/yarn.lock @@ -5131,10 +5131,10 @@ core-js@^1.0.0: resolved "https://registry.yarnpkg.com/core-js/-/core-js-1.2.7.tgz#652294c14651db28fa93bd2d5ff2983a4f08c636" integrity sha1-ZSKUwUZR2yj6k70tX/KYOk8IxjY= -core-js@^3.33.0: - version "3.33.0" - resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.33.0.tgz#70366dbf737134761edb017990cf5ce6c6369c40" - integrity sha512-HoZr92+ZjFEKar5HS6MC776gYslNOKHt75mEBKWKnPeFDpZ6nH5OeF3S6HFT1mUAUZKrzkez05VboaX8myjSuw== +core-js@^3.35.1: + version "3.35.1" + resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.35.1.tgz#9c28f8b7ccee482796f8590cc8d15739eaaf980c" + integrity sha512-IgdsbxNyMskrTFxa9lWHyMwAJU5gXOPP+1yO+K59d50VLVAIDAbs7gIv705KzALModfK3ZrSZTPNpC0PQgIZuw== core-util-is@~1.0.0: version "1.0.2" From b1768ab46fdcf6a56cb8502f10d6bff499ec26d6 Mon Sep 17 00:00:00 2001 From: Snigdha Sharma Date: Tue, 6 Feb 2024 12:59:42 -0800 Subject: [PATCH 072/357] feat(issue-priority): Set priority when issue platform groups change status (#64419) * Updates status_change_consumer to use `manage_issue_states` when handling issue escalation * Uses AUTO_SET_ONGOING to appropriately update issue statuses as they go from escalating -> ongoing * Refactors tests to check all the appropriate status info is set Fixes https://github.com/getsentry/sentry/issues/64185 --- src/sentry/issues/escalating.py | 2 +- src/sentry/issues/status_change_consumer.py | 15 ++- tests/sentry/issues/test_producer.py | 7 +- .../issues/test_status_change_consumer.py | 93 +++++++++++++++++-- 4 files changed, 105 insertions(+), 12 deletions(-) diff --git a/src/sentry/issues/escalating.py b/src/sentry/issues/escalating.py index 12079c0e54c799..2306935ebbb824 100644 --- a/src/sentry/issues/escalating.py +++ b/src/sentry/issues/escalating.py @@ -511,7 +511,6 @@ def manage_issue_states( ) add_group_to_inbox(group, GroupInboxReason.ESCALATING, snooze_details) record_group_history(group, GroupHistoryStatus.ESCALATING) - auto_update_priority(group, PriorityChangeReason.ESCALATING) has_forecast = ( True if data and activity_data and "forecast" in activity_data.keys() else False @@ -539,6 +538,7 @@ def manage_issue_states( Activity.objects.create_group_activity( group=group, type=ActivityType.SET_ESCALATING, data=data ) + auto_update_priority(group, PriorityChangeReason.ESCALATING) elif group_inbox_reason == GroupInboxReason.ONGOING: updated = Group.objects.filter( diff --git a/src/sentry/issues/status_change_consumer.py b/src/sentry/issues/status_change_consumer.py index 6bca97541b1b60..f5d622933c0e67 100644 --- a/src/sentry/issues/status_change_consumer.py +++ b/src/sentry/issues/status_change_consumer.py @@ -7,9 +7,11 @@ from sentry_sdk.tracing import NoOpSpan, Transaction +from sentry.issues.escalating import manage_issue_states from sentry.issues.status_change_message import StatusChangeMessageData from sentry.models.group import Group, GroupStatus from sentry.models.grouphash import GroupHash +from sentry.models.groupinbox import GroupInboxReason from sentry.models.organization import Organization from sentry.models.project import Project from sentry.types.activity import ActivityType @@ -73,14 +75,18 @@ def update_status(group: Group, status_change: StatusChangeMessageData) -> None: substatus=new_substatus, activity_type=ActivityType.SET_IGNORED, ) + elif new_status == GroupStatus.UNRESOLVED and new_substatus == GroupSubStatus.ESCALATING: + manage_issue_states(group=group, group_inbox_reason=GroupInboxReason.ESCALATING) elif new_status == GroupStatus.UNRESOLVED: activity_type = None - if new_substatus == GroupSubStatus.ESCALATING: - activity_type = ActivityType.SET_ESCALATING - elif new_substatus == GroupSubStatus.REGRESSED: + if new_substatus == GroupSubStatus.REGRESSED: activity_type = ActivityType.SET_REGRESSION elif new_substatus == GroupSubStatus.ONGOING: - activity_type = ActivityType.SET_UNRESOLVED + if group.substatus == GroupSubStatus.ESCALATING: + # If the group was previously escalating, we need to update the priority via AUTO_SET_ONGOING + activity_type = ActivityType.AUTO_SET_ONGOING + else: + activity_type = ActivityType.SET_UNRESOLVED # We don't support setting the UNRESOLVED status with substatus NEW as it # is automatically set on creation. All other issues should be set to ONGOING. @@ -96,6 +102,7 @@ def update_status(group: Group, status_change: StatusChangeMessageData) -> None: status=new_status, substatus=new_substatus, activity_type=activity_type, + from_substatus=group.substatus, ) else: logger.error( diff --git a/tests/sentry/issues/test_producer.py b/tests/sentry/issues/test_producer.py index 3709eba6209f5a..54b11b39d19a4f 100644 --- a/tests/sentry/issues/test_producer.py +++ b/tests/sentry/issues/test_producer.py @@ -190,11 +190,16 @@ def test_with_status_change_archived(self) -> None: ).exists() def test_with_status_change_unresolved(self): + # We modify a single group through different substatuses that are supported in the UI + # to ensure the status change is processed correctly. + self.group.update(status=GroupStatus.IGNORED, substatus=GroupSubStatus.UNTIL_ESCALATING) for substatus, activity_type in [ (GroupSubStatus.ESCALATING, ActivityType.SET_ESCALATING), - (GroupSubStatus.ONGOING, ActivityType.SET_UNRESOLVED), + (GroupSubStatus.ONGOING, ActivityType.AUTO_SET_ONGOING), (GroupSubStatus.REGRESSED, ActivityType.SET_REGRESSION), + (GroupSubStatus.ONGOING, ActivityType.SET_UNRESOLVED), ]: + # Produce the status change message and process it status_change = StatusChangeMessage( fingerprint=self.fingerprint, project_id=self.group.project_id, diff --git a/tests/sentry/issues/test_status_change_consumer.py b/tests/sentry/issues/test_status_change_consumer.py index 82f0f90002e281..c16cc93fc34b0d 100644 --- a/tests/sentry/issues/test_status_change_consumer.py +++ b/tests/sentry/issues/test_status_change_consumer.py @@ -4,9 +4,14 @@ from unittest.mock import MagicMock, patch from sentry.issues.occurrence_consumer import _process_message +from sentry.issues.priority import PriorityLevel from sentry.issues.status_change_consumer import bulk_get_groups_from_fingerprints +from sentry.models.activity import Activity from sentry.models.group import Group, GroupStatus +from sentry.models.grouphistory import GroupHistory, GroupHistoryStatus +from sentry.testutils.helpers.features import with_feature from sentry.testutils.pytest.fixtures import django_db_all +from sentry.types.activity import ActivityType from sentry.types.group import GroupSubStatus from tests.sentry.issues.test_occurrence_consumer import IssueOccurrenceTestBase, get_test_message @@ -41,6 +46,22 @@ def setUp(self): self.group = Group.objects.get(grouphash__hash=self.occurrence.fingerprint[0]) self.fingerprint = ["touch-id"] + def _assert_statuses_set( + self, status, substatus, group_history_status, activity_type, priority=None + ): + self.group.refresh_from_db() + assert self.group.status == status + assert self.group.substatus == substatus + assert GroupHistory.objects.filter( + group_id=self.group.id, status=group_history_status + ).exists() + assert Activity.objects.filter(group_id=self.group.id, type=activity_type.value).exists() + if priority: + assert self.group.priority == priority + assert Activity.objects.filter( + group_id=self.group.id, type=ActivityType.SET_PRIORITY.value + ).exists() + @django_db_all def test_valid_payload_resolved(self) -> None: message = get_test_message_status_change(self.project.id, fingerprint=["touch-id"]) @@ -51,8 +72,9 @@ def test_valid_payload_resolved(self) -> None: group = group_info.group group.refresh_from_db() - assert group.status == GroupStatus.RESOLVED - assert group.substatus is None + self._assert_statuses_set( + GroupStatus.RESOLVED, None, GroupHistoryStatus.RESOLVED, ActivityType.SET_RESOLVED + ) def test_valid_payload_archived_forever(self) -> None: message = get_test_message_status_change( @@ -68,10 +90,16 @@ def test_valid_payload_archived_forever(self) -> None: group = group_info.group group.refresh_from_db() - assert group.status == GroupStatus.IGNORED - assert group.substatus == GroupSubStatus.FOREVER + self._assert_statuses_set( + GroupStatus.IGNORED, + GroupSubStatus.FOREVER, + GroupHistoryStatus.ARCHIVED_FOREVER, + ActivityType.SET_IGNORED, + ) + @with_feature("projects:issue-priority") def test_valid_payload_unresolved_escalating(self) -> None: + self.group.update(status=GroupStatus.IGNORED, substatus=GroupSubStatus.UNTIL_ESCALATING) message = get_test_message_status_change( self.project.id, fingerprint=self.fingerprint, @@ -85,8 +113,61 @@ def test_valid_payload_unresolved_escalating(self) -> None: group = group_info.group group.refresh_from_db() - assert group.status == GroupStatus.UNRESOLVED - assert group.substatus == GroupSubStatus.ESCALATING + self._assert_statuses_set( + GroupStatus.UNRESOLVED, + GroupSubStatus.ESCALATING, + GroupHistoryStatus.ESCALATING, + ActivityType.SET_ESCALATING, + PriorityLevel.HIGH, + ) + + @with_feature("projects:issue-priority") + def test_valid_payload_auto_ongoing(self) -> None: + self.group.update( + status=GroupStatus.UNRESOLVED, + substatus=GroupSubStatus.ESCALATING, + priority=PriorityLevel.HIGH, + ) + GroupHistory.objects.create( + group=self.group, + project=self.group.project, + organization=self.organization, + status=GroupHistoryStatus.PRIORITY_MEDIUM, + ) + message = get_test_message_status_change( + self.project.id, + fingerprint=self.fingerprint, + new_status=GroupStatus.UNRESOLVED, + new_substatus=GroupSubStatus.ONGOING, + ) + result = _process_message(message) + assert result is not None + group_info = result[1] + assert group_info is not None + group = group_info.group + group.refresh_from_db() + + self._assert_statuses_set( + GroupStatus.UNRESOLVED, + GroupSubStatus.ONGOING, + GroupHistoryStatus.ONGOING, + ActivityType.AUTO_SET_ONGOING, + PriorityLevel.MEDIUM, + ) + + +class StatusChangeBulkGetGroupsFromFingerprintsTest(IssueOccurrenceTestBase): + @django_db_all + def setUp(self): + super().setUp() + message = get_test_message(self.project.id) + with self.feature("organizations:profile-file-io-main-thread-ingest"): + result = _process_message(message) + assert result is not None + + self.occurrence = result[0] + self.group = Group.objects.get(grouphash__hash=self.occurrence.fingerprint[0]) + self.fingerprint = ["touch-id"] def test_bulk_get_single_project(self) -> None: groups_by_fingerprint = bulk_get_groups_from_fingerprints( From 331c100ecae0e0c1bcd9d046bca523e68e9592f0 Mon Sep 17 00:00:00 2001 From: Alberto Leal Date: Tue, 6 Feb 2024 16:00:41 -0500 Subject: [PATCH 073/357] fix(hybrid-cloud): Fix client config for superuser (#64661) I noticed the client config is deleting the `activeorg` from the request session object when the current user is in superuser mode. A side effect of this that we're not properly propagating the accessed org in `links`. This pull request patches this. I've also included tests for staff privileges as part of the upcoming superuser/staff split work. --- src/sentry/web/client_config.py | 41 +++++++-- tests/sentry/web/frontend/test_react_page.py | 65 +++++++++++--- tests/sentry/web/test_api.py | 92 +++++++++++++++----- tests/sentry/web/test_client_config.py | 1 - 4 files changed, 158 insertions(+), 41 deletions(-) diff --git a/src/sentry/web/client_config.py b/src/sentry/web/client_config.py index b6b4334e76fd3a..88d72b2bf79e87 100644 --- a/src/sentry/web/client_config.py +++ b/src/sentry/web/client_config.py @@ -8,7 +8,9 @@ from django.conf import settings from django.contrib.auth.models import AnonymousUser from django.contrib.messages import get_messages +from django.contrib.sessions.backends.base import SessionBase from django.core.cache import cache +from django.http import HttpRequest from packaging.version import parse as parse_version import sentry @@ -17,10 +19,15 @@ from sentry.auth import superuser from sentry.auth.superuser import is_active_superuser from sentry.models.organizationmapping import OrganizationMapping +from sentry.models.user import User from sentry.services.hybrid_cloud.auth import AuthenticatedToken, AuthenticationContext -from sentry.services.hybrid_cloud.organization import organization_service +from sentry.services.hybrid_cloud.organization import ( + RpcUserOrganizationContext, + organization_service, +) from sentry.services.hybrid_cloud.project_key import ProjectKeyRole, project_key_service from sentry.services.hybrid_cloud.user import UserSerializeType +from sentry.services.hybrid_cloud.user.model import RpcUser from sentry.services.hybrid_cloud.user.serial import serialize_generic_user from sentry.services.hybrid_cloud.user.service import user_service from sentry.silo.base import SiloMode @@ -115,13 +122,22 @@ def _delete_activeorg(session): del session["activeorg"] -def _resolve_last_org(session, user, org_context=None): +def _resolve_last_org( + request: HttpRequest | None, + session: SessionBase | None, + user: RpcUser | User | None, + org_context: RpcUserOrganizationContext | None = None, +): + user_is_authenticated = ( + user is not None and not isinstance(user, AnonymousUser) and user.is_authenticated + ) + if org_context is None: last_org_slug = session["activeorg"] if session and "activeorg" in session else None if not last_org_slug: return None - if user is not None and not isinstance(user, AnonymousUser): + if user_is_authenticated: org_context = organization_service.get_organization_by_slug( slug=last_org_slug, only_visible=False, @@ -130,14 +146,23 @@ def _resolve_last_org(session, user, org_context=None): include_teams=False, ) - if org_context and org_context.member: + has_org_access = bool(org_context and org_context.member) + + if not has_org_access and user_is_authenticated: + has_org_access = superuser.is_active_superuser(request) + + if org_context and has_org_access: return org_context.organization return None class _ClientConfig: - def __init__(self, request=None, org_context=None) -> None: + def __init__( + self, + request: HttpRequest | None = None, + org_context: RpcUserOrganizationContext | None = None, + ) -> None: self.request = request if request is not None: self.user = getattr(request, "user", None) or AnonymousUser() @@ -146,7 +171,7 @@ def __init__(self, request=None, org_context=None) -> None: self.user = None self.session = None - self.last_org = _resolve_last_org(self.session, self.user, org_context) + self.last_org = _resolve_last_org(request, self.session, self.user, org_context) @property def last_org_slug(self) -> str | None: @@ -374,7 +399,9 @@ def get_context(self) -> Mapping[str, Any]: } -def get_client_config(request=None, org_context=None) -> MutableMapping[str, Any]: +def get_client_config( + request=None, org_context: RpcUserOrganizationContext | None = None +) -> MutableMapping[str, Any]: """ Provides initial bootstrap data needed to boot the frontend application. """ diff --git a/tests/sentry/web/frontend/test_react_page.py b/tests/sentry/web/frontend/test_react_page.py index cd18158b6cfcdc..c85d8dafffef8b 100644 --- a/tests/sentry/web/frontend/test_react_page.py +++ b/tests/sentry/web/frontend/test_react_page.py @@ -1,9 +1,11 @@ from fnmatch import fnmatch +from django.test import override_settings from django.urls import URLResolver, get_resolver, reverse from sentry.models.organization import OrganizationStatus from sentry.testutils.cases import TestCase +from sentry.testutils.helpers import with_feature from sentry.testutils.region import override_regions from sentry.testutils.silo import control_silo_test from sentry.types.region import Region, RegionCategory @@ -114,7 +116,6 @@ def test_redirect_to_customer_domain(self): assert self.client.session["activeorg"] with self.feature({"organizations:customer-domains": True}): - # Redirect to customer domain response = self.client.get( reverse("sentry-organization-issue-list", args=[org.slug]), follow=True @@ -182,7 +183,6 @@ def test_does_not_redirect_to_customer_domain_for_unsupported_paths(self): self.login_as(user) with self.feature({"organizations:customer-domains": True}): - url_name = "sentry-organization-create" url_name_is_non_customer_domain = any( fnmatch(url_name, p) for p in NON_CUSTOMER_DOMAIN_URL_NAMES @@ -261,31 +261,72 @@ def test_handles_unknown_url_name(self): assert response.status_code == 200 self.assertTemplateUsed(response, "sentry/base-react.html") - def test_customer_domain_non_member_org_superuser(self): - org = self.create_organization(owner=self.user) + def test_customer_domain_non_member(self): + self.create_organization(owner=self.user) other_org = self.create_organization() - self.login_as(self.user, superuser=True) + self.login_as(self.user) + with override_settings(SENTRY_USE_CUSTOMER_DOMAINS=True), self.feature( + {"organizations:customer-domains": [other_org.slug]} + ): + # Should not be able to induce activeorg + assert "activeorg" not in self.client.session + response = self.client.get( + "/", + HTTP_HOST=f"{other_org.slug}.testserver", + follow=True, + ) + assert response.status_code == 200 + assert response.redirect_chain == [(f"http://{other_org.slug}.testserver/issues/", 302)] + assert "activeorg" not in self.client.session - with self.feature({"organizations:customer-domains": [org.slug]}): + def _run_customer_domain_elevated_privileges(self, is_superuser: bool, is_staff: bool): + user = self.create_user("foo@example.com", is_superuser=is_superuser, is_staff=is_staff) + org = self.create_organization(owner=user) + other_org = self.create_organization() + + self.login_as(user, superuser=is_superuser, staff=is_staff) + with override_settings(SENTRY_USE_CUSTOMER_DOMAINS=True), self.feature( + {"organizations:customer-domains": [other_org.slug]} + ): # Induce activeorg + assert "activeorg" not in self.client.session response = self.client.get( "/", - HTTP_HOST=f"{org.slug}.testserver", + HTTP_HOST=f"{other_org.slug}.testserver", follow=True, ) assert response.status_code == 200 - assert response.redirect_chain == [(f"http://{org.slug}.testserver/issues/", 302)] - assert self.client.session["activeorg"] == org.slug - - # Access another org as superuser on non-customer domain + if is_superuser: + assert response.redirect_chain == [ + (f"http://{other_org.slug}.testserver/issues/", 302) + ] + assert self.client.session["activeorg"] == other_org.slug + else: + assert response.redirect_chain == [ + (f"http://{other_org.slug}.testserver/auth/login/{other_org.slug}/", 302) + ] + assert "activeorg" not in self.client.session + + # Accessing org on non-customer domain with superuser and/or staff. response = self.client.get( - reverse("sentry-organization-issue-list", args=[other_org.slug]), + reverse("sentry-organization-issue-list", args=[org.slug]), follow=True, ) assert response.status_code == 200 assert response.redirect_chain == [] + def test_customer_domain_non_member_org_superuser(self): + self._run_customer_domain_elevated_privileges(is_superuser=True, is_staff=False) + + @with_feature("auth:enterprise-staff-cookie") + def test_customer_domain_non_member_org_staff(self): + self._run_customer_domain_elevated_privileges(is_superuser=False, is_staff=True) + + @with_feature("auth:enterprise-staff-cookie") + def test_customer_domain_non_member_org_superuser_and_staff(self): + self._run_customer_domain_elevated_privileges(is_superuser=True, is_staff=True) + def test_customer_domain_superuser(self): org = self.create_organization(owner=self.user) other_org = self.create_organization(slug="albertos-apples") diff --git a/tests/sentry/web/test_api.py b/tests/sentry/web/test_api.py index 4c6afa9420b168..a23f412785b809 100644 --- a/tests/sentry/web/test_api.py +++ b/tests/sentry/web/test_api.py @@ -15,6 +15,7 @@ from sentry.silo.base import SiloMode from sentry.tasks.deletion.scheduled import run_deletion from sentry.testutils.cases import TestCase +from sentry.testutils.helpers.features import with_feature from sentry.testutils.silo import assume_test_silo_mode, create_test_regions, region_silo_test from sentry.utils import json @@ -188,9 +189,12 @@ def test_authenticated(self): assert data["features"] == ["organizations:create"] assert data["customerDomain"] is None - def test_superuser(self): - user = self.create_user("foo@example.com", is_superuser=True) - self.login_as(user, superuser=True) + def _run_test_with_privileges(self, is_superuser: bool, is_staff: bool): + user = self.create_user("foo@example.com", is_superuser=is_superuser, is_staff=is_staff) + self.create_organization(owner=user) + self.login_as(user, superuser=is_superuser, staff=is_staff) + + other_org = self.create_organization() with mock.patch("sentry.auth.superuser.ORG_ID", self.organization.id): resp = self.client.get(self.path) @@ -202,32 +206,78 @@ def test_superuser(self): assert data["isAuthenticated"] assert data["user"] assert data["user"]["email"] == user.email - assert data["user"]["isSuperuser"] is True + assert data["user"]["isSuperuser"] is is_superuser assert data["lastOrganization"] is None - assert data["links"] == { - "organizationUrl": None, - "regionUrl": None, - "sentryUrl": "http://testserver", - "superuserUrl": f"http://{self.organization.slug}.testserver", - } + if is_superuser: + assert data["links"] == { + "organizationUrl": None, + "regionUrl": None, + "sentryUrl": "http://testserver", + "superuserUrl": f"http://{self.organization.slug}.testserver", + } + else: + assert data["links"] == { + "organizationUrl": None, + "regionUrl": None, + "sentryUrl": "http://testserver", + } assert "activeorg" not in self.client.session # Induce last active organization - resp = self.client.get( - reverse("sentry-api-0-organization-projects", args=[self.organization.slug]) - ) - assert resp.status_code == 200 - assert resp["Content-Type"] == "application/json" - assert "activeorg" not in self.client.session - - # lastOrganization is not set - resp = self.client.get(self.path) + with override_settings(SENTRY_USE_CUSTOMER_DOMAINS=True), self.feature( + {"organizations:customer-domains": [other_org.slug]} + ), assume_test_silo_mode(SiloMode.MONOLITH): + response = self.client.get( + "/", + HTTP_HOST=f"{other_org.slug}.testserver", + follow=True, + ) + assert response.status_code == 200 + if is_superuser: + assert response.redirect_chain == [ + (f"http://{other_org.slug}.testserver/issues/", 302) + ] + assert self.client.session["activeorg"] == other_org.slug + else: + assert response.redirect_chain == [ + (f"http://{other_org.slug}.testserver/auth/login/{other_org.slug}/", 302) + ] + assert "activeorg" not in self.client.session + + # lastOrganization is set + with mock.patch("sentry.auth.superuser.ORG_ID", self.organization.id): + resp = self.client.get(self.path) assert resp.status_code == 200 assert resp["Content-Type"] == "application/json" data = json.loads(resp.content) - assert data["lastOrganization"] is None - assert "activeorg" not in self.client.session + + if is_superuser: + assert data["lastOrganization"] == other_org.slug + assert data["links"] == { + "organizationUrl": f"http://{other_org.slug}.testserver", + "regionUrl": generate_region_url(), + "sentryUrl": "http://testserver", + "superuserUrl": f"http://{self.organization.slug}.testserver", + } + else: + assert data["lastOrganization"] is None + assert data["links"] == { + "organizationUrl": None, + "regionUrl": None, + "sentryUrl": "http://testserver", + } + + def test_superuser(self): + self._run_test_with_privileges(is_superuser=True, is_staff=False) + + @with_feature("auth:enterprise-staff-cookie") + def test_staff(self): + self._run_test_with_privileges(is_superuser=False, is_staff=True) + + @with_feature("auth:enterprise-staff-cookie") + def test_superuser_and_staff(self): + self._run_test_with_privileges(is_superuser=True, is_staff=True) def test_superuser_cookie_domain(self): # Cannot set the superuser cookie domain using override_settings(). diff --git a/tests/sentry/web/test_client_config.py b/tests/sentry/web/test_client_config.py index 344af11c8d45dd..98181441f68d40 100644 --- a/tests/sentry/web/test_client_config.py +++ b/tests/sentry/web/test_client_config.py @@ -206,7 +206,6 @@ def test_client_config_links_regionurl(): @multiregion_client_config_test @django_db_all def test_client_config_links_with_priority_org(): - # request, user = make_user_request_from_non_existant_org() request, user = make_user_request_from_org() request.user = user From 5c82bb1ecfd399053afcfec25fe39c73bc089cfd Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Tue, 6 Feb 2024 13:13:41 -0800 Subject: [PATCH 074/357] fix(issues): Adjust timeline animation, container size (#64696) --- .../views/issueDetails/traceTimeline/traceTimeline.tsx | 6 +++--- .../issueDetails/traceTimeline/traceTimelineEvents.tsx | 8 ++++++-- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx b/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx index 040adafa1a39c0..9f8b594557942c 100644 --- a/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx @@ -41,7 +41,7 @@ export function TraceTimeline({event}: TraceTimelineProps) { {isLoading ? ( - + ) : ( @@ -62,9 +62,9 @@ export function TraceTimeline({event}: TraceTimelineProps) { const TimelineOutline = styled('div')` position: absolute; left: 0; - top: 5px; + top: 3px; width: 100%; - height: 8px; + height: 10px; border: 1px solid ${p => p.theme.innerBorder}; border-radius: ${p => p.theme.borderRadius}; background-color: ${p => p.theme.backgroundSecondary}; diff --git a/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx b/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx index 014bf8b8c2fdd2..c2594ccb4164f5 100644 --- a/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx @@ -254,14 +254,18 @@ const CurrentNodeRing = styled('div')` position: absolute; top: -4px; left: -12px; - animation: pulse 1s ease-out infinite; + animation: pulse 4s ease-out infinite; @keyframes pulse { 0% { transform: scale(0.1, 0.1); opacity: 0.0; } - 50% { + 80% { + transform: scale(0.1, 0.1); + opacity: 0.0; + } + 90% { opacity: 1.0; } 100% { From 18172c7b376b1cfab18e2d384d767bdb2a9a4cfd Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Tue, 6 Feb 2024 13:17:30 -0800 Subject: [PATCH 075/357] feat(issues): Add no trace available to trace timeline (#64683) --- .../traceTimeline/traceLink.spec.tsx | 5 +++ .../issueDetails/traceTimeline/traceLink.tsx | 31 ++++++++++++++++++- 2 files changed, 35 insertions(+), 1 deletion(-) diff --git a/static/app/views/issueDetails/traceTimeline/traceLink.spec.tsx b/static/app/views/issueDetails/traceTimeline/traceLink.spec.tsx index a7309c4c9bc1d4..26f8abf1947de9 100644 --- a/static/app/views/issueDetails/traceTimeline/traceLink.spec.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceLink.spec.tsx @@ -72,4 +72,9 @@ describe('TraceLink', () => { await screen.findByRole('link', {name: 'View Full Trace (2 issues)'}) ).toBeInTheDocument(); }); + + it('renders no trace available', async () => { + render(, {organization}); + expect(await screen.findByText('No Trace Available')).toBeInTheDocument(); + }); }); diff --git a/static/app/views/issueDetails/traceTimeline/traceLink.tsx b/static/app/views/issueDetails/traceTimeline/traceLink.tsx index a41cb216e1c25a..57a8e623e54bec 100644 --- a/static/app/views/issueDetails/traceTimeline/traceLink.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceLink.tsx @@ -1,6 +1,7 @@ import styled from '@emotion/styled'; import Link from 'sentry/components/links/link'; +import QuestionTooltip from 'sentry/components/questionTooltip'; import {generateTraceTarget} from 'sentry/components/quickTrace/utils'; import {IconChevron} from 'sentry/icons'; import {t, tn} from 'sentry/locale'; @@ -21,7 +22,18 @@ export function TraceLink({event}: TraceLinkProps) { const traceTarget = generateTraceTarget(event, organization); if (!event.contexts?.trace?.trace_id) { - return null; + return ( + + {t('No Trace Available')} + + + ); } return ( @@ -49,4 +61,21 @@ const StyledLink = styled(Link)` gap: ${space(0.25)}; line-height: 1.2; font-size: ${p => p.theme.fontSizeMedium}; + + svg { + margin-top: 1px; + } +`; + +const NoTraceAvailable = styled('span')` + display: flex; + align-items: center; + gap: ${space(0.25)}; + line-height: 1.2; + color: ${p => p.theme.subText}; + font-size: ${p => p.theme.fontSizeMedium}; + + svg { + margin-top: 1px; + } `; From 0e843aa3162aa6c449e918b4a2419f840f985c3a Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Tue, 6 Feb 2024 14:36:59 -0800 Subject: [PATCH 076/357] fix(issues): Hide codecov in frame stacktrace prompt (#64714) --- .../app/components/events/interfaces/frame/stacktraceLink.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/static/app/components/events/interfaces/frame/stacktraceLink.tsx b/static/app/components/events/interfaces/frame/stacktraceLink.tsx index 32f8b7d1953761..49548978f42285 100644 --- a/static/app/components/events/interfaces/frame/stacktraceLink.tsx +++ b/static/app/components/events/interfaces/frame/stacktraceLink.tsx @@ -423,7 +423,7 @@ export function StacktraceLink({frame, event, line}: StacktraceLinkProps) { event={event} hasInFrameFeature={hasInFrameFeature} /> - ) : shouldShowCodecovPrompt(organization, match) ? ( + ) : shouldShowCodecovPrompt(organization, match) && !hasInFrameFeature ? ( ) : null} @@ -469,7 +469,7 @@ export function StacktraceLink({frame, event, line}: StacktraceLinkProps) { event={event} hasInFrameFeature={hasInFrameFeature} /> - ) : shouldShowCodecovPrompt(organization, match) ? ( + ) : shouldShowCodecovPrompt(organization, match) && !hasInFrameFeature ? ( ) : null} From 85f27c4b4917af3605f2bb131a1743f7304c7f26 Mon Sep 17 00:00:00 2001 From: Richard Roggenkemper <46740234+roggenkemper@users.noreply.github.com> Date: Tue, 6 Feb 2024 15:12:00 -0800 Subject: [PATCH 077/357] feat(integrations): Add in feature flag check to Vercel Webhooks (#64437) this pr adds in a check so that we can turn on the modern Vercel Webhooks and slowly roll it out. The flow now will be both the modern/legacy requests are sent, but only one of them will run, depending on if you have the feature flag or not. So only one of the requests is actually processed. --- src/sentry/integrations/vercel/webhook.py | 72 +++++++++++-------- .../integrations/vercel/test_webhook.py | 2 + 2 files changed, 43 insertions(+), 31 deletions(-) diff --git a/src/sentry/integrations/vercel/webhook.py b/src/sentry/integrations/vercel/webhook.py index 83e1881c6dd74f..9b00916f4693ef 100644 --- a/src/sentry/integrations/vercel/webhook.py +++ b/src/sentry/integrations/vercel/webhook.py @@ -13,7 +13,7 @@ from rest_framework.response import Response from sentry_sdk import configure_scope -from sentry import VERSION, audit_log, http, options +from sentry import VERSION, audit_log, features, http, options from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import Endpoint, control_silo_endpoint @@ -186,46 +186,45 @@ def post(self, request: Request) -> Response | None: except KeyError: return self.respond({"detail": "Missing event type."}, status=400) - # Try the new Vercel request. If it fails, try the old Vercel request try: - payload = request.data["payload"] external_id = self.parse_new_external_id(request) - scope.set_tag("vercel_webhook.type", "new") - - if event_type == "integration-configuration.removed": - configuration_id = payload["configuration"]["id"] - return self._delete(external_id, configuration_id, request) - if event_type == "deployment.created": - return self._deployment_created(external_id, request) - except Exception: + new_webhook = True + tag_type_string = "new" + except KeyError: external_id = self.parse_old_external_id(request) - scope.set_tag("vercel_webhook.type", "old") - - if event_type == "integration-configuration-removed": - configuration_id = request.data["payload"]["configuration"]["id"] - return self._delete(external_id, configuration_id, request) - - if event_type == "deployment": - return self._deployment_created(external_id, request) - + new_webhook = False + tag_type_string = "old" + + scope.set_tag("vercel_webhook.type", tag_type_string) + + if event_type in ( + "integration-configuration.removed", + "integration-configuration-removed", + ): + configuration_id = request.data["payload"]["configuration"]["id"] + return self._delete(external_id, configuration_id, request, new_webhook) + if event_type in ("deployment.created", "deployment"): + return self._deployment_created(external_id, request, new_webhook) return None def delete(self, request: Request): with configure_scope() as scope: # Try the new Vercel request. If it fails, try the old Vercel request try: - payload = request.data["payload"] external_id = self.parse_new_external_id(request) - scope.set_tag("vercel_webhook.type", "new") - configuration_id = payload["configuration"]["id"] - except Exception: + configuration_id = request.data["payload"]["configuration"]["id"] + new_webhook = True + tag_type_string = "new" + except KeyError: external_id = self.parse_old_external_id(request) - scope.set_tag("vercel_webhook.type", "old") configuration_id = request.data.get("configurationId") + new_webhook = False + tag_type_string = "old" - return self._delete(external_id, configuration_id, request) + scope.set_tag("vercel_webhook.type", tag_type_string) + return self._delete(external_id, configuration_id, request, new_webhook) - def _delete(self, external_id, configuration_id, request): + def _delete(self, external_id, configuration_id, request, new_webhook): try: integration = Integration.objects.get(provider="vercel", external_id=external_id) except Integration.DoesNotExist: @@ -240,6 +239,7 @@ def _delete(self, external_id, configuration_id, request): if len(orgs) == 0: # we already deleted the organization integration and # there was only one to begin with + integration.delete() return self.respond(status=204) @@ -255,7 +255,10 @@ def _delete(self, external_id, configuration_id, request): # organization integration AND the integration (since there is only one) # 2.) we already deleted the organization integration tied to this configuration # and the remaining one is for a different org (and configuration) - if len(orgs) == 1: + if ( + len(orgs) == 1 + and features.has("organizations:vercel-integration-webhooks", orgs[0]) == new_webhook + ): try: # Case no. 1: do the deleting and return OrganizationIntegration.objects.get( @@ -283,7 +286,10 @@ def _delete(self, external_id, configuration_id, request): }, ) - if configuration_id == integration.metadata["installation_id"]: + if ( + configuration_id == integration.metadata["installation_id"] + and features.has("organizations:vercel-integration-webhooks", orgs[0]) == new_webhook + ): # if we are uninstalling a primary configuration, and there are # multiple orgs connected to this integration we must update # the credentials (access_token, webhook_id etc). @@ -323,7 +329,7 @@ def _delete(self, external_id, configuration_id, request): return self.respond(status=204) - def _deployment_created(self, external_id, request): + def _deployment_created(self, external_id, request, new_webhook): payload = request.data["payload"] vercel_project_id = ( payload["projectId"] if payload.get("projectId") else payload["project"]["id"] @@ -372,7 +378,11 @@ def _deployment_created(self, external_id, request): matched_mappings = list(filter(lambda x: x[1] == vercel_project_id, project_mappings)) if matched_mappings: organization = orgs.get(org_integration.organization_id) - if organization is None: + if ( + organization is None + or features.has("organizations:vercel-integration-webhooks", organization) + != new_webhook + ): continue sentry_project_id = matched_mappings[0][0] diff --git a/tests/sentry/integrations/vercel/test_webhook.py b/tests/sentry/integrations/vercel/test_webhook.py index c032403e1c13a7..40bf5a4261a7ad 100644 --- a/tests/sentry/integrations/vercel/test_webhook.py +++ b/tests/sentry/integrations/vercel/test_webhook.py @@ -18,6 +18,7 @@ from sentry.silo import SiloMode from sentry.testutils.cases import APITestCase from sentry.testutils.helpers import override_options +from sentry.testutils.helpers.features import with_feature from sentry.testutils.silo import assume_test_silo_mode, control_silo_test from sentry.utils import json from sentry.utils.http import absolute_uri @@ -131,6 +132,7 @@ def test_create_release(self): assert release_request.headers["User-Agent"] == f"sentry_vercel/{VERSION}" @responses.activate + @with_feature("organizations:vercel-integration-webhooks") def test_create_release_new(self): responses.add( responses.POST, From fec2250937dca3bbd78e1aa496da8d3ccd357b8c Mon Sep 17 00:00:00 2001 From: Ryan Albrecht Date: Tue, 6 Feb 2024 15:17:00 -0800 Subject: [PATCH 078/357] feat(replay): Show "Feedback Opened" breadcrumb (#63883) New breadkcrumb type renders in the replay details list: ![SCR-20240206-lqhl](https://github.com/getsentry/sentry/assets/187460/a04e28e1-7519-487b-bbf3-1a53c87dd066) The button will go to the feedback page (with a redirect because we need the groupid). filters for the breadcrumbs work well, look like this: ![SCR-20240206-lrjs](https://github.com/getsentry/sentry/assets/187460/73ceda59-1a96-4d25-bf8d-6bf6d93f7fc9) Here is the example replay that I was experimenting with: https://sentry.dev.getsentry.net:7999/replays/402c0be4e829465f8dc5d5c001f84816/?f_b_type=feedback&project=11276&query=&referrer=%2Freplays%2F&statsPeriod=90d&t_main=breadcrumbs&yAxis=count%28%29 I 'created' it by: - logging into prod - go to the replays list, notice that a replay started for my session (open a new window to try again if sampling got you down) - go to crons or somewhere, and submit a feedback Fixes https://github.com/getsentry/sentry/issues/63791 --- .../feedback/useCurrentFeedbackId.tsx | 32 +++++++++++++++++- .../replays/breadcrumbs/breadcrumbItem.tsx | 28 +++++++++------- .../breadcrumbs/openFeedbackButton.tsx | 33 +++++++++++++++++++ .../replays/breadcrumbs/replayTimeline.tsx | 11 ++++--- .../breadcrumbs/replayTimelineEvents.tsx | 10 ++++-- static/app/utils/replays/getFrameDetails.tsx | 8 +++++ static/app/utils/replays/hydrateFrames.tsx | 4 +++ static/app/utils/replays/replayReader.tsx | 9 +++-- static/app/utils/replays/types.tsx | 2 ++ static/app/utils/useProjectFromId.tsx | 13 ++++++++ .../detail/breadcrumbs/breadcrumbRow.tsx | 9 ++--- .../replays/detail/breadcrumbs/index.tsx | 6 ++++ .../breadcrumbs/useBreadcrumbFilters.tsx | 2 ++ 13 files changed, 140 insertions(+), 27 deletions(-) create mode 100644 static/app/components/replays/breadcrumbs/openFeedbackButton.tsx create mode 100644 static/app/utils/useProjectFromId.tsx diff --git a/static/app/components/feedback/useCurrentFeedbackId.tsx b/static/app/components/feedback/useCurrentFeedbackId.tsx index 3f788f31e1cbc6..11ea4829decece 100644 --- a/static/app/components/feedback/useCurrentFeedbackId.tsx +++ b/static/app/components/feedback/useCurrentFeedbackId.tsx @@ -1,11 +1,41 @@ +import {useEffect} from 'react'; + import decodeFeedbackSlug from 'sentry/components/feedback/decodeFeedbackSlug'; +import type {Event} from 'sentry/types'; +import {useApiQuery} from 'sentry/utils/queryClient'; +import {decodeScalar} from 'sentry/utils/queryString'; import useLocationQuery from 'sentry/utils/url/useLocationQuery'; +import {useNavigate} from 'sentry/utils/useNavigate'; +import useOrganization from 'sentry/utils/useOrganization'; export default function useCurrentFeedbackId() { - const {feedbackSlug} = useLocationQuery({ + const organization = useOrganization(); + const navigate = useNavigate(); + + const {eventId, feedbackSlug, projectSlug} = useLocationQuery({ fields: { + eventId: decodeScalar, feedbackSlug: val => decodeFeedbackSlug(val).feedbackId ?? '', + projectSlug: decodeScalar, }, }); + + const {data: event} = useApiQuery( + [`/projects/${organization.slug}/${projectSlug}/events/${eventId}/`], + { + staleTime: Infinity, + enabled: Boolean(eventId) && Boolean(projectSlug), + } + ); + + useEffect(() => { + if (projectSlug && event?.groupID) { + navigate( + `/organizations/${organization.slug}/feedback/?feedbackSlug=${projectSlug}:${event.groupID}`, + {replace: true} + ); + } + }, [navigate, organization.slug, projectSlug, event]); + return feedbackSlug; } diff --git a/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx b/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx index 81ed89d8b72527..e507e65be95ce2 100644 --- a/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx +++ b/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx @@ -8,6 +8,7 @@ import ProjectBadge from 'sentry/components/idBadge/projectBadge'; import Link from 'sentry/components/links/link'; import ObjectInspector from 'sentry/components/objectInspector'; import PanelItem from 'sentry/components/panels/panelItem'; +import OpenFeedbackButton from 'sentry/components/replays/breadcrumbs/openFeedbackButton'; import {OpenReplayComparisonButton} from 'sentry/components/replays/breadcrumbs/openReplayComparisonButton'; import {useReplayContext} from 'sentry/components/replays/replayContext'; import {useReplayGroupContext} from 'sentry/components/replays/replayGroupContext'; @@ -27,7 +28,7 @@ import TimestampButton from 'sentry/views/replays/detail/timestampButton'; type MouseCallback = (frame: ReplayFrame, e: React.MouseEvent) => void; -const FRAMES_WITH_BUTTONS = ['replay.hydrate-error']; +const FRAMES_WITH_BUTTONS = ['replay.hydrate-error', 'sentry.feedback']; interface Props { extraction: Extraction | undefined; @@ -41,6 +42,7 @@ interface Props { ) => void; onMouseEnter: MouseCallback; onMouseLeave: MouseCallback; + projectSlug: string | undefined; startTimestampMs: number; traces: ReplayTraceRow | undefined; className?: string; @@ -48,13 +50,6 @@ interface Props { style?: CSSProperties; } -function getCrumbOrFrameData(frame: ReplayFrame) { - return { - ...getFrameDetails(frame), - timestampMs: frame.timestampMs, - }; -} - function BreadcrumbItem({ className, extraction, @@ -65,11 +60,12 @@ function BreadcrumbItem({ onInspectorExpanded, onMouseEnter, onMouseLeave, + projectSlug, startTimestampMs, style, traces, }: Props) { - const {color, description, title, icon, timestampMs} = getCrumbOrFrameData(frame); + const {color, description, title, icon} = getFrameDetails(frame); const {replay} = useReplayContext(); const forceSpan = 'category' in frame && FRAMES_WITH_BUTTONS.includes(frame.category); @@ -96,12 +92,13 @@ function BreadcrumbItem({ {onClick ? ( ) : null} - {typeof description === 'string' || isValidElement(description) ? ( + {typeof description === 'string' || + (description !== undefined && isValidElement(description)) ? ( {description} @@ -132,6 +129,15 @@ function BreadcrumbItem({
    ) : null} + {projectSlug && 'data' in frame && frame.data && 'feedbackId' in frame.data ? ( +
    + +
    + ) : null} + {extraction?.html ? ( diff --git a/static/app/components/replays/breadcrumbs/openFeedbackButton.tsx b/static/app/components/replays/breadcrumbs/openFeedbackButton.tsx new file mode 100644 index 00000000000000..71f1b765c73276 --- /dev/null +++ b/static/app/components/replays/breadcrumbs/openFeedbackButton.tsx @@ -0,0 +1,33 @@ +import {LinkButton} from 'sentry/components/button'; +import {t} from 'sentry/locale'; +import useOrganization from 'sentry/utils/useOrganization'; +import {normalizeUrl} from 'sentry/utils/withDomainRequired'; + +interface Props { + eventId: string; + projectSlug: string; + className?: string; +} + +export default function OpenFeedbackButton({className, eventId, projectSlug}: Props) { + const organization = useOrganization(); + + return ( + + {t('Open Feedback')} + + ); +} diff --git a/static/app/components/replays/breadcrumbs/replayTimeline.tsx b/static/app/components/replays/breadcrumbs/replayTimeline.tsx index 87b9672c18a6b0..a56cc56d072fd4 100644 --- a/static/app/components/replays/breadcrumbs/replayTimeline.tsx +++ b/static/app/components/replays/breadcrumbs/replayTimeline.tsx @@ -15,11 +15,13 @@ import {useReplayContext} from 'sentry/components/replays/replayContext'; import {divide} from 'sentry/components/replays/utils'; import toPercent from 'sentry/utils/number/toPercent'; import {useDimensions} from 'sentry/utils/useDimensions'; +import useProjectFromId from 'sentry/utils/useProjectFromId'; -type Props = {}; - -function ReplayTimeline({}: Props) { +export default function ReplayTimeline() { const {replay, currentTime, timelineScale} = useReplayContext(); + const projectSlug = useProjectFromId({ + project_id: replay?.getReplay().project_id, + })?.slug; const panelRef = useRef(null); const mouseTrackingProps = useTimelineScrubberMouseTracking( @@ -71,6 +73,7 @@ function ReplayTimeline({}: Props) { @@ -91,5 +94,3 @@ const TimelineEventsContainer = styled('div')` padding-top: 10px; padding-bottom: 10px; `; - -export default ReplayTimeline; diff --git a/static/app/components/replays/breadcrumbs/replayTimelineEvents.tsx b/static/app/components/replays/breadcrumbs/replayTimelineEvents.tsx index 39ac256584d2be..be4034fe151c96 100644 --- a/static/app/components/replays/breadcrumbs/replayTimelineEvents.tsx +++ b/static/app/components/replays/breadcrumbs/replayTimelineEvents.tsx @@ -19,15 +19,17 @@ const NODE_SIZES = [8, 12, 16]; interface Props { durationMs: number; frames: ReplayFrame[]; + projectSlug: string | undefined; startTimestampMs: number; width: number; className?: string; } -function ReplayTimelineEvents({ +export default function ReplayTimelineEvents({ className, durationMs, frames, + projectSlug, startTimestampMs, width, }: Props) { @@ -43,6 +45,7 @@ function ReplayTimelineEvents({ @@ -66,10 +69,12 @@ const EventColumn = styled(Timeline.Col)<{column: number}>` function Event({ frames, markerWidth, + projectSlug, startTimestampMs, }: { frames: ReplayFrame[]; markerWidth: number; + projectSlug: string | undefined; startTimestampMs: number; }) { const theme = useTheme(); @@ -87,6 +92,7 @@ function Event({ }} onMouseEnter={onMouseEnter} onMouseLeave={onMouseLeave} + projectSlug={projectSlug} startTimestampMs={startTimestampMs} traces={undefined} onDimensionChange={() => {}} @@ -211,5 +217,3 @@ const TooltipWrapper = styled('div')` max-height: 80vh; overflow: auto; `; - -export default ReplayTimelineEvents; diff --git a/static/app/utils/replays/getFrameDetails.tsx b/static/app/utils/replays/getFrameDetails.tsx index f3262eed4d7836..b980b0ca2a07fa 100644 --- a/static/app/utils/replays/getFrameDetails.tsx +++ b/static/app/utils/replays/getFrameDetails.tsx @@ -5,6 +5,7 @@ import FeatureBadge from 'sentry/components/featureBadge'; import ExternalLink from 'sentry/components/links/externalLink'; import {Tooltip} from 'sentry/components/tooltip'; import { + IconChat, IconCursorArrow, IconFire, IconFix, @@ -268,6 +269,13 @@ const MAPPER_FOR_FRAME: Record Details> = { title: 'Paint', icon: , }), + 'sentry.feedback': () => ({ + color: 'blue300', + description: '', + tabKey: TabKey.BREADCRUMBS, + title: 'User Feedback Submitted', + icon: , + }), 'resource.css': frame => ({ color: 'gray300', description: undefined, diff --git a/static/app/utils/replays/hydrateFrames.tsx b/static/app/utils/replays/hydrateFrames.tsx index b14347208891f1..9ce5806f57fbe9 100644 --- a/static/app/utils/replays/hydrateFrames.tsx +++ b/static/app/utils/replays/hydrateFrames.tsx @@ -22,6 +22,10 @@ export default function hydrateFrames(attachments: unknown[]) { return; } if (isBreadcrumbFrameEvent(attachment)) { + if (attachment.data.payload.category === 'sentry.feedback') { + // @ts-expect-error In SDK <= 7.100.0 we were incorrectly setting the timestamp + attachment.data.payload.timestamp = attachment.data.timestamp; + } breadcrumbFrames.push(attachment.data.payload); } else if (isSpanFrameEvent(attachment)) { spanFrames.push(attachment.data.payload); diff --git a/static/app/utils/replays/replayReader.tsx b/static/app/utils/replays/replayReader.tsx index 766bf320764a7d..b65a0f73b1da73 100644 --- a/static/app/utils/replays/replayReader.tsx +++ b/static/app/utils/replays/replayReader.tsx @@ -388,9 +388,12 @@ export default class ReplayReader { [ ...this.getPerfFrames(), ...this._sortedBreadcrumbFrames.filter(frame => - ['replay.init', 'replay.mutations', 'replay.hydrate-error'].includes( - frame.category - ) + [ + 'replay.hydrate-error', + 'replay.init', + 'replay.mutations', + 'sentry.feedback', + ].includes(frame.category) ), ...this._errors, ].sort(sortFrames), diff --git a/static/app/utils/replays/types.tsx b/static/app/utils/replays/types.tsx index f280b1de6ed1f4..e80fe9624bba62 100644 --- a/static/app/utils/replays/types.tsx +++ b/static/app/utils/replays/types.tsx @@ -189,6 +189,7 @@ export type MultiClickFrame = HydratedBreadcrumb<'ui.multiClick'>; export type MutationFrame = HydratedBreadcrumb<'replay.mutations'>; export type NavFrame = HydratedBreadcrumb<'navigation'>; export type SlowClickFrame = HydratedBreadcrumb<'ui.slowClickDetected'>; +export type FeedbackOpenedFrame = HydratedBreadcrumb<'sentry.feedback'>; // This list must match each of the categories used in `HydratedBreadcrumb` above // and any app-specific types that we hydrate (ie: replay.init). @@ -197,6 +198,7 @@ export const BreadcrumbCategories = [ 'navigation', 'replay.init', 'replay.mutations', + 'sentry.feedback', 'ui.blur', 'ui.click', 'ui.focus', diff --git a/static/app/utils/useProjectFromId.tsx b/static/app/utils/useProjectFromId.tsx new file mode 100644 index 00000000000000..0099579385dd15 --- /dev/null +++ b/static/app/utils/useProjectFromId.tsx @@ -0,0 +1,13 @@ +import useProjects from 'sentry/utils/useProjects'; + +interface Props { + project_id: string | undefined; +} + +export default function useProjectFromId({project_id}: Props) { + const {projects} = useProjects(); + if (project_id) { + return projects.find(p => p.id === project_id) ?? undefined; + } + return undefined; +} diff --git a/static/app/views/replays/detail/breadcrumbs/breadcrumbRow.tsx b/static/app/views/replays/detail/breadcrumbs/breadcrumbRow.tsx index 2f19dded7a6705..13f5054ee81b81 100644 --- a/static/app/views/replays/detail/breadcrumbs/breadcrumbRow.tsx +++ b/static/app/views/replays/detail/breadcrumbs/breadcrumbRow.tsx @@ -22,6 +22,7 @@ interface Props { expandedState: Record, event: MouseEvent ) => void; + projectSlug: string | undefined; startTimestampMs: number; style: CSSProperties; traces: ReplayTraceRow | undefined; @@ -29,14 +30,15 @@ interface Props { expandPaths?: string[]; } -function BreadcrumbRow({ +export default function BreadcrumbRow({ expandPaths, - frame, extraction, + frame, index, onClick, onDimensionChange, onInspectorExpanded, + projectSlug, startTimestampMs, style, traces, @@ -75,6 +77,7 @@ function BreadcrumbRow({ onClick={onClick} onMouseEnter={onMouseEnter} onMouseLeave={onMouseLeave} + projectSlug={projectSlug} startTimestampMs={startTimestampMs} expandPaths={expandPaths} onDimensionChange={handleDimensionChange} @@ -89,5 +92,3 @@ const StyledTimeBorder = styled('div')` border-top: 1px solid transparent; border-bottom: 1px solid transparent; `; - -export default BreadcrumbRow; diff --git a/static/app/views/replays/detail/breadcrumbs/index.tsx b/static/app/views/replays/detail/breadcrumbs/index.tsx index 74301e625c8a28..cee53187ab625e 100644 --- a/static/app/views/replays/detail/breadcrumbs/index.tsx +++ b/static/app/views/replays/detail/breadcrumbs/index.tsx @@ -10,6 +10,7 @@ import {t} from 'sentry/locale'; import useCrumbHandlers from 'sentry/utils/replays/hooks/useCrumbHandlers'; import useExtractedDomNodes from 'sentry/utils/replays/hooks/useExtractedDomNodes'; import useOrganization from 'sentry/utils/useOrganization'; +import useProjectFromId from 'sentry/utils/useProjectFromId'; import useVirtualizedInspector from 'sentry/views/replays/detail//useVirtualizedInspector'; import BreadcrumbFilters from 'sentry/views/replays/detail/breadcrumbs/breadcrumbFilters'; import BreadcrumbRow from 'sentry/views/replays/detail/breadcrumbs/breadcrumbRow'; @@ -35,6 +36,10 @@ function Breadcrumbs() { const organization = useOrganization(); const hasPerfTab = organization.features.includes('session-replay-trace-table'); + const projectSlug = useProjectFromId({ + project_id: replay?.getReplay().project_id, + })?.slug; + const {onClickTimestamp} = useCrumbHandlers(); const {data: frameToExtraction, isFetching: isFetchingExtractions} = useExtractedDomNodes({replay}); @@ -104,6 +109,7 @@ function Breadcrumbs() { frame={item} extraction={frameToExtraction?.get(item)} traces={hasPerfTab ? frameToTrace?.get(item) : undefined} + projectSlug={projectSlug} startTimestampMs={startTimestampMs} style={style} expandPaths={Array.from(expandPathsRef.current?.get(index) || [])} diff --git a/static/app/views/replays/detail/breadcrumbs/useBreadcrumbFilters.tsx b/static/app/views/replays/detail/breadcrumbs/useBreadcrumbFilters.tsx index 04d865367d7af6..081557ae3a9200 100644 --- a/static/app/views/replays/detail/breadcrumbs/useBreadcrumbFilters.tsx +++ b/static/app/views/replays/detail/breadcrumbs/useBreadcrumbFilters.tsx @@ -29,6 +29,7 @@ type Return = { const TYPE_TO_LABEL: Record = { start: 'Replay Start', + feedback: 'User Feedback', replay: 'Replay', issue: 'Issue', console: 'Console', @@ -51,6 +52,7 @@ const TYPE_TO_LABEL: Record = { const OPORCATEGORY_TO_TYPE: Record = { 'replay.init': 'start', 'replay.mutations': 'replay', + 'sentry.feedback': 'feedback', issue: 'issue', console: 'console', navigation: 'nav', From 28a7ef2a3ecdab9c4a2abb11ba2df0db15ae808b Mon Sep 17 00:00:00 2001 From: Ryan Albrecht Date: Tue, 6 Feb 2024 15:29:51 -0800 Subject: [PATCH 079/357] bug(feedback): Fix bottom padding on feedback item (#64717) **Before** ![before](https://github.com/getsentry/sentry/assets/187460/8955767c-2369-4700-aa21-aa8548e90c92) **After** ![after](https://github.com/getsentry/sentry/assets/187460/1dcf3185-89d0-4185-8fd2-3ce07f5e1bd0) Follow up from https://github.com/getsentry/sentry/pull/64064 --- static/app/components/feedback/feedbackItem/feedbackItem.tsx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/static/app/components/feedback/feedbackItem/feedbackItem.tsx b/static/app/components/feedback/feedbackItem/feedbackItem.tsx index 52f86ec0e4e2c2..85a0ca0a8040cc 100644 --- a/static/app/components/feedback/feedbackItem/feedbackItem.tsx +++ b/static/app/components/feedback/feedbackItem/feedbackItem.tsx @@ -107,13 +107,14 @@ export default function FeedbackItem({feedbackItem, eventData, tags}: Props) { ); } +// 0 padding-bottom because has space(2) built-in. const OverflowPanelItem = styled(PanelItem)` overflow: scroll; flex-direction: column; flex-grow: 1; gap: ${space(4)}; - padding: ${space(2)} ${space(3)} 50px ${space(3)}; + padding: ${space(2)} ${space(3)} 0 ${space(3)}; `; const SmallTitle = styled('span')` From 58d0d906aac3d62cbe4582f70eb82ef44c94e467 Mon Sep 17 00:00:00 2001 From: Ryan Skonnord Date: Tue, 6 Feb 2024 15:41:32 -0800 Subject: [PATCH 080/357] test(hc): Disable silo test case inheritance validation (#64618) Remove call to `_validate_that_no_ancestor_is_silo_decorated` and add a deprecation comment. As far as we can tell, the check was originally motivated by trouble with swapped environment variables when changing into and out of the region silo mode. Those swaps were simplified in #61331 and related PRs, and hopefully having multiple silo decorators in the inheritance tree is no longer a problem. Decorating the base class will in fact be necessary when tests are run in region mode by default, as some base classes should properly be tagged as `@control_silo_test` and would raise spurious errors otherwise. --- src/sentry/testutils/silo.py | 16 +++++++++++----- .../endpoints/test_user_permission_details.py | 3 +++ .../api/endpoints/test_user_role_details.py | 3 +++ tests/sentry/api/endpoints/test_user_roles.py | 1 + .../sentry/api/endpoints/test_userroles_index.py | 2 ++ tests/sentry/utils/test_query.py | 2 ++ 6 files changed, 22 insertions(+), 5 deletions(-) diff --git a/src/sentry/testutils/silo.py b/src/sentry/testutils/silo.py index 302f6af4b30bdb..167cd96f0c7390 100644 --- a/src/sentry/testutils/silo.py +++ b/src/sentry/testutils/silo.py @@ -264,11 +264,6 @@ def apply(self, decorated_obj: Any) -> Any: if not (is_test_case_class or is_function): raise ValueError("@SiloModeTest must decorate a function or TestCase class") - if is_test_case_class: - self._validate_that_no_ancestor_is_silo_decorated(decorated_obj) - # _silo_modes is used to mark the class as silo decorated in the above validation - decorated_obj._silo_modes = self.silo_modes - if SENTRY_USE_MONOLITH_DBS: # In this case, skip modifying the object and let it run in the default # silo mode (monolith) @@ -280,6 +275,15 @@ def apply(self, decorated_obj: Any) -> Any: return self._mark_parameterized_by_silo_mode(decorated_obj) def _validate_that_no_ancestor_is_silo_decorated(self, object_to_validate: Any): + # Deprecated? Silo decorators at multiple places in the inheritance tree may + # be necessary if a base class needs to be run in a non-default mode, + # especially when the default is no longer region mode. The previous + # rationale may have been limited to problems around swapping the local + # region, which may now be resolved. + # + # TODO(RyanSkonnord): Delete this after ascertaining that it's safe to have + # silo decorators on test case class ancestors + class_queue = [object_to_validate] # Do a breadth-first traversal of all base classes to ensure that the @@ -294,6 +298,8 @@ def _validate_that_no_ancestor_is_silo_decorated(self, object_to_validate: Any): ) class_queue.extend(current_class.__bases__) + object_to_validate._silo_modes = self.silo_modes + all_silo_test = SiloModeTestDecorator(*SiloMode) """ diff --git a/tests/sentry/api/endpoints/test_user_permission_details.py b/tests/sentry/api/endpoints/test_user_permission_details.py index 26449ccab845cf..ad4a9c1fb2d1ce 100644 --- a/tests/sentry/api/endpoints/test_user_permission_details.py +++ b/tests/sentry/api/endpoints/test_user_permission_details.py @@ -37,6 +37,7 @@ def test_fails_without_users_admin_permission(self): assert response.status_code == 403 +@control_silo_test class UserPermissionDetailsGetTest(UserDetailsTest): method = "GET" @@ -69,6 +70,7 @@ def test_staff_without_permission(self, mock_has_permission): assert mock_has_permission.call_count == 1 +@control_silo_test class UserPermissionDetailsPostTest(UserDetailsTest): method = "POST" @@ -115,6 +117,7 @@ def test_staff_duplicate_permission(self, mock_has_permission): assert mock_has_permission.call_count == 1 +@control_silo_test class UserPermissionDetailsDeleteTest(UserDetailsTest): method = "DELETE" diff --git a/tests/sentry/api/endpoints/test_user_role_details.py b/tests/sentry/api/endpoints/test_user_role_details.py index 3ba6d40b68bcaa..609620a160e064 100644 --- a/tests/sentry/api/endpoints/test_user_role_details.py +++ b/tests/sentry/api/endpoints/test_user_role_details.py @@ -32,6 +32,7 @@ def test_fails_without_users_admin_permission(self): assert resp.status_code == 403 +@control_silo_test class UserUserRolesDetailsTest(UserUserRolesTest): def test_lookup_self(self): role = UserRole.objects.create(name="support", permissions=["broadcasts.admin"]) @@ -43,6 +44,7 @@ def test_lookup_self(self): assert resp.data["name"] == "support" +@control_silo_test class UserUserRolesCreateTest(UserUserRolesTest): method = "POST" @@ -66,6 +68,7 @@ def test_existing_role(self): assert resp.status_code == 410 +@control_silo_test class UserUserRolesDeleteTest(UserUserRolesTest): method = "DELETE" diff --git a/tests/sentry/api/endpoints/test_user_roles.py b/tests/sentry/api/endpoints/test_user_roles.py index 3f796b9c9be21d..59cf049a499912 100644 --- a/tests/sentry/api/endpoints/test_user_roles.py +++ b/tests/sentry/api/endpoints/test_user_roles.py @@ -32,6 +32,7 @@ def test_fails_without_users_admin_permission(self): assert resp.status_code == 403 +@control_silo_test class UserUserRolesGetTest(UserUserRolesTest): def test_lookup_self(self): role = UserRole.objects.create(name="support", permissions=["broadcasts.admin"]) diff --git a/tests/sentry/api/endpoints/test_userroles_index.py b/tests/sentry/api/endpoints/test_userroles_index.py index 5a0297599330c9..093fad26acd87c 100644 --- a/tests/sentry/api/endpoints/test_userroles_index.py +++ b/tests/sentry/api/endpoints/test_userroles_index.py @@ -32,6 +32,7 @@ def test_fails_without_users_admin_permission(self): assert resp.status_code == 403 +@control_silo_test class UserRolesGetTest(UserRolesTest): def test_simple(self): UserRole.objects.create(name="test-role") @@ -41,6 +42,7 @@ def test_simple(self): assert "test-role" in [r["name"] for r in resp.data] +@control_silo_test class UserRolesPostTest(UserRolesTest): method = "POST" diff --git a/tests/sentry/utils/test_query.py b/tests/sentry/utils/test_query.py index 55de1bc3f79f7b..0c2a887331885a 100644 --- a/tests/sentry/utils/test_query.py +++ b/tests/sentry/utils/test_query.py @@ -55,10 +55,12 @@ def test_empty(self): assert len(list(self.range_wrapper(qs, step=2))) == 0 +@no_silo_test class RangeQuerySetWrapperWithProgressBarTest(RangeQuerySetWrapperTest): range_wrapper = RangeQuerySetWrapperWithProgressBar +@no_silo_test class RangeQuerySetWrapperWithProgressBarApproxTest(RangeQuerySetWrapperTest): range_wrapper = RangeQuerySetWrapperWithProgressBarApprox From 926616373a6ab6731ba83b1e94ee82723e7089f6 Mon Sep 17 00:00:00 2001 From: pevensentry <101372341+pevensentry@users.noreply.github.com> Date: Tue, 6 Feb 2024 15:57:20 -0800 Subject: [PATCH 081/357] chore(login): update the login banners (#64719) Update login banners to replace the mobile performance workshop with the API rate limit blog. --- src/sentry/templates/sentry/partial/alerts.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sentry/templates/sentry/partial/alerts.html b/src/sentry/templates/sentry/partial/alerts.html index 98df9057a3cf64..efef058a5aca4b 100644 --- a/src/sentry/templates/sentry/partial/alerts.html +++ b/src/sentry/templates/sentry/partial/alerts.html @@ -78,7 +78,7 @@
    {% if banner_choice == 0 %} - Join us Feb. 6th at 11AM PT to learn how to identify, resolve and prevent crashes in your mobile app.  Learn more. + Hitting API rate limits in your project? Learn why API rate limits happen and how to address them.  Learn more. {% elif banner_choice == 1 %} Managing a Python monolith? Join us Feb. 27th at 2AM PT to learn how Kraken Technologies debugs their's with Sentry.  Learn more. {% endif %} From 5de8e3c7c2b6e1e6800653d66fbf14e032ed9575 Mon Sep 17 00:00:00 2001 From: Seiji Chew <67301797+schew2381@users.noreply.github.com> Date: Tue, 6 Feb 2024 16:09:50 -0800 Subject: [PATCH 082/357] chore(staff): Let staff access user index endpoint (#64605) I believe this is only used in the _admin portal, so once the feature flag is removed we'll only let staff access this and not superuser After this PR, the users tab in _admin should be complete! --- src/sentry/api/endpoints/user_index.py | 6 +++--- tests/sentry/api/endpoints/test_user_index.py | 13 +++++++++++-- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/src/sentry/api/endpoints/user_index.py b/src/sentry/api/endpoints/user_index.py index c083eec20bdc58..e184071339e5cb 100644 --- a/src/sentry/api/endpoints/user_index.py +++ b/src/sentry/api/endpoints/user_index.py @@ -5,7 +5,7 @@ from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import Endpoint, control_silo_endpoint from sentry.api.paginator import DateTimePaginator -from sentry.api.permissions import SuperuserPermission +from sentry.api.permissions import SuperuserOrStaffFeatureFlaggedPermission from sentry.api.serializers import serialize from sentry.db.models.query import in_iexact from sentry.models.user import User @@ -15,9 +15,9 @@ @control_silo_endpoint class UserIndexEndpoint(Endpoint): publish_status = { - "GET": ApiPublishStatus.UNKNOWN, + "GET": ApiPublishStatus.PRIVATE, } - permission_classes = (SuperuserPermission,) + permission_classes = (SuperuserOrStaffFeatureFlaggedPermission,) def get(self, request: Request) -> Response: queryset = User.objects.distinct() diff --git a/tests/sentry/api/endpoints/test_user_index.py b/tests/sentry/api/endpoints/test_user_index.py index f88dbfc23ae199..0816ff8f9feded 100644 --- a/tests/sentry/api/endpoints/test_user_index.py +++ b/tests/sentry/api/endpoints/test_user_index.py @@ -1,5 +1,6 @@ from sentry.models.userpermission import UserPermission from sentry.testutils.cases import APITestCase +from sentry.testutils.helpers.features import with_feature from sentry.testutils.silo import control_silo_test @@ -14,11 +15,19 @@ def setUp(self): self.login_as(user=self.superuser, superuser=True) - def test_superuser_only(self): + def test_normal_user_fails(self): self.login_as(self.normal_user) self.get_error_response(status_code=403) - def test_simple(self): + @with_feature("auth:enterprise-staff-cookie") + def test_staff_simple(self): + self.staff_user = self.create_user(is_staff=True) + self.login_as(self.staff_user, staff=True) + + response = self.get_success_response() + assert len(response.data) == 3 + + def test_superuser_simple(self): response = self.get_success_response() assert len(response.data) == 2 From 50db028d67ef94cfd6fd7c5af9f0b3a00e7493d0 Mon Sep 17 00:00:00 2001 From: Ryan Albrecht Date: Tue, 6 Feb 2024 16:21:52 -0800 Subject: [PATCH 083/357] bug(replay): Prevent a flash of "replay not found" during loading (#64727) Fixes https://github.com/getsentry/team-replay/issues/344 --- .../utils/api/useFetchParallelPages.spec.tsx | 38 +++++++++++++++++++ .../app/utils/api/useFetchParallelPages.tsx | 2 +- 2 files changed, 39 insertions(+), 1 deletion(-) diff --git a/static/app/utils/api/useFetchParallelPages.spec.tsx b/static/app/utils/api/useFetchParallelPages.spec.tsx index 2f57b5f411d101..7ecbb6ab8db1f9 100644 --- a/static/app/utils/api/useFetchParallelPages.spec.tsx +++ b/static/app/utils/api/useFetchParallelPages.spec.tsx @@ -224,4 +224,42 @@ describe('useFetchParallelPages', () => { expect(result.current.getLastResponseHeader).toStrictEqual(expect.any(Function)); expect(result.current.getLastResponseHeader?.('Link')).toBe('next: 0:20:0'); }); + + it('should have isFetching=true as long as something is outstanding', async () => { + MockApiClient.addMockResponse({ + url: MOCK_API_ENDPOINT, + body: 'results starting at 0', + match: [MockApiClient.matchQuery({cursor: '0:0:0', per_page: 10})], + asyncDelay: 200, + }); + MockApiClient.addMockResponse({ + url: MOCK_API_ENDPOINT, + body: 'results starting at 10', + match: [MockApiClient.matchQuery({cursor: '0:10:0', per_page: 10})], + asyncDelay: 500, + }); + + const getQueryKey = queryKeyFactory(); + + const {result, waitForNextUpdate} = reactHooks.renderHook(useFetchParallelPages, { + wrapper: makeWrapper(makeTestQueryClient()), + initialProps: { + enabled: true, + getQueryKey, + hits: 13, + perPage: 10, + }, + }); + + // No responses have resolved + expect(result.current.isFetching).toBeTruthy(); + await waitForNextUpdate(); + + // Only 1 response has resolved + expect(result.current.isFetching).toBeTruthy(); + await waitForNextUpdate(); + + // Both responses have resolved + expect(result.current.isFetching).toBeFalsy(); + }); }); diff --git a/static/app/utils/api/useFetchParallelPages.tsx b/static/app/utils/api/useFetchParallelPages.tsx index 65fb05ce42e160..d02694cc6b43fa 100644 --- a/static/app/utils/api/useFetchParallelPages.tsx +++ b/static/app/utils/api/useFetchParallelPages.tsx @@ -153,7 +153,7 @@ export default function useFetchParallelPages({ error: values.map(value => value.error), getLastResponseHeader: values.slice(-1)[0]?.getResponseHeader, isError: values.map(value => value.isError).some(Boolean), - isFetching: values.map(value => value.isFetching).every(Boolean), + isFetching: values.map(value => value.isFetching).some(Boolean), }); } }) From a70419f25a115dd292ee375c6af9568edb3e3179 Mon Sep 17 00:00:00 2001 From: Julia Hoge Date: Tue, 6 Feb 2024 16:31:21 -0800 Subject: [PATCH 084/357] chore(settings): Add feature flag for updated legal page (#64726) Closes https://github.com/getsentry/sentry/issues/64725 Adds feature flag for the UI changes planned for the terms of service legal settings page. --- src/sentry/conf/server.py | 2 ++ src/sentry/features/__init__.py | 1 + 2 files changed, 3 insertions(+) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index c64a2fd0bdbbbf..59bc207504ad1e 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1869,6 +1869,8 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:session-replay-weekly-email": False, # Lets organizations manage grouping configs "organizations:set-grouping-config": False, + # Enable the UI for updated terms of service + "organizations:settings-legal-tos-ui": False, # Enable the UI for the overage alert settings "organizations:slack-overage-notifications": False, # Enable source maps debugger diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index 35ee40080b9a46..7cfaf32d70d9a5 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -262,6 +262,7 @@ default_manager.add("organizations:session-replay-ui", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:session-replay-weekly-email", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:set-grouping-config", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) +default_manager.add("organizations:settings-legal-tos-ui", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:slack-block-kit", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:slack-overage-notifications", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:source-maps-debugger-blue-thunder-edition", OrganizationFeature, FeatureHandlerStrategy.REMOTE) From a8bbd68b4434632a4869764e1735f82cef99ab6f Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Wed, 7 Feb 2024 00:37:17 +0000 Subject: [PATCH 085/357] Revert "feat(crons): Allow max_workers to be configured for consumer (#64512)" This reverts commit 71b77a86fd67d7e95b522cc423e8fabe8d26f667. Co-authored-by: wedamija <6288560+wedamija@users.noreply.github.com> --- src/sentry/consumers/__init__.py | 6 --- .../monitors/consumers/monitor_consumer.py | 30 +++++------- .../sentry/monitors/test_monitor_consumer.py | 48 ++----------------- 3 files changed, 15 insertions(+), 69 deletions(-) diff --git a/src/sentry/consumers/__init__.py b/src/sentry/consumers/__init__.py index 543af02ff2f458..a7ddbabaf59bb8 100644 --- a/src/sentry/consumers/__init__.py +++ b/src/sentry/consumers/__init__.py @@ -102,12 +102,6 @@ def ingest_monitors_options() -> list[click.Option]: default=10, help="Maximum time spent batching check-ins to batch before processing in parallel.", ), - click.Option( - ["--max-workers", "max_workers"], - type=int, - default=None, - help="The maximum number of threads to spawn in parallel mode.", - ), ] return options diff --git a/src/sentry/monitors/consumers/monitor_consumer.py b/src/sentry/monitors/consumers/monitor_consumer.py index 656150b8c2313e..3c6e0ff7a0c7e3 100644 --- a/src/sentry/monitors/consumers/monitor_consumer.py +++ b/src/sentry/monitors/consumers/monitor_consumer.py @@ -4,9 +4,8 @@ import uuid from collections import defaultdict from collections.abc import Mapping -from concurrent.futures import Executor, ThreadPoolExecutor, wait +from concurrent.futures import ThreadPoolExecutor, wait from datetime import datetime, timedelta -from functools import partial from typing import Literal import msgpack @@ -55,7 +54,6 @@ CHECKIN_QUOTA_LIMIT = 6 CHECKIN_QUOTA_WINDOW = 60 -StrategyMode = Literal["parallel", "serial"] def _ensure_monitor_with_config( @@ -748,6 +746,9 @@ def _process_checkin(item: CheckinItem, txn: Transaction | Span): logger.exception("Failed to process check-in") +_checkin_worker = ThreadPoolExecutor() + + def process_checkin(item: CheckinItem): """ Process an individual check-in @@ -771,7 +772,7 @@ def process_checkin_group(items: list[CheckinItem]): process_checkin(item) -def process_batch(message: Message[ValuesBatch[KafkaPayload]], worker: Executor): +def process_batch(message: Message[ValuesBatch[KafkaPayload]]): """ Receives batches of check-in messages. This function will take the batch and group them together by monitor ID (ensuring order is preserved) and @@ -819,7 +820,8 @@ def process_batch(message: Message[ValuesBatch[KafkaPayload]], worker: Executor) # Submit check-in groups for processing with sentry_sdk.start_transaction(op="process_batch", name="monitors.monitor_consumer"): futures = [ - worker.submit(process_checkin_group, group) for group in checkin_mapping.values() + _checkin_worker.submit(process_checkin_group, group) + for group in checkin_mapping.values() ] wait(futures) @@ -864,11 +866,6 @@ class StoreMonitorCheckInStrategyFactory(ProcessingStrategyFactory[KafkaPayload] Does the consumer process unrelated check-ins in parallel? """ - max_workers: int | None = None - """ - Number of Executor workers to use when running in parallel - """ - max_batch_size = 500 """ How many messages will be batched at once when in parallel mode. @@ -881,10 +878,9 @@ class StoreMonitorCheckInStrategyFactory(ProcessingStrategyFactory[KafkaPayload] def __init__( self, - mode: StrategyMode | None = None, + mode: Literal["parallel", "serial"] | None = None, max_batch_size: int | None = None, max_batch_time: int | None = None, - max_workers: int | None = None, ) -> None: if mode == "parallel": self.parallel = True @@ -893,14 +889,10 @@ def __init__( self.max_batch_size = max_batch_size if max_batch_time is not None: self.max_batch_time = max_batch_time - if max_workers is not None: - self.max_workers = max_workers - - def create_parallel_worker(self, commit: Commit) -> ProcessingStrategy[KafkaPayload]: - worker = ThreadPoolExecutor(max_workers=self.max_workers) + def create_paralell_worker(self, commit: Commit) -> ProcessingStrategy[KafkaPayload]: batch_processor = RunTask( - function=partial(process_batch, worker=worker), + function=process_batch, next_step=CommitOffsets(commit), ) return BatchStep( @@ -921,6 +913,6 @@ def create_with_partitions( partitions: Mapping[Partition, int], ) -> ProcessingStrategy[KafkaPayload]: if self.parallel: - return self.create_parallel_worker(commit) + return self.create_paralell_worker(commit) else: return self.create_synchronous_worker(commit) diff --git a/tests/sentry/monitors/test_monitor_consumer.py b/tests/sentry/monitors/test_monitor_consumer.py index d1021b57a6a1ba..3839c27bc551ce 100644 --- a/tests/sentry/monitors/test_monitor_consumer.py +++ b/tests/sentry/monitors/test_monitor_consumer.py @@ -13,10 +13,7 @@ from sentry.constants import ObjectStatus from sentry.db.models import BoundedPositiveIntegerField from sentry.monitors.constants import TIMEOUT, PermitCheckInStatus -from sentry.monitors.consumers.monitor_consumer import ( - StoreMonitorCheckInStrategyFactory, - StrategyMode, -) +from sentry.monitors.consumers.monitor_consumer import StoreMonitorCheckInStrategyFactory from sentry.monitors.models import ( CheckInStatus, Monitor, @@ -26,7 +23,7 @@ MonitorType, ScheduleType, ) -from sentry.testutils.cases import BaseTestCase, TestCase, TransactionTestCase +from sentry.testutils.cases import TestCase from sentry.utils import json from sentry.utils.locking.manager import LockManager from sentry.utils.outcomes import Outcome @@ -35,9 +32,7 @@ locks = LockManager(build_instance_from_options(settings.SENTRY_POST_PROCESS_LOCKS_BACKEND_OPTIONS)) -class MonitorConsumerTest(BaseTestCase): - mode: StrategyMode = "serial" - +class MonitorConsumerTest(TestCase): def _create_monitor(self, **kwargs): return Monitor.objects.create( organization_id=self.organization.id, @@ -85,10 +80,7 @@ def send_checkin( commit = mock.Mock() partition = Partition(Topic("test"), 0) - factory = StoreMonitorCheckInStrategyFactory( - mode=self.mode, max_workers=1 - ).create_with_partitions(commit, {partition: 0}) - factory.submit( + StoreMonitorCheckInStrategyFactory().create_with_partitions(commit, {partition: 0}).submit( Message( BrokerValue( KafkaPayload(b"fake-key", msgpack.packb(wrapper), []), @@ -98,39 +90,7 @@ def send_checkin( ) ) ) - factory.join() - - -class ParallelMonitorConsumerTest(TransactionTestCase, MonitorConsumerTest): - mode: StrategyMode = "parallel" - - def test(self) -> None: - monitor = self._create_monitor(slug="my-monitor") - - self.send_checkin(monitor.slug) - - checkin = MonitorCheckIn.objects.get(guid=self.guid) - assert checkin.status == CheckInStatus.OK - assert checkin.monitor_config == monitor.config - - monitor_environment = MonitorEnvironment.objects.get(id=checkin.monitor_environment.id) - assert monitor_environment.status == MonitorStatus.OK - assert monitor_environment.last_checkin == checkin.date_added - assert monitor_environment.next_checkin == monitor.get_next_expected_checkin( - checkin.date_added - ) - assert monitor_environment.next_checkin_latest == monitor.get_next_expected_checkin_latest( - checkin.date_added - ) - - # Process another check-in to verify we set an expected time for the next check-in - self.send_checkin(monitor.slug) - checkin = MonitorCheckIn.objects.get(guid=self.guid) - assert checkin.expected_time == monitor_environment.next_checkin - assert checkin.trace_id.hex == self.trace_id - -class SynchronousMonitorConsumerTest(MonitorConsumerTest, TestCase): def send_clock_pulse( self, ts: datetime | None = None, From 1a02f69b756ad3591207062d1dd58850949b8dcd Mon Sep 17 00:00:00 2001 From: Cathy Teng <70817427+cathteng@users.noreply.github.com> Date: Tue, 6 Feb 2024 16:40:23 -0800 Subject: [PATCH 086/357] chore(roles): remove OrgRoleInfo (#64630) --- static/app/components/orgRole.tsx | 147 ------------------ .../organizationMemberRow.spec.tsx | 57 +------ .../organizationMemberRow.tsx | 6 +- .../organizationMembersList.spec.tsx | 14 -- 4 files changed, 4 insertions(+), 220 deletions(-) delete mode 100644 static/app/components/orgRole.tsx diff --git a/static/app/components/orgRole.tsx b/static/app/components/orgRole.tsx deleted file mode 100644 index 214f4b8f558b69..00000000000000 --- a/static/app/components/orgRole.tsx +++ /dev/null @@ -1,147 +0,0 @@ -import {Fragment, useEffect, useMemo} from 'react'; -import styled from '@emotion/styled'; -import * as Sentry from '@sentry/react'; - -import ExternalLink from 'sentry/components/links/externalLink'; -import Link from 'sentry/components/links/link'; -import QuestionTooltip from 'sentry/components/questionTooltip'; -import {t, tct} from 'sentry/locale'; -import {space} from 'sentry/styles/space'; -import type {Member, Organization} from 'sentry/types'; -import {getEffectiveOrgRole} from 'sentry/utils/orgRole'; - -export function OrgRoleInfo({ - organization, - member, -}: { - member: Member; - organization: Organization; -}) { - const {orgRoleList} = organization; - const {orgRole, groupOrgRoles} = member; - - const orgRoleFromMember = useMemo(() => { - const role = orgRoleList.find(r => r.id === orgRole); - return role; - }, [orgRole, orgRoleList]); - - const effectiveOrgRole = useMemo(() => { - if (!groupOrgRoles) { - return orgRoleFromMember; - } - const memberOrgRoles = groupOrgRoles.map(r => r.role.id).concat([orgRole]); - return getEffectiveOrgRole(memberOrgRoles, orgRoleList); - }, [orgRole, groupOrgRoles, orgRoleList, orgRoleFromMember]); - - useEffect(() => { - if (!orgRoleFromMember) { - Sentry.withScope(scope => { - scope.setExtra('context', { - memberId: member.id, - orgRole: member.orgRole, - }); - Sentry.captureException(new Error('OrgMember has an invalid orgRole.')); - }); - } - }, [orgRoleFromMember, member]); - - useEffect(() => { - if (!effectiveOrgRole) { - Sentry.withScope(scope => { - scope.setExtra('context', { - memberId: member.id, - orgRoleFromMember, - groupOrgRoles, - orgRoleList, - effectiveOrgRole, - }); - Sentry.captureException(new Error('OrgMember has no effectiveOrgRole.')); - }); - } - }, [effectiveOrgRole, member, orgRoleFromMember, groupOrgRoles, orgRoleList]); - - // This code path should not happen, so this weird UI is fine. - if (!orgRoleFromMember) { - return {t('Error Role')}; - } - - if (groupOrgRoles?.length === 0 || !effectiveOrgRole || !groupOrgRoles) { - return {orgRoleFromMember.name}; - } - - const urlPrefix = `/settings/${organization.slug}/`; - - const tooltipBody = ( - -
    {t('This user recieved org-level roles from several sources.')}
    - -
    - - - {t('User-specific')} - -
    : {orgRoleFromMember.name}
    -
    -
    - -
    -
    {t('Teams')}:
    - {groupOrgRoles && - groupOrgRoles - .sort((a, b) => a.teamSlug.localeCompare(b.teamSlug)) - .map(r => ( - - #{r.teamSlug} -
    : {r.role.name}
    -
    - ))} -
    - -
    - {tct( - 'Sentry will grant them permissions equivalent to the union-set of all their role. [docsLink:See docs here].', - { - docsLink: ( - - ), - } - )} -
    -
    - ); - - return ( - - {effectiveOrgRole.name} - - - ); -} - -const Wrapper = styled('span')` - display: inline-flex; - gap: ${space(0.5)}; -`; - -const TooltipWrapper = styled('div')` - width: 200px; - display: grid; - row-gap: ${space(1.5)}; - text-align: left; - overflow: hidden; -`; - -const TeamRow = styled('div')` - display: grid; - grid-template-columns: auto 1fr; - - > * { - white-space: nowrap; - overflow: hidden; - text-overflow: ellipsis; - } -`; -const TeamLink = styled(Link)` - max-width: 130px; - font-weight: 700; -`; diff --git a/static/app/views/settings/organizationMembers/organizationMemberRow.spec.tsx b/static/app/views/settings/organizationMembers/organizationMemberRow.spec.tsx index ef09e6477e2b09..d3ecf25c507634 100644 --- a/static/app/views/settings/organizationMembers/organizationMemberRow.spec.tsx +++ b/static/app/views/settings/organizationMembers/organizationMemberRow.spec.tsx @@ -1,11 +1,9 @@ import {MemberFixture} from 'sentry-fixture/member'; import {OrganizationFixture} from 'sentry-fixture/organization'; -import {TeamFixture} from 'sentry-fixture/team'; import {UserFixture} from 'sentry-fixture/user'; -import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary'; +import {render, screen} from 'sentry-test/reactTestingLibrary'; -import {OrgRoleFixture} from 'sentry/types/role'; import OrganizationMemberRow from 'sentry/views/settings/organizationMembers/organizationMemberRow'; describe('OrganizationMemberRow', function () { @@ -32,22 +30,6 @@ describe('OrganizationMemberRow', function () { groupOrgRoles: [], }); - const managerTeam = TeamFixture({ - orgRole: 'manager', - }); - - const memberOnManagerTeam = MemberFixture({ - id: '2', - orgRole: 'member', - teams: [managerTeam.slug], - groupOrgRoles: [ - { - teamSlug: managerTeam.slug, - role: OrgRoleFixture({name: 'Manager'}), - }, - ], - }); - const currentUser = UserFixture({ id: '2', email: 'currentUser@email.com', @@ -308,41 +290,4 @@ describe('OrganizationMemberRow', function () { expect(removeButton()).toBeEnabled(); }); }); - - describe('render org role', function () { - it('renders org role without tooltip if no org roles from team membership', function () { - render( - - ); - - expect(screen.getByText('Member')).toBeInTheDocument(); - - const questionTooltip = screen.queryByTestId('more-information'); - expect(questionTooltip).not.toBeInTheDocument(); - }); - }); - - it('renders org role info tooltip if member has org roles from team membership', async function () { - render( - - ); - - const questionTooltip = screen.getByTestId('more-information'); - expect(questionTooltip).toBeInTheDocument(); - - await userEvent.hover(questionTooltip); - await waitFor(() => { - expect(screen.getByText(`#${managerTeam.slug}`)).toBeInTheDocument(); - expect(screen.getByText(': Manager')).toBeInTheDocument(); - }); - }); }); diff --git a/static/app/views/settings/organizationMembers/organizationMemberRow.tsx b/static/app/views/settings/organizationMembers/organizationMemberRow.tsx index f2e88b3d69d53e..e8cd90c0909bfb 100644 --- a/static/app/views/settings/organizationMembers/organizationMemberRow.tsx +++ b/static/app/views/settings/organizationMembers/organizationMemberRow.tsx @@ -7,13 +7,13 @@ import Confirm from 'sentry/components/confirm'; import HookOrDefault from 'sentry/components/hookOrDefault'; import Link from 'sentry/components/links/link'; import LoadingIndicator from 'sentry/components/loadingIndicator'; -import {OrgRoleInfo} from 'sentry/components/orgRole'; import PanelItem from 'sentry/components/panels/panelItem'; import {IconCheckmark, IconClose, IconFlag, IconMail, IconSubtract} from 'sentry/icons'; import {t, tct} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import type {AvatarUser, Member, Organization} from 'sentry/types'; import isMemberDisabledFromLimit from 'sentry/utils/isMemberDisabledFromLimit'; +import {capitalize} from 'sentry/utils/string/capitalize'; type Props = { canAddMembers: boolean; @@ -75,7 +75,7 @@ export default class OrganizationMemberRow extends PureComponent { }; renderMemberRole() { - const {member, organization} = this.props; + const {member} = this.props; const {roleName, pending, expired} = member; if (isMemberDisabledFromLimit(member)) { return {t('Deactivated')}; @@ -88,7 +88,7 @@ export default class OrganizationMemberRow extends PureComponent { ); } - return ; + return {capitalize(member.orgRole)}; } render() { diff --git a/static/app/views/settings/organizationMembers/organizationMembersList.spec.tsx b/static/app/views/settings/organizationMembers/organizationMembersList.spec.tsx index b0caa04afc2a96..4979d0babf0564 100644 --- a/static/app/views/settings/organizationMembers/organizationMembersList.spec.tsx +++ b/static/app/views/settings/organizationMembers/organizationMembersList.spec.tsx @@ -437,20 +437,6 @@ describe('OrganizationMembersList', function () { } }); - it('can filter members with org roles from team membership', async function () { - const routerContext = RouterContextFixture(); - render(, { - context: routerContext, - }); - - await userEvent.click(screen.getByRole('button', {name: 'Filter'})); - await userEvent.click(screen.getByRole('option', {name: 'Owner'})); - await userEvent.click(screen.getByRole('button', {name: 'Filter'})); - - const owners = screen.queryAllByText('Owner'); - expect(owners).toHaveLength(3); - }); - describe('OrganizationInviteRequests', function () { const inviteRequest = MemberFixture({ id: '123', From 7708c3decc77922bfeccbb6f84ddc368031eabe9 Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Tue, 6 Feb 2024 17:02:27 -0800 Subject: [PATCH 087/357] fix(issues): Update trace timeline dot styles (#64731) --- .../traceTimeline/traceTimeline.tsx | 2 +- .../traceTimeline/traceTimelineEvents.tsx | 41 ++++++++++--------- 2 files changed, 23 insertions(+), 20 deletions(-) diff --git a/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx b/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx index 9f8b594557942c..fcbe333deaa1cf 100644 --- a/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx @@ -62,7 +62,7 @@ export function TraceTimeline({event}: TraceTimelineProps) { const TimelineOutline = styled('div')` position: absolute; left: 0; - top: 3px; + top: 3.5px; width: 100%; height: 10px; border: 1px solid ${p => p.theme.innerBorder}; diff --git a/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx b/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx index c2594ccb4164f5..70fcf209a9c5d3 100644 --- a/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx @@ -174,13 +174,15 @@ function NodeGroup({ )} {!isCurrentNode && - groupEvents.map(groupEvent => - 'event.type' in groupEvent ? ( - - ) : ( - - ) - )} + groupEvents + .slice(0, 4) + .map(groupEvent => + 'event.type' in groupEvent ? ( + + ) : ( + + ) + )} ); })} @@ -230,42 +232,42 @@ const IconNode = styled('div')` box-shadow: ${p => p.theme.dropShadowLight}; user-select: none; background-color: ${p => color(p.theme.red200).alpha(0.3).string()}; + border: 1px solid ${p => p.theme.red300}; margin-left: -8px; `; const PerformanceIconNode = styled(IconNode)` background-color: unset; - border: 1px solid ${p => color(p.theme.red300).alpha(0.3).string()}; `; const CurrentNodeContainer = styled('div')` position: absolute; grid-column: 1; grid-row: 1; - width: 8px; - height: 8px; + width: 12px; + height: 12px; `; const CurrentNodeRing = styled('div')` border: 1px solid ${p => p.theme.red300}; - height: 16px; - width: 16px; + height: 24px; + width: 24px; border-radius: 100%; position: absolute; - top: -4px; - left: -12px; - animation: pulse 4s ease-out infinite; + top: -6px; + left: -16px; + animation: pulse 2s ease-out infinite; @keyframes pulse { 0% { transform: scale(0.1, 0.1); opacity: 0.0; } - 80% { + 50% { transform: scale(0.1, 0.1); opacity: 0.0; } - 90% { + 70% { opacity: 1.0; } 100% { @@ -277,8 +279,9 @@ const CurrentNodeRing = styled('div')` const CurrentIconNode = styled(IconNode)` background-color: ${p => p.theme.red300}; - border-radius: 50%; - position: absolute; + width: 12px; + height: 12px; + margin-left: -10px; `; const TooltipHelper = styled('span')` From 0c3dbe360f5cce6cf05b614d52a23e67174d0b90 Mon Sep 17 00:00:00 2001 From: Ryan Albrecht Date: Tue, 6 Feb 2024 17:03:49 -0800 Subject: [PATCH 088/357] feat(replay): Split /replay-count/ data source by issue category (#64730) Fixes https://github.com/getsentry/team-replay/issues/380 --- .../components/eventOrGroupExtraDetails.tsx | 2 +- .../group/issueReplayCount.spec.tsx | 15 +++++---- .../app/components/group/issueReplayCount.tsx | 13 ++++---- .../replayCount/useReplayCountForIssues.tsx | 32 ++++++++++++++++--- static/app/views/issueDetails/header.tsx | 2 +- 5 files changed, 45 insertions(+), 19 deletions(-) diff --git a/static/app/components/eventOrGroupExtraDetails.tsx b/static/app/components/eventOrGroupExtraDetails.tsx index 2d4d799c342018..6aa12f2fed27ab 100644 --- a/static/app/components/eventOrGroupExtraDetails.tsx +++ b/static/app/components/eventOrGroupExtraDetails.tsx @@ -81,7 +81,7 @@ function EventOrGroupExtraDetails({data, showAssignee, organization}: Props) { {numComments} )} - {showReplayCount && } + {showReplayCount && } {logger && ( ); + const {container} = render(); await waitFor(() => { expect(container).toBeEmptyDOMElement(); }); - expect(mockGetReplayCountForIssue).toHaveBeenCalledWith(groupId); + expect(mockGetReplayCountForIssue).toHaveBeenCalledWith(groupId, 'error'); }); it('does not render when a group has a count of zero', async function () { const mockGetReplayCountForIssue = mockCount(0); - const {container} = render(); + const {container} = render(); await waitFor(() => { expect(container).toBeEmptyDOMElement(); }); - expect(mockGetReplayCountForIssue).toHaveBeenCalledWith(groupId); + expect(mockGetReplayCountForIssue).toHaveBeenCalledWith(groupId, 'error'); }); it('renders the correct replay count', async function () { const mockGetReplayCountForIssue = mockCount(2); - const {container} = render(, { + const {container} = render(, { context: routerContext, }); @@ -62,6 +65,6 @@ describe('IssueReplayCount', function () { 'href', `/organizations/${organization.slug}/issues/${groupId}/replays/` ); - expect(mockGetReplayCountForIssue).toHaveBeenCalledWith(groupId); + expect(mockGetReplayCountForIssue).toHaveBeenCalledWith(groupId, 'error'); }); }); diff --git a/static/app/components/group/issueReplayCount.tsx b/static/app/components/group/issueReplayCount.tsx index acaea319d7c69d..fbba1cde035b86 100644 --- a/static/app/components/group/issueReplayCount.tsx +++ b/static/app/components/group/issueReplayCount.tsx @@ -5,21 +5,22 @@ import {Tooltip} from 'sentry/components/tooltip'; import {IconPlay} from 'sentry/icons'; import {t, tn} from 'sentry/locale'; import {space} from 'sentry/styles/space'; +import type {Group} from 'sentry/types'; import useReplayCountForIssues from 'sentry/utils/replayCount/useReplayCountForIssues'; import useOrganization from 'sentry/utils/useOrganization'; import {normalizeUrl} from 'sentry/utils/withDomainRequired'; -type Props = { - groupId: string; -}; +interface Props { + group: Group; +} /** * Show the count of how many replays are associated to an issue. */ -function IssueReplayCount({groupId}: Props) { +function IssueReplayCount({group}: Props) { const organization = useOrganization(); const {getReplayCountForIssue} = useReplayCountForIssues(); - const count = getReplayCountForIssue(groupId); + const count = getReplayCountForIssue(group.id, group.issueCategory); if (count === undefined || count === 0) { return null; @@ -37,7 +38,7 @@ function IssueReplayCount({groupId}: Props) { 50 ? titleOver50 : title50OrLess}> diff --git a/static/app/utils/replayCount/useReplayCountForIssues.tsx b/static/app/utils/replayCount/useReplayCountForIssues.tsx index 4c44659c096203..15a310e8833027 100644 --- a/static/app/utils/replayCount/useReplayCountForIssues.tsx +++ b/static/app/utils/replayCount/useReplayCountForIssues.tsx @@ -1,3 +1,4 @@ +import {IssueCategory} from 'sentry/types'; import useReplayCount from 'sentry/utils/replayCount/useReplayCount'; import useOrganization from 'sentry/utils/useOrganization'; @@ -6,18 +7,39 @@ import useOrganization from 'sentry/utils/useOrganization'; */ export default function useReplayCountForIssues() { const organization = useOrganization(); - const {getOne, getMany, hasOne, hasMany} = useReplayCount({ + const { + getOne: getOneError, + getMany: getManyError, + hasOne: hasOneError, + hasMany: hasManyError, + } = useReplayCount({ bufferLimit: 25, dataSource: 'discover', fieldName: 'issue.id', organization, statsPeriod: '14d', }); + const { + getOne: getOneIssue, + getMany: getManyIssue, + hasOne: hasOneIssue, + hasMany: hasManyIssue, + } = useReplayCount({ + bufferLimit: 25, + dataSource: 'search_issues', + fieldName: 'issue.id', + organization, + statsPeriod: '14d', + }); return { - getReplayCountForIssue: getOne, - getReplayCountForIssues: getMany, - issueHasReplay: hasOne, - issuesHaveReplay: hasMany, + getReplayCountForIssue: (id: string, category: IssueCategory) => + category === IssueCategory.ERROR ? getOneError(id) : getOneIssue(id), + getReplayCountForIssues: (id: readonly string[], category: IssueCategory) => + category === IssueCategory.ERROR ? getManyError(id) : getManyIssue(id), + issueHasReplay: (id: string, category: IssueCategory) => + category === IssueCategory.ERROR ? hasOneError(id) : hasOneIssue(id), + issuesHaveReplay: (id: readonly string[], category: IssueCategory) => + category === IssueCategory.ERROR ? hasManyError(id) : hasManyIssue(id), }; } diff --git a/static/app/views/issueDetails/header.tsx b/static/app/views/issueDetails/header.tsx index 12b095bb5842a7..72be0e6107abe6 100644 --- a/static/app/views/issueDetails/header.tsx +++ b/static/app/views/issueDetails/header.tsx @@ -59,7 +59,7 @@ function GroupHeaderTabs({ const organization = useOrganization(); const {getReplayCountForIssue} = useReplayCountForIssues(); - const replaysCount = getReplayCountForIssue(group.id); + const replaysCount = getReplayCountForIssue(group.id, group.issueCategory); const projectFeatures = new Set(project ? project.features : []); const organizationFeatures = new Set(organization ? organization.features : []); From af067d47cc93097b1bb565502e1b5fdf4c2bd07e Mon Sep 17 00:00:00 2001 From: edwardgou-sentry <83961295+edwardgou-sentry@users.noreply.github.com> Date: Tue, 6 Feb 2024 21:01:16 -0500 Subject: [PATCH 089/357] fix(projects): Fix session metrics not rendering when period is 45+ days (#64670) Session metrics don't render on 45+ days because the previous period query `isLoading` state is always true. Fix this by updating `isLoading` to check to see if the query is enabled. --- .../projectStabilityScoreCard.tsx | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/static/app/views/projectDetail/projectScoreCards/projectStabilityScoreCard.tsx b/static/app/views/projectDetail/projectScoreCards/projectStabilityScoreCard.tsx index d5626559b566f5..1bef733f435775 100644 --- a/static/app/views/projectDetail/projectScoreCards/projectStabilityScoreCard.tsx +++ b/static/app/views/projectDetail/projectScoreCards/projectStabilityScoreCard.tsx @@ -72,6 +72,12 @@ const useCrashFreeRate = (props: Props) => { {staleTime: 0, enabled: isEnabled} ); + const isPreviousPeriodEnabled = shouldFetchPreviousPeriod({ + start: datetime.start, + end: datetime.end, + period: datetime.period, + }); + const previousQuery = useApiQuery( [ `/organizations/${organization.slug}/sessions/`, @@ -85,20 +91,15 @@ const useCrashFreeRate = (props: Props) => { ], { staleTime: 0, - enabled: - isEnabled && - shouldFetchPreviousPeriod({ - start: datetime.start, - end: datetime.end, - period: datetime.period, - }), + enabled: isEnabled && isPreviousPeriodEnabled, } ); return { crashFreeRate: currentQuery.data, previousCrashFreeRate: previousQuery.data, - isLoading: currentQuery.isLoading || previousQuery.isLoading, + isLoading: + currentQuery.isLoading || (previousQuery.isLoading && isPreviousPeriodEnabled), error: currentQuery.error || previousQuery.error, refetch: () => { currentQuery.refetch(); From 505d8fb455ad6b018edb10c746dec08d5092602b Mon Sep 17 00:00:00 2001 From: Ryan Albrecht Date: Tue, 6 Feb 2024 18:05:53 -0800 Subject: [PATCH 090/357] feat(replay): Move Share and Delete into dropdown to save space (#64737) ![SCR-20240206-ouek](https://github.com/getsentry/sentry/assets/187460/76de4c5b-4e2b-4833-a854-3b40d39b1501) Fixes https://github.com/getsentry/team-replay/issues/370 --- .../replays/hooks/useDeleteReplay.tsx} | 45 ++++++------- .../hooks/useShareReplayAtTimestamp.tsx} | 28 +++------ static/app/views/replays/detail/page.tsx | 63 ++++++++++++++++--- 3 files changed, 87 insertions(+), 49 deletions(-) rename static/app/{components/replays/header/deleteButton.tsx => utils/replays/hooks/useDeleteReplay.tsx} (53%) rename static/app/{components/replays/shareButton.tsx => utils/replays/hooks/useShareReplayAtTimestamp.tsx} (82%) diff --git a/static/app/components/replays/header/deleteButton.tsx b/static/app/utils/replays/hooks/useDeleteReplay.tsx similarity index 53% rename from static/app/components/replays/header/deleteButton.tsx rename to static/app/utils/replays/hooks/useDeleteReplay.tsx index 59ebe203c1b9fe..ddea0ab0f9f5ef 100644 --- a/static/app/components/replays/header/deleteButton.tsx +++ b/static/app/utils/replays/hooks/useDeleteReplay.tsx @@ -1,25 +1,28 @@ +import {useCallback} from 'react'; import * as Sentry from '@sentry/react'; import {addErrorMessage} from 'sentry/actionCreators/indicator'; -import {Button} from 'sentry/components/button'; -import Confirm from 'sentry/components/confirm'; -import {IconDelete} from 'sentry/icons'; +import {openConfirmModal} from 'sentry/components/confirm'; import {t} from 'sentry/locale'; import useApi from 'sentry/utils/useApi'; import {useNavigate} from 'sentry/utils/useNavigate'; import useOrganization from 'sentry/utils/useOrganization'; interface DeleteButtonProps { - projectSlug: string; - replayId: string; + projectSlug: string | null; + replayId: string | undefined; } -function DeleteButton({projectSlug, replayId}: DeleteButtonProps) { +export default function useDeleteReplay({projectSlug, replayId}: DeleteButtonProps) { const api = useApi(); const navigate = useNavigate(); const organization = useOrganization(); - const handleDelete = async () => { + const handleDelete = useCallback(async () => { + if (!projectSlug || !replayId) { + return; + } + try { await api.requestPromise( `/projects/${organization.slug}/${projectSlug}/replays/${replayId}/`, @@ -32,18 +35,18 @@ function DeleteButton({projectSlug, replayId}: DeleteButtonProps) { addErrorMessage(t('Failed to delete replay')); Sentry.captureException(err); } - }; - - return ( - - - - ); -} + }, [api, navigate, organization, projectSlug, replayId]); -export default DeleteButton; + const confirmDelte = useCallback(() => { + if (!projectSlug || !replayId) { + return; + } + + openConfirmModal({ + message: t('Are you sure you want to delete this replay?'), + onConfirm: handleDelete, + }); + }, [handleDelete, projectSlug, replayId]); + + return confirmDelte; +} diff --git a/static/app/components/replays/shareButton.tsx b/static/app/utils/replays/hooks/useShareReplayAtTimestamp.tsx similarity index 82% rename from static/app/components/replays/shareButton.tsx rename to static/app/utils/replays/hooks/useShareReplayAtTimestamp.tsx index 8c82953d05da0f..8dd5d71540af81 100644 --- a/static/app/components/replays/shareButton.tsx +++ b/static/app/utils/replays/hooks/useShareReplayAtTimestamp.tsx @@ -1,13 +1,11 @@ -import {useState} from 'react'; +import {useCallback, useState} from 'react'; import styled from '@emotion/styled'; import {openModal} from 'sentry/actionCreators/modal'; -import {Button} from 'sentry/components/button'; import RadioGroup from 'sentry/components/forms/controls/radioGroup'; import Input from 'sentry/components/input'; import {useReplayContext} from 'sentry/components/replays/replayContext'; import TextCopyInput from 'sentry/components/textCopyInput'; -import {IconUpload} from 'sentry/icons'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import {formatSecondsToClock, parseClockToSeconds} from 'sentry/utils/formatters'; @@ -73,24 +71,16 @@ function ShareModal({currentTimeSec, Header, Body}) { ); } -function ShareButton() { - // Cannot use this hook inside the modal because context will not be wired up +export default function useShareReplayAtTimestamp() { const {currentTime} = useReplayContext(); - // floor() to remove ms level precision. It's a cleaner url by default this way. - const currentTimeSec = Math.floor(currentTime / 1000); + const handleShare = useCallback(() => { + // floor() to remove ms level precision. It's a cleaner url by default this way. + const currentTimeSec = Math.floor(currentTime / 1000); - return ( - - ); + openModal(deps => ); + }, [currentTime]); + return handleShare; } const StyledTextCopyInput = styled(TextCopyInput)` @@ -119,5 +109,3 @@ const ShareAtRadioGroup = styled('div')` flex-direction: column; max-width: fit-content; `; - -export default ShareButton; diff --git a/static/app/views/replays/detail/page.tsx b/static/app/views/replays/detail/page.tsx index 3a257364b9a04c..58cea23b76790c 100644 --- a/static/app/views/replays/detail/page.tsx +++ b/static/app/views/replays/detail/page.tsx @@ -1,19 +1,23 @@ import type {ReactNode} from 'react'; import styled from '@emotion/styled'; +import type {MenuItemProps} from 'sentry/components/dropdownMenu'; +import {DropdownMenu} from 'sentry/components/dropdownMenu'; import UserBadge from 'sentry/components/idBadge/userBadge'; import FullViewport from 'sentry/components/layouts/fullViewport'; import * as Layout from 'sentry/components/layouts/thirds'; import ConfigureReplayCard from 'sentry/components/replays/configureReplayCard'; -import DeleteButton from 'sentry/components/replays/header/deleteButton'; import DetailsPageBreadcrumbs from 'sentry/components/replays/header/detailsPageBreadcrumbs'; import FeedbackButton from 'sentry/components/replays/header/feedbackButton'; import HeaderPlaceholder from 'sentry/components/replays/header/headerPlaceholder'; import ReplayMetaData from 'sentry/components/replays/header/replayMetaData'; -import ShareButton from 'sentry/components/replays/shareButton'; import SentryDocumentTitle from 'sentry/components/sentryDocumentTitle'; +import {IconDelete, IconEllipsis, IconUpload} from 'sentry/icons'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; +import {defined} from 'sentry/utils'; +import useDeleteReplay from 'sentry/utils/replays/hooks/useDeleteReplay'; +import useShareReplayAtTimestamp from 'sentry/utils/replays/hooks/useShareReplayAtTimestamp'; import type {ReplayError, ReplayRecord} from 'sentry/views/replays/types'; type Props = { @@ -24,11 +28,45 @@ type Props = { replayRecord: undefined | ReplayRecord; }; -function Page({children, orgSlug, replayRecord, projectSlug, replayErrors}: Props) { +export default function Page({ + children, + orgSlug, + replayRecord, + projectSlug, + replayErrors, +}: Props) { const title = replayRecord ? `${replayRecord.id} — Session Replay — ${orgSlug}` : `Session Replay — ${orgSlug}`; + const onShareReplay = useShareReplayAtTimestamp(); + const onDeleteReplay = useDeleteReplay({replayId: replayRecord?.id, projectSlug}); + + const dropdownItems: MenuItemProps[] = [ + { + key: 'share', + label: ( + + + {t('Share')} + + ), + onAction: onShareReplay, + }, + replayRecord?.id && projectSlug + ? { + key: 'delete', + label: ( + + + {t('Delete')} + + ), + onAction: onDeleteReplay, + } + : null, + ].filter(defined); + const header = replayRecord?.is_archived ? (
    @@ -38,12 +76,17 @@ function Page({children, orgSlug, replayRecord, projectSlug, replayErrors}: Prop - - {replayRecord?.id && projectSlug && ( - - )} + , + }} + size="sm" + items={dropdownItems} + /> {replayRecord ? ( @@ -100,4 +143,8 @@ const ButtonActionsWrapper = styled(Layout.HeaderActions)` } `; -export default Page; +const ItemSpacer = styled('div')` + display: flex; + gap: ${space(1)}; + align-items: center; +`; From aa2e6f24a9bedca6ec018b19be4e86b90ff700cb Mon Sep 17 00:00:00 2001 From: Riccardo Busetti Date: Wed, 7 Feb 2024 09:25:33 +0100 Subject: [PATCH 091/357] feat(ddm): Add date range support to metrics meta endpoints (#64560) --- .../api/endpoints/organization_metrics.py | 58 +++++---- src/sentry/api/urls.py | 6 - src/sentry/snuba/metrics/datasource.py | 44 +++++-- .../test_organization_metric_details.py | 8 +- .../endpoints/test_organization_metrics.py | 91 +------------- ...a.py => test_organization_metrics_data.py} | 0 .../test_organization_metrics_details.py | 119 ++++++++++++++++++ ... test_organization_metrics_tag_details.py} | 38 +++++- ...s.py => test_organization_metrics_tags.py} | 38 +++++- 9 files changed, 267 insertions(+), 135 deletions(-) rename tests/sentry/api/endpoints/{test_organization_metric_data.py => test_organization_metrics_data.py} (100%) create mode 100644 tests/sentry/api/endpoints/test_organization_metrics_details.py rename tests/sentry/api/endpoints/{test_organization_metric_tag_details.py => test_organization_metrics_tag_details.py} (84%) rename tests/sentry/api/endpoints/{test_organization_metric_tags.py => test_organization_metrics_tags.py} (84%) diff --git a/src/sentry/api/endpoints/organization_metrics.py b/src/sentry/api/endpoints/organization_metrics.py index 63225d69a7104c..b07b927a557cc8 100644 --- a/src/sentry/api/endpoints/organization_metrics.py +++ b/src/sentry/api/endpoints/organization_metrics.py @@ -49,21 +49,6 @@ def get_use_case_id(request: Request) -> UseCaseID: ) -@region_silo_endpoint -class OrganizationMetricsEndpoint(OrganizationEndpoint): - publish_status = {"GET": ApiPublishStatus.EXPERIMENTAL} - owner = ApiOwner.TELEMETRY_EXPERIENCE - - def get(self, request: Request, organization) -> Response: - projects = self.get_projects(request, organization) - if not projects: - raise InvalidParams("You must supply at least one projects to see its metrics") - - metrics = get_metrics_meta(projects=projects, use_case_id=get_use_case_id(request)) - - return Response(metrics, status=200) - - @region_silo_endpoint class OrganizationMetricsDetailsEndpoint(OrganizationEndpoint): publish_status = { @@ -74,10 +59,15 @@ class OrganizationMetricsDetailsEndpoint(OrganizationEndpoint): """Get the metadata of all the stored metrics including metric name, available operations and metric unit""" def get(self, request: Request, organization) -> Response: - # TODO: fade out endpoint since the new metrics endpoint will be used. projects = self.get_projects(request, organization) + if not projects: + raise InvalidParams("You must supply at least one project to see its metrics") + + start, end = get_date_range_from_params(request.GET) - metrics = get_metrics_meta(projects=projects, use_case_id=get_use_case_id(request)) + metrics = get_metrics_meta( + projects=projects, use_case_id=get_use_case_id(request), start=start, end=end + ) return Response(metrics, status=200) @@ -92,12 +82,18 @@ class OrganizationMetricDetailsEndpoint(OrganizationEndpoint): """Get metric name, available operations, metric unit and available tags""" def get(self, request: Request, organization, metric_name) -> Response: + # Right now this endpoint is not used, however we are planning an entire refactor of + # the metrics endpoints. projects = self.get_projects(request, organization) + if not projects: + raise InvalidParams( + "You must supply at least one project to see the details of a metric" + ) try: metric = get_single_metric_info( - projects, - metric_name, + projects=projects, + metric_name=metric_name, use_case_id=get_use_case_id(request), ) except InvalidParams as exc: @@ -127,12 +123,18 @@ class OrganizationMetricsTagsEndpoint(OrganizationEndpoint): def get(self, request: Request, organization) -> Response: metric_names = request.GET.getlist("metric") or [] projects = self.get_projects(request, organization) + if not projects: + raise InvalidParams("You must supply at least one project to see the tag names") + + start, end = get_date_range_from_params(request.GET) try: tags = get_all_tags( - projects, - metric_names, + projects=projects, + metric_names=metric_names, use_case_id=get_use_case_id(request), + start=start, + end=end, ) except (InvalidParams, DerivedMetricParseException) as exc: raise (ParseError(detail=str(exc))) @@ -150,15 +152,21 @@ class OrganizationMetricsTagDetailsEndpoint(OrganizationEndpoint): """Get all existing tag values for a metric""" def get(self, request: Request, organization, tag_name) -> Response: - metric_names = request.GET.getlist("metric") or None + metric_names = request.GET.getlist("metric") or [] projects = self.get_projects(request, organization) + if not projects: + raise InvalidParams("You must supply at least one project to see the tag values") + + start, end = get_date_range_from_params(request.GET) try: tag_values = get_tag_values( - projects, - tag_name, - metric_names, + projects=projects, + tag_name=tag_name, + metric_names=metric_names, use_case_id=get_use_case_id(request), + start=start, + end=end, ) except (InvalidParams, DerivedMetricParseException) as exc: raise ParseError(str(exc)) diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py index abb2f1c7fdd3df..904cbc5bfeaac7 100644 --- a/src/sentry/api/urls.py +++ b/src/sentry/api/urls.py @@ -385,7 +385,6 @@ OrganizationMetricDetailsEndpoint, OrganizationMetricsDataEndpoint, OrganizationMetricsDetailsEndpoint, - OrganizationMetricsEndpoint, OrganizationMetricsTagDetailsEndpoint, OrganizationMetricsTagsEndpoint, ) @@ -1967,11 +1966,6 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: OrganizationDDMMetaEndpoint.as_view(), name="sentry-api-0-organization-ddm-meta", ), - re_path( - r"^(?P[^/]+)/metrics/$", - OrganizationMetricsEndpoint.as_view(), - name="sentry-api-0-organization-metrics-index", - ), re_path( r"^(?P[^/]+)/metrics/meta/$", OrganizationMetricsDetailsEndpoint.as_view(), diff --git a/src/sentry/snuba/metrics/datasource.py b/src/sentry/snuba/metrics/datasource.py index 6fc9f87dfc9669..4646d2447d6b7e 100644 --- a/src/sentry/snuba/metrics/datasource.py +++ b/src/sentry/snuba/metrics/datasource.py @@ -228,11 +228,16 @@ def _build_metric_meta( ) -def get_metrics_meta(projects: Sequence[Project], use_case_id: UseCaseID) -> Sequence[MetricMeta]: +def get_metrics_meta( + projects: Sequence[Project], + use_case_id: UseCaseID, + start: datetime | None = None, + end: datetime | None = None, +) -> Sequence[MetricMeta]: if not projects: return [] - stored_metrics = get_stored_metrics_of_projects(projects, use_case_id) + stored_metrics = get_stored_metrics_of_projects(projects, use_case_id, start, end) metrics_blocking_state = get_metrics_blocking_state_of_projects(projects, use_case_id) metrics_metas = [] @@ -276,7 +281,10 @@ def get_metrics_meta(projects: Sequence[Project], use_case_id: UseCaseID) -> Seq def get_stored_metrics_of_projects( - projects: Sequence[Project], use_case_id: UseCaseID + projects: Sequence[Project], + use_case_id: UseCaseID, + start: datetime | None = None, + end: datetime | None = None, ) -> Mapping[str, Sequence[int]]: org_id = projects[0].organization_id project_ids = [project.id for project in projects] @@ -310,6 +318,8 @@ def get_stored_metrics_of_projects( project_ids=project_ids, org_id=org_id, use_case_id=use_case_id, + start=start, + end=end, ) grouped_stored_metrics = {} @@ -440,6 +450,8 @@ def _fetch_tags_or_values_for_metrics( referrer: str, column: str, use_case_id: UseCaseID, + start: datetime | None = None, + end: datetime | None = None, ) -> tuple[Sequence[Tag] | Sequence[TagValue], str | None]: metric_mris = [] @@ -452,7 +464,9 @@ def _fetch_tags_or_values_for_metrics( else: metric_mris.append(get_mri(metric_name)) - return _fetch_tags_or_values_for_mri(projects, metric_mris, referrer, column, use_case_id) + return _fetch_tags_or_values_for_mri( + projects, metric_mris, referrer, column, use_case_id, start, end + ) def _fetch_tags_or_values_for_mri( @@ -461,6 +475,8 @@ def _fetch_tags_or_values_for_mri( referrer: str, column: str, use_case_id: UseCaseID, + start: datetime | None = None, + end: datetime | None = None, ) -> tuple[Sequence[Tag] | Sequence[TagValue], str | None]: """ Function that takes as input projects, metric_mris, and a column, and based on the column @@ -507,6 +523,8 @@ def _fetch_tags_or_values_for_mri( project_ids=[p.id for p in projects], org_id=org_id, use_case_id=use_case_id, + start=start, + end=end, ) for row in rows: @@ -587,10 +605,10 @@ def _fetch_tags_or_values_for_mri( def get_single_metric_info( - projects: Sequence[Project], metric_name: str, use_case_id: UseCaseID + projects: Sequence[Project], + metric_name: str, + use_case_id: UseCaseID, ) -> MetricMetaWithTagKeys: - assert projects - tags, metric_type = _fetch_tags_or_values_for_metrics( projects=projects, metric_names=[metric_name], @@ -623,7 +641,11 @@ def get_single_metric_info( def get_all_tags( - projects: Sequence[Project], metric_names: Sequence[str] | None, use_case_id: UseCaseID + projects: Sequence[Project], + metric_names: Sequence[str] | None, + use_case_id: UseCaseID, + start: datetime | None = None, + end: datetime | None = None, ) -> Sequence[Tag]: """Get all metric tags for the given projects and metric_names.""" assert projects @@ -635,6 +657,8 @@ def get_all_tags( column="tags.key", referrer="snuba.metrics.meta.get_tags", use_case_id=use_case_id, + start=start, + end=end, ) except InvalidParams: return [] @@ -647,6 +671,8 @@ def get_tag_values( tag_name: str, metric_names: Sequence[str] | None, use_case_id: UseCaseID, + start: datetime | None = None, + end: datetime | None = None, ) -> Sequence[TagValue]: """Get all known values for a specific tag for the given projects and metric_names.""" assert projects @@ -667,6 +693,8 @@ def get_tag_values( metric_names=metric_names, referrer="snuba.metrics.meta.get_tag_values", use_case_id=use_case_id, + start=start, + end=end, ) except InvalidParams: return [] diff --git a/tests/sentry/api/endpoints/test_organization_metric_details.py b/tests/sentry/api/endpoints/test_organization_metric_details.py index 9563c2f705e2c5..0c2671d9245901 100644 --- a/tests/sentry/api/endpoints/test_organization_metric_details.py +++ b/tests/sentry/api/endpoints/test_organization_metric_details.py @@ -11,7 +11,7 @@ from sentry.snuba.metrics.naming_layer.mapping import get_mri, get_public_name_from_mri from sentry.snuba.metrics.naming_layer.mri import SessionMRI from sentry.snuba.metrics.naming_layer.public import SessionMetricKey -from sentry.testutils.cases import OrganizationMetricsIntegrationTestCase +from sentry.testutils.cases import MetricsAPIBaseTestCase, OrganizationMetricsIntegrationTestCase from sentry.testutils.silo import region_silo_test from tests.sentry.api.endpoints.test_organization_metrics import ( MOCKED_DERIVED_METRICS, @@ -40,10 +40,14 @@ def _indexer_record(org_id: int, string: str) -> None: @region_silo_test -class OrganizationMetricDetailsIntegrationTest(OrganizationMetricsIntegrationTestCase): +class OrganizationMetricDetailsTest(OrganizationMetricsIntegrationTestCase): endpoint = "sentry-api-0-organization-metric-details" + @property + def now(self): + return MetricsAPIBaseTestCase.MOCK_DATETIME + @patch( "sentry.snuba.metrics.datasource.get_mri", mocked_mri_resolver(["metric1", "metric2", "metric3"], get_mri), diff --git a/tests/sentry/api/endpoints/test_organization_metrics.py b/tests/sentry/api/endpoints/test_organization_metrics.py index e34f162e9c45e3..dda6c2a4507895 100644 --- a/tests/sentry/api/endpoints/test_organization_metrics.py +++ b/tests/sentry/api/endpoints/test_organization_metrics.py @@ -7,7 +7,6 @@ from sentry.models.apitoken import ApiToken from sentry.sentry_metrics import indexer from sentry.sentry_metrics.use_case_id_registry import UseCaseID -from sentry.sentry_metrics.visibility import block_metric, block_tags_of_metric from sentry.silo import SiloMode from sentry.snuba.metrics import ( DERIVED_METRICS, @@ -16,11 +15,7 @@ complement, division_float, ) -from sentry.testutils.cases import ( - APITestCase, - MetricsAPIBaseTestCase, - OrganizationMetricsIntegrationTestCase, -) +from sentry.testutils.cases import APITestCase from sentry.testutils.silo import assume_test_silo_mode, region_silo_test from sentry.testutils.skips import requires_snuba @@ -62,7 +57,6 @@ def indexer_record(use_case_id: UseCaseID, org_id: int, string: str) -> int: class OrganizationMetricsPermissionTest(APITestCase): endpoints = ( - ("sentry-api-0-organization-metrics-index",), ("sentry-api-0-organization-metrics-details",), ("sentry-api-0-organization-metric-details", "foo"), ("sentry-api-0-organization-metrics-tags",), @@ -88,86 +82,3 @@ def test_permissions(self): for endpoint in self.endpoints: response = self.send_get_request(token, *endpoint) assert response.status_code in (200, 400, 404) - - -@region_silo_test -class OrganizationMetricsTest(OrganizationMetricsIntegrationTestCase): - - endpoint = "sentry-api-0-organization-metrics-index" - - @property - def now(self): - return MetricsAPIBaseTestCase.MOCK_DATETIME - - def test_metrics_meta_sessions(self): - response = self.get_success_response( - self.organization.slug, project=[self.project.id], useCase=["sessions"] - ) - - assert isinstance(response.data, list) - - def test_metrics_meta_transactions(self): - response = self.get_success_response( - self.organization.slug, project=[self.project.id], useCase=["transactions"] - ) - - assert isinstance(response.data, list) - - def test_metrics_meta_invalid_use_case(self): - response = self.get_error_response( - self.organization.slug, project=[self.project.id], useCase=["not-a-use-case"] - ) - - assert response.status_code == 400 - - def test_metrics_meta_no_projects(self): - response = self.get_success_response( - self.organization.slug, project=[], useCase=["transactions"] - ) - - assert isinstance(response.data, list) - - def test_metrics_meta_for_custom_metrics(self): - project_1 = self.create_project() - project_2 = self.create_project() - - block_metric("s:custom/user@none", [project_1]) - block_tags_of_metric("d:custom/page_load@millisecond", {"release"}, [project_2]) - - metrics = ( - ("s:custom/user@none", "set", project_1), - ("s:custom/user@none", "set", project_2), - ("c:custom/clicks@none", "counter", project_1), - ("d:custom/page_load@millisecond", "distribution", project_2), - ) - for mri, entity, project in metrics: - self.store_metric( - project.organization.id, - project.id, - entity, # type:ignore - mri, - {"transaction": "/hello"}, - int(self.now.timestamp()), - 10, - UseCaseID.CUSTOM, - ) - - response = self.get_success_response( - self.organization.slug, project=[project_1.id, project_2.id], useCase=["custom"] - ) - assert len(response.data) == 3 - - data = sorted(response.data, key=lambda d: d["mri"]) - assert data[0]["mri"] == "c:custom/clicks@none" - assert data[0]["projectIds"] == [project_1.id] - assert data[0]["blockingStatus"] == [] - assert data[1]["mri"] == "d:custom/page_load@millisecond" - assert data[1]["projectIds"] == [project_2.id] - assert data[1]["blockingStatus"] == [ - {"isBlocked": False, "blockedTags": ["release"], "projectId": project_2.id} - ] - assert data[2]["mri"] == "s:custom/user@none" - assert sorted(data[2]["projectIds"]) == sorted([project_1.id, project_2.id]) - assert data[2]["blockingStatus"] == [ - {"isBlocked": True, "blockedTags": [], "projectId": project_1.id} - ] diff --git a/tests/sentry/api/endpoints/test_organization_metric_data.py b/tests/sentry/api/endpoints/test_organization_metrics_data.py similarity index 100% rename from tests/sentry/api/endpoints/test_organization_metric_data.py rename to tests/sentry/api/endpoints/test_organization_metrics_data.py diff --git a/tests/sentry/api/endpoints/test_organization_metrics_details.py b/tests/sentry/api/endpoints/test_organization_metrics_details.py new file mode 100644 index 00000000000000..a97533743116d3 --- /dev/null +++ b/tests/sentry/api/endpoints/test_organization_metrics_details.py @@ -0,0 +1,119 @@ +from datetime import timedelta + +import pytest + +from sentry.sentry_metrics.use_case_id_registry import UseCaseID +from sentry.sentry_metrics.visibility import block_metric, block_tags_of_metric +from sentry.testutils.cases import MetricsAPIBaseTestCase, OrganizationMetricsIntegrationTestCase +from sentry.testutils.silo import region_silo_test +from sentry.testutils.skips import requires_snuba + +pytestmark = [pytest.mark.sentry_metrics, requires_snuba] + + +@region_silo_test +class OrganizationMetricsDetailsTest(OrganizationMetricsIntegrationTestCase): + + endpoint = "sentry-api-0-organization-metrics-details" + + @property + def now(self): + return MetricsAPIBaseTestCase.MOCK_DATETIME + + def test_metrics_details_sessions(self): + response = self.get_success_response( + self.organization.slug, project=self.project.id, useCase="sessions" + ) + + assert isinstance(response.data, list) + + def test_metrics_details_transactions(self): + response = self.get_success_response( + self.organization.slug, project=self.project.id, useCase="transactions" + ) + + assert isinstance(response.data, list) + + def test_metrics_details_invalid_use_case(self): + response = self.get_error_response( + self.organization.slug, project=self.project.id, useCase="not-a-use-case" + ) + + assert response.status_code == 400 + + def test_metrics_details_no_projects(self): + response = self.get_success_response(self.organization.slug, useCase="transactions") + + assert isinstance(response.data, list) + + def test_metrics_details_for_custom_metrics(self): + project_1 = self.create_project() + project_2 = self.create_project() + + block_metric("s:custom/user@none", [project_1]) + block_tags_of_metric("d:custom/page_load@millisecond", {"release"}, [project_2]) + + metrics = ( + ("s:custom/user@none", "set", project_1), + ("s:custom/user@none", "set", project_2), + ("c:custom/clicks@none", "counter", project_1), + ("d:custom/page_load@millisecond", "distribution", project_2), + ) + for mri, entity, project in metrics: + self.store_metric( + project.organization.id, + project.id, + entity, # type:ignore + mri, + {"transaction": "/hello"}, + int(self.now.timestamp()), + 10, + UseCaseID.CUSTOM, + ) + + response = self.get_success_response( + self.organization.slug, project=[project_1.id, project_2.id], useCase="custom" + ) + assert len(response.data) == 3 + + data = sorted(response.data, key=lambda d: d["mri"]) + assert data[0]["mri"] == "c:custom/clicks@none" + assert data[0]["projectIds"] == [project_1.id] + assert data[0]["blockingStatus"] == [] + assert data[1]["mri"] == "d:custom/page_load@millisecond" + assert data[1]["projectIds"] == [project_2.id] + assert data[1]["blockingStatus"] == [ + {"isBlocked": False, "blockedTags": ["release"], "projectId": project_2.id} + ] + assert data[2]["mri"] == "s:custom/user@none" + assert sorted(data[2]["projectIds"]) == sorted([project_1.id, project_2.id]) + assert data[2]["blockingStatus"] == [ + {"isBlocked": True, "blockedTags": [], "projectId": project_1.id} + ] + + def test_metrics_details_with_date_range(self): + metrics = ( + ("c:custom/clicks_1@none", 0), + ("c:custom/clicks_2@none", 1), + ("c:custom/clicks_3@none", 7), + ) + for mri, days in metrics: + self.store_metric( + self.project.organization.id, + self.project.id, + "counter", + mri, + {"transaction": "/hello"}, + int((self.now - timedelta(days=days)).timestamp()), + 10, + UseCaseID.CUSTOM, + ) + + for stats_period, expected_count in (("1d", 1), ("2d", 2), ("2w", 3)): + response = self.get_success_response( + self.organization.slug, + project=self.project.id, + useCase="custom", + statsPeriod=stats_period, + ) + assert len(response.data) == expected_count diff --git a/tests/sentry/api/endpoints/test_organization_metric_tag_details.py b/tests/sentry/api/endpoints/test_organization_metrics_tag_details.py similarity index 84% rename from tests/sentry/api/endpoints/test_organization_metric_tag_details.py rename to tests/sentry/api/endpoints/test_organization_metrics_tag_details.py index 855aed81f06a3d..c7eddf073ad9d7 100644 --- a/tests/sentry/api/endpoints/test_organization_metric_tag_details.py +++ b/tests/sentry/api/endpoints/test_organization_metrics_tag_details.py @@ -8,7 +8,7 @@ from sentry.sentry_metrics.use_case_id_registry import UseCaseID from sentry.snuba.metrics.naming_layer import get_mri from sentry.snuba.metrics.naming_layer.public import SessionMetricKey -from sentry.testutils.cases import OrganizationMetricsIntegrationTestCase +from sentry.testutils.cases import MetricsAPIBaseTestCase, OrganizationMetricsIntegrationTestCase from sentry.testutils.helpers.datetime import freeze_time from sentry.testutils.silo import region_silo_test from tests.sentry.api.endpoints.test_organization_metrics import ( @@ -24,10 +24,14 @@ def _indexer_record(org_id: int, string: str) -> None: @region_silo_test -class OrganizationMetricsTagDetailsIntegrationTest(OrganizationMetricsIntegrationTestCase): +class OrganizationMetricsTagDetailsTest(OrganizationMetricsIntegrationTestCase): endpoint = "sentry-api-0-organization-metrics-tag-details" + @property + def now(self): + return MetricsAPIBaseTestCase.MOCK_DATETIME + def test_unknown_tag(self): _indexer_record(self.organization.id, "bar") response = self.get_success_response(self.project.organization.slug, "bar") @@ -193,3 +197,33 @@ def test_incorrectly_setup_derived_metric(self, mocked_derived_metrics, mocked_m "The following metrics {'crash_free_fake'} cannot be computed from single entities. " "Please revise the definition of these singular entity derived metrics" ) + + def test_metric_tag_details_with_date_range(self): + mri = "c:custom/clicks@none" + transactions = ( + ("/hello", 0), + ("/world", 1), + ("/foo", 7), + ) + for transaction, days in transactions: + self.store_metric( + self.project.organization.id, + self.project.id, + "counter", + mri, + {"transaction": transaction}, + int((self.now - timedelta(days=days)).timestamp()), + 10, + UseCaseID.CUSTOM, + ) + + for stats_period, expected_count in (("1d", 1), ("2d", 2), ("2w", 3)): + response = self.get_success_response( + self.organization.slug, + "transaction", + metric=[mri], + project=self.project.id, + useCase="custom", + statsPeriod=stats_period, + ) + assert len(response.data) == expected_count diff --git a/tests/sentry/api/endpoints/test_organization_metric_tags.py b/tests/sentry/api/endpoints/test_organization_metrics_tags.py similarity index 84% rename from tests/sentry/api/endpoints/test_organization_metric_tags.py rename to tests/sentry/api/endpoints/test_organization_metrics_tags.py index c1d6eddccb9128..0f5203de28acb8 100644 --- a/tests/sentry/api/endpoints/test_organization_metric_tags.py +++ b/tests/sentry/api/endpoints/test_organization_metrics_tags.py @@ -1,5 +1,6 @@ import time from collections.abc import Collection +from datetime import timedelta from unittest.mock import patch import pytest @@ -9,7 +10,7 @@ from sentry.snuba.metrics.naming_layer import get_mri from sentry.snuba.metrics.naming_layer.mri import SessionMRI from sentry.snuba.metrics.naming_layer.public import SessionMetricKey -from sentry.testutils.cases import OrganizationMetricsIntegrationTestCase +from sentry.testutils.cases import MetricsAPIBaseTestCase, OrganizationMetricsIntegrationTestCase from sentry.testutils.silo import region_silo_test from tests.sentry.api.endpoints.test_organization_metrics import ( MOCKED_DERIVED_METRICS, @@ -24,10 +25,14 @@ def mocked_bulk_reverse_resolve(use_case_id, org_id: int, ids: Collection[int]): @region_silo_test -class OrganizationMetricsTagsIntegrationTest(OrganizationMetricsIntegrationTestCase): +class OrganizationMetricsTagsTest(OrganizationMetricsIntegrationTestCase): endpoint = "sentry-api-0-organization-metrics-tags" + @property + def now(self): + return MetricsAPIBaseTestCase.MOCK_DATETIME + @patch( "sentry.snuba.metrics.datasource.get_mri", mocked_mri_resolver(["metric1", "metric2", "metric3"], get_mri), @@ -223,3 +228,32 @@ def test_incorrectly_setup_derived_metric(self, mocked_derived_metrics, mocked_m "The following metrics {'crash_free_fake'} cannot be computed from single entities. " "Please revise the definition of these singular entity derived metrics" ) + + def test_metric_tags_with_date_range(self): + mri = "c:custom/clicks@none" + tags = ( + ("transaction", "/hello", 0), + ("release", "1.0", 1), + ("environment", "prod", 7), + ) + for tag_name, tag_value, days in tags: + self.store_metric( + self.project.organization.id, + self.project.id, + "counter", + mri, + {tag_name: tag_value}, + int((self.now - timedelta(days=days)).timestamp()), + 10, + UseCaseID.CUSTOM, + ) + + for stats_period, expected_count in (("1d", 1), ("2d", 2), ("2w", 3)): + response = self.get_success_response( + self.organization.slug, + metric=[mri], + project=self.project.id, + useCase="custom", + statsPeriod=stats_period, + ) + assert len(response.data) == expected_count From 9176304bdf585ad8b2f55838d08648ff1378b63e Mon Sep 17 00:00:00 2001 From: ArthurKnaus Date: Wed, 7 Feb 2024 09:35:26 +0100 Subject: [PATCH 092/357] feat(ddm): Canvas charts (#64760) Enable canvas rendering for metrics charts as it is faster than using svg based rendering. --- static/app/views/ddm/chart.tsx | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/static/app/views/ddm/chart.tsx b/static/app/views/ddm/chart.tsx index 79859898ce6e11..f406e00d69f6ea 100644 --- a/static/app/views/ddm/chart.tsx +++ b/static/app/views/ddm/chart.tsx @@ -146,7 +146,8 @@ export const MetricChart = forwardRef( ...focusAreaBrush.options, forwardedRef: mergeRefs([forwardedRef, chartRef]), series: seriesToShow, - renderer: seriesToShow.length > 20 ? ('canvas' as const) : ('svg' as const), + devicePixelRatio: 2, + renderer: 'canvas' as const, isGroupedByDate: true, colors: seriesToShow.map(s => s.color), grid: {top: 5, bottom: 0, left: 0, right: 0}, @@ -315,7 +316,7 @@ const createFogOfWarLineSeries = (series: Series, fogBucketCnt = 0) => ({ // We include the last non-fog of war bucket so that the line is connected data: series.data.slice(-fogBucketCnt - 1), lineStyle: { - type: 'dashed', + type: 'dotted', }, }); @@ -326,7 +327,7 @@ const createFogOfWarAreaSeries = (series: Series, fogBucketCnt = 0) => ({ // We include the last non-fog of war bucket so that the line is connected data: series.data.slice(-fogBucketCnt - 1), lineStyle: { - type: 'dashed', + type: 'dotted', color: Color(series.color).lighten(0.3).string(), }, }); From e9f54126715989d2cddb7ea165a8d095136fe3ce Mon Sep 17 00:00:00 2001 From: ArthurKnaus Date: Wed, 7 Feb 2024 09:58:04 +0100 Subject: [PATCH 093/357] feat(charts): Avoid overflowing tooltips (#64759) ### Problem 1. The current tooltip can exit the viewport on top. 2. Labels are never truncated -> tooltip can exit the viewport on the side ### Solution 1. Add a minimum "top" distance to the position calculation 2. Add `max-width` to the tooltip and an ellipsis to its labels. - closes https://github.com/getsentry/sentry/issues/63577 - closes https://github.com/getsentry/sentry/issues/64021 --- static/app/components/charts/baseChart.tsx | 4 +++- .../components/charts/components/tooltip.tsx | 22 ++++++++++++++----- 2 files changed, 19 insertions(+), 7 deletions(-) diff --git a/static/app/components/charts/baseChart.tsx b/static/app/components/charts/baseChart.tsx index 241d1fbe8bf5ad..a8807329a3035e 100644 --- a/static/app/components/charts/baseChart.tsx +++ b/static/app/components/charts/baseChart.tsx @@ -51,7 +51,7 @@ import {defined} from 'sentry/utils'; import Grid from './components/grid'; import Legend from './components/legend'; import type {TooltipSubLabel} from './components/tooltip'; -import {computeChartTooltip} from './components/tooltip'; +import {CHART_TOOLTIP_VIEWPORT_OFFSET, computeChartTooltip} from './components/tooltip'; import XAxis from './components/xAxis'; import YAxis from './components/yAxis'; import LineSeries from './series/lineSeries'; @@ -676,12 +676,14 @@ const getTooltipStyles = (p: {theme: Theme}) => css` } .tooltip-series { border-bottom: none; + max-width: calc(100vw - 2 * ${CHART_TOOLTIP_VIEWPORT_OFFSET}px); } .tooltip-series-solo { border-radius: ${p.theme.borderRadius}; } .tooltip-label { margin-right: ${space(1)}; + ${p.theme.overflowEllipsis}; } .tooltip-label strong { font-weight: normal; diff --git a/static/app/components/charts/components/tooltip.tsx b/static/app/components/charts/components/tooltip.tsx index 24fa151d38ccd1..60329a0459ccd1 100644 --- a/static/app/components/charts/components/tooltip.tsx +++ b/static/app/components/charts/components/tooltip.tsx @@ -13,6 +13,8 @@ import toArray from 'sentry/utils/toArray'; import {truncationFormatter} from '../utils'; +export const CHART_TOOLTIP_VIEWPORT_OFFSET = 20; + type ChartProps = React.ComponentProps; export function defaultFormatAxisLabel( @@ -393,7 +395,8 @@ export function computeChartTooltip( // Get the left offset of the tip container (the chart) // so that we can estimate overflows - const chartLeft = chartElement.getBoundingClientRect().left ?? 0; + const chartBoundingRect = chartElement.getBoundingClientRect(); + const chartLeft = chartBoundingRect.left ?? 0; // Determine the new left edge. let leftPos = Number(pos[0]) - tipWidth / 2; @@ -401,13 +404,13 @@ export function computeChartTooltip( const rightEdge = chartLeft + Number(pos[0]) + tipWidth / 2; let arrowPosition: string | undefined; - if (rightEdge >= window.innerWidth - 20) { + if (rightEdge >= window.innerWidth - CHART_TOOLTIP_VIEWPORT_OFFSET) { // If the tooltip would leave viewport on the right, pin it. - leftPos -= rightEdge - window.innerWidth + 20; + leftPos -= rightEdge - window.innerWidth + CHART_TOOLTIP_VIEWPORT_OFFSET; arrowPosition = `${Number(pos[0]) - leftPos}px`; - } else if (leftPos + chartLeft - 20 <= 0) { + } else if (leftPos + chartLeft - CHART_TOOLTIP_VIEWPORT_OFFSET <= 0) { // If the tooltip would leave viewport on the left, pin it. - leftPos = chartLeft * -1 + 20; + leftPos = chartLeft * -1 + CHART_TOOLTIP_VIEWPORT_OFFSET; arrowPosition = `${Number(pos[0]) - leftPos}px`; } else { // Tooltip not near the window edge, reset position @@ -419,7 +422,14 @@ export function computeChartTooltip( arrow.style.left = arrowPosition; } - return {left: leftPos, top: Number(pos[1]) - tipHeight - 20}; + return { + left: leftPos, + top: Math.max( + Number(pos[1]) - tipHeight - 20, + // avoid tooltip from being cut off by the top edge of the window + CHART_TOOLTIP_VIEWPORT_OFFSET - chartBoundingRect.top + ), + }; }, formatter, ...props, From 36dcbe8b02eb65f87dd1e30f50074148cba64768 Mon Sep 17 00:00:00 2001 From: Alexander Tarasov Date: Wed, 7 Feb 2024 10:35:10 +0100 Subject: [PATCH 094/357] fix(security): deny fetching internal app tokens by auth token (#64262) Similar to https://github.com/getsentry/sentry/pull/53850 Internal app tokens should be accessible only via UI not auth tokens. --- .../sentry_apps/internal_app_token/index.py | 2 ++ .../api/endpoints/test_sentry_internal_app_tokens.py | 10 ++++++++++ 2 files changed, 12 insertions(+) diff --git a/src/sentry/api/endpoints/integrations/sentry_apps/internal_app_token/index.py b/src/sentry/api/endpoints/integrations/sentry_apps/internal_app_token/index.py index 3651580077598e..3d925e52ba89c6 100644 --- a/src/sentry/api/endpoints/integrations/sentry_apps/internal_app_token/index.py +++ b/src/sentry/api/endpoints/integrations/sentry_apps/internal_app_token/index.py @@ -4,6 +4,7 @@ from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus +from sentry.api.authentication import SessionNoAuthTokenAuthentication from sentry.api.base import control_silo_endpoint from sentry.api.bases import SentryAppBaseEndpoint, SentryInternalAppTokenPermission from sentry.api.endpoints.integrations.sentry_apps.details import ( @@ -24,6 +25,7 @@ class SentryInternalAppTokensEndpoint(SentryAppBaseEndpoint): "GET": ApiPublishStatus.UNKNOWN, "POST": ApiPublishStatus.UNKNOWN, } + authentication_classes = (SessionNoAuthTokenAuthentication,) permission_classes = (SentryInternalAppTokenPermission,) def get(self, request: Request, sentry_app) -> Response: diff --git a/tests/sentry/api/endpoints/test_sentry_internal_app_tokens.py b/tests/sentry/api/endpoints/test_sentry_internal_app_tokens.py index 273d0b49b229d0..a209e7bfeb8c3c 100644 --- a/tests/sentry/api/endpoints/test_sentry_internal_app_tokens.py +++ b/tests/sentry/api/endpoints/test_sentry_internal_app_tokens.py @@ -113,3 +113,13 @@ def test_token_is_masked(self): assert response_content[0]["token"] == MASKED_VALUE assert response_content[0]["refreshToken"] == MASKED_VALUE + + def test_deny_token_access(self): + self.login_as(self.user) + token = ApiToken.objects.create(user=self.user, scope_list=["org:write"]) + + sentry_app = self.create_internal_integration(name="OtherInternal", organization=self.org) + + url = reverse("sentry-api-0-sentry-internal-app-tokens", args=[sentry_app.slug]) + response = self.client.get(url, format="json", HTTP_AUTHORIZATION=f"Bearer {token.token}") + assert response.status_code == 403, response.content From 948e528977449228d5d7d6576e282c76921f9223 Mon Sep 17 00:00:00 2001 From: Alexander Tarasov Date: Wed, 7 Feb 2024 10:35:34 +0100 Subject: [PATCH 095/357] fix(security): add feature permission check when adding sentry app (#64457) Another attempt of https://github.com/getsentry/sentry/pull/64054 Prevent enabling paid tier apps when no feature is enabled for the organization. Similar to https://github.com/getsentry/sentry/pull/63737 but for sentry apps. --- .../sentry_apps/installation/index.py | 28 ++++++++++++++ ...t_organization_sentry_app_installations.py | 37 +++++++++++++++++++ 2 files changed, 65 insertions(+) diff --git a/src/sentry/api/endpoints/integrations/sentry_apps/installation/index.py b/src/sentry/api/endpoints/integrations/sentry_apps/installation/index.py index 487a5a5653d059..68eaed6b584a61 100644 --- a/src/sentry/api/endpoints/integrations/sentry_apps/installation/index.py +++ b/src/sentry/api/endpoints/integrations/sentry_apps/installation/index.py @@ -2,6 +2,7 @@ from rest_framework.request import Request from rest_framework.response import Response +from sentry import features from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import control_silo_endpoint @@ -10,6 +11,8 @@ from sentry.api.paginator import OffsetPaginator from sentry.api.serializers import serialize from sentry.constants import SENTRY_APP_SLUG_MAX_LENGTH, SentryAppStatus +from sentry.features.exceptions import FeatureNotRegistered +from sentry.models.integrations.integration_feature import IntegrationFeature, IntegrationTypes from sentry.models.integrations.sentry_app import SentryApp from sentry.models.integrations.sentry_app_installation import SentryAppInstallation from sentry.sentry_apps.installations import SentryAppInstallationCreator @@ -53,6 +56,31 @@ def post(self, request: Request, organization) -> Response: ): return Response(status=404) + # feature check + app_features = IntegrationFeature.objects.filter( + target_id=app.id, target_type=IntegrationTypes.SENTRY_APP.value + ) + + is_feature_enabled = {} + for feature in app_features: + feature_flag_name = "organizations:%s" % feature.feature_str() + try: + features.get(feature_flag_name, None) + is_feature_enabled[feature_flag_name] = features.has( + feature_flag_name, organization + ) + except FeatureNotRegistered: + is_feature_enabled[feature_flag_name] = True + + if not any(is_feature_enabled.values()): + return Response( + { + "detail": "At least one feature from this list has to be enabled in order to install the app", + "missing_features": list(is_feature_enabled.keys()), + }, + status=403, + ) + try: # check for an exiting installation and return that if it exists install = SentryAppInstallation.objects.get( diff --git a/tests/sentry/api/endpoints/test_organization_sentry_app_installations.py b/tests/sentry/api/endpoints/test_organization_sentry_app_installations.py index c702a844e4c5a3..e7c8c31aac210b 100644 --- a/tests/sentry/api/endpoints/test_organization_sentry_app_installations.py +++ b/tests/sentry/api/endpoints/test_organization_sentry_app_installations.py @@ -1,6 +1,9 @@ from django.test import override_settings +from sentry.constants import SentryAppStatus +from sentry.models.integrations.integration_feature import Feature from sentry.models.integrations.sentry_app_installation import SentryAppInstallation +from sentry.sentry_apps.apps import SentryAppUpdater from sentry.slug.errors import DEFAULT_SLUG_ERROR_MESSAGE from sentry.testutils.cases import APITestCase from sentry.testutils.helpers.features import with_feature @@ -128,6 +131,22 @@ def test_install_published_app(self): assert expected.items() <= response.data.items() + def test_install_published_app_by_other_org(self): + user2 = self.create_user("foo@example.com") + org2 = self.create_organization(owner=user2) + self.login_as(user=user2) + + response = self.get_success_response( + org2.slug, slug=self.published_app.slug, status_code=200 + ) + + expected = { + "app": {"slug": self.published_app.slug, "uuid": self.published_app.uuid}, + "organization": {"slug": org2.slug}, + } + + assert expected.items() <= response.data.items() + def test_install_superuser(self): self.login_as(user=self.superuser, superuser=True) app = self.create_sentry_app(name="Sample", organization=self.org) @@ -190,3 +209,21 @@ def test_cannot_install_other_org_internal_app(self): org2 = self.create_organization() internal_app = self.create_internal_integration(name="Internal App", organization=org2) self.get_error_response(self.org.slug, slug=internal_app.slug, status_code=404) + + @with_feature({"organizations:integrations-alert-rule": False}) + def test_disallow_app_with_all_features_disabled(self): + # prepare an app with paid features + app = self.unpublished_app + SentryAppUpdater(sentry_app=app, features=[Feature.ALERTS]).run(user=self.user) + app.update(status=SentryAppStatus.PUBLISHED) + + # test on a free-tier org + user2 = self.create_user("free@example.com") + org2 = self.create_organization(owner=user2) + self.login_as(user=user2) + + response = self.get_error_response(org2.slug, slug=app.slug, status_code=403) + assert response.data == { + "detail": "At least one feature from this list has to be enabled in order to install the app", + "missing_features": ["organizations:integrations-alert-rule"], + } From dd0edf1fcc309765d94505f1bac758d7831dcbbf Mon Sep 17 00:00:00 2001 From: Arpad Borsos Date: Wed, 7 Feb 2024 12:15:12 +0100 Subject: [PATCH 096/357] Hardcode and remove usage of enabled/obsolete options (#64653) --- src/sentry/attachments/base.py | 5 +- src/sentry/grouping/enhancer/__init__.py | 13 +---- src/sentry/lang/native/symbolicator.py | 25 +-------- src/sentry/models/eventattachment.py | 7 +-- src/sentry/reprocessing2.py | 56 +++++++++---------- src/sentry/tasks/recap_servers.py | 9 ++- src/sentry/tasks/store.py | 8 +-- tests/relay_integration/test_integration.py | 27 ++------- tests/sentry/attachments/test_base.py | 12 +--- tests/sentry/grouping/test_enhancer.py | 11 ---- .../test_ingest_consumer_processing.py | 30 ++++------ .../full_config/REGION.pysnap | 4 +- tests/sentry/tasks/test_assemble.py | 1 - tests/sentry/tasks/test_reprocessing2.py | 1 + tests/symbolicator/test_minidump_full.py | 13 +---- tests/symbolicator/test_payload_full.py | 8 +-- tests/symbolicator/test_unreal_full.py | 41 ++++---------- 17 files changed, 76 insertions(+), 195 deletions(-) diff --git a/src/sentry/attachments/base.py b/src/sentry/attachments/base.py index c5e3f11de3adfb..81100b6f27503d 100644 --- a/src/sentry/attachments/base.py +++ b/src/sentry/attachments/base.py @@ -3,7 +3,6 @@ import sentry_sdk import zstandard -from sentry import options from sentry.utils import metrics from sentry.utils.json import prune_empty_keys @@ -182,6 +181,4 @@ def delete(self, key): def compress_chunk(chunk_data: bytes) -> bytes: - if options.get("attachment-cache.use-zstd"): - return zstandard.compress(chunk_data) - return zlib.compress(chunk_data) + return zstandard.compress(chunk_data) diff --git a/src/sentry/grouping/enhancer/__init__.py b/src/sentry/grouping/enhancer/__init__.py index 14f28e4da7d75f..eac0af6aad3d3b 100644 --- a/src/sentry/grouping/enhancer/__init__.py +++ b/src/sentry/grouping/enhancer/__init__.py @@ -440,18 +440,7 @@ def _to_config_structure(self): @sentry_sdk.tracing.trace def dumps(self) -> str: encoded = msgpack.dumps(self._to_config_structure()) - - try: - # I don’t want to put DB access into all of the tests ;-) - use_zstd = options.get("enhancers.use-zstd") - except Exception: - use_zstd = False - - if use_zstd: - compressed = zstandard.compress(encoded) - else: - compressed = zlib.compress(encoded) - + compressed = zstandard.compress(encoded) return base64.urlsafe_b64encode(compressed).decode("ascii").strip("=") @classmethod diff --git a/src/sentry/lang/native/symbolicator.py b/src/sentry/lang/native/symbolicator.py index 77756f9b5e9f45..942035cf8bb1a5 100644 --- a/src/sentry/lang/native/symbolicator.py +++ b/src/sentry/lang/native/symbolicator.py @@ -2,7 +2,6 @@ import dataclasses import logging -import random import time import uuid from collections.abc import Callable @@ -237,10 +236,6 @@ class SymbolicatorSession: - Otherwise, it retries failed requests. """ - # Used as the `x-sentry-worker-id` HTTP header which is the routing key of - # the Symbolicator load balancer. - _worker_id = None - def __init__( self, url=None, @@ -253,7 +248,7 @@ def __init__( self.event_id = event_id self.timeout = timeout self.session = None - self.worker_id = self._get_worker_id() + self.reset_worker_id() def __enter__(self): self.open() @@ -365,21 +360,5 @@ def query_task(self, task_id): with metrics.timer("events.symbolicator.query_task"): return self._request("get", task_url, params=params) - @classmethod - def _get_worker_id(cls) -> str: - if random.random() <= options.get("symbolicator.worker-id-randomization-sample-rate"): - return uuid.uuid4().hex - - # as class attribute to keep it static for life of process - if cls._worker_id is None: - # %5000 to reduce cardinality of metrics tagging with worker id - cls._worker_id = str(uuid.uuid4().int % 5000) - return cls._worker_id - - @classmethod - def _reset_worker_id(cls): - cls._worker_id = None - def reset_worker_id(self): - self._reset_worker_id() - self.worker_id = self._get_worker_id() + self.worker_id = uuid.uuid4().hex diff --git a/src/sentry/models/eventattachment.py b/src/sentry/models/eventattachment.py index 766d01923b96de..7598bbefc9c370 100644 --- a/src/sentry/models/eventattachment.py +++ b/src/sentry/models/eventattachment.py @@ -1,5 +1,4 @@ import mimetypes -import random from dataclasses import dataclass from hashlib import sha1 from io import BytesIO @@ -12,7 +11,6 @@ from django.db import models from django.utils import timezone -from sentry import options from sentry.attachments.base import CachedAttachment from sentry.backup.scopes import RelocationScope from sentry.db.models import BoundedBigIntegerField, Model, region_silo_only_model, sane_repr @@ -156,9 +154,8 @@ def putfile(cls, project_id: int, attachment: CachedAttachment) -> PutfileResult blob = BytesIO(attachment.data) - store_blobs = project_id in options.get("eventattachments.store-blobs.projects") or ( - random.random() < options.get("eventattachments.store-blobs.sample-rate") - ) + # NOTE: we still keep the old code around for a while before complete removing it + store_blobs = True if store_blobs: size, checksum = get_size_and_checksum(blob) diff --git a/src/sentry/reprocessing2.py b/src/sentry/reprocessing2.py index f5212f8a7a37b5..0ccdba4669e468 100644 --- a/src/sentry/reprocessing2.py +++ b/src/sentry/reprocessing2.py @@ -96,6 +96,7 @@ from sentry.deletions.defaults.group import DIRECT_GROUP_RELATED_MODELS from sentry.eventstore.models import Event from sentry.eventstore.processing import event_processing_store +from sentry.models.eventattachment import EventAttachment from sentry.snuba.dataset import Dataset from sentry.utils import json, metrics, snuba from sentry.utils.dates import to_datetime, to_timestamp @@ -119,7 +120,7 @@ # 1. they are migrated as part of the processing pipeline (post-process/save-event) # 2. there are a lot of them per group. For remaining events, we need to chunk # up those queries for them to not get too slow -EVENT_MODELS_TO_MIGRATE = (models.EventAttachment, models.UserReport) +EVENT_MODELS_TO_MIGRATE = (EventAttachment, models.UserReport) # The amount of seconds after which we assume there was no progress during reprocessing, # and after which we just give up and mark the group as finished. @@ -160,7 +161,7 @@ def backup_unprocessed_event(data): class ReprocessableEvent: event: Event data: dict[str, Any] - attachments: list[models.EventAttachment] + attachments: list[EventAttachment] def pull_event_data(project_id, event_id) -> ReprocessableEvent: @@ -182,7 +183,7 @@ def pull_event_data(project_id, event_id) -> ReprocessableEvent: required_attachment_types = get_required_attachment_types(data) attachments = list( - models.EventAttachment.objects.filter( + EventAttachment.objects.filter( project_id=project_id, event_id=event_id, type__in=list(required_attachment_types) ) ) @@ -217,11 +218,6 @@ def reprocess_event(project_id, event_id, start_time): # (we simply update group_id on the EventAttachment models in post_process) attachment_objects = [] - files = { - f.id: f - for f in models.File.objects.filter(id__in=[ea.file_id for ea in attachments if ea.file_id]) - } - for attachment_id, attachment in enumerate(attachments): with sentry_sdk.start_span(op="reprocess_event._copy_attachment_into_cache") as span: span.set_data("attachment_id", attachment.id) @@ -229,7 +225,6 @@ def reprocess_event(project_id, event_id, start_time): _copy_attachment_into_cache( attachment_id=attachment_id, attachment=attachment, - file=files[attachment.file_id] if attachment.file_id else None, cache_key=cache_key, cache_timeout=CACHE_TIMEOUT, ) @@ -399,28 +394,29 @@ def buffered_delete_old_primary_hash( ) -def _copy_attachment_into_cache(attachment_id, attachment, file, cache_key, cache_timeout): - fp = file.getfile() - chunk_index = 0 - size = 0 - while True: - chunk = fp.read(settings.SENTRY_REPROCESSING_ATTACHMENT_CHUNK_SIZE) - if not chunk: - break - - size += len(chunk) - - attachment_cache.set_chunk( - key=cache_key, - id=attachment_id, - chunk_index=chunk_index, - chunk_data=chunk, - timeout=cache_timeout, - ) - chunk_index += 1 +def _copy_attachment_into_cache( + attachment_id, attachment: EventAttachment, cache_key, cache_timeout +): + with attachment.getfile() as fp: + chunk_index = 0 + size = 0 + while True: + chunk = fp.read(settings.SENTRY_REPROCESSING_ATTACHMENT_CHUNK_SIZE) + if not chunk: + break + + size += len(chunk) + + attachment_cache.set_chunk( + key=cache_key, + id=attachment_id, + chunk_index=chunk_index, + chunk_data=chunk, + timeout=cache_timeout, + ) + chunk_index += 1 - expected_size = attachment.size or file.size - assert size == expected_size + assert size == attachment.size return CachedAttachment( key=cache_key, diff --git a/src/sentry/tasks/recap_servers.py b/src/sentry/tasks/recap_servers.py index 6d8f996e166a78..7e6b06bb0af724 100644 --- a/src/sentry/tasks/recap_servers.py +++ b/src/sentry/tasks/recap_servers.py @@ -5,7 +5,7 @@ import uuid from typing import Any -from sentry import features, http, options +from sentry import features, http from sentry.datascrubbing import scrub_data from sentry.event_manager import EventManager from sentry.models.options.project_option import ProjectOption @@ -138,10 +138,9 @@ def store_crash(crash, project: Project, url: str) -> None: ) return - if options.get("processing.can-use-scrubbers"): - new_event = safe_execute(scrub_data, project=project, event=event, _with_transaction=False) - if new_event is not None: - event = new_event + new_event = safe_execute(scrub_data, project=project, event=event, _with_transaction=False) + if new_event is not None: + event = new_event event_manager = EventManager(event, project=project) event_manager.save(project_id=project.id) diff --git a/src/sentry/tasks/store.py b/src/sentry/tasks/store.py index 5389dac680ee0d..0e56e493a42828 100644 --- a/src/sentry/tasks/store.py +++ b/src/sentry/tasks/store.py @@ -391,7 +391,7 @@ def _continue_to_save_event() -> None: # We are fairly confident, however, that this should run *before* # re-normalization as it is hard to find sensitive data in partially # trimmed strings. - if has_changed and options.get("processing.can-use-scrubbers"): + if has_changed: with sentry_sdk.start_span(op="task.store.datascrubbers.scrub"): with metrics.timer( "tasks.store.datascrubbers.scrub", tags={"from_symbolicate": from_symbolicate} @@ -770,9 +770,9 @@ def _do_save_event( time() - start_time, instance=data["platform"], tags={ - "is_reprocessing2": "true" - if reprocessing2.is_reprocessed_event(data) - else "false", + "is_reprocessing2": ( + "true" if reprocessing2.is_reprocessed_event(data) else "false" + ), }, ) diff --git a/tests/relay_integration/test_integration.py b/tests/relay_integration/test_integration.py index 21400bf298a401..afa1f5abad5fab 100644 --- a/tests/relay_integration/test_integration.py +++ b/tests/relay_integration/test_integration.py @@ -127,33 +127,18 @@ def test_standalone_attachment(self): assert attachment.group_id == event.group_id def test_blob_only_attachment(self): - event_id1 = uuid4().hex - event_id2 = uuid4().hex + event_id = uuid4().hex files = {"some_file": ("hello.txt", BytesIO(b"Hello World! default"))} - self.post_and_retrieve_attachment(event_id1, files) - - # Again, but using direct blob storage - files = {"some_file": ("hello.txt", BytesIO(b"Hello World! direct"))} - with self.options( - { - "eventattachments.store-blobs.sample-rate": 1, - } - ): - self.post_and_retrieve_attachment(event_id2, files) + self.post_and_retrieve_attachment(event_id, files) attachments = EventAttachment.objects.filter(project_id=self.project.id) - assert len(attachments) == 2 + assert len(attachments) == 1 - attachment1 = EventAttachment.objects.get(event_id=event_id1) - with attachment1.getfile() as blob: + attachment = EventAttachment.objects.get(event_id=event_id) + with attachment.getfile() as blob: assert blob.read() == b"Hello World! default" - assert attachment1.file_id is not None - - attachment2 = EventAttachment.objects.get(event_id=event_id2) - with attachment2.getfile() as blob: - assert blob.read() == b"Hello World! direct" - assert attachment2.blob_path is not None + assert attachment.blob_path is not None def test_transaction(self): event_data = { diff --git a/tests/sentry/attachments/test_base.py b/tests/sentry/attachments/test_base.py index ff21b917a4a419..15b230121b2cec 100644 --- a/tests/sentry/attachments/test_base.py +++ b/tests/sentry/attachments/test_base.py @@ -1,8 +1,6 @@ import copy from sentry.attachments.base import BaseAttachmentCache, CachedAttachment -from sentry.testutils.helpers.options import override_options -from sentry.testutils.pytest.fixtures import django_db_all class InMemoryCache: @@ -60,7 +58,6 @@ def test_meta_rate_limited(): } -@django_db_all def test_basic_chunked(): data = InMemoryCache() cache = BaseAttachmentCache(data) @@ -82,7 +79,6 @@ def test_basic_chunked(): assert not list(cache.get("c:foo")) -@django_db_all def test_basic_unchunked(): data = InMemoryCache() cache = BaseAttachmentCache(data) @@ -100,28 +96,24 @@ def test_basic_unchunked(): assert not list(cache.get("c:foo")) -@django_db_all def test_zstd_chunks(): data = InMemoryCache() cache = BaseAttachmentCache(data) cache.set_chunk("mixed_chunks", 123, 0, b"Hello World! ") cache.set_chunk("mixed_chunks", 123, 1, b"Just visiting. ") - with override_options({"attachment-cache.use-zstd": True}): - cache.set_chunk("mixed_chunks", 123, 2, b"Bye.") + cache.set_chunk("mixed_chunks", 123, 2, b"Bye.") mixed_chunks = cache.get_from_chunks(key="mixed_chunks", id=123, chunks=3) assert mixed_chunks.data == b"Hello World! Just visiting. Bye." att = CachedAttachment(key="not_chunked", id=456, data=b"Hello World! Bye.") - with override_options({"attachment-cache.use-zstd": True}): - cache.set("not_chunked", [att]) + cache.set("not_chunked", [att]) (not_chunked,) = cache.get("not_chunked") assert not_chunked.data == b"Hello World! Bye." -@django_db_all def test_basic_rate_limited(): data = InMemoryCache() cache = BaseAttachmentCache(data) diff --git a/tests/sentry/grouping/test_enhancer.py b/tests/sentry/grouping/test_enhancer.py index 0a1193066c131e..49555561a2457f 100644 --- a/tests/sentry/grouping/test_enhancer.py +++ b/tests/sentry/grouping/test_enhancer.py @@ -62,17 +62,6 @@ def test_basic_parsing(insta_snapshot, version): ) assert isinstance(dumped, str) - with override_options({"enhancers.use-zstd": True}): - dumped_zstd = enhancement.dumps() - - assert dumped_zstd is not dumped - assert Enhancements.loads(dumped_zstd).dumps() == dumped_zstd - assert ( - Enhancements.loads(dumped_zstd)._to_config_structure() - == enhancement._to_config_structure() - ) - assert isinstance(dumped_zstd, str) - def test_parsing_errors(): with pytest.raises(InvalidEnhancerConfig): diff --git a/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py b/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py index 7e2f936898906f..e21980ea8097b6 100644 --- a/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py +++ b/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py @@ -25,7 +25,6 @@ ) from sentry.models.debugfile import create_files_from_dif_zip from sentry.models.eventattachment import EventAttachment -from sentry.models.files.file import File from sentry.models.userreport import UserReport from sentry.options import set from sentry.testutils.pytest.fixtures import django_db_all @@ -316,12 +315,10 @@ def test_with_attachments(default_project, task_runner, missing_chunks, monkeypa if not missing_chunks: (attachment,) = persisted_attachments - file = File.objects.get(id=attachment.file_id) - assert file.type == "custom.attachment" - assert file.headers == {"Content-Type": "text/plain"} - file_contents = file.getfile() - assert file_contents.read() == b"Hello World!" - assert file_contents.name == "lol.txt" + assert attachment.content_type == "text/plain" + assert attachment.name == "lol.txt" + with attachment.getfile() as file: + assert file.read() == b"Hello World!" else: assert not persisted_attachments @@ -397,12 +394,10 @@ def test_deobfuscate_view_hierarchy(default_project, task_runner): EventAttachment.objects.filter(project_id=project_id, event_id=event_id) ) (attachment,) = persisted_attachments - file = File.objects.get(id=attachment.file_id) - assert file.type == "event.view_hierarchy" - assert file.headers == {"Content-Type": "application/json"} - file_contents = file.getfile() - assert file_contents.read() == expected_response - assert file_contents.name == "view_hierarchy.json" + assert attachment.content_type == "application/json" + assert attachment.name == "view_hierarchy.json" + with attachment.getfile() as file: + assert file.read() == expected_response @django_db_all @@ -479,13 +474,8 @@ def test_individual_attachments( assert attachment.group_id == group_id assert attachment.content_type == chunks[2] - if attachment.file_id: - file = File.objects.get(id=attachment.file_id) - assert file.type == chunks[1] - assert file.headers == {"Content-Type": chunks[2]} - - file_contents = attachment.getfile() - assert file_contents.read() == b"".join(chunks[0]) + with attachment.getfile() as file_contents: + assert file_contents.read() == b"".join(chunks[0]) @django_db_all diff --git a/tests/sentry/relay/snapshots/test_config/test_get_project_config/full_config/REGION.pysnap b/tests/sentry/relay/snapshots/test_config/test_get_project_config/full_config/REGION.pysnap index f14a0c93ca9c43..f4bf8a92892297 100644 --- a/tests/sentry/relay/snapshots/test_config/test_get_project_config/full_config/REGION.pysnap +++ b/tests/sentry/relay/snapshots/test_config/test_get_project_config/full_config/REGION.pysnap @@ -1,4 +1,6 @@ --- +created: '2024-02-07T10:46:28.389774Z' +creator: sentry source: tests/sentry/relay/test_config.py --- config: @@ -97,7 +99,7 @@ config: - '*/readyz' - '*/ping' groupingConfig: - enhancements: eJybzDRxc15qeXFJZU6qlZGBkbGugaGuoeEEAHJMCAM + enhancements: KLUv_SAYwQAAkwKRs25ld3N0eWxlOjIwMjMtMDEtMTGQ id: newstyle:2023-01-11 piiConfig: applications: diff --git a/tests/sentry/tasks/test_assemble.py b/tests/sentry/tasks/test_assemble.py index 610cc21b9537fe..99d0dcfcac81fa 100644 --- a/tests/sentry/tasks/test_assemble.py +++ b/tests/sentry/tasks/test_assemble.py @@ -836,7 +836,6 @@ def test_failing_update(self, _): with self.options( { - "processing.save-release-archives": True, "processing.release-archive-min-files": 1, } ): diff --git a/tests/sentry/tasks/test_reprocessing2.py b/tests/sentry/tasks/test_reprocessing2.py index 55745c450f9c41..b61132a27fc2b9 100644 --- a/tests/sentry/tasks/test_reprocessing2.py +++ b/tests/sentry/tasks/test_reprocessing2.py @@ -45,6 +45,7 @@ def _create_event_attachment(evt, type): file_id=file.id, type=file.type, name="foo", + size=file.size, ) diff --git a/tests/symbolicator/test_minidump_full.py b/tests/symbolicator/test_minidump_full.py index cc562b357693a7..f512f1ec970c9d 100644 --- a/tests/symbolicator/test_minidump_full.py +++ b/tests/symbolicator/test_minidump_full.py @@ -9,7 +9,6 @@ from sentry import eventstore from sentry.lang.native.utils import STORE_CRASH_REPORTS_ALL from sentry.models.eventattachment import EventAttachment -from sentry.models.files.file import File from sentry.testutils.cases import TransactionTestCase from sentry.testutils.factories import get_fixture_path from sentry.testutils.helpers.task_runner import BurstTaskRunner @@ -109,14 +108,10 @@ def test_full_minidump(self): hello, minidump = attachments assert hello.name == "hello.txt" - hello_file = File.objects.get(id=hello.file_id) - assert hello_file.type == "event.attachment" - assert hello_file.checksum == "2ef7bde608ce5404e97d5f042f95f89f1c232871" + assert hello.sha1 == "2ef7bde608ce5404e97d5f042f95f89f1c232871" assert minidump.name == "windows.dmp" - minidump_file = File.objects.get(id=minidump.file_id) - assert minidump_file.type == "event.minidump" - assert minidump_file.checksum == "74bb01c850e8d65d3ffbc5bad5cabc4668fce247" + assert minidump.sha1 == "74bb01c850e8d65d3ffbc5bad5cabc4668fce247" def test_full_minidump_json_extra(self): self.project.update_option("sentry:store_crash_reports", STORE_CRASH_REPORTS_ALL) @@ -202,9 +197,7 @@ def test_reprocessing(self): ) assert minidump.name == "windows.dmp" - minidump_file = File.objects.get(id=minidump.file_id) - assert minidump_file.type == "event.minidump" - assert minidump_file.checksum == "74bb01c850e8d65d3ffbc5bad5cabc4668fce247" + assert minidump.sha1 == "74bb01c850e8d65d3ffbc5bad5cabc4668fce247" def test_minidump_threadnames(self): self.project.update_option("sentry:store_crash_reports", STORE_CRASH_REPORTS_ALL) diff --git a/tests/symbolicator/test_payload_full.py b/tests/symbolicator/test_payload_full.py index 35e5fb9cf040cc..2997cef8c5ff89 100644 --- a/tests/symbolicator/test_payload_full.py +++ b/tests/symbolicator/test_payload_full.py @@ -20,7 +20,6 @@ from sentry.testutils.cases import TransactionTestCase from sentry.testutils.factories import get_fixture_path from sentry.testutils.helpers.datetime import before_now, iso_format -from sentry.testutils.helpers.options import override_options from sentry.testutils.relay import RelayStoreHelper from sentry.testutils.skips import requires_kafka, requires_symbolicator from sentry.utils import json @@ -398,12 +397,7 @@ def test_resolve_mixed_stack_trace(self): }, } - with override_options( - { - "symbolicator.sourcemaps-processing-sample-rate": 1.0, - } - ): - event = self.post_and_retrieve_event(data) + event = self.post_and_retrieve_event(data) exception = event.interfaces["exception"] frames = exception.values[0].stacktrace.frames diff --git a/tests/symbolicator/test_unreal_full.py b/tests/symbolicator/test_unreal_full.py index 4204c0f2f85b65..61331ef6970af9 100644 --- a/tests/symbolicator/test_unreal_full.py +++ b/tests/symbolicator/test_unreal_full.py @@ -8,7 +8,6 @@ from sentry.lang.native.utils import STORE_CRASH_REPORTS_ALL from sentry.models.eventattachment import EventAttachment -from sentry.models.files.file import File from sentry.testutils.cases import TransactionTestCase from sentry.testutils.factories import get_fixture_path from sentry.testutils.relay import RelayStoreHelper @@ -109,24 +108,16 @@ def test_unreal_crash_with_attachments(self): context, config, minidump, log = attachments assert context.name == "CrashContext.runtime-xml" - context_file = File.objects.get(id=context.file_id) - assert context_file.type == "unreal.context" - assert context_file.checksum == "835d3e10db5d1799dc625132c819c047261ddcfb" + assert context.sha1 == "835d3e10db5d1799dc625132c819c047261ddcfb" assert config.name == "CrashReportClient.ini" - config_file = File.objects.get(id=config.file_id) - assert config_file.type == "event.attachment" - assert config_file.checksum == "5839c750bdde8cba4d2a979ea857b8154cffdab5" + assert config.sha1 == "5839c750bdde8cba4d2a979ea857b8154cffdab5" assert minidump.name == "UE4Minidump.dmp" - minidump_file = File.objects.get(id=minidump.file_id) - assert minidump_file.type == "event.minidump" - assert minidump_file.checksum == "089d9fd3b5c0cc4426339ab46ec3835e4be83c0f" + assert minidump.sha1 == "089d9fd3b5c0cc4426339ab46ec3835e4be83c0f" assert log.name == "YetAnother.log" # Log file is named after the project - log_file = File.objects.get(id=log.file_id) - assert log_file.type == "unreal.logs" - assert log_file.checksum == "24d1c5f75334cd0912cc2670168d593d5fe6c081" + assert log.sha1 == "24d1c5f75334cd0912cc2670168d593d5fe6c081" def test_unreal_apple_crash_with_attachments(self): attachments = self.unreal_crash_test_impl(get_unreal_crash_apple_file()) @@ -135,31 +126,19 @@ def test_unreal_apple_crash_with_attachments(self): context, config, diagnostics, log, info, minidump = attachments assert context.name == "CrashContext.runtime-xml" - context_file = File.objects.get(id=context.file_id) - assert context_file.type == "unreal.context" - assert context_file.checksum == "5d2723a7d25111645702fcbbcb8e1d038db56c6e" + assert context.sha1 == "5d2723a7d25111645702fcbbcb8e1d038db56c6e" assert config.name == "CrashReportClient.ini" - config_file = File.objects.get(id=config.file_id) - assert config_file.type == "event.attachment" - assert config_file.checksum == "4d6a2736e3e4969a68b7adbe197b05c171c29ea0" + assert config.sha1 == "4d6a2736e3e4969a68b7adbe197b05c171c29ea0" assert diagnostics.name == "Diagnostics.txt" - diagnostics_file = File.objects.get(id=diagnostics.file_id) - assert diagnostics_file.type == "event.attachment" - assert diagnostics_file.checksum == "aa271bf4e307a78005410234081945352e8fb236" + assert diagnostics.sha1 == "aa271bf4e307a78005410234081945352e8fb236" assert log.name == "YetAnotherMac.log" # Log file is named after the project - log_file = File.objects.get(id=log.file_id) - assert log_file.type == "unreal.logs" - assert log_file.checksum == "735e751a8b6b943dbc0abce0e6d096f4d48a0c1e" + assert log.sha1 == "735e751a8b6b943dbc0abce0e6d096f4d48a0c1e" assert info.name == "info.txt" - info_file = File.objects.get(id=info.file_id) - assert info_file.type == "event.attachment" - assert info_file.checksum == "279b27ac5d0e6792d088e0662ce1a18413b772bc" + assert info.sha1 == "279b27ac5d0e6792d088e0662ce1a18413b772bc" assert minidump.name == "minidump.dmp" - minidump_file = File.objects.get(id=minidump.file_id) - assert minidump_file.type == "event.applecrashreport" - assert minidump_file.checksum == "728d0f4b09cf5a7942da3893b6db79ac842b701a" + assert minidump.sha1 == "728d0f4b09cf5a7942da3893b6db79ac842b701a" From 48768ae78d95f33bdfa63eac1e93d659244cfc85 Mon Sep 17 00:00:00 2001 From: ArthurKnaus Date: Wed, 7 Feb 2024 12:45:36 +0100 Subject: [PATCH 097/357] feat(ddm): Improve chart series names (#64758) ### Problem Our current series naming has multiple problems. 1. Dependent on both data and local component state leading to visual glitches when updating groupings. 2. Long names with the names of grouping values being repetitive elements e.g. `result_type:success, environment:prod` 3. if there is only a single group we simply showed the field name, not exposing any information about the returned group ### Solutions 1. Only format based on the timeseries response. It includes all needed information. 2. Remove grouping names from the series name -> `success, environment`. Keep the grouping names in the tooltip when hovering in the summary table. Adding groups is a conscious choice the user makes, expecting a certain outcome. So my assumption is that in most cases they won't need the name of the group value. If they need it, it is accessible via the tooltip. 3. Always show the grouping values, even if there is only a single group - relates to https://github.com/getsentry/sentry/issues/64021 --- static/app/utils/metrics/index.tsx | 14 +++---- .../dashboards/datasetConfig/metrics.tsx | 10 +++-- .../widgetCard/metricWidgetCard/index.tsx | 3 +- static/app/views/ddm/chart.tsx | 2 + static/app/views/ddm/createAlertModal.tsx | 1 - static/app/views/ddm/summaryTable.tsx | 39 +++++++++++++++---- static/app/views/ddm/widget.tsx | 15 ++----- 7 files changed, 49 insertions(+), 35 deletions(-) diff --git a/static/app/utils/metrics/index.tsx b/static/app/utils/metrics/index.tsx index bb5adca204b182..ad7465e26f50c5 100644 --- a/static/app/utils/metrics/index.tsx +++ b/static/app/utils/metrics/index.tsx @@ -256,13 +256,9 @@ export function useClearQuery() { }, [routerRef]); } -// TODO(ddm): there has to be a nicer way to do this -export function getSeriesName( - group: MetricsGroup, - isOnlyGroup = false, - groupBy: MetricsQuery['groupBy'] -) { - if (isOnlyGroup && !groupBy?.length) { +export function getMetricsSeriesName(group: MetricsGroup) { + const groupByEntries = Object.entries(group.by ?? {}); + if (!groupByEntries.length) { const field = Object.keys(group.series)?.[0]; const {mri} = parseField(field) ?? {mri: field}; const name = formatMRI(mri as MRI); @@ -270,8 +266,8 @@ export function getSeriesName( return name ?? '(none)'; } - return Object.entries(group.by) - .map(([key, value]) => `${key}:${String(value).length ? value : t('none')}`) + return groupByEntries + .map(([_key, value]) => `${String(value).length ? value : t('(none)')}`) .join(', '); } diff --git a/static/app/views/dashboards/datasetConfig/metrics.tsx b/static/app/views/dashboards/datasetConfig/metrics.tsx index 6315db2bdcb2fa..50b7d89bc8c66a 100644 --- a/static/app/views/dashboards/datasetConfig/metrics.tsx +++ b/static/app/views/dashboards/datasetConfig/metrics.tsx @@ -13,7 +13,11 @@ import type {CustomMeasurementCollection} from 'sentry/utils/customMeasurements/ import type {TableData} from 'sentry/utils/discover/discoverQuery'; import type {EventData} from 'sentry/utils/discover/eventView'; import {NumberContainer} from 'sentry/utils/discover/styles'; -import {getMetricsApiRequestQuery, getSeriesName, groupByOp} from 'sentry/utils/metrics'; +import { + getMetricsApiRequestQuery, + getMetricsSeriesName, + groupByOp, +} from 'sentry/utils/metrics'; import {formatMetricUsingUnit} from 'sentry/utils/metrics/formatters'; import { formatMRIField, @@ -352,9 +356,7 @@ export function transformMetricsResponseToSeries( data.groups.forEach(group => { Object.keys(group.series).forEach(field => { results.push({ - seriesName: - queryAlias || - getSeriesName(group, data.groups.length === 1, widgetQuery.columns), + seriesName: queryAlias || getMetricsSeriesName(group), data: data.intervals.map((interval, index) => ({ name: interval, value: group.series[field][index] ?? 0, diff --git a/static/app/views/dashboards/widgetCard/metricWidgetCard/index.tsx b/static/app/views/dashboards/widgetCard/metricWidgetCard/index.tsx index 3acacb86871053..969f7a64a8843f 100644 --- a/static/app/views/dashboards/widgetCard/metricWidgetCard/index.tsx +++ b/static/app/views/dashboards/widgetCard/metricWidgetCard/index.tsx @@ -242,10 +242,9 @@ export function MetricWidgetChartContainer({ ? getChartTimeseries(timeseriesData, { getChartPalette: createChartPalette, mri, - groupBy, }) : []; - }, [timeseriesData, mri, groupBy]); + }, [timeseriesData, mri]); if (isError) { const errorMessage = diff --git a/static/app/views/ddm/chart.tsx b/static/app/views/ddm/chart.tsx index f406e00d69f6ea..e5390ed727ff89 100644 --- a/static/app/views/ddm/chart.tsx +++ b/static/app/views/ddm/chart.tsx @@ -160,6 +160,8 @@ export const MetricChart = forwardRef( if (!isChartHovered(chartRef?.current)) { return ''; } + + // Hovering a single correlated sample datapoint if (params.seriesType === 'scatter') { return getFormatter(samples.formatters)(params, asyncTicket); } diff --git a/static/app/views/ddm/createAlertModal.tsx b/static/app/views/ddm/createAlertModal.tsx index 0c29c669a985c9..e76693ae36a877 100644 --- a/static/app/views/ddm/createAlertModal.tsx +++ b/static/app/views/ddm/createAlertModal.tsx @@ -154,7 +154,6 @@ export function CreateAlertModal({Header, Body, Footer, metricsQuery}: Props) { getChartTimeseries(data, { mri: metricsQuery.mri, focusedSeries: undefined, - groupBy: [], // We are limited to one series in this chart, so we can just use the first color getChartPalette: createChartPalette, }), diff --git a/static/app/views/ddm/summaryTable.tsx b/static/app/views/ddm/summaryTable.tsx index 0b51ddf7e36c4b..31d97837e65e39 100644 --- a/static/app/views/ddm/summaryTable.tsx +++ b/static/app/views/ddm/summaryTable.tsx @@ -116,7 +116,6 @@ export const SummaryTable = memo(function SummaryTable({ return { ...s, ...getValues(s.data), - name: s.seriesName, }; }) .sort((a, b) => { @@ -127,8 +126,8 @@ export const SummaryTable = memo(function SummaryTable({ if (name === 'name') { return order === 'asc' - ? a.name.localeCompare(b.name) - : b.name.localeCompare(a.name); + ? a.seriesName.localeCompare(b.seriesName) + : b.seriesName.localeCompare(a.seriesName); } const aValue = a[name] ?? 0; const bValue = b[name] ?? 0; @@ -168,7 +167,6 @@ export const SummaryTable = memo(function SummaryTable({ > {rows.map( ({ - name, seriesName, groupBy, color, @@ -211,12 +209,11 @@ export const SummaryTable = memo(function SummaryTable({ } delay={500} overlayStyle={{maxWidth: '80vw'}} > - {name} + {seriesName} {/* TODO(ddm): Add a tooltip with the full value, don't add on click in case users want to copy the value */} @@ -259,6 +256,34 @@ export const SummaryTable = memo(function SummaryTable({ ); }); +function FullSeriesName({ + seriesName, + groupBy, +}: { + seriesName: string; + groupBy?: Record; +}) { + if (!groupBy || Object.keys(groupBy).length === 0) { + return {seriesName}; + } + + const goupByEntries = Object.entries(groupBy); + return ( + + {goupByEntries.map(([key, value], index) => { + const formattedValue = value || t('(none)'); + return ( + + {`${key}:`} +   + {index === goupByEntries.length - 1 ? formattedValue : `${formattedValue}, `} + + ); + })} + + ); +} + function SortableHeaderCell({ sortState, name, diff --git a/static/app/views/ddm/widget.tsx b/static/app/views/ddm/widget.tsx index 8702388e9a652a..49594aeb0fda78 100644 --- a/static/app/views/ddm/widget.tsx +++ b/static/app/views/ddm/widget.tsx @@ -19,7 +19,7 @@ import type {MetricsApiResponse, MRI, PageFilters} from 'sentry/types'; import type {ReactEchartsRef} from 'sentry/types/echarts'; import { getDefaultMetricDisplayType, - getSeriesName, + getMetricsSeriesName, stringifyMetricWidget, } from 'sentry/utils/metrics'; import {metricDisplayTypeOptions} from 'sentry/utils/metrics/constants'; @@ -289,16 +289,9 @@ const MetricWidgetBody = memo( getChartPalette, mri, focusedSeries: focusedSeries?.seriesName, - groupBy: metricsQuery.groupBy, }) : []; - }, [ - timeseriesData, - getChartPalette, - mri, - focusedSeries?.seriesName, - metricsQuery.groupBy, - ]); + }, [timeseriesData, getChartPalette, mri, focusedSeries?.seriesName]); const handleSortChange = useCallback( newSort => { @@ -367,12 +360,10 @@ export function getChartTimeseries( getChartPalette, mri, focusedSeries, - groupBy, }: { getChartPalette: (seriesNames: string[]) => Record; mri: MRI; focusedSeries?: string; - groupBy?: string[]; } ) { // this assumes that all series have the same unit @@ -382,7 +373,7 @@ export function getChartTimeseries( const series = data.groups.map(g => { return { values: Object.values(g.series)[0], - name: getSeriesName(g, data.groups.length === 1, groupBy), + name: getMetricsSeriesName(g), groupBy: g.by, transaction: g.by.transaction, release: g.by.release, From 70684a750dcabcb841e0cd28b857a76a596dbb10 Mon Sep 17 00:00:00 2001 From: ArthurKnaus Date: Wed, 7 Feb 2024 12:50:44 +0100 Subject: [PATCH 098/357] feat(ddm): More visible bar charts fog of war (#64764) --- static/app/views/ddm/chart.tsx | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/static/app/views/ddm/chart.tsx b/static/app/views/ddm/chart.tsx index e5390ed727ff89..8d8e2d3b44ecd0 100644 --- a/static/app/views/ddm/chart.tsx +++ b/static/app/views/ddm/chart.tsx @@ -299,6 +299,9 @@ function transformToScatterSeries({ }); } +const EXTRAPOLATED_AREA_STRIPE_IMG = + 'image://data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAABkCAYAAAC/zKGXAAAAMUlEQVR4Ae3KoREAIAwEsMKgrMeYj8BzyIpEZyTZda16mPVJFEVRFEVRFEVRFMWO8QB4uATKpuU51gAAAABJRU5ErkJggg=='; + const createFogOfWarBarSeries = (series: Series, fogBucketCnt = 0) => ({ ...series, silent: true, @@ -308,7 +311,13 @@ const createFogOfWarBarSeries = (series: Series, fogBucketCnt = 0) => ({ value: index < series.data.length - fogBucketCnt ? 0 : data.value, })), itemStyle: { - opacity: 0.5, + opacity: 1, + decal: { + symbol: EXTRAPOLATED_AREA_STRIPE_IMG, + dashArrayX: [6, 0], + dashArrayY: [6, 0], + rotation: Math.PI / 4, + }, }, }); From da335116af55c3745d765c0f458aff1b3b9eda5c Mon Sep 17 00:00:00 2001 From: ArthurKnaus Date: Wed, 7 Feb 2024 13:01:58 +0100 Subject: [PATCH 099/357] fix(ddm-onboarding): PHP code locations snippet (#64765) - closes https://github.com/getsentry/sentry/issues/64763 --- static/app/gettingStartedDocs/php/laravel.tsx | 4 ++-- static/app/gettingStartedDocs/php/php.tsx | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/static/app/gettingStartedDocs/php/laravel.tsx b/static/app/gettingStartedDocs/php/laravel.tsx index 82da95444ff1a0..30c310de78014c 100644 --- a/static/app/gettingStartedDocs/php/laravel.tsx +++ b/static/app/gettingStartedDocs/php/laravel.tsx @@ -179,7 +179,7 @@ const customMetricsOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: tct( - 'Once the SDK is installed or updated, you can enable code locations being emitted with your metricsin your [code:config/sentry.php] file:', + 'Once the SDK is installed or updated, you can enable code locations being emitted with your metrics in your [code:config/sentry.php] file:', { code: , } @@ -191,7 +191,7 @@ const customMetricsOnboarding: OnboardingConfig = { label: 'PHP', value: 'php', language: 'php', - code: `'metric_code_locations' => true,`, + code: `'attach_metric_code_locations' => true,`, }, ], }, diff --git a/static/app/gettingStartedDocs/php/php.tsx b/static/app/gettingStartedDocs/php/php.tsx index d71692a59a2214..e15329bf19faf6 100644 --- a/static/app/gettingStartedDocs/php/php.tsx +++ b/static/app/gettingStartedDocs/php/php.tsx @@ -30,7 +30,7 @@ const getMetricsConfigureSnippet = () => ` use function \\Sentry\\init; \\Sentry\\init([ - 'metric_code_locations' => true, + 'attach_metric_code_locations' => true, ]);`; const getVerifySnippet = () => ` From 003dfe434157c82787e922a04211f2284542b1e2 Mon Sep 17 00:00:00 2001 From: Arpad Borsos Date: Wed, 7 Feb 2024 13:29:39 +0100 Subject: [PATCH 100/357] Add a `payload_size` metric to the ingest consumer (#64646) In order to have better insights into the pipeline following INC-626, we would like to have a metric for the payload size that the ingest consumers receive from Kafka, because the existing metrics showed an increased read traffic. --- src/sentry/ingest/consumer/attachment_event.py | 8 +++++++- src/sentry/ingest/consumer/factory.py | 9 +++++---- src/sentry/ingest/consumer/simple_event.py | 8 +++++++- 3 files changed, 19 insertions(+), 6 deletions(-) diff --git a/src/sentry/ingest/consumer/attachment_event.py b/src/sentry/ingest/consumer/attachment_event.py index 43843868f444b7..e4397ca921aa9b 100644 --- a/src/sentry/ingest/consumer/attachment_event.py +++ b/src/sentry/ingest/consumer/attachment_event.py @@ -21,7 +21,7 @@ def decode_and_process_chunks( - raw_message: Message[KafkaPayload], + raw_message: Message[KafkaPayload], consumer_type: str ) -> IngestMessage | None: """ The first pass for the `attachments` topic: @@ -31,6 +31,12 @@ def decode_and_process_chunks( - Process and save `attachment_chunk`s. """ raw_payload = raw_message.payload.value + metrics.distribution( + "ingest_consumer.payload_size", + len(raw_payload), + tags={"consumer": consumer_type}, + unit="byte", + ) message: IngestMessage = msgpack.unpackb(raw_payload, use_list=False) if message["type"] == "attachment_chunk": diff --git a/src/sentry/ingest/consumer/factory.py b/src/sentry/ingest/consumer/factory.py index e2ab171c4edbe0..dfdb0d56c844e0 100644 --- a/src/sentry/ingest/consumer/factory.py +++ b/src/sentry/ingest/consumer/factory.py @@ -1,6 +1,7 @@ from __future__ import annotations from collections.abc import Callable, Mapping, MutableMapping +from functools import partial from typing import Any, NamedTuple, TypeVar from arroyo import Topic @@ -102,9 +103,8 @@ def create_with_partitions( final_step = CommitOffsets(commit) if not self.is_attachment_topic: - next_step = maybe_multiprocess_step( - mp, process_simple_event_message, final_step, self._pool - ) + event_function = partial(process_simple_event_message, consumer_type=self.consumer_type) + next_step = maybe_multiprocess_step(mp, event_function, final_step, self._pool) return create_backpressure_step(health_checker=self.health_checker, next_step=next_step) # The `attachments` topic is a bit different, as it allows multiple event types: @@ -129,8 +129,9 @@ def create_with_partitions( # As the steps are defined (and types inferred) in reverse order, we would get a type error here, # as `step_1` outputs an `| None`, but the `filter_step` does not mention that in its type, # as it is inferred from the `step_2` input type which does not mention `| None`. + attachment_function = partial(decode_and_process_chunks, consumer_type=self.consumer_type) step_1 = maybe_multiprocess_step( - mp, decode_and_process_chunks, filter_step, self._pool # type:ignore + mp, attachment_function, filter_step, self._pool # type:ignore ) return create_backpressure_step(health_checker=self.health_checker, next_step=step_1) diff --git a/src/sentry/ingest/consumer/simple_event.py b/src/sentry/ingest/consumer/simple_event.py index caa24daaa43826..89c628fc923d16 100644 --- a/src/sentry/ingest/consumer/simple_event.py +++ b/src/sentry/ingest/consumer/simple_event.py @@ -12,7 +12,7 @@ logger = logging.getLogger(__name__) -def process_simple_event_message(raw_message: Message[KafkaPayload]) -> None: +def process_simple_event_message(raw_message: Message[KafkaPayload], consumer_type: str) -> None: """ Processes a single Kafka Message containing a "simple" Event payload. @@ -28,6 +28,12 @@ def process_simple_event_message(raw_message: Message[KafkaPayload]) -> None: """ raw_payload = raw_message.payload.value + metrics.distribution( + "ingest_consumer.payload_size", + len(raw_payload), + tags={"consumer": consumer_type}, + unit="byte", + ) message: IngestMessage = msgpack.unpackb(raw_payload, use_list=False) message_type = message["type"] From 4590e432159b7b00808670cf84ea99257272731c Mon Sep 17 00:00:00 2001 From: David Herberth Date: Wed, 7 Feb 2024 14:10:43 +0100 Subject: [PATCH 101/357] ref(relay): Stop exposing relay cardinality limiter feature flag (#64762) Cardinality Limits will only be added to the project config when the feature flag is enabled, we don't need to additionally send the feature flag. --- src/sentry/relay/config/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/sentry/relay/config/__init__.py b/src/sentry/relay/config/__init__.py index 1953d87ca4ebc7..6f1dd36f191609 100644 --- a/src/sentry/relay/config/__init__.py +++ b/src/sentry/relay/config/__init__.py @@ -58,7 +58,6 @@ "organizations:custom-metrics", "organizations:metric-meta", "organizations:standalone-span-ingestion", - "organizations:relay-cardinality-limiter", ] EXTRACT_METRICS_VERSION = 1 From 4c06b9dcc7c31ef3257298725f200a044a2ee17d Mon Sep 17 00:00:00 2001 From: Jodi Jang <116035587+jangjodi@users.noreply.github.com> Date: Wed, 7 Feb 2024 08:38:24 -0500 Subject: [PATCH 102/357] ref(similarity-embedding): Use in app frames (#64715) Change similarity embeddings to only use in app frames of the stacktrace --- .../group_similar_issues_embeddings.py | 29 ++++++++++--------- .../test_group_similar_issues_embeddings.py | 13 ++++++++- 2 files changed, 27 insertions(+), 15 deletions(-) diff --git a/src/sentry/api/endpoints/group_similar_issues_embeddings.py b/src/sentry/api/endpoints/group_similar_issues_embeddings.py index ba014eaa06678e..35ad703c23e434 100644 --- a/src/sentry/api/endpoints/group_similar_issues_embeddings.py +++ b/src/sentry/api/endpoints/group_similar_issues_embeddings.py @@ -40,20 +40,21 @@ def get_stacktrace_string(exception: Mapping[Any, Any], event: GroupEvent) -> st choices = [event.platform, "default"] if event.platform else ["default"] templates = [f"sentry/partial/frames/{choice}.txt" for choice in choices] for frame in exc["stacktrace"]["frames"]: - output.append( - render_to_string( - templates, - { - "abs_path": frame.get("abs_path"), - "filename": frame.get("filename"), - "function": frame.get("function"), - "module": frame.get("module"), - "lineno": frame.get("lineno"), - "colno": frame.get("colno"), - "context_line": frame.get("context_line"), - }, - ).strip("\n") - ) + if frame["in_app"]: + output.append( + render_to_string( + templates, + { + "abs_path": frame.get("abs_path"), + "filename": frame.get("filename"), + "function": frame.get("function"), + "module": frame.get("module"), + "lineno": frame.get("lineno"), + "colno": frame.get("colno"), + "context_line": frame.get("context_line"), + }, + ).strip("\n") + ) return "\n".join(output) diff --git a/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py b/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py index 829cd5521c3de2..7f887f6068052d 100644 --- a/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py +++ b/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py @@ -40,7 +40,18 @@ def setUp(self): "abs_path": "/Users/jodi/python_onboarding/python_onboarding.py", "lineno": 20, "context_line": " divide_by_zero_another()", - } + "in_app": True, + }, + # The non-in-app frame should not be included in the stacktrace + { + "function": "another_function", + "module": "__main__", + "filename": "python_onboarding.py", + "abs_path": "/Users/jodi/python_onboarding/python_onboarding.py", + "lineno": 40, + "context_line": " another_function()", + "in_app": False, + }, ] }, "type": "ZeroDivisionError", From 98865c9b038637446180378bc7b9733fa6413f53 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 7 Feb 2024 08:55:55 -0500 Subject: [PATCH 103/357] ref: upgrade grpcio (#64709) our current version does not build for 3.12 --- requirements-base.txt | 2 +- requirements-dev-frozen.txt | 4 ++-- requirements-frozen.txt | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements-base.txt b/requirements-base.txt index 08f41718c14c18..170e5a52eacadb 100644 --- a/requirements-base.txt +++ b/requirements-base.txt @@ -88,7 +88,7 @@ cryptography>=38.0.3 # Note, grpcio>1.30.0 requires setting GRPC_POLL_STRATEGY=epoll1 # See https://github.com/grpc/grpc/issues/23796 and # https://github.com/grpc/grpc/blob/v1.35.x/doc/core/grpc-polling-engines.md#polling-engine-implementations-in-grpc -grpcio>=1.47.0 +grpcio>=1.59.0 # not directly used, but provides a speedup for redis hiredis>=0.3.1 diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index 528f7a94d5b801..5591dc19945801 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -69,8 +69,8 @@ google-crc32c==1.5.0 google-resumable-media==2.7.0 googleapis-common-protos==1.62.0 grpc-google-iam-v1==0.12.4 -grpcio==1.56.0 -grpcio-status==1.56.0 +grpcio==1.60.1 +grpcio-status==1.60.1 h11==0.13.0 hiredis==0.3.1 honcho==1.1.0 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index dfcc2187f611ae..a84ea961cbb8c1 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -55,8 +55,8 @@ google-crc32c==1.5.0 google-resumable-media==2.7.0 googleapis-common-protos==1.62.0 grpc-google-iam-v1==0.12.4 -grpcio==1.56.0 -grpcio-status==1.56.0 +grpcio==1.60.1 +grpcio-status==1.60.1 h11==0.14.0 hiredis==0.3.1 httpcore==1.0.2 From 231f56df1f3cdb54240e7a56809605c5fb6cbdb9 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 7 Feb 2024 08:56:08 -0500 Subject: [PATCH 104/357] ref: upgrade pyyaml to 6.0.1 (#64710) our current version does not build for python 3.12 --- requirements-base.txt | 2 +- requirements-dev-frozen.txt | 2 +- requirements-frozen.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements-base.txt b/requirements-base.txt index 170e5a52eacadb..231d6b2e42de54 100644 --- a/requirements-base.txt +++ b/requirements-base.txt @@ -50,7 +50,7 @@ pymemcache python-u2flib-server>=5.0.0 fido2>=0.9.2 python3-saml>=1.15.0 -PyYAML>=5.4 +PyYAML>=6.0.1 rb>=1.9.0 redis-py-cluster>=2.1.0 redis>=3.4.1 diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index 5591dc19945801..3d11a5aad5bb30 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -156,7 +156,7 @@ pytz==2018.9 pyupgrade==3.15.0 pyuwsgi==2.0.23 pyvat==1.3.15 -pyyaml==5.4 +pyyaml==6.0.1 rb==1.10.0 redis==3.4.1 redis-py-cluster==2.1.0 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index a84ea961cbb8c1..c58baa04aa8be6 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -103,7 +103,7 @@ python3-saml==1.15.0 pytz==2018.9 pyuwsgi==2.0.23 pyvat==1.3.15 -pyyaml==5.4 +pyyaml==6.0.1 rb==1.10.0 redis==3.4.1 redis-py-cluster==2.1.0 From 5054360cb76dd3e807cefdcdd37841be046a72be Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 7 Feb 2024 08:56:37 -0500 Subject: [PATCH 105/357] ref: upgrade charset-normalizer (#64712) the current version does not build for 3.12 --- requirements-dev-frozen.txt | 2 +- requirements-frozen.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index 3d11a5aad5bb30..b4527d3dfea46b 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -24,7 +24,7 @@ celery==5.3.5 certifi==2023.7.22 cffi==1.15.1 cfgv==3.3.1 -charset-normalizer==3.0.1 +charset-normalizer==3.3.2 click==8.1.7 click-didyoumean==0.3.0 click-plugins==1.1.1 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index c58baa04aa8be6..6b37edd5b5c7f4 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -20,7 +20,7 @@ cachetools==5.3.0 celery==5.3.5 certifi==2023.7.22 cffi==1.15.1 -charset-normalizer==3.0.1 +charset-normalizer==3.3.2 click==8.1.7 click-didyoumean==0.3.0 click-plugins==1.1.1 From d66c885260449f84bacf80deba5bc514fd0f8bcc Mon Sep 17 00:00:00 2001 From: Francesco Novy Date: Wed, 7 Feb 2024 15:13:41 +0100 Subject: [PATCH 106/357] feat: Update browser tracing integration usage (#64761) This updates usage of browser tracing in our onboarding. Couldn't try it yet as I failed to setup local env so far... --- static/app/gettingStartedDocs/ionic/ionic.tsx | 14 ++++++-------- .../app/gettingStartedDocs/javascript/angular.tsx | 9 ++++----- static/app/gettingStartedDocs/javascript/ember.tsx | 12 +++--------- .../app/gettingStartedDocs/javascript/gatsby.tsx | 9 ++++----- .../gettingStartedDocs/javascript/javascript.tsx | 9 ++++----- static/app/gettingStartedDocs/javascript/react.tsx | 9 ++++----- .../app/gettingStartedDocs/javascript/svelte.tsx | 9 ++++----- static/app/gettingStartedDocs/javascript/vue.tsx | 9 ++++----- 8 files changed, 33 insertions(+), 47 deletions(-) diff --git a/static/app/gettingStartedDocs/ionic/ionic.tsx b/static/app/gettingStartedDocs/ionic/ionic.tsx index c1546aab6d061f..a00e498b2e4815 100644 --- a/static/app/gettingStartedDocs/ionic/ionic.tsx +++ b/static/app/gettingStartedDocs/ionic/ionic.tsx @@ -23,14 +23,12 @@ Sentry.init( // We recommend adjusting this value in production. tracesSampleRate: 1.0, integrations: [ - new SentrySibling.BrowserTracing({ - // Set "tracePropagationTargets" to control for which URLs distributed tracing should be enabled - tracePropagationTargets: [ - "localhost", - /^https:\/\/yourserver\.io\/api/, - ], - routingInstrumentation: SentrySibling.routingInstrumentation, - }), + SentrySibling.browserTracingIntegration(), + ], + // Set "tracePropagationTargets" to control for which URLs distributed tracing should be enabled + tracePropagationTargets: [ + "localhost", + /^https:\/\/yourserver\.io\/api/, ], }, // Forward the init method to the sibling Framework. diff --git a/static/app/gettingStartedDocs/javascript/angular.tsx b/static/app/gettingStartedDocs/javascript/angular.tsx index b5a64923a9ae76..3ea81971ab508c 100644 --- a/static/app/gettingStartedDocs/javascript/angular.tsx +++ b/static/app/gettingStartedDocs/javascript/angular.tsx @@ -206,10 +206,7 @@ function getSdkSetupSnippet(params: Params) { integrations: [${ params.isPerformanceSelected ? ` - new Sentry.BrowserTracing({ - // Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled - tracePropagationTargets: ["localhost", /^https:\\/\\/yourserver\\.io\\/api/], - }),` + Sentry.browserTracingIntegration(),` : '' }${ params.isReplaySelected @@ -221,7 +218,9 @@ function getSdkSetupSnippet(params: Params) { params.isPerformanceSelected ? ` // Performance Monitoring - tracesSampleRate: 1.0, // Capture 100% of the transactions` + tracesSampleRate: 1.0, // Capture 100% of the transactions + // Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled + tracePropagationTargets: ["localhost", /^https:\\/\\/yourserver\\.io\\/api/],` : '' }${ params.isReplaySelected diff --git a/static/app/gettingStartedDocs/javascript/ember.tsx b/static/app/gettingStartedDocs/javascript/ember.tsx index e82421c96c6c3d..76d4d786ded1a8 100644 --- a/static/app/gettingStartedDocs/javascript/ember.tsx +++ b/static/app/gettingStartedDocs/javascript/ember.tsx @@ -26,14 +26,6 @@ import * as Sentry from "@sentry/ember"; Sentry.init({ dsn: "${params.dsn}", integrations: [${ - params.isPerformanceSelected - ? ` - new Sentry.BrowserTracing({ - // Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled - tracePropagationTargets: ["localhost", /^https:\\/\\/yourserver\\.io\\/api/], - }),` - : '' - }${ params.isReplaySelected ? ` Sentry.replayIntegration(${getReplayConfigOptions(params.replayOptions)}),` @@ -43,7 +35,9 @@ Sentry.init({ params.isPerformanceSelected ? ` // Performance Monitoring - tracesSampleRate: 1.0, // Capture 100% of the transactions` + tracesSampleRate: 1.0, // Capture 100% of the transactions + // Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled + tracePropagationTargets: ["localhost", /^https:\\/\\/yourserver\\.io\\/api/],` : '' }${ params.isReplaySelected diff --git a/static/app/gettingStartedDocs/javascript/gatsby.tsx b/static/app/gettingStartedDocs/javascript/gatsby.tsx index ae41859e519c19..31163ddf6e218b 100644 --- a/static/app/gettingStartedDocs/javascript/gatsby.tsx +++ b/static/app/gettingStartedDocs/javascript/gatsby.tsx @@ -25,10 +25,7 @@ Sentry.init({ integrations: [${ params.isPerformanceSelected ? ` - new Sentry.BrowserTracing({ - // Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled - tracePropagationTargets: ["localhost", /^https:\\/\\/yourserver\\.io\\/api/], - }),` + Sentry.browserTracingIntegration(),` : '' }${ params.isReplaySelected @@ -40,7 +37,9 @@ Sentry.init({ params.isPerformanceSelected ? ` // Performance Monitoring - tracesSampleRate: 1.0, // Capture 100% of the transactions` + tracesSampleRate: 1.0, // Capture 100% of the transactions + // Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled + tracePropagationTargets: ["localhost", /^https:\\/\\/yourserver\\.io\\/api/],` : '' }${ params.isReplaySelected diff --git a/static/app/gettingStartedDocs/javascript/javascript.tsx b/static/app/gettingStartedDocs/javascript/javascript.tsx index a58adbef3f517a..a9116fa594377e 100644 --- a/static/app/gettingStartedDocs/javascript/javascript.tsx +++ b/static/app/gettingStartedDocs/javascript/javascript.tsx @@ -24,10 +24,7 @@ Sentry.init({ integrations: [${ params.isPerformanceSelected ? ` - new Sentry.BrowserTracing({ - // Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled - tracePropagationTargets: ["localhost", /^https:\\/\\/yourserver\\.io\\/api/], - }),` + Sentry.browserTracingIntegration(),` : '' }${ params.isReplaySelected @@ -39,7 +36,9 @@ Sentry.init({ params.isPerformanceSelected ? ` // Performance Monitoring - tracesSampleRate: 1.0, // Capture 100% of the transactions` + tracesSampleRate: 1.0, // Capture 100% of the transactions + // Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled + tracePropagationTargets: ["localhost", /^https:\\/\\/yourserver\\.io\\/api/],` : '' }${ params.isReplaySelected diff --git a/static/app/gettingStartedDocs/javascript/react.tsx b/static/app/gettingStartedDocs/javascript/react.tsx index f06f2de4f4953a..138cec1fefe44d 100644 --- a/static/app/gettingStartedDocs/javascript/react.tsx +++ b/static/app/gettingStartedDocs/javascript/react.tsx @@ -24,10 +24,7 @@ Sentry.init({ integrations: [${ params.isPerformanceSelected ? ` - new Sentry.BrowserTracing({ - // Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled - tracePropagationTargets: ["localhost", /^https:\\/\\/yourserver\\.io\\/api/], - }),` + Sentry.browserTracingIntegration(),` : '' }${ params.isReplaySelected @@ -39,7 +36,9 @@ Sentry.init({ params.isPerformanceSelected ? ` // Performance Monitoring - tracesSampleRate: 1.0, // Capture 100% of the transactions` + tracesSampleRate: 1.0, // Capture 100% of the transactions + // Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled + tracePropagationTargets: ["localhost", /^https:\\/\\/yourserver\\.io\\/api/],` : '' }${ params.isReplaySelected diff --git a/static/app/gettingStartedDocs/javascript/svelte.tsx b/static/app/gettingStartedDocs/javascript/svelte.tsx index 66503d6fd6ba29..b0fea483b1fa34 100644 --- a/static/app/gettingStartedDocs/javascript/svelte.tsx +++ b/static/app/gettingStartedDocs/javascript/svelte.tsx @@ -26,10 +26,7 @@ Sentry.init({ integrations: [${ params.isPerformanceSelected ? ` - new Sentry.BrowserTracing({ - // Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled - tracePropagationTargets: ["localhost", /^https:\\/\\/yourserver\\.io\\/api/], - }),` + Sentry.browserTracingIntegration(),` : '' }${ params.isReplaySelected @@ -41,7 +38,9 @@ Sentry.init({ params.isPerformanceSelected ? ` // Performance Monitoring - tracesSampleRate: 1.0, // Capture 100% of the transactions` + tracesSampleRate: 1.0, // Capture 100% of the transactions + // Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled + tracePropagationTargets: ["localhost", /^https:\\/\\/yourserver\\.io\\/api/],` : '' }${ params.isReplaySelected diff --git a/static/app/gettingStartedDocs/javascript/vue.tsx b/static/app/gettingStartedDocs/javascript/vue.tsx index fd63c9685a22fb..d04953da227685 100644 --- a/static/app/gettingStartedDocs/javascript/vue.tsx +++ b/static/app/gettingStartedDocs/javascript/vue.tsx @@ -47,10 +47,7 @@ const getSentryInitLayout = (params: Params, siblingOption: string): string => { integrations: [${ params.isPerformanceSelected ? ` - new Sentry.BrowserTracing({ - // Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled - tracePropagationTargets: ["localhost", /^https:\\/\\/yourserver\\.io\\/api/], - }),` + Sentry.browserTracingIntergation(),` : '' }${ params.isReplaySelected @@ -62,7 +59,9 @@ const getSentryInitLayout = (params: Params, siblingOption: string): string => { params.isPerformanceSelected ? ` // Performance Monitoring - tracesSampleRate: 1.0, // Capture 100% of the transactions` + tracesSampleRate: 1.0, // Capture 100% of the transactions + // Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled + tracePropagationTargets: ["localhost", /^https:\\/\\/yourserver\\.io\\/api/],` : '' }${ params.isReplaySelected From 67cdb97dc47deb071539c40aeebe9fb587c797b2 Mon Sep 17 00:00:00 2001 From: Tor Date: Wed, 7 Feb 2024 15:20:13 +0100 Subject: [PATCH 107/357] feat(ddm): Implement global abuse limits for metrics (#64574) Implements global abuse limit for metric buckets. Relay: https://github.com/getsentry/relay/pull/2928 Epic: https://github.com/getsentry/relay/issues/2716 --- src/sentry/options/defaults.py | 8 ++++++++ src/sentry/quotas/base.py | 10 +++++++++- tests/sentry/quotas/test_base.py | 8 ++++++++ tests/sentry/quotas/test_redis.py | 15 +++++++++++++++ 4 files changed, 40 insertions(+), 1 deletion(-) diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index 3eccbc67e4b695..206c3a46a6d62a 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -998,6 +998,14 @@ flags=FLAG_PRIORITIZE_DISK | FLAG_AUTOMATOR_MODIFIABLE, ) + +register( + "global-abuse-quota.metric-bucket-limit", + type=Int, + default=0, + flags=FLAG_PRIORITIZE_DISK | FLAG_AUTOMATOR_MODIFIABLE, +) + # END ABUSE QUOTAS # Send event messages for specific project IDs to random partitions in Kafka diff --git a/src/sentry/quotas/base.py b/src/sentry/quotas/base.py index 66d9d1bf45c085..6c47bb2233be1f 100644 --- a/src/sentry/quotas/base.py +++ b/src/sentry/quotas/base.py @@ -22,6 +22,7 @@ class QuotaScope(IntEnum): ORGANIZATION = 1 PROJECT = 2 KEY = 3 + GLOBAL = 4 def api_name(self): return self.name.lower() @@ -36,7 +37,7 @@ class AbuseQuota: # Quota categories. categories: list[DataCategory] # Quota Scope. - scope: Literal[QuotaScope.ORGANIZATION, QuotaScope.PROJECT] + scope: Literal[QuotaScope.ORGANIZATION, QuotaScope.PROJECT, QuotaScope.GLOBAL] # Old org option name still used for compatibility reasons, # takes precedence over `option` and `compat_option_sentry`. compat_option_org: str | None = None @@ -404,6 +405,12 @@ def get_abuse_quotas(self, org): categories=[DataCategory.METRIC_BUCKET], scope=QuotaScope.ORGANIZATION, ), + AbuseQuota( + id="gam", + option="global-abuse-quota.metric-bucket-limit", + categories=[DataCategory.METRIC_BUCKET], + scope=QuotaScope.GLOBAL, + ), ] # XXX: These reason codes are hardcoded in getsentry: @@ -412,6 +419,7 @@ def get_abuse_quotas(self, org): reason_codes = { QuotaScope.ORGANIZATION: "org_abuse_limit", QuotaScope.PROJECT: "project_abuse_limit", + QuotaScope.GLOBAL: "global_abuse_limit", } for quota in abuse_quotas: diff --git a/tests/sentry/quotas/test_base.py b/tests/sentry/quotas/test_base.py index 2b82de259d2968..5712ed50aaacde 100644 --- a/tests/sentry/quotas/test_base.py +++ b/tests/sentry/quotas/test_base.py @@ -159,6 +159,14 @@ def test_check_accept_monitor_checkin(self): "reasonCode": "go_away", }, ), + ( + QuotaConfig(limit=0, scope=QuotaScope.GLOBAL, reason_code="come back!"), + { + "limit": 0, + "scope": "global", + "reasonCode": "come back!", + }, + ), ], ) def test_quotas_to_json(obj, json): diff --git a/tests/sentry/quotas/test_redis.py b/tests/sentry/quotas/test_redis.py index a69431dbbb7062..fb3dc3e87988ae 100644 --- a/tests/sentry/quotas/test_redis.py +++ b/tests/sentry/quotas/test_redis.py @@ -71,6 +71,12 @@ class RedisQuotaTest(TestCase): def quota(self): return RedisQuota() + def test_redis_quota_serialize(self): + assert QuotaScope.ORGANIZATION.api_name() == "organization" + assert QuotaScope.PROJECT.api_name() == "project" + assert QuotaScope.KEY.api_name() == "key" + assert QuotaScope.GLOBAL.api_name() == "global" + def test_abuse_quotas(self): # These legacy options need to be set, otherwise we'll run into # AssertionError: reject-all quotas cannot be tracked @@ -113,6 +119,7 @@ def test_abuse_quotas(self): self.organization.update_option("project-abuse-quota.attachment-limit", 601) self.organization.update_option("project-abuse-quota.session-limit", 602) self.organization.update_option("organization-abuse-quota.metric-bucket-limit", 603) + self.organization.update_option("global-abuse-quota.metric-bucket-limit", 604) with self.feature("organizations:transaction-metrics-extraction"): quotas = self.quota.get_quotas(self.project) @@ -148,6 +155,14 @@ def test_abuse_quotas(self): assert quotas[4].window == 10 assert quotas[4].reason_code == "org_abuse_limit" + assert quotas[5].id == "gam" + assert quotas[5].scope == QuotaScope.GLOBAL + assert quotas[5].scope_id is None + assert quotas[5].categories == {DataCategory.METRIC_BUCKET} + assert quotas[5].limit == 6040 + assert quotas[5].window == 10 + assert quotas[5].reason_code == "global_abuse_limit" + # Let's set the global option for error limits. # Since we already have an org override for it, it shouldn't change anything. with self.options({"project-abuse-quota.error-limit": 3}): From bb1fc016bdbbb5fc359025973932adaefd209f25 Mon Sep 17 00:00:00 2001 From: Ogi <86684834+obostjancic@users.noreply.github.com> Date: Wed, 7 Feb 2024 15:26:46 +0100 Subject: [PATCH 108/357] feat(ddm): timerange limited meta queries (#64645) --- static/app/utils/metrics/index.tsx | 18 +++++++++ static/app/utils/metrics/useBlockMetric.tsx | 2 +- static/app/utils/metrics/useMetricsMeta.tsx | 38 +++++++++++-------- static/app/utils/metrics/useMetricsTags.tsx | 23 +++++++++-- .../views/alerts/rules/metric/mriField.tsx | 4 +- .../metricWidgetCard/inlineEditor.tsx | 3 +- .../dashboards/widgetCard/metricsContext.tsx | 2 +- static/app/views/ddm/context.tsx | 2 +- static/app/views/ddm/dashboardImportModal.tsx | 2 +- static/app/views/ddm/metricSearchBar.tsx | 9 +++-- static/app/views/ddm/queryBuilder.tsx | 8 ++-- .../discover/table/columnEditCollection.tsx | 2 +- .../projectMetrics/projectMetrics.tsx | 2 +- .../projectMetrics/projectMetricsDetails.tsx | 2 +- 14 files changed, 80 insertions(+), 37 deletions(-) diff --git a/static/app/utils/metrics/index.tsx b/static/app/utils/metrics/index.tsx index ad7465e26f50c5..56f60a969fc9b1 100644 --- a/static/app/utils/metrics/index.tsx +++ b/static/app/utils/metrics/index.tsx @@ -31,6 +31,7 @@ import type { MRI, UseCase, } from 'sentry/types/metrics'; +import {statsPeriodToDays} from 'sentry/utils/dates'; import {isMeasurement as isMeasurementName} from 'sentry/utils/discover/fields'; import {generateEventSlug} from 'sentry/utils/discover/urls'; import {getMeasurements} from 'sentry/utils/measurements/measurements'; @@ -419,3 +420,20 @@ export function getMetricsCorrelationSpanUrl( isTransaction ? undefined : spanId ); } + +export function getMetaDateTimeParams(datetime?: PageFilters['datetime']) { + if (datetime?.period) { + if (statsPeriodToDays(datetime.period) < 14) { + return {statsPeriod: '14d'}; + } + return {statsPeriod: datetime.period}; + } + if (datetime?.start && datetime?.end) { + return { + start: moment(datetime.start).toISOString(), + end: moment(datetime.end).toISOString(), + }; + } + + return {statsPeriod: '14d'}; +} diff --git a/static/app/utils/metrics/useBlockMetric.tsx b/static/app/utils/metrics/useBlockMetric.tsx index d1561ad1b2f1e3..2ea24fe1ca2db5 100644 --- a/static/app/utils/metrics/useBlockMetric.tsx +++ b/static/app/utils/metrics/useBlockMetric.tsx @@ -49,7 +49,7 @@ export const useBlockMetric = (project: Project) => { const useCase = getUseCaseFromMRI(data.metricMri); const metaQueryKey = getMetricsMetaQueryKey( slug, - [parseInt(project.id, 10)], + {projects: [parseInt(project.id, 10)]}, useCase ?? 'custom' ); queryClient.setQueryData( diff --git a/static/app/utils/metrics/useMetricsMeta.tsx b/static/app/utils/metrics/useMetricsMeta.tsx index f63e3f0004ea8e..5a635a868af312 100644 --- a/static/app/utils/metrics/useMetricsMeta.tsx +++ b/static/app/utils/metrics/useMetricsMeta.tsx @@ -6,6 +6,8 @@ import useOrganization from 'sentry/utils/useOrganization'; import type {MetricMeta, MRI, UseCase} from '../../types/metrics'; +import {getMetaDateTimeParams} from './index'; + const DEFAULT_USE_CASES = ['sessions', 'transactions', 'custom', 'spans']; export function getMetricsMetaQueryKeys( @@ -14,30 +16,30 @@ export function getMetricsMetaQueryKeys( useCases?: UseCase[] ): ApiQueryKey[] { return ( - useCases?.map(useCase => getMetricsMetaQueryKey(orgSlug, projects, useCase)) ?? [] + useCases?.map(useCase => getMetricsMetaQueryKey(orgSlug, {projects}, useCase)) ?? [] ); } export function getMetricsMetaQueryKey( orgSlug: string, - projects: PageFilters['projects'], + {projects, datetime}: Partial, useCase: UseCase ): ApiQueryKey { - return [ - `/organizations/${orgSlug}/metrics/meta/`, - {query: {useCase, project: projects}}, - ]; + const queryParams = projects?.length + ? {useCase, projects, ...getMetaDateTimeParams(datetime)} + : {useCase, ...getMetaDateTimeParams(datetime)}; + return [`/organizations/${orgSlug}/metrics/meta/`, {query: queryParams}]; } function useMetaUseCase( useCase: UseCase, - projects: PageFilters['projects'], + pageFilters: Partial, options: Omit, 'staleTime'> ) { const {slug} = useOrganization(); const apiQueryResult = useApiQuery( - getMetricsMetaQueryKey(slug, projects, useCase), + getMetricsMetaQueryKey(slug, pageFilters, useCase), { ...options, staleTime: 2000, // 2 seconds to cover page load @@ -48,22 +50,26 @@ function useMetaUseCase( } export function useMetricsMeta( - projects: PageFilters['projects'], + pageFilters: Partial, useCases?: UseCase[], filterBlockedMetrics = true ): {data: MetricMeta[]; isLoading: boolean} { const enabledUseCases = useCases ?? DEFAULT_USE_CASES; - const {data: sessionMeta = [], ...sessionsReq} = useMetaUseCase('sessions', projects, { - enabled: enabledUseCases.includes('sessions'), - }); - const {data: txnsMeta = [], ...txnsReq} = useMetaUseCase('transactions', projects, { + const {data: sessionMeta = [], ...sessionsReq} = useMetaUseCase( + 'sessions', + pageFilters, + { + enabled: enabledUseCases.includes('sessions'), + } + ); + const {data: txnsMeta = [], ...txnsReq} = useMetaUseCase('transactions', pageFilters, { enabled: enabledUseCases.includes('transactions'), }); - const {data: customMeta = [], ...customReq} = useMetaUseCase('custom', projects, { + const {data: customMeta = [], ...customReq} = useMetaUseCase('custom', pageFilters, { enabled: enabledUseCases.includes('custom'), }); - const {data: spansMeta = [], ...spansReq} = useMetaUseCase('spans', projects, { + const {data: spansMeta = [], ...spansReq} = useMetaUseCase('spans', pageFilters, { enabled: enabledUseCases.includes('spans'), }); @@ -94,7 +100,7 @@ export function useMetricsMeta( export function useProjectMetric(mri: MRI, projectId: number) { const useCase = getUseCaseFromMRI(mri); - const res = useMetricsMeta([projectId], [useCase ?? 'custom'], false); + const res = useMetricsMeta({projects: [projectId]}, [useCase ?? 'custom'], false); const metricMeta = res.data?.find(({mri: metaMri}) => metaMri === mri); const blockingStatus = metricMeta?.blockingStatus?.[0]; diff --git a/static/app/utils/metrics/useMetricsTags.tsx b/static/app/utils/metrics/useMetricsTags.tsx index beba2f574c4c8e..395818ebc2b9ad 100644 --- a/static/app/utils/metrics/useMetricsTags.tsx +++ b/static/app/utils/metrics/useMetricsTags.tsx @@ -5,18 +5,35 @@ import {useMetricsMeta} from 'sentry/utils/metrics/useMetricsMeta'; import {useApiQuery} from 'sentry/utils/queryClient'; import useOrganization from 'sentry/utils/useOrganization'; +import {getMetaDateTimeParams} from './index'; + export function useMetricsTags( mri: MRI | undefined, - projects: PageFilters['projects'], + pageFilters: Partial, filterBlockedTags = true ) { const {slug} = useOrganization(); const useCase = getUseCaseFromMRI(mri) ?? 'custom'; + const queryParams = pageFilters.projects?.length + ? { + metric: mri, + useCase, + projects: pageFilters.projects, + ...getMetaDateTimeParams(pageFilters.datetime), + } + : { + metric: mri, + useCase, + ...getMetaDateTimeParams(pageFilters.datetime), + }; + const tagsQuery = useApiQuery( [ `/organizations/${slug}/metrics/tags/`, - {query: {metric: mri, useCase, project: projects}}, + { + query: queryParams, + }, ], { enabled: !!mri, @@ -24,7 +41,7 @@ export function useMetricsTags( } ); - const metricMeta = useMetricsMeta(projects, [useCase], false); + const metricMeta = useMetricsMeta(pageFilters, [useCase], false); const blockedTags = metricMeta.data ?.find(meta => meta.mri === mri) diff --git a/static/app/views/alerts/rules/metric/mriField.tsx b/static/app/views/alerts/rules/metric/mriField.tsx index 22a50164ad6ebb..a682fdfb62602a 100644 --- a/static/app/views/alerts/rules/metric/mriField.tsx +++ b/static/app/views/alerts/rules/metric/mriField.tsx @@ -29,7 +29,9 @@ function filterAndSortOperations(operations: string[]) { } function MriField({aggregate, project, onChange}: Props) { - const {data: meta, isLoading} = useMetricsMeta([parseInt(project.id, 10)], ['custom']); + const {data: meta, isLoading} = useMetricsMeta({projects: [parseInt(project.id, 10)]}, [ + 'custom', + ]); const metaArr = useMemo(() => { return meta.map( diff --git a/static/app/views/dashboards/widgetCard/metricWidgetCard/inlineEditor.tsx b/static/app/views/dashboards/widgetCard/metricWidgetCard/inlineEditor.tsx index f3ba2c8675d82e..93d4e4dc1e0ca8 100644 --- a/static/app/views/dashboards/widgetCard/metricWidgetCard/inlineEditor.tsx +++ b/static/app/views/dashboards/widgetCard/metricWidgetCard/inlineEditor.tsx @@ -74,7 +74,7 @@ export const InlineEditor = memo(function InlineEditor({ const [editingName, setEditingName] = useState(false); const {metricsMeta: meta, isLoading: isMetaLoading} = useMetricsDashboardContext(); - const {data: tags = []} = useMetricsTags(metricsQuery.mri, projects); + const {data: tags = []} = useMetricsTags(metricsQuery.mri, {projects}); const displayedMetrics = useMemo(() => { const isSelected = (metric: MetricMeta) => metric.mri === metricsQuery.mri; @@ -239,7 +239,6 @@ export const InlineEditor = memo(function InlineEditor({ {!editingName && ( id.toString())} mri={metricsQuery.mri} disabled={!metricsQuery.mri} onChange={query => { diff --git a/static/app/views/dashboards/widgetCard/metricsContext.tsx b/static/app/views/dashboards/widgetCard/metricsContext.tsx index 80e5fb211571c3..46b2a22d0882ef 100644 --- a/static/app/views/dashboards/widgetCard/metricsContext.tsx +++ b/static/app/views/dashboards/widgetCard/metricsContext.tsx @@ -19,7 +19,7 @@ export function useMetricsDashboardContext() { export function MetricsDashboardContextProvider({children}: {children: React.ReactNode}) { const pageFilters = usePageFilters().selection; - const metricsMetaQuery = useMetricsMeta(pageFilters.projects); + const metricsMetaQuery = useMetricsMeta(pageFilters); const contextValue = useMemo(() => { return { diff --git a/static/app/views/ddm/context.tsx b/static/app/views/ddm/context.tsx index 9c029e4c79b72e..e6b81213ba78ee 100644 --- a/static/app/views/ddm/context.tsx +++ b/static/app/views/ddm/context.tsx @@ -210,7 +210,7 @@ export function DDMContextProvider({children}: {children: React.ReactNode}) { const [highlightedSampleId, setHighlightedSampleId] = useState(); const pageFilters = usePageFilters().selection; - const {data: metricsMeta, isLoading} = useMetricsMeta(pageFilters.projects); + const {data: metricsMeta, isLoading} = useMetricsMeta(pageFilters); const focusAreaSelection = useMemo( () => router.location.query.focusArea && JSON.parse(router.location.query.focusArea), diff --git a/static/app/views/ddm/dashboardImportModal.tsx b/static/app/views/ddm/dashboardImportModal.tsx index 3f6b32970c20c3..0223e56a542404 100644 --- a/static/app/views/ddm/dashboardImportModal.tsx +++ b/static/app/views/ddm/dashboardImportModal.tsx @@ -53,7 +53,7 @@ function DashboardImportModal({Header, Body, Footer}: ModalRenderProps) { const {selection} = usePageFilters(); // we want to get all custom metrics for organization - const {data: metricsMeta} = useMetricsMeta([-1], ['custom']); + const {data: metricsMeta} = useMetricsMeta({projects: [-1]}, ['custom']); const organization = useOrganization(); diff --git a/static/app/views/ddm/metricSearchBar.tsx b/static/app/views/ddm/metricSearchBar.tsx index fcdb59917a2aa2..25e3b40d15d65d 100644 --- a/static/app/views/ddm/metricSearchBar.tsx +++ b/static/app/views/ddm/metricSearchBar.tsx @@ -23,9 +23,9 @@ import usePageFilters from 'sentry/utils/usePageFilters'; interface MetricSearchBarProps extends Partial { onChange: (value: string) => void; - projectIds: string[]; disabled?: boolean; mri?: MRI; + projectIds?: string[]; query?: string; } @@ -73,11 +73,14 @@ export function MetricSearchBar({ const api = useApi(); const {selection} = usePageFilters(); const projectIdNumbers = useMemo( - () => projectIds.map(id => parseInt(id, 10)), + () => projectIds?.map(id => parseInt(id, 10)), [projectIds] ); - const {data: tags = EMPTY_ARRAY, isLoading} = useMetricsTags(mri, projectIdNumbers); + const {data: tags = EMPTY_ARRAY, isLoading} = useMetricsTags(mri, { + ...selection, + projects: projectIdNumbers, + }); const supportedTags: TagCollection = useMemo( () => tags.reduce((acc, tag) => ({...acc, [tag.key]: tag}), {}), diff --git a/static/app/views/ddm/queryBuilder.tsx b/static/app/views/ddm/queryBuilder.tsx index d8bef3372fcd48..d625c1245130d4 100644 --- a/static/app/views/ddm/queryBuilder.tsx +++ b/static/app/views/ddm/queryBuilder.tsx @@ -25,10 +25,10 @@ import type { } from 'sentry/utils/metrics/types'; import {useBreakpoints} from 'sentry/utils/metrics/useBreakpoints'; import {useIncrementQueryMetric} from 'sentry/utils/metrics/useIncrementQueryMetric'; -import {useMetricsMeta} from 'sentry/utils/metrics/useMetricsMeta'; import {useMetricsTags} from 'sentry/utils/metrics/useMetricsTags'; import {middleEllipsis} from 'sentry/utils/middleEllipsis'; import useKeyPress from 'sentry/utils/useKeyPress'; +import {useDDMContext} from 'sentry/views/ddm/context'; import {MetricSearchBar} from 'sentry/views/ddm/metricSearchBar'; type QueryBuilderProps = { @@ -58,7 +58,7 @@ export const QueryBuilder = memo(function QueryBuilder({ powerUserMode, onChange, }: QueryBuilderProps) { - const {data: meta} = useMetricsMeta(projects); + const {metricsMeta: meta} = useDDMContext(); const mriModeKeyPressed = useKeyPress('`', undefined, true); const [mriMode, setMriMode] = useState(powerUserMode); // power user mode that shows raw MRI instead of metrics names const breakpoints = useBreakpoints(); @@ -70,7 +70,7 @@ export const QueryBuilder = memo(function QueryBuilder({ // eslint-disable-next-line react-hooks/exhaustive-deps }, [mriModeKeyPressed, powerUserMode]); - const {data: tags = []} = useMetricsTags(metricsQuery.mri, projects); + const {data: tags = []} = useMetricsTags(metricsQuery.mri, {projects}); const displayedMetrics = useMemo(() => { if (mriMode) { @@ -202,8 +202,6 @@ export const QueryBuilder = memo(function QueryBuilder({ id.toString())} mri={metricsQuery.mri} disabled={!metricsQuery.mri} onChange={query => { diff --git a/static/app/views/discover/table/columnEditCollection.tsx b/static/app/views/discover/table/columnEditCollection.tsx index 3bd4f924ca668d..7de9faef50d901 100644 --- a/static/app/views/discover/table/columnEditCollection.tsx +++ b/static/app/views/discover/table/columnEditCollection.tsx @@ -693,7 +693,7 @@ interface MetricTagQueryFieldProps const EMPTY_ARRAY = []; function MetricTagQueryField({mri, ...props}: MetricTagQueryFieldProps) { const {projects} = usePageFilters().selection; - const {data = EMPTY_ARRAY} = useMetricsTags(mri as MRI | undefined, projects); + const {data = EMPTY_ARRAY} = useMetricsTags(mri as MRI | undefined, {projects}); const fieldOptions = useMemo(() => { return data.reduce( diff --git a/static/app/views/settings/projectMetrics/projectMetrics.tsx b/static/app/views/settings/projectMetrics/projectMetrics.tsx index de7973e11e4718..7d773b18ad3a3f 100644 --- a/static/app/views/settings/projectMetrics/projectMetrics.tsx +++ b/static/app/views/settings/projectMetrics/projectMetrics.tsx @@ -44,7 +44,7 @@ enum BlockingStatusTab { function ProjectMetrics({project, location}: Props) { const {data: meta, isLoading} = useMetricsMeta( - [parseInt(project.id, 10)], + {projects: [parseInt(project.id, 10)]}, ['custom'], false ); diff --git a/static/app/views/settings/projectMetrics/projectMetricsDetails.tsx b/static/app/views/settings/projectMetrics/projectMetricsDetails.tsx index 471c1d95c8e1e5..ff0dc008a56984 100644 --- a/static/app/views/settings/projectMetrics/projectMetricsDetails.tsx +++ b/static/app/views/settings/projectMetrics/projectMetricsDetails.tsx @@ -67,7 +67,7 @@ function ProjectMetricsDetails({project, params, organization}: Props) { const { data: {blockingStatus}, } = useProjectMetric(mri, projectId); - const {data: tagsData = []} = useMetricsTags(mri, projectIds, false); + const {data: tagsData = []} = useMetricsTags(mri, {projects: projectIds}, false); const isBlockedMetric = blockingStatus?.isBlocked ?? false; const blockMetricMutation = useBlockMetric(project); From d2ff2e24e1cb40017ac3239cc3c95ad8f64e55ef Mon Sep 17 00:00:00 2001 From: Jodi Jang <116035587+jangjodi@users.noreply.github.com> Date: Wed, 7 Feb 2024 09:30:27 -0500 Subject: [PATCH 109/357] ref(similarity-embedding): Add analytics for similar issue count (#64604) Add analytic event for number of similar issues returned with exception similarity value of over 0.99 --- src/sentry/api/analytics.py | 13 ++++ .../group_similar_issues_embeddings.py | 19 +++++- .../test_group_similar_issues_embeddings.py | 67 ++++++++++++++++++- 3 files changed, 97 insertions(+), 2 deletions(-) diff --git a/src/sentry/api/analytics.py b/src/sentry/api/analytics.py index 581abd17ce91dc..e515a7a4b6abe8 100644 --- a/src/sentry/api/analytics.py +++ b/src/sentry/api/analytics.py @@ -34,6 +34,19 @@ class FunctionTimerEvent(analytics.Event): ) +class GroupSimilarIssuesEmbeddingsCountEvent(analytics.Event): + type = "group_similar_issues_embeddings.count" + + attributes = ( + analytics.Attribute("organization_id"), + analytics.Attribute("project_id"), + analytics.Attribute("group_id"), + analytics.Attribute("user_id"), + analytics.Attribute("count_over_threshold", required=False), + ) + + analytics.register(OrganizationSavedSearchCreatedEvent) analytics.register(OrganizationSavedSearchDeletedEvent) analytics.register(FunctionTimerEvent) +analytics.register(GroupSimilarIssuesEmbeddingsCountEvent) diff --git a/src/sentry/api/endpoints/group_similar_issues_embeddings.py b/src/sentry/api/endpoints/group_similar_issues_embeddings.py index 35ad703c23e434..78b89ff970c84c 100644 --- a/src/sentry/api/endpoints/group_similar_issues_embeddings.py +++ b/src/sentry/api/endpoints/group_similar_issues_embeddings.py @@ -6,7 +6,7 @@ from rest_framework.request import Request from rest_framework.response import Response -from sentry import features +from sentry import analytics, features from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import region_silo_endpoint @@ -122,6 +122,23 @@ def get(self, request: Request, group) -> Response: results = get_similar_issues_embeddings(similar_issues_params) + analytics.record( + "group_similar_issues_embeddings.count", + organization_id=group.organization.id, + project_id=group.project.id, + group_id=group.id, + count_over_threshold=len( + [ + result["stacktrace_similarity"] # type: ignore + for result in results["responses"] + if result["stacktrace_similarity"] > 0.99 # type: ignore + ] + ) + if results["responses"] + else 0, + user_id=request.user.id, + ) + if not results["responses"]: return Response([]) formatted_results = self.get_formatted_results(results["responses"], request.user) diff --git a/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py b/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py index 7f887f6068052d..772552070db65e 100644 --- a/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py +++ b/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py @@ -165,6 +165,61 @@ def test_simple(self, mock_seer_request): headers={"Content-Type": "application/json;charset=utf-8"}, ) + @with_feature("projects:similarity-embeddings") + @mock.patch("sentry.analytics.record") + @mock.patch("sentry.seer.utils.seer_connection_pool.urlopen") + def test_multiple(self, mock_seer_request, mock_record): + similar_group_over_threshold = self.create_group(project=self.project) + similar_group_under_threshold = self.create_group(project=self.project) + seer_return_value: SimilarIssuesEmbeddingsResponse = { + "responses": [ + { + "message_similarity": 0.95, + "parent_group_id": self.similar_group.id, + "should_group": True, + "stacktrace_similarity": 0.998, # Over threshold + }, + { + "message_similarity": 0.95, + "parent_group_id": similar_group_over_threshold.id, + "should_group": True, + "stacktrace_similarity": 0.998, + }, + { + "message_similarity": 0.95, + "parent_group_id": similar_group_under_threshold.id, + "should_group": False, + "stacktrace_similarity": 0.95, + }, + ] + } + mock_seer_request.return_value = HTTPResponse(json.dumps(seer_return_value).encode("utf-8")) + + response = self.client.get( + self.path, + data={"k": "1", "threshold": "0.99"}, + ) + + assert response.data == self.get_expected_response( + [ + self.similar_group.id, + similar_group_over_threshold.id, + similar_group_under_threshold.id, + ], + [0.95, 0.95, 0.95], + [0.998, 0.998, 0.95], + ["Yes", "Yes", "No"], + ) + + mock_record.assert_called_with( + "group_similar_issues_embeddings.count", + organization_id=self.org.id, + project_id=self.project.id, + group_id=self.group.id, + count_over_threshold=2, + user_id=self.user.id, + ) + @with_feature("projects:similarity-embeddings") @mock.patch("sentry.seer.utils.seer_connection_pool.urlopen") def test_invalid_return(self, mock_seer_request): @@ -195,12 +250,22 @@ def test_invalid_return(self, mock_seer_request): ) @with_feature("projects:similarity-embeddings") + @mock.patch("sentry.analytics.record") @mock.patch("sentry.seer.utils.seer_connection_pool.urlopen") - def test_empty_return(self, mock_seer_request): + def test_empty_return(self, mock_seer_request, mock_record): mock_seer_request.return_value = HTTPResponse([]) response = self.client.get(self.path) assert response.data == [] + mock_record.assert_called_with( + "group_similar_issues_embeddings.count", + organization_id=self.org.id, + project_id=self.project.id, + group_id=self.group.id, + count_over_threshold=0, + user_id=self.user.id, + ) + @with_feature("projects:similarity-embeddings") @mock.patch("sentry.seer.utils.seer_connection_pool.urlopen") def test_no_optional_params(self, mock_seer_request): From fd6083f7f6d635501356eb45d448262a0c9613bc Mon Sep 17 00:00:00 2001 From: Mark Story Date: Wed, 7 Feb 2024 09:53:09 -0500 Subject: [PATCH 110/357] fix(hybridcloud) Fix relation traversal in alert details (#64596) Much like the changes in #61435 the organization level endpoint needs similar changes. Fixes SENTRY-2J54 --- .../organization_alert_rule_details.py | 45 ++++++++++++++++ .../test_organization_alert_rule_details.py | 53 +++++++++++++++++++ 2 files changed, 98 insertions(+) diff --git a/src/sentry/incidents/endpoints/organization_alert_rule_details.py b/src/sentry/incidents/endpoints/organization_alert_rule_details.py index 08e268c1056b1d..9e7a49d20b100f 100644 --- a/src/sentry/incidents/endpoints/organization_alert_rule_details.py +++ b/src/sentry/incidents/endpoints/organization_alert_rule_details.py @@ -22,6 +22,7 @@ ) from sentry.apidocs.examples.metric_alert_examples import MetricAlertExamples from sentry.apidocs.parameters import GlobalParams, MetricAlertParams +from sentry.constants import SentryAppStatus from sentry.incidents.endpoints.bases import OrganizationAlertRuleEndpoint from sentry.incidents.logic import ( AlreadyDeletedError, @@ -31,6 +32,8 @@ from sentry.incidents.serializers import AlertRuleSerializer as DrfAlertRuleSerializer from sentry.incidents.utils.sentry_apps import trigger_sentry_app_action_creators_for_incidents from sentry.integrations.slack.utils import RedisRuleStatus +from sentry.models.apiapplication import ApiApplication +from sentry.models.integrations.sentry_app import SentryApp from sentry.models.integrations.sentry_app_component import SentryAppComponent from sentry.models.integrations.sentry_app_installation import SentryAppInstallation from sentry.models.project import Project @@ -47,12 +50,54 @@ def fetch_alert_rule(request: Request, organization, alert_rule): alert_rule, request.user, DetailedAlertRuleSerializer(expand=expand) ) + # Fetch sentryapp instances to avoid impossible relationship traversal in region silo mode. + sentry_app_ids: list[int] = [] + for trigger in serialized_rule.get("triggers", []): + for action in trigger.get("actions", []): + if action.get("_sentry_app_installation"): + sentry_app_ids.append( + action.get("_sentry_app_installation", {}).get("sentry_app_id", None) + ) + if sentry_app_ids: + sentry_app_map = { + install.sentry_app.id: install.sentry_app + for install in app_service.get_many(filter=dict(app_ids=sentry_app_ids)) + } + # Prepare AlertRuleTriggerActions that are SentryApp components errors = [] for trigger in serialized_rule.get("triggers", []): for action in trigger.get("actions", []): if action.get("_sentry_app_installation") and action.get("_sentry_app_component"): + # TODO(hybridcloud) This is nasty and should be fixed. + # Because all of the prepare_* functions currently operate on ORM + # records we need to convert our RpcSentryApp and dict data into detached + # ORM models and stitch together relations used in preparing UI components. installation = SentryAppInstallation(**action.get("_sentry_app_installation", {})) + rpc_app = sentry_app_map.get(installation.sentry_app_id) + installation.sentry_app = SentryApp( + id=rpc_app.id, + scope_list=rpc_app.scope_list, + application_id=rpc_app.application_id, + application=ApiApplication( + id=rpc_app.application.id, + client_id=rpc_app.application.client_id, + client_secret=rpc_app.application.client_secret, + ), + proxy_user_id=rpc_app.proxy_user_id, + owner_id=rpc_app.owner_id, + name=rpc_app.name, + slug=rpc_app.slug, + uuid=rpc_app.uuid, + events=rpc_app.events, + webhook_url=rpc_app.webhook_url, + status=SentryAppStatus.as_int(rpc_app.status), + metadata=rpc_app.metadata, + ) + # The api_token_id field is nulled out to prevent relation traversal as these + # ORM objects are turned back into RPC objects. + installation.api_token_id = None + component = installation.prepare_ui_component( SentryAppComponent(**action.get("_sentry_app_component")), None, diff --git a/tests/sentry/incidents/endpoints/test_organization_alert_rule_details.py b/tests/sentry/incidents/endpoints/test_organization_alert_rule_details.py index 847c81c050b824..b6e29406820616 100644 --- a/tests/sentry/incidents/endpoints/test_organization_alert_rule_details.py +++ b/tests/sentry/incidents/endpoints/test_organization_alert_rule_details.py @@ -191,6 +191,59 @@ def test_expand_latest_incident(self): assert resp.data["latestIncident"]["id"] == str(incident.id) assert "latestIncident" not in no_expand_resp.data + @responses.activate + def test_with_sentryapp_success(self): + self.superuser = self.create_user("admin@localhost", is_superuser=True) + self.login_as(user=self.superuser) + self.create_team(organization=self.organization, members=[self.superuser]) + + sentry_app = self.create_sentry_app( + organization=self.organization, + published=True, + verify_install=False, + name="Super Awesome App", + schema={"elements": [self.create_alert_rule_action_schema()]}, + ) + self.create_sentry_app_installation( + slug=sentry_app.slug, organization=self.organization, user=self.superuser + ) + rule = self.create_alert_rule() + trigger = self.create_alert_rule_trigger(rule, "hi", 1000) + self.create_alert_rule_trigger_action( + alert_rule_trigger=trigger, + target_identifier=sentry_app.id, + type=AlertRuleTriggerAction.Type.SENTRY_APP, + target_type=AlertRuleTriggerAction.TargetType.SENTRY_APP, + sentry_app=sentry_app, + sentry_app_config=[ + {"name": "title", "value": "An alert"}, + {"summary": "Something happened here..."}, + {"name": "points", "value": "3"}, + {"name": "assignee", "value": "Nisanthan"}, + ], + ) + + responses.add( + responses.GET, + "https://example.com/sentry/members", + json=[ + {"value": "bob", "label": "Bob"}, + {"value": "jess", "label": "Jess"}, + ], + status=200, + ) + with self.feature("organizations:incidents"): + resp = self.get_response(self.organization.slug, rule.id) + + assert resp.status_code == 200 + assert len(responses.calls) == 1 + assert "errors" not in resp.data + + action = resp.data["triggers"][0]["actions"][0] + assert "select" == action["formFields"]["optional_fields"][-1]["type"] + assert "sentry/members" in action["formFields"]["optional_fields"][-1]["uri"] + assert "bob" == action["formFields"]["optional_fields"][-1]["choices"][0][0] + @responses.activate def test_with_unresponsive_sentryapp(self): self.superuser = self.create_user("admin@localhost", is_superuser=True) From 0de481fe5bba4f1924efa04854bef25a351411a2 Mon Sep 17 00:00:00 2001 From: ArthurKnaus Date: Wed, 7 Feb 2024 16:03:09 +0100 Subject: [PATCH 111/357] fix(onboarding): vue duplicated source maps instructions (#64780) - closes https://github.com/getsentry/sentry/issues/64772 --- static/app/gettingStartedDocs/javascript/vue.tsx | 6 ------ 1 file changed, 6 deletions(-) diff --git a/static/app/gettingStartedDocs/javascript/vue.tsx b/static/app/gettingStartedDocs/javascript/vue.tsx index d04953da227685..b6801a88b1ee6e 100644 --- a/static/app/gettingStartedDocs/javascript/vue.tsx +++ b/static/app/gettingStartedDocs/javascript/vue.tsx @@ -167,12 +167,6 @@ const onboarding: OnboardingConfig = { }; export const nextSteps = [ - { - id: 'source-maps', - name: t('Source Maps'), - description: t('Learn how to enable readable stack traces in your Sentry errors.'), - link: 'https://docs.sentry.io/platforms/javascript/guides/vue/sourcemaps/', - }, { id: 'vue-features', name: t('Vue Features'), From 09cc8bce41443e10912d5409fa7e7579f9ecf40d Mon Sep 17 00:00:00 2001 From: Shruthi Date: Wed, 7 Feb 2024 10:17:03 -0500 Subject: [PATCH 112/357] feat(spans): Add scaffolding to buffer spans for perf issues (#64435) Performance issue detection requires parent-child-sibling relationships of spans. To facilitate this in a span streaming world, the plan is to hold spans in a redis buffer. The first time we see a segment id on a span, we create a new key in redis and push the span to a list with a TTL of 5 minutes. If it's the first time we see that segment, we also queue a celery task with a countdown of two minutes. The task is currently a no-op but will eventually fetch the spans for a given segment, build a "transaction" type payload and put it through the performance detection pipeline. --------- Co-authored-by: getsantry[bot] <66042841+getsantry[bot]@users.noreply.github.com> --- src/sentry/conf/server.py | 6 + src/sentry/consumers/__init__.py | 4 + src/sentry/runner/commands/devserver.py | 3 + src/sentry/spans/buffer/redis.py | 36 +++++ src/sentry/spans/consumers/__init__.py | 0 .../spans/consumers/process/__init__.py | 0 src/sentry/spans/consumers/process/factory.py | 56 ++++++++ src/sentry/tasks/spans.py | 10 ++ src/sentry/utils/sdk.py | 1 + tests/sentry/spans/buffer/__init__.py | 0 tests/sentry/spans/buffer/test_redis.py | 22 +++ tests/sentry/spans/consumers/__init__.py | 0 .../spans/consumers/process/__init__.py | 0 .../spans/consumers/process/test_factory.py | 128 ++++++++++++++++++ 14 files changed, 266 insertions(+) create mode 100644 src/sentry/spans/buffer/redis.py create mode 100644 src/sentry/spans/consumers/__init__.py create mode 100644 src/sentry/spans/consumers/process/__init__.py create mode 100644 src/sentry/spans/consumers/process/factory.py create mode 100644 src/sentry/tasks/spans.py create mode 100644 tests/sentry/spans/buffer/__init__.py create mode 100644 tests/sentry/spans/buffer/test_redis.py create mode 100644 tests/sentry/spans/consumers/__init__.py create mode 100644 tests/sentry/spans/consumers/process/__init__.py create mode 100644 tests/sentry/spans/consumers/process/test_factory.py diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 59bc207504ad1e..d30f94113d572c 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -127,6 +127,7 @@ def env( SENTRY_STATISTICAL_DETECTORS_REDIS_CLUSTER = "default" SENTRY_METRIC_META_REDIS_CLUSTER = "default" SENTRY_ESCALATION_THRESHOLDS_REDIS_CLUSTER = "default" +SENTRY_SPAN_BUFFER_CLUSTER = "default" # Hosts that are allowed to use system token authentication. # http://en.wikipedia.org/wiki/Reserved_IP_addresses @@ -764,6 +765,7 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str: "sentry.tasks.reprocessing2", "sentry.tasks.sentry_apps", "sentry.tasks.servicehooks", + "sentry.tasks.spans", "sentry.tasks.store", "sentry.tasks.symbolication", "sentry.tasks.unmerge", @@ -925,6 +927,7 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str: Queue("nudge.invite_missing_org_members", routing_key="invite_missing_org_members"), Queue("auto_resolve_issues", routing_key="auto_resolve_issues"), Queue("on_demand_metrics", routing_key="on_demand_metrics"), + Queue("spans.process_segment", routing_key="spans.process_segment"), ] from celery.schedules import crontab @@ -2748,6 +2751,9 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: # This flag activates indexed spans backend in the development environment SENTRY_USE_SPANS = False +# This flag activates spans consumer in the sentry backend in development environment +SENTRY_USE_SPANS_BUFFER = False + # This flag activates consuming issue platform occurrence data in the development environment SENTRY_USE_ISSUE_OCCURRENCE = False diff --git a/src/sentry/consumers/__init__.py b/src/sentry/consumers/__init__.py index a7ddbabaf59bb8..3b0c485d9d6e56 100644 --- a/src/sentry/consumers/__init__.py +++ b/src/sentry/consumers/__init__.py @@ -323,6 +323,10 @@ def ingest_monitors_options() -> list[click.Option]: "synchronize_commit_group_default": "snuba-consumers", "click_options": _POST_PROCESS_FORWARDER_OPTIONS, }, + "process-spans": { + "topic": settings.KAFKA_SNUBA_SPANS, + "strategy_factory": "sentry.spans.consumers.process.factory.ProcessSpansStrategyFactory", + }, **settings.SENTRY_KAFKA_CONSUMERS, } diff --git a/src/sentry/runner/commands/devserver.py b/src/sentry/runner/commands/devserver.py index 4e9358ab577e23..99e2b1b3f76bca 100644 --- a/src/sentry/runner/commands/devserver.py +++ b/src/sentry/runner/commands/devserver.py @@ -331,6 +331,9 @@ def devserver( if settings.SENTRY_USE_PROFILING: kafka_consumers.add("ingest-profiles") + if settings.SENTRY_USE_SPANS_BUFFER: + kafka_consumers.add("process-spans") + if occurrence_ingest: kafka_consumers.add("ingest-occurrences") diff --git a/src/sentry/spans/buffer/redis.py b/src/sentry/spans/buffer/redis.py new file mode 100644 index 00000000000000..aa9e036e5c65f9 --- /dev/null +++ b/src/sentry/spans/buffer/redis.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +from django.conf import settings +from sentry_redis_tools.clients import RedisCluster, StrictRedis + +from sentry.utils import redis + +SEGMENT_TTL = 5 * 60 # 5 min TTL in seconds + + +def get_redis_client() -> RedisCluster | StrictRedis: + return redis.redis_clusters.get(settings.SENTRY_SPAN_BUFFER_CLUSTER) + + +def get_segment_key(project_id: str | int, segment_id: str) -> str: + return f"segment:{segment_id}:{project_id}:process-segment" + + +class RedisSpansBuffer: + def __init__(self): + self.client: RedisCluster | StrictRedis = get_redis_client() + + def read_segment(self, project_id: str | int, segment_id: str) -> list[str | bytes]: + key = get_segment_key(project_id, segment_id) + + return self.client.lrange(key, 0, -1) or [] + + def write_span(self, project_id: str | int, segment_id: str, span: bytes) -> bool: + key = get_segment_key(project_id, segment_id) + length = self.client.rpush(key, span) + new_key = length == 1 + + if new_key: + self.client.expire(key, SEGMENT_TTL) + + return new_key diff --git a/src/sentry/spans/consumers/__init__.py b/src/sentry/spans/consumers/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/src/sentry/spans/consumers/process/__init__.py b/src/sentry/spans/consumers/process/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/src/sentry/spans/consumers/process/factory.py b/src/sentry/spans/consumers/process/factory.py new file mode 100644 index 00000000000000..4ace1e43510f7f --- /dev/null +++ b/src/sentry/spans/consumers/process/factory.py @@ -0,0 +1,56 @@ +import logging +from collections.abc import Mapping +from typing import Any + +from arroyo.backends.kafka.consumer import KafkaPayload +from arroyo.processing.strategies.abstract import ProcessingStrategy, ProcessingStrategyFactory +from arroyo.processing.strategies.commit import CommitOffsets +from arroyo.processing.strategies.run_task import RunTask +from arroyo.types import BrokerValue, Commit, Message, Partition +from sentry_kafka_schemas import get_codec +from sentry_kafka_schemas.codecs import Codec +from sentry_kafka_schemas.schema_types.snuba_spans_v1 import SpanEvent + +from sentry.spans.buffer.redis import RedisSpansBuffer +from sentry.tasks.spans import process_segment + +logger = logging.getLogger(__name__) +SPAN_SCHEMA: Codec[SpanEvent] = get_codec("snuba-spans") + +PROCESS_SEGMENT_DELAY = 2 * 60 # 2 minutes + + +def _deserialize_span(value: bytes) -> Mapping[str, Any]: + return SPAN_SCHEMA.decode(value) + + +def process_message(message: Message[KafkaPayload]): + assert isinstance(message.value, BrokerValue) + try: + span = _deserialize_span(message.payload.value) + segment_id = span["segment_id"] + project_id = span["project_id"] + except Exception: + logger.exception("Failed to process span payload") + return + + client = RedisSpansBuffer() + new_segment = client.write_span(project_id, segment_id, message.payload.value) + if new_segment: + # This function currently does nothing. + process_segment.apply_async( + args=[project_id, segment_id], + countdown=PROCESS_SEGMENT_DELAY, + ) + + +class ProcessSpansStrategyFactory(ProcessingStrategyFactory[KafkaPayload]): + def create_with_partitions( + self, + commit: Commit, + partitions: Mapping[Partition, int], + ) -> ProcessingStrategy[KafkaPayload]: + return RunTask( + function=process_message, + next_step=CommitOffsets(commit), + ) diff --git a/src/sentry/tasks/spans.py b/src/sentry/tasks/spans.py new file mode 100644 index 00000000000000..fa665b6e0cfd1f --- /dev/null +++ b/src/sentry/tasks/spans.py @@ -0,0 +1,10 @@ +from sentry.tasks.base import instrumented_task + + +@instrumented_task( + name="sentry.tasks.spans.process_segment", + queue="spans.process_segment", + max_retries=0, +) +def process_segment(project_id, segment_id): + return diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 9a9978f20585c6..6f02c598f3faef 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -39,6 +39,7 @@ "sentry/event_manager.py", "sentry/tasks/process_buffer.py", "sentry/ingest/consumer/processors.py", + "sentry/tasks/spans.py", # This consumer lives outside of sentry but is just as unsafe. "outcomes_consumer.py", ) diff --git a/tests/sentry/spans/buffer/__init__.py b/tests/sentry/spans/buffer/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/tests/sentry/spans/buffer/test_redis.py b/tests/sentry/spans/buffer/test_redis.py new file mode 100644 index 00000000000000..a5e968a527d327 --- /dev/null +++ b/tests/sentry/spans/buffer/test_redis.py @@ -0,0 +1,22 @@ +from unittest import mock + +from sentry.spans.buffer.redis import RedisSpansBuffer, get_redis_client + + +class TestRedisSpansBuffer: + def test_first_span_in_segment_calls_expire(self): + buffer = RedisSpansBuffer() + with mock.patch.object(buffer, "client", new=get_redis_client()) as mock_client: + mock_client.expire = mock.Mock() + + buffer.write_span("bar", "foo", b"span data") + mock_client.expire.assert_called_once_with("segment:foo:bar:process-segment", 300) + + def test_ttl_not_set_repeatedly(self): + buffer = RedisSpansBuffer() + buffer.write_span("bar", "foo", b"span data") + with mock.patch.object(buffer, "client", new=get_redis_client()) as mock_client: + mock_client.expire = mock.Mock() + buffer.write_span("bar", "foo", b"other span data") + + mock_client.expire.assert_not_called diff --git a/tests/sentry/spans/consumers/__init__.py b/tests/sentry/spans/consumers/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/tests/sentry/spans/consumers/process/__init__.py b/tests/sentry/spans/consumers/process/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/tests/sentry/spans/consumers/process/test_factory.py b/tests/sentry/spans/consumers/process/test_factory.py new file mode 100644 index 00000000000000..fba932c604909b --- /dev/null +++ b/tests/sentry/spans/consumers/process/test_factory.py @@ -0,0 +1,128 @@ +from datetime import datetime +from unittest import mock + +from arroyo.backends.kafka import KafkaPayload +from arroyo.types import BrokerValue, Message, Partition, Topic +from django.conf import settings + +from sentry.spans.buffer.redis import get_redis_client +from sentry.spans.consumers.process.factory import ProcessSpansStrategyFactory +from sentry.utils import json + + +def build_mock_span(**kwargs): + span = { + "duration_ms": 0, + "event_id": "72fcea47d44a444fb132f8d462eeb0b4", + "exclusive_time_ms": 0.006, + "is_segment": False, + "parent_span_id": "93f0e87ad9cc709e", + "profile_id": "7ce060d7ea62432b8355bc9e612676e4", + "project_id": 1, + "received": 1706734067.029479, + "retention_days": 90, + "segment_id": "ace31e54d65652aa", + "sentry_tags": { + "environment": "development", + "op": "relay_fetch_org_options", + "release": "backend@24.2.0.dev0+df7615f2ff7dc3c8802f806477f920bb934bd198", + "transaction": "/api/0/relays/projectconfigs/", + "transaction.method": "POST", + "transaction.op": "http.server", + "user": "ip:127.0.0.1", + }, + "span_id": "95acbe6d30a66717", + "start_timestamp_ms": 1706734066840, + "trace_id": "8e6f22e6169545cc963255d0f29cb76b", + } + + span.update(**kwargs) + return span + + +def build_mock_message(data, topic=None): + message = mock.Mock() + message.value.return_value = json.dumps(data) + if topic: + message.topic.return_value = topic + return message + + +@mock.patch("sentry.spans.consumers.process.factory.process_segment") +def test_consumer_pushes_to_redis_and_schedules_task(process_segment): + redis_client = get_redis_client() + + topic = Topic(settings.KAFKA_SNUBA_SPANS) + partition = Partition(topic, 0) + strategy = ProcessSpansStrategyFactory().create_with_partitions( + commit=mock.Mock(), + partitions={}, + ) + + span_data = build_mock_span() + message = build_mock_message(span_data, topic) + + strategy.submit( + Message( + BrokerValue( + KafkaPayload(b"key", message.value().encode("utf-8"), []), + partition, + 1, + datetime.now(), + ) + ) + ) + + strategy.poll() + strategy.join(1) + strategy.terminate() + assert redis_client.lrange("segment:ace31e54d65652aa:1:process-segment", 0, -1) == [ + message.value() + ] + process_segment.apply_async.assert_called_once_with(args=[1, "ace31e54d65652aa"], countdown=120) + + +@mock.patch("sentry.spans.consumers.process.factory.process_segment") +def test_second_span_in_segment_does_not_queue_task(process_segment): + redis_client = get_redis_client() + + topic = Topic(settings.KAFKA_SNUBA_SPANS) + partition = Partition(topic, 0) + strategy = ProcessSpansStrategyFactory().create_with_partitions( + commit=mock.Mock(), + partitions={}, + ) + + span_data = build_mock_span() + message = build_mock_message(span_data, topic) + + strategy.submit( + Message( + BrokerValue( + KafkaPayload(b"key", message.value().encode("utf-8"), []), + partition, + 1, + datetime.now(), + ) + ) + ) + + strategy.submit( + Message( + BrokerValue( + KafkaPayload(b"key", message.value().encode("utf-8"), []), + partition, + 1, + datetime.now(), + ) + ) + ) + + strategy.poll() + strategy.join(1) + strategy.terminate() + assert redis_client.lrange("segment:ace31e54d65652aa:1:process-segment", 0, -1) == [ + message.value(), + message.value(), + ] + process_segment.apply_async.assert_called_once_with(args=[1, "ace31e54d65652aa"], countdown=120) From aaa760531b9d19027276e2cfe18bdff2563728ba Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Wed, 7 Feb 2024 10:26:37 -0500 Subject: [PATCH 113/357] ref(metrics-extraction): Modify metric for 0 on demand rows (#64674) ### Summary This splits up the metric to check we're skipped recently modified or not --- src/sentry/relay/config/metric_extraction.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/src/sentry/relay/config/metric_extraction.py b/src/sentry/relay/config/metric_extraction.py index d6aa52f9a04d9f..d0a4dba6c44e1b 100644 --- a/src/sentry/relay/config/metric_extraction.py +++ b/src/sentry/relay/config/metric_extraction.py @@ -3,10 +3,12 @@ from collections import defaultdict from collections.abc import Sequence from dataclasses import dataclass +from datetime import timedelta from typing import Any, Literal, TypedDict import sentry_sdk from celery.exceptions import SoftTimeLimitExceeded +from django.utils import timezone from sentry_relay.processing import validate_sampling_condition from sentry import features, options @@ -465,11 +467,18 @@ def _can_widget_query_use_stateful_extraction( if len(on_demand_entries) == 0: # 0 on-demand entries is expected, and happens when the on-demand task hasn't caught up yet for newly created widgets or widgets recently modified to have on-demand state. - metrics.incr( - "on_demand_metrics.on_demand_spec.skip_recently_modified", - amount=len(metrics_specs), - sample_rate=1.0, - ) + if widget_query.date_modified > timezone.now() - timedelta(days=1): + metrics.incr( + "on_demand_metrics.on_demand_spec.skip_recently_modified", + amount=len(metrics_specs), + sample_rate=1.0, + ) + else: + metrics.incr( + "on_demand_metrics.on_demand_spec.older_widget_query", + amount=len(metrics_specs), + sample_rate=1.0, + ) return False elif len(on_demand_entries) > 1: # There should only be one on demand entry. From 3fe111331629742a0d3b150c3352b38159f0796a Mon Sep 17 00:00:00 2001 From: edwardgou-sentry <83961295+edwardgou-sentry@users.noreply.github.com> Date: Wed, 7 Feb 2024 10:45:34 -0500 Subject: [PATCH 114/357] fix(webvitals): Fix page overview slideout panel not querying for INP samples properly (#64754) Fixes page overview slideout panel not querying for INP samples properly: - fixes inp p90 and median thresholds flipped incorrectly - properly map inp query filters back to fid to avoid invalid queries - map fid response to inp for the samples query --- .../pageOverviewWebVitalsDetailPanel.tsx | 17 +++++++++++------ .../calculatePerformanceScore.tsx | 4 ++-- ...seTransactionSamplesWebVitalsScoresQuery.tsx | 8 ++++++-- 3 files changed, 19 insertions(+), 10 deletions(-) diff --git a/static/app/views/performance/browser/webVitals/pageOverviewWebVitalsDetailPanel.tsx b/static/app/views/performance/browser/webVitals/pageOverviewWebVitalsDetailPanel.tsx index 3d0a57d0ed070a..5cf10ca6bf5bea 100644 --- a/static/app/views/performance/browser/webVitals/pageOverviewWebVitalsDetailPanel.tsx +++ b/static/app/views/performance/browser/webVitals/pageOverviewWebVitalsDetailPanel.tsx @@ -38,6 +38,7 @@ import type { TransactionSampleRowWithScore, WebVitals, } from 'sentry/views/performance/browser/webVitals/utils/types'; +import {useReplaceFidWithInpSetting} from 'sentry/views/performance/browser/webVitals/utils/useReplaceFidWithInpSetting'; import {useStoredScoresSetting} from 'sentry/views/performance/browser/webVitals/utils/useStoredScoresSetting'; import {generateReplayLink} from 'sentry/views/performance/transactionSummary/utils'; import DetailPanel from 'sentry/views/starfish/components/detailPanel'; @@ -69,6 +70,7 @@ export function PageOverviewWebVitalsDetailPanel({ const organization = useOrganization(); const routes = useRoutes(); const shouldUseStoredScores = useStoredScoresSetting(); + const shouldReplaceFidWithInp = useReplaceFidWithInpSetting(); const replayLinkGenerator = generateReplayLink(routes); @@ -94,6 +96,9 @@ export function PageOverviewWebVitalsDetailPanel({ ? calculatePerformanceScoreFromStoredTableDataRow(projectScoresData?.data?.[0]) : calculatePerformanceScoreFromTableDataRow(projectData?.data?.[0]); + // TODO: remove this when INP is queryable. Need to map inp back to fid for search filters. + const webVitalFilter = shouldReplaceFidWithInp && webVital === 'inp' ? 'fid' : webVital; + // Do 3 queries filtering on LCP to get a spread of good, meh, and poor events // We can't query by performance score yet, so we're using LCP as a best estimate const {data: goodData, isLoading: isGoodTransactionWebVitalsQueryLoading} = @@ -101,12 +106,12 @@ export function PageOverviewWebVitalsDetailPanel({ limit: 3, transaction: transaction ?? '', query: webVital - ? `measurements.${webVital}:<${PERFORMANCE_SCORE_P90S[webVital]}` + ? `measurements.${webVitalFilter}:<${PERFORMANCE_SCORE_P90S[webVital]}` : undefined, enabled: Boolean(webVital), withProfiles: true, sortName: 'webVitalSort', - webVital: webVital ?? undefined, + webVital: webVitalFilter ?? undefined, }); const {data: mehData, isLoading: isMehTransactionWebVitalsQueryLoading} = @@ -114,12 +119,12 @@ export function PageOverviewWebVitalsDetailPanel({ limit: 3, transaction: transaction ?? '', query: webVital - ? `measurements.${webVital}:<${PERFORMANCE_SCORE_MEDIANS[webVital]} measurements.${webVital}:>=${PERFORMANCE_SCORE_P90S[webVital]}` + ? `measurements.${webVitalFilter}:<${PERFORMANCE_SCORE_MEDIANS[webVital]} measurements.${webVitalFilter}:>=${PERFORMANCE_SCORE_P90S[webVital]}` : undefined, enabled: Boolean(webVital), withProfiles: true, sortName: 'webVitalSort', - webVital: webVital ?? undefined, + webVital: webVitalFilter ?? undefined, }); const {data: poorData, isLoading: isPoorTransactionWebVitalsQueryLoading} = @@ -127,12 +132,12 @@ export function PageOverviewWebVitalsDetailPanel({ limit: 3, transaction: transaction ?? '', query: webVital - ? `measurements.${webVital}:>=${PERFORMANCE_SCORE_MEDIANS[webVital]}` + ? `measurements.${webVitalFilter}:>=${PERFORMANCE_SCORE_MEDIANS[webVital]}` : undefined, enabled: Boolean(webVital), withProfiles: true, sortName: 'webVitalSort', - webVital: webVital ?? undefined, + webVital: webVitalFilter ?? undefined, }); const data = [...goodData, ...mehData, ...poorData]; diff --git a/static/app/views/performance/browser/webVitals/utils/queries/rawWebVitalsQueries/calculatePerformanceScore.tsx b/static/app/views/performance/browser/webVitals/utils/queries/rawWebVitalsQueries/calculatePerformanceScore.tsx index e4107c5e97bd77..0affaa95b3185a 100644 --- a/static/app/views/performance/browser/webVitals/utils/queries/rawWebVitalsQueries/calculatePerformanceScore.tsx +++ b/static/app/views/performance/browser/webVitals/utils/queries/rawWebVitalsQueries/calculatePerformanceScore.tsx @@ -18,7 +18,7 @@ export const PERFORMANCE_SCORE_MEDIANS = { cls: 0.25, fid: 300, ttfb: 400, - inp: 200, + inp: 500, }; export const PERFORMANCE_SCORE_P90S = { @@ -27,7 +27,7 @@ export const PERFORMANCE_SCORE_P90S = { cls: 0.1, fid: 100, ttfb: 200, - inp: 500, + inp: 200, }; export type Vitals = { diff --git a/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/useTransactionSamplesWebVitalsScoresQuery.tsx b/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/useTransactionSamplesWebVitalsScoresQuery.tsx index edf4e6831eefa5..0ced6cfcc84d43 100644 --- a/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/useTransactionSamplesWebVitalsScoresQuery.tsx +++ b/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/useTransactionSamplesWebVitalsScoresQuery.tsx @@ -15,6 +15,7 @@ import { SORTABLE_INDEXED_FIELDS, SORTABLE_INDEXED_SCORE_FIELDS, } from 'sentry/views/performance/browser/webVitals/utils/types'; +import {useReplaceFidWithInpSetting} from 'sentry/views/performance/browser/webVitals/utils/useReplaceFidWithInpSetting'; import {useStoredScoresSetting} from 'sentry/views/performance/browser/webVitals/utils/useStoredScoresSetting'; import {useWebVitalsSort} from 'sentry/views/performance/browser/webVitals/utils/useWebVitalsSort'; @@ -43,6 +44,7 @@ export const useTransactionSamplesWebVitalsScoresQuery = ({ const pageFilters = usePageFilters(); const location = useLocation(); const shouldUseStoredScores = useStoredScoresSetting(); + const shouldReplaceFidWithInp = useReplaceFidWithInpSetting(); const filteredSortableFields = shouldUseStoredScores ? SORTABLE_INDEXED_FIELDS @@ -104,6 +106,8 @@ export const useTransactionSamplesWebVitalsScoresQuery = ({ referrer: 'api.performance.browser.web-vitals.transaction', }); + // TODO: Remove this once we can query for INP. + const webVitalKey = shouldReplaceFidWithInp && webVital === 'fid' ? 'inp' : webVital; const toNumber = (item: ReactText) => (item ? parseFloat(item.toString()) : undefined); const tableData: TransactionSampleRowWithScore[] = !isLoading && data?.data.length @@ -128,12 +132,12 @@ export const useTransactionSamplesWebVitalsScoresQuery = ({ ), ...(webVital ? { - [`${webVital}Score`]: Math.round( + [`${webVitalKey}Score`]: Math.round( ((toNumber(row[`measurements.score.${webVital}`]) ?? 0) / (toNumber(row[`measurements.score.weight.${webVital}`]) ?? 0)) * 100 ), - [`${webVital}Weight`]: Math.round( + [`${webVitalKey}Weight`]: Math.round( (toNumber(row[`measurements.score.weight.${webVital}`]) ?? 0) * 100 ), } From d31e3ac708dc11a6c75c6d5bae926befe1fbea91 Mon Sep 17 00:00:00 2001 From: Nar Saynorath Date: Wed, 7 Feb 2024 10:49:08 -0500 Subject: [PATCH 115/357] feat(app-start): Merge samples tables (#64681) This PR merges the span ops table with the event samples table. In doing so I've also made the following changes: - Add a change column for comparing cold/warm start durations in the span op table - Apply the span op, start type, and device class filters to the span op table and start type and device class for the event samples - I had to leave out the span op filter from the event samples table because it's going to be more difficult to implement and I felt like this PR was big enough - Add props to shared components so we can style them differently before the new style is propagated to the screens module (since it's in prod currently) --- static/app/views/starfish/types.tsx | 2 + .../appStartup/screenSummary/eventSamples.tsx | 16 ++- .../views/appStartup/screenSummary/index.tsx | 45 +------ .../appStartup/screenSummary/samples.tsx | 112 ++++++++++++++++++ .../screenSummary/spanOpSelector.tsx | 10 +- .../screenSummary/spanOperationTable.spec.tsx | 14 +-- .../screenSummary/spanOperationTable.tsx | 49 ++++---- .../screenSummary/startTypeSelector.tsx | 43 +++++++ .../screenLoadSpans/deviceClassSelector.tsx | 12 +- .../screenLoadSpans/eventSamplesTable.tsx | 18 ++- 10 files changed, 237 insertions(+), 84 deletions(-) create mode 100644 static/app/views/starfish/views/appStartup/screenSummary/samples.tsx create mode 100644 static/app/views/starfish/views/appStartup/screenSummary/startTypeSelector.tsx diff --git a/static/app/views/starfish/types.tsx b/static/app/views/starfish/types.tsx index f2937d0de56079..8f2c1cc77227c1 100644 --- a/static/app/views/starfish/types.tsx +++ b/static/app/views/starfish/types.tsx @@ -35,6 +35,8 @@ export enum SpanMetricsField { HTTP_RESPONSE_TRANSFER_SIZE = 'http.response_transfer_size', FILE_EXTENSION = 'file_extension', OS_NAME = 'os.name', + APP_START_TYPE = 'app_start_type', + DEVICE_CLASS = 'device.class', } export type SpanNumberFields = diff --git a/static/app/views/starfish/views/appStartup/screenSummary/eventSamples.tsx b/static/app/views/starfish/views/appStartup/screenSummary/eventSamples.tsx index 2dc0adacaccde0..c6b4ed581228fc 100644 --- a/static/app/views/starfish/views/appStartup/screenSummary/eventSamples.tsx +++ b/static/app/views/starfish/views/appStartup/screenSummary/eventSamples.tsx @@ -12,6 +12,11 @@ import { SECONDARY_RELEASE_ALIAS, } from 'sentry/views/starfish/components/releaseSelector'; import {useReleaseSelection} from 'sentry/views/starfish/queries/useReleases'; +import {SpanMetricsField} from 'sentry/views/starfish/types'; +import { + COLD_START_TYPE, + WARM_START_TYPE, +} from 'sentry/views/starfish/views/appStartup/screenSummary/startTypeSelector'; import {EventSamplesTable} from 'sentry/views/starfish/views/screens/screenLoadSpans/eventSamplesTable'; import {useTableQuery} from 'sentry/views/starfish/views/screens/screensTable'; @@ -25,6 +30,7 @@ type Props = { release: string; sortKey: string; transaction: string; + footerAlignedPagination?: boolean; showDeviceClassSelector?: boolean; }; @@ -34,12 +40,16 @@ export function EventSamples({ release, sortKey, showDeviceClassSelector, + footerAlignedPagination, }: Props) { const location = useLocation(); const {selection} = usePageFilters(); const {primaryRelease} = useReleaseSelection(); const cursor = decodeScalar(location.query?.[cursorName]); + const deviceClass = decodeScalar(location.query[SpanMetricsField.DEVICE_CLASS]) ?? ''; + const startType = decodeScalar(location.query[SpanMetricsField.APP_START_TYPE]) ?? ''; + const searchQuery = new MutableSearch([ `transaction:${transaction}`, `release:${release}`, @@ -49,10 +59,11 @@ export function EventSamples({ 'OR', 'span.description:"Warm Start"', ')', + `${SpanMetricsField.APP_START_TYPE}:${ + startType || `[${COLD_START_TYPE},${WARM_START_TYPE}]` + }`, ]); - const deviceClass = decodeScalar(location.query['device.class']); - if (deviceClass) { if (deviceClass === 'Unknown') { searchQuery.addFilterValue('!has', 'device.class'); @@ -112,6 +123,7 @@ export function EventSamples({ showDeviceClassSelector={showDeviceClassSelector} columnNameMap={columnNameMap} sort={sort} + footerAlignedPagination={footerAlignedPagination} /> ); } diff --git a/static/app/views/starfish/views/appStartup/screenSummary/index.tsx b/static/app/views/starfish/views/appStartup/screenSummary/index.tsx index cce544e98416bb..6894ab74c32bcd 100644 --- a/static/app/views/starfish/views/appStartup/screenSummary/index.tsx +++ b/static/app/views/starfish/views/appStartup/screenSummary/index.tsx @@ -23,13 +23,8 @@ import { SECONDARY_RELEASE_ALIAS, } from 'sentry/views/starfish/components/releaseSelector'; import {SpanMetricsField} from 'sentry/views/starfish/types'; -import {EventSamples} from 'sentry/views/starfish/views/appStartup/screenSummary/eventSamples'; -import {SpanOperationTable} from 'sentry/views/starfish/views/appStartup/screenSummary/spanOperationTable'; +import {SamplesTables} from 'sentry/views/starfish/views/appStartup/screenSummary/samples'; import {QueryParameterNames} from 'sentry/views/starfish/views/queryParameters'; -import { - MobileCursors, - MobileSortKeys, -} from 'sentry/views/starfish/views/screens/constants'; import {MetricsRibbon} from 'sentry/views/starfish/views/screens/screenLoadSpans/metricsRibbon'; import {ScreenLoadSpanSamples} from 'sentry/views/starfish/views/screens/screenLoadSpans/samples'; @@ -194,36 +189,9 @@ function ScreenSummary() { - - -
    - -
    -
    - -
    - -
    -
    -
    - - - + + + {spanGroup && spanOp && ( (SPANS); + const {primaryRelease, secondaryRelease} = useReleaseSelection(); + + const content = useMemo(() => { + if (sampleType === EVENT) { + return ( + + + {primaryRelease && ( +
    + +
    + )} +
    + + {secondaryRelease && ( +
    + +
    + )} +
    +
    + ); + } + + return ( + + + + ); + }, [primaryRelease, sampleType, secondaryRelease, transactionName]); + + return ( +
    + + + {sampleType === SPANS && ( + + )} + + + + setSampleType(value)} defaultValue={SPANS}> + {t('By Spans')} + {t('By Event')} + + + {content} +
    + ); +} + +const EventSplitContainer = styled('div')` + display: grid; + grid-template-columns: 1fr 1fr; + gap: ${space(1.5)} +`; + +const Controls = styled('div')` + display: flex; + justify-content: space-between; + align-items: center; + margin-bottom: ${space(1)}; +`; + +const FiltersContainer = styled('div')` + display: flex; + gap: ${space(1)}; + align-items: center; +`; diff --git a/static/app/views/starfish/views/appStartup/screenSummary/spanOpSelector.tsx b/static/app/views/starfish/views/appStartup/screenSummary/spanOpSelector.tsx index 9f5c1fee14225c..c911d1a1539806 100644 --- a/static/app/views/starfish/views/appStartup/screenSummary/spanOpSelector.tsx +++ b/static/app/views/starfish/views/appStartup/screenSummary/spanOpSelector.tsx @@ -1,9 +1,7 @@ import {browserHistory} from 'react-router'; -import styled from '@emotion/styled'; import {CompactSelect} from 'sentry/components/compactSelect'; import {t} from 'sentry/locale'; -import {space} from 'sentry/styles/space'; import type {NewQuery} from 'sentry/types'; import EventView from 'sentry/utils/discover/eventView'; import {DiscoverDatasets} from 'sentry/utils/discover/types'; @@ -73,8 +71,8 @@ export function SpanOpSelector({transaction, primaryRelease, secondaryRelease}: ]; return ( - { @@ -90,7 +88,3 @@ export function SpanOpSelector({transaction, primaryRelease, secondaryRelease}: /> ); } - -const StyledCompactSelect = styled(CompactSelect)` - margin-bottom: ${space(1)}; -`; diff --git a/static/app/views/starfish/views/appStartup/screenSummary/spanOperationTable.spec.tsx b/static/app/views/starfish/views/appStartup/screenSummary/spanOperationTable.spec.tsx index 18fae0365158b9..e60287fd5c16fa 100644 --- a/static/app/views/starfish/views/appStartup/screenSummary/spanOperationTable.spec.tsx +++ b/static/app/views/starfish/views/appStartup/screenSummary/spanOperationTable.spec.tsx @@ -48,7 +48,7 @@ describe('SpanOpSelector', function () { 'span.description': 'string', 'span.group': 'string', 'avg_if(span.self_time,release,release1)': 'duration', - 'time_spent_percentage()': 'percentage', + 'avg_compare(span.self_time,release,release1,release2)': 'percent_change', 'count()': 'integer', 'avg_if(span.self_time,release,release2)': 'duration', 'sum(span.self_time)': 'duration', @@ -61,7 +61,7 @@ describe('SpanOpSelector', function () { 'span.description': 'Application Init', 'span.group': '7f4be68f08c0455f', 'avg_if(span.self_time,release,release1)': 22.549867, - 'time_spent_percentage()': 0.003017625053431528, + 'avg_compare(span.self_time,release,release1,release2)': 0.5, 'count()': 14, 'avg_if(span.self_time,release,release2)': 12504.931908384617, 'sum(span.self_time)': 162586.66467600001, @@ -82,17 +82,15 @@ describe('SpanOpSelector', function () { expect(await screen.findByRole('link', {name: 'Operation'})).toBeInTheDocument(); expect(screen.getByRole('link', {name: 'Span Description'})).toBeInTheDocument(); - expect(screen.getByRole('link', {name: 'Duration (R1)'})).toBeInTheDocument(); - expect(screen.getByRole('link', {name: 'Duration (R2)'})).toBeInTheDocument(); - expect(screen.getByRole('link', {name: 'Total Count'})).toBeInTheDocument(); - expect(screen.getByRole('link', {name: 'Total Time Spent'})).toBeInTheDocument(); + expect(screen.getByRole('link', {name: 'Avg Duration (R1)'})).toBeInTheDocument(); + expect(screen.getByRole('link', {name: 'Avg Duration (R2)'})).toBeInTheDocument(); + expect(screen.getByRole('link', {name: 'Change'})).toBeInTheDocument(); expect(await screen.findByRole('cell', {name: 'app.start.warm'})).toBeInTheDocument(); expect(screen.getByRole('cell', {name: 'Application Init'})).toBeInTheDocument(); expect(screen.getByRole('cell', {name: '22.55ms'})).toBeInTheDocument(); expect(screen.getByRole('cell', {name: '12.50s'})).toBeInTheDocument(); - expect(screen.getByRole('cell', {name: '14'})).toBeInTheDocument(); - expect(screen.getByRole('cell', {name: '2.71min'})).toBeInTheDocument(); + expect(screen.getByRole('cell', {name: '+50%'})).toBeInTheDocument(); expect(screen.getByRole('link', {name: 'Application Init'})).toHaveAttribute( 'href', diff --git a/static/app/views/starfish/views/appStartup/screenSummary/spanOperationTable.tsx b/static/app/views/starfish/views/appStartup/screenSummary/spanOperationTable.tsx index 0ee6d801194da1..6838ac8a9f450b 100644 --- a/static/app/views/starfish/views/appStartup/screenSummary/spanOperationTable.tsx +++ b/static/app/views/starfish/views/appStartup/screenSummary/spanOperationTable.tsx @@ -32,13 +32,22 @@ import {OverflowEllipsisTextContainer} from 'sentry/views/starfish/components/te import {SpanMetricsField} from 'sentry/views/starfish/types'; import {STARFISH_CHART_INTERVAL_FIDELITY} from 'sentry/views/starfish/utils/constants'; import {appendReleaseFilters} from 'sentry/views/starfish/utils/releaseComparison'; -import {SpanOpSelector} from 'sentry/views/starfish/views/appStartup/screenSummary/spanOpSelector'; +import { + COLD_START_TYPE, + WARM_START_TYPE, +} from 'sentry/views/starfish/views/appStartup/screenSummary/startTypeSelector'; import {QueryParameterNames} from 'sentry/views/starfish/views/queryParameters'; import {MobileCursors} from 'sentry/views/starfish/views/screens/constants'; import {useTableQuery} from 'sentry/views/starfish/views/screens/screensTable'; -const {SPAN_SELF_TIME, SPAN_DESCRIPTION, SPAN_GROUP, SPAN_OP, PROJECT_ID} = - SpanMetricsField; +const { + SPAN_SELF_TIME, + SPAN_DESCRIPTION, + SPAN_GROUP, + SPAN_OP, + PROJECT_ID, + APP_START_TYPE, +} = SpanMetricsField; type Props = { primaryRelease?: string; @@ -67,6 +76,8 @@ export function SpanOperationTable({ const cursor = decodeScalar(location.query?.[MobileCursors.SPANS_TABLE]); const spanOp = decodeScalar(location.query[SpanMetricsField.SPAN_OP]) ?? ''; + const startType = decodeScalar(location.query[SpanMetricsField.APP_START_TYPE]) ?? ''; + const deviceClass = decodeScalar(location.query[SpanMetricsField.DEVICE_CLASS]) ?? ''; const searchQuery = new MutableSearch([ 'transaction.op:ui.load', @@ -75,9 +86,11 @@ export function SpanOperationTable({ // Exclude root level spans because they're comprised of nested operations '!span.description:"Cold Start"', '!span.description:"Warm Start"', - ...(spanOp - ? [`${SpanMetricsField.SPAN_OP}:${spanOp}`] - : [`span.op:[${[...STARTUP_SPANS].join(',')}]`]), + `${SpanMetricsField.APP_START_TYPE}:${ + startType || `[${COLD_START_TYPE},${WARM_START_TYPE}]` + }`, + `${SpanMetricsField.SPAN_OP}:${spanOp || `[${[...STARTUP_SPANS].join(',')}]`}`, + ...(deviceClass ? [`${SpanMetricsField.DEVICE_CLASS}:${deviceClass}`] : []), ]); const queryStringPrimary = appendReleaseFilters( searchQuery, @@ -89,7 +102,7 @@ export function SpanOperationTable({ decodeScalar(location.query[QueryParameterNames.SPANS_SORT]) )[0] ?? { kind: 'desc', - field: 'time_spent_percentage()', + field: `avg_compare(${SPAN_SELF_TIME},release,${primaryRelease},${secondaryRelease})`, }; const newQuery: NewQuery = { @@ -101,8 +114,8 @@ export function SpanOperationTable({ SPAN_DESCRIPTION, `avg_if(${SPAN_SELF_TIME},release,${primaryRelease})`, `avg_if(${SPAN_SELF_TIME},release,${secondaryRelease})`, - 'count()', - 'time_spent_percentage()', + `avg_compare(${SPAN_SELF_TIME},release,${primaryRelease},${secondaryRelease})`, + SpanMetricsField.APP_START_TYPE, `sum(${SPAN_SELF_TIME})`, ], query: queryStringPrimary, @@ -125,16 +138,17 @@ export function SpanOperationTable({ const columnNameMap = { [SPAN_OP]: t('Operation'), [SPAN_DESCRIPTION]: t('Span Description'), - 'count()': t('Total Count'), [`avg_if(${SPAN_SELF_TIME},release,${primaryRelease})`]: t( - 'Duration (%s)', + 'Avg Duration (%s)', PRIMARY_RELEASE_ALIAS ), [`avg_if(${SPAN_SELF_TIME},release,${secondaryRelease})`]: t( - 'Duration (%s)', + 'Avg Duration (%s)', SECONDARY_RELEASE_ALIAS ), - ['time_spent_percentage()']: t('Total Time Spent'), + [`avg_compare(${SPAN_SELF_TIME},release,${primaryRelease},${secondaryRelease})`]: + t('Change'), + [APP_START_TYPE]: t('Start Type'), }; function renderBodyCell(column, row): React.ReactNode { @@ -231,21 +245,16 @@ export function SpanOperationTable({ return ( - { return {key: col, name: columnNameMap[col] ?? col, width: COL_WIDTH_UNDEFINED}; })} diff --git a/static/app/views/starfish/views/appStartup/screenSummary/startTypeSelector.tsx b/static/app/views/starfish/views/appStartup/screenSummary/startTypeSelector.tsx new file mode 100644 index 00000000000000..977cd14d727de9 --- /dev/null +++ b/static/app/views/starfish/views/appStartup/screenSummary/startTypeSelector.tsx @@ -0,0 +1,43 @@ +import {browserHistory} from 'react-router'; + +import {CompactSelect} from 'sentry/components/compactSelect'; +import {t} from 'sentry/locale'; +import {decodeScalar} from 'sentry/utils/queryString'; +import {useLocation} from 'sentry/utils/useLocation'; +import {SpanMetricsField} from 'sentry/views/starfish/types'; +import {MobileCursors} from 'sentry/views/starfish/views/screens/constants'; + +export const COLD_START_TYPE = 'cold'; +export const WARM_START_TYPE = 'warm'; + +export function StartTypeSelector() { + const location = useLocation(); + + const value = decodeScalar(location.query[SpanMetricsField.APP_START_TYPE]) ?? ''; + + const options = [ + {value: '', label: t('All')}, + {value: COLD_START_TYPE, label: t('Cold')}, + {value: WARM_START_TYPE, label: t('Warm')}, + ]; + + return ( + { + browserHistory.push({ + ...location, + query: { + ...location.query, + [SpanMetricsField.APP_START_TYPE]: newValue.value, + [MobileCursors.RELEASE_1_EVENT_SAMPLE_TABLE]: undefined, + [MobileCursors.RELEASE_2_EVENT_SAMPLE_TABLE]: undefined, + [MobileCursors.SPANS_TABLE]: undefined, + }, + }); + }} + /> + ); +} diff --git a/static/app/views/starfish/views/screens/screenLoadSpans/deviceClassSelector.tsx b/static/app/views/starfish/views/screens/screenLoadSpans/deviceClassSelector.tsx index 406bc730e011e8..92fba11b3b0700 100644 --- a/static/app/views/starfish/views/screens/screenLoadSpans/deviceClassSelector.tsx +++ b/static/app/views/starfish/views/screens/screenLoadSpans/deviceClassSelector.tsx @@ -1,3 +1,4 @@ +import type {ComponentProps} from 'react'; import {browserHistory} from 'react-router'; import {CompactSelect} from 'sentry/components/compactSelect'; @@ -6,7 +7,12 @@ import {decodeScalar} from 'sentry/utils/queryString'; import {useLocation} from 'sentry/utils/useLocation'; import {MobileCursors} from 'sentry/views/starfish/views/screens/constants'; -export function DeviceClassSelector() { +interface Props { + clearSpansTableCursor?: boolean; + size?: ComponentProps['size']; +} + +export function DeviceClassSelector({size = 'xs', clearSpansTableCursor}: Props) { const location = useLocation(); const value = decodeScalar(location.query['device.class']) ?? ''; @@ -21,7 +27,8 @@ export function DeviceClassSelector() { return ( { @@ -32,6 +39,7 @@ export function DeviceClassSelector() { ['device.class']: newValue.value, [MobileCursors.RELEASE_1_EVENT_SAMPLE_TABLE]: undefined, [MobileCursors.RELEASE_2_EVENT_SAMPLE_TABLE]: undefined, + ...(clearSpansTableCursor ? {[MobileCursors.SPANS_TABLE]: undefined} : {}), }, }); }} diff --git a/static/app/views/starfish/views/screens/screenLoadSpans/eventSamplesTable.tsx b/static/app/views/starfish/views/screens/screenLoadSpans/eventSamplesTable.tsx index b6dd0807c31118..3392856acdf0cc 100644 --- a/static/app/views/starfish/views/screens/screenLoadSpans/eventSamplesTable.tsx +++ b/static/app/views/starfish/views/screens/screenLoadSpans/eventSamplesTable.tsx @@ -38,6 +38,7 @@ type Props = { sort: Sort; sortKey: string; data?: TableData; + footerAlignedPagination?: boolean; pageLinks?: string; showDeviceClassSelector?: boolean; }; @@ -56,6 +57,7 @@ export function EventSamplesTable({ profileIdKey, columnNameMap, sort, + footerAlignedPagination = false, }: Props) { const location = useLocation(); const organization = useOrganization(); @@ -169,10 +171,13 @@ export function EventSamplesTable({ return ( -
    - {showDeviceClassSelector && } - -
    + {!footerAlignedPagination && ( +
    + {showDeviceClassSelector && } + + +
    + )} +
    + {footerAlignedPagination && ( + + )} +
    ); } From 3ce1e8ddaaf6d6ec42389ce74bd26dc42c399c83 Mon Sep 17 00:00:00 2001 From: Colton Allen Date: Wed, 7 Feb 2024 10:03:18 -0600 Subject: [PATCH 116/357] docs(replays): Add API documentation for the replay-counts endpoint (#64781) Related: https://github.com/getsentry/team-replay/issues/353 --- .../apidocs/examples/replay_examples.py | 15 +++++++++++ .../endpoints/organization_replay_count.py | 27 ++++++++++++++++++- 2 files changed, 41 insertions(+), 1 deletion(-) diff --git a/src/sentry/apidocs/examples/replay_examples.py b/src/sentry/apidocs/examples/replay_examples.py index 19d3f47fa8996a..15a0ca3c6319ab 100644 --- a/src/sentry/apidocs/examples/replay_examples.py +++ b/src/sentry/apidocs/examples/replay_examples.py @@ -58,3 +58,18 @@ class ReplayExamples: response_only=True, ), ] + + GET_REPLAY_COUNTS = [ + OpenApiExample( + "Query replay count by issue or transaction id", + value={ + 1: 9, + 2: 0, + 5: 0, + 9: 1, + 10: 29, + }, + status_codes=["200"], + response_only=True, + ) + ] diff --git a/src/sentry/replays/endpoints/organization_replay_count.py b/src/sentry/replays/endpoints/organization_replay_count.py index 01ad22c81ce112..95e91eedf33cef 100644 --- a/src/sentry/replays/endpoints/organization_replay_count.py +++ b/src/sentry/replays/endpoints/organization_replay_count.py @@ -1,6 +1,7 @@ from __future__ import annotations from django.db.models import F +from drf_spectacular.utils import extend_schema from rest_framework import serializers, status from rest_framework.exceptions import ParseError from rest_framework.response import Response @@ -12,6 +13,10 @@ from sentry.api.base import region_silo_endpoint from sentry.api.bases import NoProjects from sentry.api.bases.organization_events import OrganizationEventsV2EndpointBase +from sentry.apidocs.constants import RESPONSE_BAD_REQUEST, RESPONSE_FORBIDDEN +from sentry.apidocs.examples.replay_examples import ReplayExamples +from sentry.apidocs.parameters import GlobalParams, OrganizationParams, VisibilityParams +from sentry.apidocs.utils import inline_sentry_response_serializer from sentry.exceptions import InvalidSearchQuery from sentry.models.organization import Organization from sentry.models.project import Project @@ -37,6 +42,7 @@ class ReplayDataSourceValidator(serializers.Serializer): @region_silo_endpoint +@extend_schema(tags=["Replays"]) class OrganizationReplayCountEndpoint(OrganizationEventsV2EndpointBase): """ Get all the replay ids associated with a set of issues/transactions in discover, @@ -45,7 +51,7 @@ class OrganizationReplayCountEndpoint(OrganizationEventsV2EndpointBase): owner = ApiOwner.REPLAY publish_status = { - "GET": ApiPublishStatus.UNKNOWN, + "GET": ApiPublishStatus.PUBLIC, } enforce_rate_limit = True @@ -57,7 +63,26 @@ class OrganizationReplayCountEndpoint(OrganizationEventsV2EndpointBase): } } + @extend_schema( + examples=ReplayExamples.GET_REPLAY_COUNTS, + operation_id="Return a Count of Replays", + parameters=[ + GlobalParams.END, + GlobalParams.ENVIRONMENT, + GlobalParams.ORG_SLUG, + GlobalParams.START, + GlobalParams.STATS_PERIOD, + OrganizationParams.PROJECT, + VisibilityParams.QUERY, + ], + responses={ + 200: inline_sentry_response_serializer("ReplayCounts", dict[int, int]), + 400: RESPONSE_BAD_REQUEST, + 403: RESPONSE_FORBIDDEN, + }, + ) def get(self, request: Request, organization: Organization) -> Response: + """Return a count of replays for the given issue or transaction id.""" if not features.has("organizations:session-replay", organization, actor=request.user): return Response(status=404) From ee126fe55538bf5de7665a17148fdb8fea99f06b Mon Sep 17 00:00:00 2001 From: edwardgou-sentry <83961295+edwardgou-sentry@users.noreply.github.com> Date: Wed, 7 Feb 2024 11:12:47 -0500 Subject: [PATCH 117/357] fix(webvitals): fix sorting issues with inp columns (#64755) Fixes sorting and pagination issues with inp columns and tables: - map back fid to inp when calling `useWebVitalsSort`, to resolve rendering issues with column key mismatch - update pagination in page overview samples table to use appropriate pageLinks and loading states depending on dataset - update dataset selector to reset pagination and sort when toggled - update useProjectWebVitalsScoresQuery to map fid to inp key --- .../webVitals/pagePerformanceTable.tsx | 6 ++++- .../webVitals/pageSamplePerformanceTable.tsx | 25 +++++++++++++++---- .../useProjectWebVitalsScoresQuery.tsx | 11 +++++++- .../webVitals/utils/useWebVitalsSort.tsx | 9 ++++--- 4 files changed, 41 insertions(+), 10 deletions(-) diff --git a/static/app/views/performance/browser/webVitals/pagePerformanceTable.tsx b/static/app/views/performance/browser/webVitals/pagePerformanceTable.tsx index 5d3718ee931108..5b01c06d917eb3 100644 --- a/static/app/views/performance/browser/webVitals/pagePerformanceTable.tsx +++ b/static/app/views/performance/browser/webVitals/pagePerformanceTable.tsx @@ -82,7 +82,11 @@ export function PagePerformanceTable() { [projects, location.query.project] ); - const sort = useWebVitalsSort(); + let sort = useWebVitalsSort(); + // Need to map fid back to inp for rendering + if (shouldReplaceFidWithInp && sort.field === 'p75(measurements.fid)') { + sort = {...sort, field: 'p75(measurements.inp)'}; + } const {data: projectData, isLoading: isProjectWebVitalsQueryLoading} = useProjectRawWebVitalsQuery({transaction: query}); diff --git a/static/app/views/performance/browser/webVitals/pageSamplePerformanceTable.tsx b/static/app/views/performance/browser/webVitals/pageSamplePerformanceTable.tsx index e87eb58e290547..c95e250f90bfba 100644 --- a/static/app/views/performance/browser/webVitals/pageSamplePerformanceTable.tsx +++ b/static/app/views/performance/browser/webVitals/pageSamplePerformanceTable.tsx @@ -108,10 +108,14 @@ export function PageSamplePerformanceTable({transaction, search, limit = 9}: Pro field => !SORTABLE_INDEXED_SCORE_FIELDS.includes(field) ); - const sort = useWebVitalsSort({ + let sort = useWebVitalsSort({ defaultSort: DEFAULT_INDEXED_SORT, sortableFields: sortableFields as unknown as string[], }); + // Need to map fid back to inp for rendering + if (shouldReplaceFidWithInp && sort.field === 'measurements.fid') { + sort = {...sort, field: 'measurements.inp'}; + } const replayLinkGenerator = generateReplayLink(routes); const project = useMemo( @@ -368,7 +372,18 @@ export function PageSamplePerformanceTable({transaction, search, limit = 9}: Pro {shouldReplaceFidWithInp && ( - + { + // Reset pagination and sort when switching datasets + router.replace({ + ...location, + query: {...location.query, sort: undefined, cursor: undefined}, + }); + setDataset(newDataSet); + }} + > {t('Pageloads')} @@ -389,14 +404,14 @@ export function PageSamplePerformanceTable({transaction, search, limit = 9}: Pro } /> {/* The Pagination component disappears if pageLinks is not defined, which happens any time the table data is loading. So we render a disabled button bar if pageLinks is not defined to minimize ui shifting */} - {!pageLinks && ( + {!(dataset === Dataset.INTERACTIONS ? interactionsPageLinks : pageLinks) && (
    )} + {hasSimilarityEmbeddingsFeature && ( + 0 = Not Similar, 1 = Similar + )} - + {itemsWithFiltered.map(item => ( @@ -105,6 +115,13 @@ const Header = styled('div')` margin-bottom: ${space(1)}; `; +const LegendSmall = styled('div')` + display: flex; + justify-content: flex-end; + margin-bottom: ${space(1)}; + font-size: ${p => p.theme.fontSizeSmall}; +`; + const Footer = styled('div')` display: flex; justify-content: center; diff --git a/static/app/views/issueDetails/groupSimilarIssues/similarStackTrace/toolbar.tsx b/static/app/views/issueDetails/groupSimilarIssues/similarStackTrace/toolbar.tsx index 6968b65d12a95b..837a0bad03f0d7 100644 --- a/static/app/views/issueDetails/groupSimilarIssues/similarStackTrace/toolbar.tsx +++ b/static/app/views/issueDetails/groupSimilarIssues/similarStackTrace/toolbar.tsx @@ -1,6 +1,7 @@ -import {Component} from 'react'; +import {Component, Fragment} from 'react'; import styled from '@emotion/styled'; +import {addSuccessMessage} from 'sentry/actionCreators/indicator'; import {Button} from 'sentry/components/button'; import Confirm from 'sentry/components/confirm'; import PanelHeader from 'sentry/components/panels/panelHeader'; @@ -8,15 +9,19 @@ import ToolbarHeader from 'sentry/components/toolbarHeader'; import {t} from 'sentry/locale'; import GroupingStore from 'sentry/stores/groupingStore'; import {space} from 'sentry/styles/space'; -import type {Project} from 'sentry/types'; +import type {Organization, Project} from 'sentry/types'; +import {trackAnalytics} from 'sentry/utils/analytics'; type Props = { onMerge: () => void; + groupId?: string; + organization?: Organization; project?: Project; }; const initialState = { mergeCount: 0, + mergeList: [] as string[], }; type State = typeof initialState; @@ -35,12 +40,35 @@ class SimilarToolbar extends Component { } if (mergeList.length !== this.state.mergeCount) { - this.setState({mergeCount: mergeList.length}); + this.setState({mergeCount: mergeList.length, mergeList}); } }; listener = GroupingStore.listen(this.onGroupChange, undefined); + handleSimilarityEmbeddings = (value: string) => { + if ( + this.state.mergeList.length === 0 || + !this.props.organization || + !this.props.groupId + ) { + return; + } + for (const parentGroupId of this.state.mergeList) { + trackAnalytics( + 'issue_details.similar_issues.similarity_embeddings_feedback_recieved', + { + organization: this.props.organization, + projectId: this.props.project?.id, + parentGroupId, + groupId: this.props.groupId, + value, + } + ); + } + addSuccessMessage('Sent analytic for similarity embeddings grouping'); + }; + render() { const {onMerge, project} = this.props; const {mergeCount} = this.state; @@ -50,15 +78,41 @@ class SimilarToolbar extends Component { return ( - - - + + + + + {hasSimilarityEmbeddingsFeature && ( + + + + + )} + {t('Events')} @@ -89,3 +143,9 @@ const StyledToolbarHeader = styled(ToolbarHeader)` justify-content: center; padding: ${space(0.5)} 0; `; + +const ButtonPanel = styled('div')` + display: flex; + align-items: left; + gap: ${space(1)}; +`; From f8597f834ce1a02bb96a51fc638fe8e1d74abf97 Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Wed, 7 Feb 2024 10:09:23 -0800 Subject: [PATCH 128/357] feat(issues): Add more timestamps to timeline (#64739) --- .../traceTimeline/traceTimeline.spec.tsx | 2 +- .../traceTimeline/traceTimeline.tsx | 2 +- .../traceTimeline/traceTimelineEvents.tsx | 34 +++++++------------ 3 files changed, 14 insertions(+), 24 deletions(-) diff --git a/static/app/views/issueDetails/traceTimeline/traceTimeline.spec.tsx b/static/app/views/issueDetails/traceTimeline/traceTimeline.spec.tsx index f769c79a17b1e8..c03ee43da8fd69 100644 --- a/static/app/views/issueDetails/traceTimeline/traceTimeline.spec.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceTimeline.spec.tsx @@ -130,6 +130,6 @@ describe('TraceTimeline', () => { }); render(, {organization}); // Checking for the presence of seconds - expect(await screen.findAllByText(/\d{1,2}:\d{2}:\d{2} (AM|PM)/)).toHaveLength(3); + expect(await screen.findAllByText(/\d{1,2}:\d{2}:\d{2} (AM|PM)/)).toHaveLength(5); }); }); diff --git a/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx b/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx index fcbe333deaa1cf..6301917c0971cb 100644 --- a/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx @@ -33,7 +33,7 @@ export function TraceTimeline({event}: TraceTimelineProps) { !isLoading && data.length > 0 && data.every(item => item.id === event.id); if (isError || noEvents || onlySelfEvent) { // display empty placeholder to reduce layout shift - return
    ; + return
    ; } return ( diff --git a/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx b/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx index 70fcf209a9c5d3..3b2986745ab677 100644 --- a/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx @@ -46,6 +46,9 @@ export function TraceTimelineEvents({event, width}: TraceTimelineEventsProps) { // If the duration is less than 2 minutes, show seconds const showTimelineSeconds = durationMs < 120 * 1000; + const middleTimestamp = paddedStartTime + Math.floor(durationMs / 2); + const leftMiddleTimestamp = paddedStartTime + Math.floor(durationMs / 4); + const rightMiddleTimestamp = paddedStartTime + Math.floor((durationMs / 4) * 3); return ( @@ -77,19 +80,11 @@ export function TraceTimelineEvents({event, width}: TraceTimelineEventsProps) { })} - - - - - - - - - + + + + + ); @@ -118,16 +113,11 @@ const TimelineColumns = styled('div')` `; const TimestampColumns = styled('div')` - display: grid; - grid-template-columns: repeat(3, 1fr); - margin-top: ${space(1)}; -`; - -const TimestampItem = styled('div')` - place-items: stretch; - display: grid; + display: flex; align-items: center; - position: relative; + justify-content: space-between; + margin-top: ${space(1)}; + text-align: center; color: ${p => p.theme.subText}; font-size: ${p => p.theme.fontSizeSmall}; `; From 96c3f64d22312291a09b105cc25f302893c6e32f Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Wed, 7 Feb 2024 13:17:04 -0500 Subject: [PATCH 129/357] feat(metrics-extraction): Make stateful check permissive, add 2 more metrics (#64783) ### Summary Adds two more metrics to track every outcome of the extraction, make the extraction_enabled check permissive by default --- src/sentry/relay/config/metric_extraction.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/sentry/relay/config/metric_extraction.py b/src/sentry/relay/config/metric_extraction.py index d0a4dba6c44e1b..74932361cf15af 100644 --- a/src/sentry/relay/config/metric_extraction.py +++ b/src/sentry/relay/config/metric_extraction.py @@ -248,9 +248,15 @@ def _get_widget_metric_specs( if not extraction_enabled: # Return no specs if any extraction is blocked for a widget that should have specs. ignored_widget_ids[widget_query.widget.id] = True + metrics.incr( + "on_demand_metrics.widgets.can_use_stateful_extraction", sample_rate=1.0 + ) else: # Stateful extraction cannot be used in some cases (eg. newly created or recently modified widgets). # We skip cardinality checks for those cases, however, and assume extraction is allowed temporarily. + metrics.incr( + "on_demand_metrics.widgets.cannot_use_stateful_extraction", sample_rate=1.0 + ) continue else: # TODO: Remove this cardinality check after above option is enabled permanently. @@ -540,7 +546,8 @@ def _widget_query_stateful_extraction_enabled(widget_query: DashboardWidgetQuery sentry_sdk.capture_exception( Exception("Skipped extraction due to mismatched on_demand entries") ) - return False + # We default to allowed extraction if something unexpected occurs otherwise customers lose data. + return True on_demand_entry = on_demand_entries[0] From e995350d36be4fbd657a7fcb8fd7d5188b76bc4c Mon Sep 17 00:00:00 2001 From: Malachi Willey Date: Wed, 7 Feb 2024 10:26:59 -0800 Subject: [PATCH 130/357] feat(issue-priority): Add priority dropdown to issue details page (#64734) --- fixtures/js-stubs/group.ts | 2 + static/app/types/group.tsx | 1 + .../utils/analytics/issueAnalyticsEvents.tsx | 9 ++- static/app/utils/events.tsx | 2 + .../app/views/issueDetails/groupPriority.tsx | 56 +++++++++++++++++++ static/app/views/issueDetails/header.spec.tsx | 38 ++++++++++++- static/app/views/issueDetails/header.tsx | 15 ++++- 7 files changed, 118 insertions(+), 5 deletions(-) create mode 100644 static/app/views/issueDetails/groupPriority.tsx diff --git a/fixtures/js-stubs/group.ts b/fixtures/js-stubs/group.ts index 6ac6dc13b51777..6da2c6da9c79b0 100644 --- a/fixtures/js-stubs/group.ts +++ b/fixtures/js-stubs/group.ts @@ -7,6 +7,7 @@ import { GroupUnresolved, IssueCategory, IssueType, + PriorityLevel, } from 'sentry/types'; export function GroupFixture(params: Partial = {}): Group { @@ -37,6 +38,7 @@ export function GroupFixture(params: Partial = {}): Group { pluginActions: [], pluginContexts: [], pluginIssues: [], + priority: PriorityLevel.MEDIUM, project: ProjectFixture({ platform: 'javascript', }), diff --git a/static/app/types/group.tsx b/static/app/types/group.tsx index 61e38877e4684d..15026ed75fe371 100644 --- a/static/app/types/group.tsx +++ b/static/app/types/group.tsx @@ -729,6 +729,7 @@ export interface BaseGroup { pluginActions: TitledPlugin[]; pluginContexts: any[]; // TODO(ts) pluginIssues: TitledPlugin[]; + priority: PriorityLevel; project: Project; seenBy: User[]; shareId: string; diff --git a/static/app/utils/analytics/issueAnalyticsEvents.tsx b/static/app/utils/analytics/issueAnalyticsEvents.tsx index 51876aa0b08e45..e964689bc9e047 100644 --- a/static/app/utils/analytics/issueAnalyticsEvents.tsx +++ b/static/app/utils/analytics/issueAnalyticsEvents.tsx @@ -1,6 +1,6 @@ import type {SourceMapProcessingIssueType} from 'sentry/components/events/interfaces/crashContent/exception/useSourceMapDebug'; import type {FieldValue} from 'sentry/components/forms/model'; -import type {IntegrationType} from 'sentry/types'; +import type {IntegrationType, PriorityLevel} from 'sentry/types'; import type {BaseEventAnalyticsParams} from 'sentry/utils/analytics/workflowAnalyticsEvents'; import type {CommonGroupAnalyticsData} from 'sentry/utils/events'; @@ -39,6 +39,11 @@ interface ExternalIssueParams extends CommonGroupAnalyticsData { external_issue_type: IntegrationType; } +interface SetPriorityParams extends CommonGroupAnalyticsData { + from_priority: PriorityLevel; + to_priority: PriorityLevel; +} + export type IssueEventParameters = { 'actionable_items.expand_clicked': ActionableItemDebugParam; 'device.classification.high.end.android.device': { @@ -83,6 +88,7 @@ export type IssueEventParameters = { 'issue_details.issue_status_docs_clicked': {}; 'issue_details.performance.autogrouped_siblings_toggle': {}; 'issue_details.performance.hidden_spans_expanded': {}; + 'issue_details.set_priority': SetPriorityParams; 'issue_details.similar_issues.similarity_embeddings_feedback_recieved': { groupId: string; parentGroupId: string; @@ -332,4 +338,5 @@ export const issueEventMap: Record = { 'issue_details.sourcemap_wizard_copy': 'Issue Details: Sourcemap Wizard Copy', 'issue_details.sourcemap_wizard_learn_more': 'Issue Details: Sourcemap Wizard Learn More', + 'issue_details.set_priority': 'Issue Details: Set Priority', }; diff --git a/static/app/utils/events.tsx b/static/app/utils/events.tsx index c55fb1b11c255a..3cc45f9a1fa183 100644 --- a/static/app/utils/events.tsx +++ b/static/app/utils/events.tsx @@ -481,6 +481,7 @@ export type CommonGroupAnalyticsData = { issue_level?: string; issue_status?: string; issue_substatus?: string | null; + priority?: string; }; export function getAnalyticsDataForGroup(group?: Group | null): CommonGroupAnalyticsData { @@ -507,6 +508,7 @@ export function getAnalyticsDataForGroup(group?: Group | null): CommonGroupAnaly num_participants: group?.participants?.length ?? 0, num_viewers: group?.seenBy?.filter(user => user.id !== activeUser?.id).length ?? 0, group_num_user_feedback: group?.userReportCount ?? 0, + priority: group?.priority, }; } diff --git a/static/app/views/issueDetails/groupPriority.tsx b/static/app/views/issueDetails/groupPriority.tsx new file mode 100644 index 00000000000000..a4cc950038528a --- /dev/null +++ b/static/app/views/issueDetails/groupPriority.tsx @@ -0,0 +1,56 @@ +import {bulkUpdate} from 'sentry/actionCreators/group'; +import {addLoadingMessage, clearIndicators} from 'sentry/actionCreators/indicator'; +import {GroupPriorityDropdown} from 'sentry/components/group/groupPriority'; +import {t} from 'sentry/locale'; +import IssueListCacheStore from 'sentry/stores/IssueListCacheStore'; +import {type Group, PriorityLevel} from 'sentry/types'; +import {trackAnalytics} from 'sentry/utils/analytics'; +import {getAnalyticsDataForGroup} from 'sentry/utils/events'; +import useApi from 'sentry/utils/useApi'; +import useOrganization from 'sentry/utils/useOrganization'; + +type GroupDetailsPriorityProps = { + group: Group; +}; + +function GroupPriority({group}: GroupDetailsPriorityProps) { + const api = useApi({persistInFlight: true}); + const organization = useOrganization(); + + const onChange = (priority: PriorityLevel) => { + if (priority === group.priority) { + return; + } + + trackAnalytics('issue_details.set_priority', { + organization, + ...getAnalyticsDataForGroup(group), + from_priority: group.priority, + to_priority: priority, + }); + + addLoadingMessage(t('Saving changes\u2026')); + + bulkUpdate( + api, + { + orgId: organization.slug, + projectId: group.project.slug, + itemIds: [group.id], + data: {priority}, + }, + {complete: clearIndicators} + ); + + IssueListCacheStore.reset(); + }; + + return ( + + ); +} + +export default GroupPriority; diff --git a/static/app/views/issueDetails/header.spec.tsx b/static/app/views/issueDetails/header.spec.tsx index 96678b485bfae9..945fa9cdd5aea2 100644 --- a/static/app/views/issueDetails/header.spec.tsx +++ b/static/app/views/issueDetails/header.spec.tsx @@ -6,11 +6,11 @@ import {TeamFixture} from 'sentry-fixture/team'; import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; -import {IssueCategory} from 'sentry/types'; +import {IssueCategory, PriorityLevel} from 'sentry/types'; import GroupHeader from 'sentry/views/issueDetails/header'; import {ReprocessingStatus} from 'sentry/views/issueDetails/utils'; -describe('groupDetails', () => { +describe('GroupHeader', () => { const baseUrl = 'BASE_URL/'; const organization = OrganizationFixture(); const project = ProjectFixture({ @@ -187,4 +187,38 @@ describe('groupDetails', () => { expect(screen.queryByRole('tab', {name: /replays/i})).not.toBeInTheDocument(); }); }); + + describe('priority', () => { + it('can change priority', async function () { + const mockModifyIssue = MockApiClient.addMockResponse({ + url: `/projects/org-slug/project-slug/issues/`, + method: 'PUT', + body: {}, + }); + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/replay-count/', + body: {}, + }); + + render( + + ); + + await userEvent.click(screen.getByRole('button', {name: 'Modify issue priority'})); + await userEvent.click(screen.getByRole('menuitemradio', {name: 'High'})); + + expect(mockModifyIssue).toHaveBeenCalledWith( + expect.anything(), + expect.objectContaining({ + data: {priority: PriorityLevel.HIGH}, + }) + ); + }); + }); }); diff --git a/static/app/views/issueDetails/header.tsx b/static/app/views/issueDetails/header.tsx index 72be0e6107abe6..8fd712d2ffd674 100644 --- a/static/app/views/issueDetails/header.tsx +++ b/static/app/views/issueDetails/header.tsx @@ -28,6 +28,7 @@ import {projectCanLinkToReplay} from 'sentry/utils/replays/projectSupportsReplay import useRouteAnalyticsParams from 'sentry/utils/routeAnalytics/useRouteAnalyticsParams'; import {useLocation} from 'sentry/utils/useLocation'; import useOrganization from 'sentry/utils/useOrganization'; +import GroupPriority from 'sentry/views/issueDetails/groupPriority'; import GroupActions from './actions'; import {ShortIdBreadrcumb} from './shortIdBreadcrumb'; @@ -298,6 +299,12 @@ function GroupHeader({ 0 )}
    + {organization.features.includes('issue-priority-ui') && group.priority ? ( + +
    {t('Priority')}
    + +
    + ) : null} )} @@ -349,8 +356,7 @@ const StyledEventOrGroupTitle = styled(EventOrGroupTitle)` `; const StatsWrapper = styled('div')` - display: grid; - grid-template-columns: repeat(2, min-content); + display: flex; gap: calc(${space(3)} + ${space(3)}); @media (min-width: ${p => p.theme.breakpoints.small}) { @@ -374,3 +380,8 @@ const IconBadge = styled(Badge)` const StyledTabList = styled(TabList)` margin-top: ${space(2)}; `; + +const PriorityContainer = styled('div')` + /* Ensures that the layout doesn't shift when changing priority */ + min-width: 80px; +`; From 8a39d3d8fd9a5229bd9b219144cbb638e17ad944 Mon Sep 17 00:00:00 2001 From: Snigdha Sharma Date: Wed, 7 Feb 2024 10:52:19 -0800 Subject: [PATCH 131/357] feat(issue-priority): Add new `GroupType.default_priority` field (#64687) Pre-work for https://github.com/getsentry/sentry/pull/64231 We're adding GroupType.default_priority, which will be used in the case where an occurrence is created without providing initial_issue_priority. --- src/sentry/event_manager.py | 3 +- src/sentry/issues/grouptype.py | 35 +++++++++++++++++-- src/sentry/issues/priority.py | 9 ++--- src/sentry/types/group.py | 7 ++++ tests/sentry/event_manager/test_priority.py | 2 +- tests/sentry/issues/test_priority.py | 2 +- tests/sentry/models/test_activity.py | 3 +- tests/sentry/tasks/test_post_process.py | 3 +- .../snuba/api/endpoints/test_group_details.py | 2 +- tests/snuba/api/serializers/test_group.py | 2 +- 10 files changed, 50 insertions(+), 18 deletions(-) diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index 39f6065333f58e..a23bcb4838c4ce 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -54,7 +54,6 @@ from sentry.ingest.inbound_filters import FilterStatKeys from sentry.issues.grouptype import GroupCategory from sentry.issues.issue_occurrence import IssueOccurrence -from sentry.issues.priority import PriorityLevel from sentry.issues.producer import PayloadType, produce_occurrence_to_kafka from sentry.killswitches import killswitch_matches_context from sentry.lang.native.utils import STORE_CRASH_REPORTS_ALL, convert_crashreport_count @@ -97,7 +96,7 @@ from sentry.tasks.relay import schedule_invalidate_project_config from sentry.tsdb.base import TSDBModel from sentry.types.activity import ActivityType -from sentry.types.group import GroupSubStatus +from sentry.types.group import GroupSubStatus, PriorityLevel from sentry.usage_accountant import record from sentry.utils import json, metrics from sentry.utils.cache import cache_key_for_event diff --git a/src/sentry/issues/grouptype.py b/src/sentry/issues/grouptype.py index e329224894db9a..a72a13fbd2d309 100644 --- a/src/sentry/issues/grouptype.py +++ b/src/sentry/issues/grouptype.py @@ -11,6 +11,7 @@ from sentry import features from sentry.features.base import OrganizationFeature from sentry.ratelimits.sliding_windows import Quota +from sentry.types.group import PriorityLevel from sentry.utils import metrics if TYPE_CHECKING: @@ -116,6 +117,7 @@ class GroupType: description: str category: int noise_config: NoiseConfig | None = None + default_priority: int = PriorityLevel.MEDIUM # If True this group type should be released everywhere. If False, fall back to features to # decide if this is released. released: bool = False @@ -217,6 +219,7 @@ class ErrorGroupType(GroupType): slug = "error" description = "Error" category = GroupCategory.ERROR.value + default_priority = PriorityLevel.MEDIUM released = True @@ -232,6 +235,7 @@ class PerformanceSlowDBQueryGroupType(PerformanceGroupTypeDefaults, GroupType): description = "Slow DB Query" category = GroupCategory.PERFORMANCE.value noise_config = NoiseConfig(ignore_limit=100) + default_priority = PriorityLevel.LOW released = True @@ -241,6 +245,7 @@ class PerformanceRenderBlockingAssetSpanGroupType(PerformanceGroupTypeDefaults, slug = "performance_render_blocking_asset_span" description = "Large Render Blocking Asset" category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW released = True @@ -250,6 +255,7 @@ class PerformanceNPlusOneGroupType(PerformanceGroupTypeDefaults, GroupType): slug = "performance_n_plus_one_db_queries" description = "N+1 Query" category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW released = True @@ -260,6 +266,7 @@ class PerformanceConsecutiveDBQueriesGroupType(PerformanceGroupTypeDefaults, Gro description = "Consecutive DB Queries" category = GroupCategory.PERFORMANCE.value noise_config = NoiseConfig(ignore_limit=15) + default_priority = PriorityLevel.LOW released = True @@ -269,6 +276,7 @@ class PerformanceFileIOMainThreadGroupType(PerformanceGroupTypeDefaults, GroupTy slug = "performance_file_io_main_thread" description = "File IO on Main Thread" category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW released = True @@ -279,6 +287,7 @@ class PerformanceConsecutiveHTTPQueriesGroupType(PerformanceGroupTypeDefaults, G description = "Consecutive HTTP" category = GroupCategory.PERFORMANCE.value noise_config = NoiseConfig(ignore_limit=5) + default_priority = PriorityLevel.LOW released = True @@ -288,6 +297,7 @@ class PerformanceNPlusOneAPICallsGroupType(GroupType): slug = "performance_n_plus_one_api_calls" description = "N+1 API Call" category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW released = True @@ -297,6 +307,7 @@ class PerformanceMNPlusOneDBQueriesGroupType(PerformanceGroupTypeDefaults, Group slug = "performance_m_n_plus_one_db_queries" description = "MN+1 Query" category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW released = True @@ -307,6 +318,7 @@ class PerformanceUncompressedAssetsGroupType(PerformanceGroupTypeDefaults, Group description = "Uncompressed Asset" category = GroupCategory.PERFORMANCE.value noise_config = NoiseConfig(ignore_limit=100) + default_priority = PriorityLevel.LOW released = True @@ -316,6 +328,7 @@ class PerformanceDBMainThreadGroupType(PerformanceGroupTypeDefaults, GroupType): slug = "performance_db_main_thread" description = "DB on Main Thread" category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW released = True @@ -325,6 +338,7 @@ class PerformanceLargeHTTPPayloadGroupType(PerformanceGroupTypeDefaults, GroupTy slug = "performance_large_http_payload" description = "Large HTTP payload" category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW released = True @@ -335,6 +349,7 @@ class PerformanceHTTPOverheadGroupType(PerformanceGroupTypeDefaults, GroupType): description = "HTTP/1.1 Overhead" noise_config = NoiseConfig(ignore_limit=20) category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW # experimental @@ -346,6 +361,7 @@ class PerformanceDurationRegressionGroupType(GroupType): category = GroupCategory.PERFORMANCE.value enable_auto_resolve = False enable_escalation_detection = False + default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -356,6 +372,7 @@ class PerformanceP95EndpointRegressionGroupType(GroupType): category = GroupCategory.PERFORMANCE.value enable_auto_resolve = False enable_escalation_detection = False + default_priority = PriorityLevel.MEDIUM released = True @@ -366,6 +383,7 @@ class ProfileFileIOGroupType(GroupType): slug = "profile_file_io_main_thread" description = "File I/O on Main Thread" category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -374,6 +392,7 @@ class ProfileImageDecodeGroupType(GroupType): slug = "profile_image_decode_main_thread" description = "Image Decoding on Main Thread" category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -382,6 +401,7 @@ class ProfileJSONDecodeType(GroupType): slug = "profile_json_decode_main_thread" description = "JSON Decoding on Main Thread" category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -390,17 +410,17 @@ class ProfileCoreDataExperimentalType(GroupType): slug = "profile_core_data_main_exp" description = "Core Data on Main Thread" category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW # 2005 was ProfileRegexExperimentalType - - @dataclass(frozen=True) class ProfileViewIsSlowExperimentalType(GroupType): type_id = 2006 slug = "profile_view_is_slow_experimental" description = "View Render/Layout/Update is slow" category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -410,6 +430,7 @@ class ProfileRegexType(GroupType): description = "Regex on Main Thread" category = GroupCategory.PERFORMANCE.value released = True + default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -418,6 +439,7 @@ class ProfileFrameDropExperimentalType(GroupType): slug = "profile_frame_drop_experimental" description = "Frame Drop" category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -428,6 +450,7 @@ class ProfileFrameDropType(GroupType): category = GroupCategory.PERFORMANCE.value noise_config = NoiseConfig(ignore_limit=2000) released = True + default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -437,6 +460,7 @@ class ProfileFunctionRegressionExperimentalType(GroupType): description = "Function Duration Regression (Experimental)" category = GroupCategory.PERFORMANCE.value enable_auto_resolve = False + default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -447,6 +471,7 @@ class ProfileFunctionRegressionType(GroupType): category = GroupCategory.PERFORMANCE.value enable_auto_resolve = False released = True + default_priority = PriorityLevel.MEDIUM @dataclass(frozen=True) @@ -457,6 +482,7 @@ class MonitorCheckInFailure(GroupType): category = GroupCategory.CRON.value released = True creation_quota = Quota(3600, 60, 60_000) # 60,000 per hour, sliding window of 60 seconds + default_priority = PriorityLevel.HIGH @dataclass(frozen=True) @@ -467,6 +493,7 @@ class MonitorCheckInTimeout(GroupType): category = GroupCategory.CRON.value released = True creation_quota = Quota(3600, 60, 60_000) # 60,000 per hour, sliding window of 60 seconds + default_priority = PriorityLevel.HIGH @dataclass(frozen=True) @@ -477,6 +504,7 @@ class MonitorCheckInMissed(GroupType): category = GroupCategory.CRON.value released = True creation_quota = Quota(3600, 60, 60_000) # 60,000 per hour, sliding window of 60 seconds + default_priority = PriorityLevel.HIGH @dataclass(frozen=True) @@ -486,6 +514,7 @@ class ReplayDeadClickType(GroupType): slug = "replay_click_dead" description = "Dead Click Detected" category = GroupCategory.REPLAY.value + default_priority = PriorityLevel.MEDIUM @dataclass(frozen=True) @@ -494,6 +523,7 @@ class ReplayRageClickType(GroupType): slug = "replay_click_rage" description = "Rage Click Detected" category = GroupCategory.REPLAY.value + default_priority = PriorityLevel.MEDIUM @dataclass(frozen=True) @@ -503,6 +533,7 @@ class FeedbackGroup(GroupType): description = "Feedback" category = GroupCategory.FEEDBACK.value creation_quota = Quota(3600, 60, 1000) # 1000 per hour, sliding window of 60 seconds + default_priority = PriorityLevel.MEDIUM @metrics.wraps("noise_reduction.should_create_group", sample_rate=1.0) diff --git a/src/sentry/issues/priority.py b/src/sentry/issues/priority.py index b7dc224e64924e..b999af69d5885e 100644 --- a/src/sentry/issues/priority.py +++ b/src/sentry/issues/priority.py @@ -1,7 +1,7 @@ from __future__ import annotations import logging -from enum import Enum, IntEnum +from enum import Enum from typing import TYPE_CHECKING from sentry import features @@ -10,17 +10,12 @@ from sentry.models.user import User from sentry.services.hybrid_cloud.user.model import RpcUser from sentry.types.activity import ActivityType +from sentry.types.group import PriorityLevel if TYPE_CHECKING: from sentry.models.group import Group -class PriorityLevel(IntEnum): - LOW = 25 - MEDIUM = 50 - HIGH = 75 - - PRIORITY_LEVEL_TO_STR: dict[int, str] = { PriorityLevel.LOW: "low", PriorityLevel.MEDIUM: "medium", diff --git a/src/sentry/types/group.py b/src/sentry/types/group.py index a781ed769206f8..096daa63b306b8 100644 --- a/src/sentry/types/group.py +++ b/src/sentry/types/group.py @@ -1,4 +1,5 @@ from collections.abc import Mapping +from enum import IntEnum class GroupSubStatus: @@ -65,3 +66,9 @@ class GroupSubStatus: GroupSubStatus.FOREVER: "archived_forever", GroupSubStatus.UNTIL_CONDITION_MET: "archived_until_condition_met", } + + +class PriorityLevel(IntEnum): + LOW = 25 + MEDIUM = 50 + HIGH = 75 diff --git a/tests/sentry/event_manager/test_priority.py b/tests/sentry/event_manager/test_priority.py index 2fbd97988a43b2..fa25e59b753276 100644 --- a/tests/sentry/event_manager/test_priority.py +++ b/tests/sentry/event_manager/test_priority.py @@ -4,11 +4,11 @@ from unittest.mock import MagicMock, patch from sentry.event_manager import EventManager -from sentry.issues.priority import PriorityLevel from sentry.testutils.cases import TestCase from sentry.testutils.helpers.features import with_feature from sentry.testutils.silo import region_silo_test from sentry.testutils.skips import requires_snuba +from sentry.types.group import PriorityLevel from tests.sentry.event_manager.test_severity import make_event pytestmark = [requires_snuba] diff --git a/tests/sentry/issues/test_priority.py b/tests/sentry/issues/test_priority.py index 55d4c4ab63d59d..07b1145796106c 100644 --- a/tests/sentry/issues/test_priority.py +++ b/tests/sentry/issues/test_priority.py @@ -4,7 +4,6 @@ PRIORITY_LEVEL_TO_STR, PRIORITY_TO_GROUP_HISTORY_STATUS, PriorityChangeReason, - PriorityLevel, auto_update_priority, ) from sentry.models.activity import Activity @@ -14,6 +13,7 @@ from sentry.testutils.helpers.datetime import before_now from sentry.testutils.helpers.features import apply_feature_flag_on_cls from sentry.types.activity import ActivityType +from sentry.types.group import PriorityLevel @apply_feature_flag_on_cls("projects:issue-priority") diff --git a/tests/sentry/models/test_activity.py b/tests/sentry/models/test_activity.py index 90028503315523..1f9152f3b3d629 100644 --- a/tests/sentry/models/test_activity.py +++ b/tests/sentry/models/test_activity.py @@ -1,12 +1,13 @@ import logging from sentry.event_manager import EventManager -from sentry.issues.priority import PRIORITY_LEVEL_TO_STR, PriorityLevel +from sentry.issues.priority import PRIORITY_LEVEL_TO_STR from sentry.models.activity import Activity from sentry.testutils.cases import TestCase from sentry.testutils.helpers.features import with_feature from sentry.testutils.silo import region_silo_test from sentry.types.activity import ActivityType +from sentry.types.group import PriorityLevel from sentry.utils.iterators import chunked from tests.sentry.event_manager.test_event_manager import make_event diff --git a/tests/sentry/tasks/test_post_process.py b/tests/sentry/tasks/test_post_process.py index 8f23f525a3c22e..b16b0d5df9b658 100644 --- a/tests/sentry/tasks/test_post_process.py +++ b/tests/sentry/tasks/test_post_process.py @@ -29,7 +29,6 @@ ProfileFileIOGroupType, ) from sentry.issues.ingest import save_issue_occurrence -from sentry.issues.priority import PriorityLevel from sentry.models.activity import Activity, ActivityIntegration from sentry.models.group import GROUP_SUBSTATUS_TO_STATUS_MAP, Group, GroupStatus from sentry.models.groupassignee import GroupAssignee @@ -72,7 +71,7 @@ from sentry.testutils.silo import assume_test_silo_mode, region_silo_test from sentry.testutils.skips import requires_snuba from sentry.types.activity import ActivityType -from sentry.types.group import GroupSubStatus +from sentry.types.group import GroupSubStatus, PriorityLevel from sentry.utils import json from sentry.utils.cache import cache from sentry.utils.sdk_crashes.sdk_crash_detection_config import SdkName diff --git a/tests/snuba/api/endpoints/test_group_details.py b/tests/snuba/api/endpoints/test_group_details.py index 48d4a6816ad8d5..b849a01d2bfaae 100644 --- a/tests/snuba/api/endpoints/test_group_details.py +++ b/tests/snuba/api/endpoints/test_group_details.py @@ -4,7 +4,6 @@ from sentry import tsdb from sentry.issues.forecasts import generate_and_save_forecasts -from sentry.issues.priority import PriorityLevel from sentry.models.activity import Activity from sentry.models.environment import Environment from sentry.models.group import GroupStatus @@ -16,6 +15,7 @@ from sentry.testutils.helpers.features import with_feature from sentry.testutils.silo import region_silo_test from sentry.types.activity import ActivityType +from sentry.types.group import PriorityLevel @region_silo_test diff --git a/tests/snuba/api/serializers/test_group.py b/tests/snuba/api/serializers/test_group.py index 63244dc6e5f9bb..8109a38335d465 100644 --- a/tests/snuba/api/serializers/test_group.py +++ b/tests/snuba/api/serializers/test_group.py @@ -7,7 +7,6 @@ from sentry.api.serializers import serialize from sentry.api.serializers.models.group import GroupSerializerSnuba from sentry.issues.grouptype import PerformanceNPlusOneGroupType, ProfileFileIOGroupType -from sentry.issues.priority import PriorityLevel from sentry.models.group import Group, GroupStatus from sentry.models.groupenvironment import GroupEnvironment from sentry.models.grouplink import GroupLink @@ -23,6 +22,7 @@ from sentry.testutils.helpers.features import with_feature from sentry.testutils.performance_issues.store_transaction import PerfIssueTransactionTestMixin from sentry.testutils.silo import assume_test_silo_mode, region_silo_test +from sentry.types.group import PriorityLevel from sentry.utils.samples import load_data from tests.sentry.issues.test_utils import SearchIssueTestMixin From eb62c1e8f3902d778472b1938ca185a686c7adb6 Mon Sep 17 00:00:00 2001 From: Snigdha Sharma Date: Wed, 7 Feb 2024 10:56:24 -0800 Subject: [PATCH 132/357] feat(issue-priority): Register option to enable issue priority (#64753) We will use this to enable/disable the feature during today's GA rollout. The project-based option will be removed in a followup. --- src/sentry/options/defaults.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index 206c3a46a6d62a..1761e706713d7c 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -773,6 +773,15 @@ ) +# Killswitch for issue priority +register( + "issues.priority.enabled", + default=False, + type=Bool, + flags=FLAG_MODIFIABLE_BOOL | FLAG_AUTOMATOR_MODIFIABLE, +) + + # ## sentry.killswitches # # The following options are documented in sentry.killswitches in more detail From 3c2486b89052a967a8c2a51314cb5a40f9bb082e Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Wed, 7 Feb 2024 14:01:37 -0500 Subject: [PATCH 133/357] feat(metrics-extraction): Add flag specifically for widgets UI (#64802) ### Summary This adds a flag (unfortunately separate from on-demand-metrics-ui as that is tied to alerts) that controls whether a customer sees on-demand related ui in widgets and dashboards. --- src/sentry/conf/server.py | 2 ++ src/sentry/features/__init__.py | 1 + 2 files changed, 3 insertions(+) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index d30f94113d572c..38a024dad6201a 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1679,6 +1679,8 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:on-demand-metrics-prefill": False, # Display on demand metrics related UI elements "organizations:on-demand-metrics-ui": False, + # Display on demand metrics related UI elements, for dashboards and widgets. The other flag is for alerts. + "organizations:on-demand-metrics-ui-widgets": False, # This spec version includes the environment in the query hash "organizations:on-demand-metrics-query-spec-version-two": False, # Enable the SDK selection feature in the onboarding diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index 7cfaf32d70d9a5..1d191a7949fd7b 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -163,6 +163,7 @@ default_manager.add("organizations:on-demand-metrics-extraction-widgets", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:on-demand-metrics-extraction", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:on-demand-metrics-ui", OrganizationFeature, FeatureHandlerStrategy.REMOTE) +default_manager.add("organizations:on-demand-metrics-ui-widgets", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:on-demand-metrics-query-spec-version-two", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:onboarding-sdk-selection", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:onboarding", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) # Only enabled in sentry.io to enable onboarding flows. From ab4723b77d5f69b1431bcd000ff4123995b7d275 Mon Sep 17 00:00:00 2001 From: Isabella Enriquez Date: Wed, 7 Feb 2024 14:18:59 -0500 Subject: [PATCH 134/357] ref(slack): Move View Replays to footer in block kit (#64794) Moves the View Replays link to the footer instead of keeping it in the rich text block (otherwise it gets code formatted and is not hyperlinked). image --- src/sentry/integrations/message_builder.py | 2 +- src/sentry/integrations/slack/message_builder/issues.py | 9 +++++++-- tests/sentry/integrations/slack/test_message_builder.py | 4 ++-- 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/src/sentry/integrations/message_builder.py b/src/sentry/integrations/message_builder.py index 1cacf632071b63..9f4e43bb3a50b9 100644 --- a/src/sentry/integrations/message_builder.py +++ b/src/sentry/integrations/message_builder.py @@ -166,7 +166,7 @@ def build_attachment_replay_link( referrer = EXTERNAL_PROVIDERS[ExternalProviders.SLACK] replay_url = f"{group.get_absolute_url()}replays/?referrer={referrer}" - return f"\n\n{url_format.format(text='View Replays', url=absolute_uri(replay_url))}" + return f"{url_format.format(text='View Replays', url=absolute_uri(replay_url))}" return None diff --git a/src/sentry/integrations/slack/message_builder/issues.py b/src/sentry/integrations/slack/message_builder/issues.py index 33e265cbf3ff0f..9cce6b23ef30f6 100644 --- a/src/sentry/integrations/slack/message_builder/issues.py +++ b/src/sentry/integrations/slack/message_builder/issues.py @@ -548,7 +548,7 @@ def build(self, notification_uuid: str | None = None) -> SlackBlock | SlackAttac text = escape_slack_text(text) # This link does not contain user input (it's a static label and a url), must not escape it. - text += build_attachment_replay_link(self.group, self.event) or "" + replay_link = build_attachment_replay_link(self.group, self.event) project = Project.objects.get_from_cache(id=self.group.project_id) # If an event is unspecified, use the tags of the latest event (if one exists). @@ -589,6 +589,8 @@ def build(self, notification_uuid: str | None = None) -> SlackBlock | SlackAttac title = build_attachment_title(obj) if not features.has("organizations:slack-block-kit", self.group.project.organization): + if replay_link: + text += f"\n\n{replay_link}" if action_text and self.identity: text += "\n" + action_text @@ -713,7 +715,10 @@ def build(self, notification_uuid: str | None = None) -> SlackBlock | SlackAttac for k, v in footer_data.items(): footer_text += f"{k}: {v} " - footer_text = footer_text[:-4] # chop off the empty space + if replay_link: + footer_text += replay_link + else: + footer_text = footer_text[:-4] # chop off the empty space blocks.append(self.get_context_block(text=footer_text)) else: blocks.append(self.get_context_block(text=footer, timestamp=timestamp)) diff --git a/tests/sentry/integrations/slack/test_message_builder.py b/tests/sentry/integrations/slack/test_message_builder.py index 0fd21fab6b3d6b..d5186ea2807645 100644 --- a/tests/sentry/integrations/slack/test_message_builder.py +++ b/tests/sentry/integrations/slack/test_message_builder.py @@ -948,8 +948,8 @@ def test_build_replay_issue_block_kit(self, has_replays): blocks = SlackIssuesMessageBuilder(event.group, event.for_group(event.group)).build() assert isinstance(blocks, dict) assert ( - f"\n\n" - in blocks["blocks"][1]["elements"][0]["elements"][0]["text"] + f"" + in blocks["blocks"][4]["elements"][0]["text"] ) From 92e82484456fb7a47b1ba14f983304182fd6c99a Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Wed, 7 Feb 2024 19:22:48 +0000 Subject: [PATCH 135/357] Revert "feat(issue-priority): Add new `GroupType.default_priority` field (#64687)" This reverts commit 8a39d3d8fd9a5229bd9b219144cbb638e17ad944. Co-authored-by: asottile-sentry <103459774+asottile-sentry@users.noreply.github.com> --- src/sentry/event_manager.py | 3 +- src/sentry/issues/grouptype.py | 35 ++----------------- src/sentry/issues/priority.py | 9 +++-- src/sentry/types/group.py | 7 ---- tests/sentry/event_manager/test_priority.py | 2 +- tests/sentry/issues/test_priority.py | 2 +- tests/sentry/models/test_activity.py | 3 +- tests/sentry/tasks/test_post_process.py | 3 +- .../snuba/api/endpoints/test_group_details.py | 2 +- tests/snuba/api/serializers/test_group.py | 2 +- 10 files changed, 18 insertions(+), 50 deletions(-) diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index a23bcb4838c4ce..39f6065333f58e 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -54,6 +54,7 @@ from sentry.ingest.inbound_filters import FilterStatKeys from sentry.issues.grouptype import GroupCategory from sentry.issues.issue_occurrence import IssueOccurrence +from sentry.issues.priority import PriorityLevel from sentry.issues.producer import PayloadType, produce_occurrence_to_kafka from sentry.killswitches import killswitch_matches_context from sentry.lang.native.utils import STORE_CRASH_REPORTS_ALL, convert_crashreport_count @@ -96,7 +97,7 @@ from sentry.tasks.relay import schedule_invalidate_project_config from sentry.tsdb.base import TSDBModel from sentry.types.activity import ActivityType -from sentry.types.group import GroupSubStatus, PriorityLevel +from sentry.types.group import GroupSubStatus from sentry.usage_accountant import record from sentry.utils import json, metrics from sentry.utils.cache import cache_key_for_event diff --git a/src/sentry/issues/grouptype.py b/src/sentry/issues/grouptype.py index a72a13fbd2d309..e329224894db9a 100644 --- a/src/sentry/issues/grouptype.py +++ b/src/sentry/issues/grouptype.py @@ -11,7 +11,6 @@ from sentry import features from sentry.features.base import OrganizationFeature from sentry.ratelimits.sliding_windows import Quota -from sentry.types.group import PriorityLevel from sentry.utils import metrics if TYPE_CHECKING: @@ -117,7 +116,6 @@ class GroupType: description: str category: int noise_config: NoiseConfig | None = None - default_priority: int = PriorityLevel.MEDIUM # If True this group type should be released everywhere. If False, fall back to features to # decide if this is released. released: bool = False @@ -219,7 +217,6 @@ class ErrorGroupType(GroupType): slug = "error" description = "Error" category = GroupCategory.ERROR.value - default_priority = PriorityLevel.MEDIUM released = True @@ -235,7 +232,6 @@ class PerformanceSlowDBQueryGroupType(PerformanceGroupTypeDefaults, GroupType): description = "Slow DB Query" category = GroupCategory.PERFORMANCE.value noise_config = NoiseConfig(ignore_limit=100) - default_priority = PriorityLevel.LOW released = True @@ -245,7 +241,6 @@ class PerformanceRenderBlockingAssetSpanGroupType(PerformanceGroupTypeDefaults, slug = "performance_render_blocking_asset_span" description = "Large Render Blocking Asset" category = GroupCategory.PERFORMANCE.value - default_priority = PriorityLevel.LOW released = True @@ -255,7 +250,6 @@ class PerformanceNPlusOneGroupType(PerformanceGroupTypeDefaults, GroupType): slug = "performance_n_plus_one_db_queries" description = "N+1 Query" category = GroupCategory.PERFORMANCE.value - default_priority = PriorityLevel.LOW released = True @@ -266,7 +260,6 @@ class PerformanceConsecutiveDBQueriesGroupType(PerformanceGroupTypeDefaults, Gro description = "Consecutive DB Queries" category = GroupCategory.PERFORMANCE.value noise_config = NoiseConfig(ignore_limit=15) - default_priority = PriorityLevel.LOW released = True @@ -276,7 +269,6 @@ class PerformanceFileIOMainThreadGroupType(PerformanceGroupTypeDefaults, GroupTy slug = "performance_file_io_main_thread" description = "File IO on Main Thread" category = GroupCategory.PERFORMANCE.value - default_priority = PriorityLevel.LOW released = True @@ -287,7 +279,6 @@ class PerformanceConsecutiveHTTPQueriesGroupType(PerformanceGroupTypeDefaults, G description = "Consecutive HTTP" category = GroupCategory.PERFORMANCE.value noise_config = NoiseConfig(ignore_limit=5) - default_priority = PriorityLevel.LOW released = True @@ -297,7 +288,6 @@ class PerformanceNPlusOneAPICallsGroupType(GroupType): slug = "performance_n_plus_one_api_calls" description = "N+1 API Call" category = GroupCategory.PERFORMANCE.value - default_priority = PriorityLevel.LOW released = True @@ -307,7 +297,6 @@ class PerformanceMNPlusOneDBQueriesGroupType(PerformanceGroupTypeDefaults, Group slug = "performance_m_n_plus_one_db_queries" description = "MN+1 Query" category = GroupCategory.PERFORMANCE.value - default_priority = PriorityLevel.LOW released = True @@ -318,7 +307,6 @@ class PerformanceUncompressedAssetsGroupType(PerformanceGroupTypeDefaults, Group description = "Uncompressed Asset" category = GroupCategory.PERFORMANCE.value noise_config = NoiseConfig(ignore_limit=100) - default_priority = PriorityLevel.LOW released = True @@ -328,7 +316,6 @@ class PerformanceDBMainThreadGroupType(PerformanceGroupTypeDefaults, GroupType): slug = "performance_db_main_thread" description = "DB on Main Thread" category = GroupCategory.PERFORMANCE.value - default_priority = PriorityLevel.LOW released = True @@ -338,7 +325,6 @@ class PerformanceLargeHTTPPayloadGroupType(PerformanceGroupTypeDefaults, GroupTy slug = "performance_large_http_payload" description = "Large HTTP payload" category = GroupCategory.PERFORMANCE.value - default_priority = PriorityLevel.LOW released = True @@ -349,7 +335,6 @@ class PerformanceHTTPOverheadGroupType(PerformanceGroupTypeDefaults, GroupType): description = "HTTP/1.1 Overhead" noise_config = NoiseConfig(ignore_limit=20) category = GroupCategory.PERFORMANCE.value - default_priority = PriorityLevel.LOW # experimental @@ -361,7 +346,6 @@ class PerformanceDurationRegressionGroupType(GroupType): category = GroupCategory.PERFORMANCE.value enable_auto_resolve = False enable_escalation_detection = False - default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -372,7 +356,6 @@ class PerformanceP95EndpointRegressionGroupType(GroupType): category = GroupCategory.PERFORMANCE.value enable_auto_resolve = False enable_escalation_detection = False - default_priority = PriorityLevel.MEDIUM released = True @@ -383,7 +366,6 @@ class ProfileFileIOGroupType(GroupType): slug = "profile_file_io_main_thread" description = "File I/O on Main Thread" category = GroupCategory.PERFORMANCE.value - default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -392,7 +374,6 @@ class ProfileImageDecodeGroupType(GroupType): slug = "profile_image_decode_main_thread" description = "Image Decoding on Main Thread" category = GroupCategory.PERFORMANCE.value - default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -401,7 +382,6 @@ class ProfileJSONDecodeType(GroupType): slug = "profile_json_decode_main_thread" description = "JSON Decoding on Main Thread" category = GroupCategory.PERFORMANCE.value - default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -410,17 +390,17 @@ class ProfileCoreDataExperimentalType(GroupType): slug = "profile_core_data_main_exp" description = "Core Data on Main Thread" category = GroupCategory.PERFORMANCE.value - default_priority = PriorityLevel.LOW # 2005 was ProfileRegexExperimentalType + + @dataclass(frozen=True) class ProfileViewIsSlowExperimentalType(GroupType): type_id = 2006 slug = "profile_view_is_slow_experimental" description = "View Render/Layout/Update is slow" category = GroupCategory.PERFORMANCE.value - default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -430,7 +410,6 @@ class ProfileRegexType(GroupType): description = "Regex on Main Thread" category = GroupCategory.PERFORMANCE.value released = True - default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -439,7 +418,6 @@ class ProfileFrameDropExperimentalType(GroupType): slug = "profile_frame_drop_experimental" description = "Frame Drop" category = GroupCategory.PERFORMANCE.value - default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -450,7 +428,6 @@ class ProfileFrameDropType(GroupType): category = GroupCategory.PERFORMANCE.value noise_config = NoiseConfig(ignore_limit=2000) released = True - default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -460,7 +437,6 @@ class ProfileFunctionRegressionExperimentalType(GroupType): description = "Function Duration Regression (Experimental)" category = GroupCategory.PERFORMANCE.value enable_auto_resolve = False - default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -471,7 +447,6 @@ class ProfileFunctionRegressionType(GroupType): category = GroupCategory.PERFORMANCE.value enable_auto_resolve = False released = True - default_priority = PriorityLevel.MEDIUM @dataclass(frozen=True) @@ -482,7 +457,6 @@ class MonitorCheckInFailure(GroupType): category = GroupCategory.CRON.value released = True creation_quota = Quota(3600, 60, 60_000) # 60,000 per hour, sliding window of 60 seconds - default_priority = PriorityLevel.HIGH @dataclass(frozen=True) @@ -493,7 +467,6 @@ class MonitorCheckInTimeout(GroupType): category = GroupCategory.CRON.value released = True creation_quota = Quota(3600, 60, 60_000) # 60,000 per hour, sliding window of 60 seconds - default_priority = PriorityLevel.HIGH @dataclass(frozen=True) @@ -504,7 +477,6 @@ class MonitorCheckInMissed(GroupType): category = GroupCategory.CRON.value released = True creation_quota = Quota(3600, 60, 60_000) # 60,000 per hour, sliding window of 60 seconds - default_priority = PriorityLevel.HIGH @dataclass(frozen=True) @@ -514,7 +486,6 @@ class ReplayDeadClickType(GroupType): slug = "replay_click_dead" description = "Dead Click Detected" category = GroupCategory.REPLAY.value - default_priority = PriorityLevel.MEDIUM @dataclass(frozen=True) @@ -523,7 +494,6 @@ class ReplayRageClickType(GroupType): slug = "replay_click_rage" description = "Rage Click Detected" category = GroupCategory.REPLAY.value - default_priority = PriorityLevel.MEDIUM @dataclass(frozen=True) @@ -533,7 +503,6 @@ class FeedbackGroup(GroupType): description = "Feedback" category = GroupCategory.FEEDBACK.value creation_quota = Quota(3600, 60, 1000) # 1000 per hour, sliding window of 60 seconds - default_priority = PriorityLevel.MEDIUM @metrics.wraps("noise_reduction.should_create_group", sample_rate=1.0) diff --git a/src/sentry/issues/priority.py b/src/sentry/issues/priority.py index b999af69d5885e..b7dc224e64924e 100644 --- a/src/sentry/issues/priority.py +++ b/src/sentry/issues/priority.py @@ -1,7 +1,7 @@ from __future__ import annotations import logging -from enum import Enum +from enum import Enum, IntEnum from typing import TYPE_CHECKING from sentry import features @@ -10,12 +10,17 @@ from sentry.models.user import User from sentry.services.hybrid_cloud.user.model import RpcUser from sentry.types.activity import ActivityType -from sentry.types.group import PriorityLevel if TYPE_CHECKING: from sentry.models.group import Group +class PriorityLevel(IntEnum): + LOW = 25 + MEDIUM = 50 + HIGH = 75 + + PRIORITY_LEVEL_TO_STR: dict[int, str] = { PriorityLevel.LOW: "low", PriorityLevel.MEDIUM: "medium", diff --git a/src/sentry/types/group.py b/src/sentry/types/group.py index 096daa63b306b8..a781ed769206f8 100644 --- a/src/sentry/types/group.py +++ b/src/sentry/types/group.py @@ -1,5 +1,4 @@ from collections.abc import Mapping -from enum import IntEnum class GroupSubStatus: @@ -66,9 +65,3 @@ class GroupSubStatus: GroupSubStatus.FOREVER: "archived_forever", GroupSubStatus.UNTIL_CONDITION_MET: "archived_until_condition_met", } - - -class PriorityLevel(IntEnum): - LOW = 25 - MEDIUM = 50 - HIGH = 75 diff --git a/tests/sentry/event_manager/test_priority.py b/tests/sentry/event_manager/test_priority.py index fa25e59b753276..2fbd97988a43b2 100644 --- a/tests/sentry/event_manager/test_priority.py +++ b/tests/sentry/event_manager/test_priority.py @@ -4,11 +4,11 @@ from unittest.mock import MagicMock, patch from sentry.event_manager import EventManager +from sentry.issues.priority import PriorityLevel from sentry.testutils.cases import TestCase from sentry.testutils.helpers.features import with_feature from sentry.testutils.silo import region_silo_test from sentry.testutils.skips import requires_snuba -from sentry.types.group import PriorityLevel from tests.sentry.event_manager.test_severity import make_event pytestmark = [requires_snuba] diff --git a/tests/sentry/issues/test_priority.py b/tests/sentry/issues/test_priority.py index 07b1145796106c..55d4c4ab63d59d 100644 --- a/tests/sentry/issues/test_priority.py +++ b/tests/sentry/issues/test_priority.py @@ -4,6 +4,7 @@ PRIORITY_LEVEL_TO_STR, PRIORITY_TO_GROUP_HISTORY_STATUS, PriorityChangeReason, + PriorityLevel, auto_update_priority, ) from sentry.models.activity import Activity @@ -13,7 +14,6 @@ from sentry.testutils.helpers.datetime import before_now from sentry.testutils.helpers.features import apply_feature_flag_on_cls from sentry.types.activity import ActivityType -from sentry.types.group import PriorityLevel @apply_feature_flag_on_cls("projects:issue-priority") diff --git a/tests/sentry/models/test_activity.py b/tests/sentry/models/test_activity.py index 1f9152f3b3d629..90028503315523 100644 --- a/tests/sentry/models/test_activity.py +++ b/tests/sentry/models/test_activity.py @@ -1,13 +1,12 @@ import logging from sentry.event_manager import EventManager -from sentry.issues.priority import PRIORITY_LEVEL_TO_STR +from sentry.issues.priority import PRIORITY_LEVEL_TO_STR, PriorityLevel from sentry.models.activity import Activity from sentry.testutils.cases import TestCase from sentry.testutils.helpers.features import with_feature from sentry.testutils.silo import region_silo_test from sentry.types.activity import ActivityType -from sentry.types.group import PriorityLevel from sentry.utils.iterators import chunked from tests.sentry.event_manager.test_event_manager import make_event diff --git a/tests/sentry/tasks/test_post_process.py b/tests/sentry/tasks/test_post_process.py index b16b0d5df9b658..8f23f525a3c22e 100644 --- a/tests/sentry/tasks/test_post_process.py +++ b/tests/sentry/tasks/test_post_process.py @@ -29,6 +29,7 @@ ProfileFileIOGroupType, ) from sentry.issues.ingest import save_issue_occurrence +from sentry.issues.priority import PriorityLevel from sentry.models.activity import Activity, ActivityIntegration from sentry.models.group import GROUP_SUBSTATUS_TO_STATUS_MAP, Group, GroupStatus from sentry.models.groupassignee import GroupAssignee @@ -71,7 +72,7 @@ from sentry.testutils.silo import assume_test_silo_mode, region_silo_test from sentry.testutils.skips import requires_snuba from sentry.types.activity import ActivityType -from sentry.types.group import GroupSubStatus, PriorityLevel +from sentry.types.group import GroupSubStatus from sentry.utils import json from sentry.utils.cache import cache from sentry.utils.sdk_crashes.sdk_crash_detection_config import SdkName diff --git a/tests/snuba/api/endpoints/test_group_details.py b/tests/snuba/api/endpoints/test_group_details.py index b849a01d2bfaae..48d4a6816ad8d5 100644 --- a/tests/snuba/api/endpoints/test_group_details.py +++ b/tests/snuba/api/endpoints/test_group_details.py @@ -4,6 +4,7 @@ from sentry import tsdb from sentry.issues.forecasts import generate_and_save_forecasts +from sentry.issues.priority import PriorityLevel from sentry.models.activity import Activity from sentry.models.environment import Environment from sentry.models.group import GroupStatus @@ -15,7 +16,6 @@ from sentry.testutils.helpers.features import with_feature from sentry.testutils.silo import region_silo_test from sentry.types.activity import ActivityType -from sentry.types.group import PriorityLevel @region_silo_test diff --git a/tests/snuba/api/serializers/test_group.py b/tests/snuba/api/serializers/test_group.py index 8109a38335d465..63244dc6e5f9bb 100644 --- a/tests/snuba/api/serializers/test_group.py +++ b/tests/snuba/api/serializers/test_group.py @@ -7,6 +7,7 @@ from sentry.api.serializers import serialize from sentry.api.serializers.models.group import GroupSerializerSnuba from sentry.issues.grouptype import PerformanceNPlusOneGroupType, ProfileFileIOGroupType +from sentry.issues.priority import PriorityLevel from sentry.models.group import Group, GroupStatus from sentry.models.groupenvironment import GroupEnvironment from sentry.models.grouplink import GroupLink @@ -22,7 +23,6 @@ from sentry.testutils.helpers.features import with_feature from sentry.testutils.performance_issues.store_transaction import PerfIssueTransactionTestMixin from sentry.testutils.silo import assume_test_silo_mode, region_silo_test -from sentry.types.group import PriorityLevel from sentry.utils.samples import load_data from tests.sentry.issues.test_utils import SearchIssueTestMixin From bce02f415555500f01a4207ca59489c3112a4a11 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 7 Feb 2024 14:24:53 -0500 Subject: [PATCH 136/357] ref: remove SentryScript hack (#64804) since we pass in the script as bytes the eager encoding lookup does not happen this was failing in python 2 due to `basestring` and a lack of strict encodings #19678 --- src/sentry/tsdb/redis.py | 5 +++-- src/sentry/utils/redis.py | 35 +---------------------------------- 2 files changed, 4 insertions(+), 36 deletions(-) diff --git a/src/sentry/tsdb/redis.py b/src/sentry/tsdb/redis.py index c096259fa6e843..afbe8e068f64a3 100644 --- a/src/sentry/tsdb/redis.py +++ b/src/sentry/tsdb/redis.py @@ -11,11 +11,12 @@ from django.utils import timezone from django.utils.encoding import force_bytes +from redis.client import Script from sentry.tsdb.base import BaseTSDB from sentry.utils.compat import crc32 from sentry.utils.dates import to_datetime, to_timestamp -from sentry.utils.redis import SentryScript, check_cluster_versions, get_cluster_from_options +from sentry.utils.redis import check_cluster_versions, get_cluster_from_options from sentry.utils.versioning import Version logger = logging.getLogger(__name__) @@ -24,7 +25,7 @@ SketchParameters = namedtuple("SketchParameters", "depth width capacity") -CountMinScript = SentryScript( +CountMinScript = Script( None, importlib.resources.files("sentry").joinpath("scripts/tsdb/cmsketch.lua").read_bytes() ) diff --git a/src/sentry/utils/redis.py b/src/sentry/utils/redis.py index 708de2d18f72a3..16ae7f1dda1967 100644 --- a/src/sentry/utils/redis.py +++ b/src/sentry/utils/redis.py @@ -10,7 +10,7 @@ import rb from django.utils.functional import SimpleLazyObject from redis.client import Script -from redis.connection import ConnectionPool, Encoder +from redis.connection import ConnectionPool from redis.exceptions import BusyLoadingError, ConnectionError from rediscluster import RedisCluster from rediscluster.exceptions import ClusterError @@ -314,36 +314,3 @@ def call_script(client, keys, args): return script[0](keys, args, client) return call_script - - -class SentryScript(Script): - """ - XXX: This is a gross workaround to fix a breaking api change in redis-py. When we - instantiate a script, we've historically been passing `None` as the client. Then - when we call the script we pass the actual client, which Redis uses as an override. - The breaking changes relies on there being a client passed in the constructor to - determine the encoding of the script before generating the sha. - - To work around this, we create a fake client with a fake connection pool that just - returns an encoder that will work. Once this has been done we then set the - `registered_client` back to None, so that the behaviour is the same as before. - - This is only needed when we can't use `load_script`, since we have the client - available there and can pass it through. So once we remove `RedisTSDB` we can also - kill this hack. - """ - - class FakeConnectionPool: - def get_encoder(self): - return Encoder(encoding="utf-8", encoding_errors="strict", decode_responses=False) - - class FakeEncoderClient: - def __init__(self): - self.connection_pool = SentryScript.FakeConnectionPool() - - def __init__(self, registered_client, script): - if registered_client is None: - registered_client = self.FakeEncoderClient() - super().__init__(registered_client, script) - if isinstance(self.registered_client, self.FakeEncoderClient): - self.registered_client = None From 9588474f24d700d6411e26adedac62c041a64525 Mon Sep 17 00:00:00 2001 From: Shruthi Date: Wed, 7 Feb 2024 14:36:01 -0500 Subject: [PATCH 137/357] Revert "build: update corejs to v3.35.1 (#64695)" (#64799) This reverts commit 24dd87cc46b75a064457904548c3e21a992a6d91. This update changed the compiled output of the config [file](https://sentry.io/_chartcuterie-config.js) we serve to chartcuterie - it now includes some URLSearchParam references that charcuterie can't seem to handle causing new pods to crash loop --- babel.config.ts | 2 +- package.json | 2 +- yarn.lock | 8 ++++---- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/babel.config.ts b/babel.config.ts index 0e29d6603ee058..b7ba2975e6f28a 100644 --- a/babel.config.ts +++ b/babel.config.ts @@ -15,7 +15,7 @@ const config: TransformOptions = { '@babel/preset-env', { useBuiltIns: 'usage', - corejs: '3.35.1', + corejs: '3.27', }, ], '@babel/preset-typescript', diff --git a/package.json b/package.json index 043ee7b977e0d5..3e770de06be568 100644 --- a/package.json +++ b/package.json @@ -101,7 +101,7 @@ "color": "^4.2.3", "compression-webpack-plugin": "10.0.0", "copy-webpack-plugin": "^11.0.0", - "core-js": "^3.35.1", + "core-js": "^3.33.0", "cronstrue": "^2.26.0", "crypto-browserify": "^3.12.0", "crypto-js": "4.2.0", diff --git a/yarn.lock b/yarn.lock index 95ba2aa1d1d45f..2db61ec20bca7b 100644 --- a/yarn.lock +++ b/yarn.lock @@ -5131,10 +5131,10 @@ core-js@^1.0.0: resolved "https://registry.yarnpkg.com/core-js/-/core-js-1.2.7.tgz#652294c14651db28fa93bd2d5ff2983a4f08c636" integrity sha1-ZSKUwUZR2yj6k70tX/KYOk8IxjY= -core-js@^3.35.1: - version "3.35.1" - resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.35.1.tgz#9c28f8b7ccee482796f8590cc8d15739eaaf980c" - integrity sha512-IgdsbxNyMskrTFxa9lWHyMwAJU5gXOPP+1yO+K59d50VLVAIDAbs7gIv705KzALModfK3ZrSZTPNpC0PQgIZuw== +core-js@^3.33.0: + version "3.33.0" + resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.33.0.tgz#70366dbf737134761edb017990cf5ce6c6369c40" + integrity sha512-HoZr92+ZjFEKar5HS6MC776gYslNOKHt75mEBKWKnPeFDpZ6nH5OeF3S6HFT1mUAUZKrzkez05VboaX8myjSuw== core-util-is@~1.0.0: version "1.0.2" From 6e0408c8da7305f4e36498d77fd7adff703bb6b0 Mon Sep 17 00:00:00 2001 From: Nar Saynorath Date: Wed, 7 Feb 2024 14:42:06 -0500 Subject: [PATCH 138/357] fix(mobile-starfish): Broken links to profiles (#64803) The URL incorrectly uses `/flamechart` instead of `/flamegraph` --- .../views/starfish/components/samplesTable/spanSamplesTable.tsx | 2 +- .../components/samplesTable/transactionSamplesTable.tsx | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/static/app/views/starfish/components/samplesTable/spanSamplesTable.tsx b/static/app/views/starfish/components/samplesTable/spanSamplesTable.tsx index ab91ee466996f4..3319333cfd1d72 100644 --- a/static/app/views/starfish/components/samplesTable/spanSamplesTable.tsx +++ b/static/app/views/starfish/components/samplesTable/spanSamplesTable.tsx @@ -143,7 +143,7 @@ export function SpanSamplesTable({ diff --git a/static/app/views/starfish/components/samplesTable/transactionSamplesTable.tsx b/static/app/views/starfish/components/samplesTable/transactionSamplesTable.tsx index a846267a77048b..56a4b6c1eed1b0 100644 --- a/static/app/views/starfish/components/samplesTable/transactionSamplesTable.tsx +++ b/static/app/views/starfish/components/samplesTable/transactionSamplesTable.tsx @@ -195,7 +195,7 @@ export function TransactionSamplesTable({ {row.profile_id.slice(0, 8)} From b9adc3b9a1d126079c58afbb98aa921060cc4f28 Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Wed, 7 Feb 2024 11:43:58 -0800 Subject: [PATCH 139/357] feat(issues): Add analytics to issue details page for timeline (#64806) --- .../traceTimeline/traceTimeline.spec.tsx | 13 +++++++++++++ .../issueDetails/traceTimeline/traceTimeline.tsx | 15 ++++++++++++++- 2 files changed, 27 insertions(+), 1 deletion(-) diff --git a/static/app/views/issueDetails/traceTimeline/traceTimeline.spec.tsx b/static/app/views/issueDetails/traceTimeline/traceTimeline.spec.tsx index c03ee43da8fd69..c96641dfc59dd1 100644 --- a/static/app/views/issueDetails/traceTimeline/traceTimeline.spec.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceTimeline.spec.tsx @@ -5,10 +5,13 @@ import {ProjectFixture} from 'sentry-fixture/project'; import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; import ProjectsStore from 'sentry/stores/projectsStore'; +import useRouteAnalyticsParams from 'sentry/utils/routeAnalytics/useRouteAnalyticsParams'; import {TraceTimeline} from './traceTimeline'; import type {TraceEventResponse} from './useTraceTimelineEvents'; +jest.mock('sentry/utils/routeAnalytics/useRouteAnalyticsParams'); + describe('TraceTimeline', () => { const organization = OrganizationFixture({features: ['issues-trace-timeline']}); const event = EventFixture({ @@ -55,6 +58,7 @@ describe('TraceTimeline', () => { beforeEach(() => { ProjectsStore.loadInitialData([project]); + jest.clearAllMocks(); }); it('renders items and highlights the current event', async () => { @@ -73,6 +77,9 @@ describe('TraceTimeline', () => { await userEvent.hover(screen.getByTestId('trace-timeline-tooltip-1')); expect(await screen.findByText('You are here')).toBeInTheDocument(); + expect(useRouteAnalyticsParams).toHaveBeenCalledWith({ + trace_timeline_status: 'shown', + }); }); it('displays nothing if the only event is the current event', async () => { @@ -91,6 +98,9 @@ describe('TraceTimeline', () => { }); render(, {organization}); expect(await screen.findByTestId('trace-timeline-empty')).toBeInTheDocument(); + expect(useRouteAnalyticsParams).toHaveBeenCalledWith({ + trace_timeline_status: 'empty', + }); }); it('displays nothing if there are no events', async () => { @@ -112,6 +122,9 @@ describe('TraceTimeline', () => { }); render(, {organization}); expect(await screen.findByTestId('trace-timeline-empty')).toBeInTheDocument(); + expect(useRouteAnalyticsParams).toHaveBeenCalledWith({ + trace_timeline_status: 'empty', + }); }); it('shows seconds for very short timelines', async () => { diff --git a/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx b/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx index 6301917c0971cb..d7b8a0e3efc623 100644 --- a/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx @@ -5,6 +5,7 @@ import ErrorBoundary from 'sentry/components/errorBoundary'; import Placeholder from 'sentry/components/placeholder'; import {space} from 'sentry/styles/space'; import type {Event} from 'sentry/types'; +import useRouteAnalyticsParams from 'sentry/utils/routeAnalytics/useRouteAnalyticsParams'; import {useDimensions} from 'sentry/utils/useDimensions'; import useOrganization from 'sentry/utils/useOrganization'; import {hasTraceTimelineFeature} from 'sentry/views/issueDetails/traceTimeline/utils'; @@ -23,7 +24,19 @@ export function TraceTimeline({event}: TraceTimelineProps) { const hasFeature = hasTraceTimelineFeature(organization); const {isError, isLoading, data} = useTraceTimelineEvents({event}, hasFeature); - if (!hasFeature || !event.contexts?.trace?.trace_id) { + const hasTraceId = !!event.contexts?.trace?.trace_id; + + let timelineStatus: string | undefined; + if (hasFeature) { + if (hasTraceId && !isLoading) { + timelineStatus = data.length > 1 ? 'shown' : 'empty'; + } else if (!hasTraceId) { + timelineStatus = 'no_trace_id'; + } + } + useRouteAnalyticsParams(timelineStatus ? {trace_timeline_status: timelineStatus} : {}); + + if (!hasFeature || !hasTraceId) { return null; } From 004a2c37c314187f223f4a1b6567ac25aaef15f6 Mon Sep 17 00:00:00 2001 From: Nar Saynorath Date: Wed, 7 Feb 2024 14:47:04 -0500 Subject: [PATCH 140/357] fix(app-start): Use the span op to filter event samples (#64812) For some reason the app_start_type tag isn't populating on the duration metric. Since we're clamped down to specific span ops and descriptions for the event samples, just replacing the span op in the filter works for now. --- .../appStartup/screenSummary/eventSamples.tsx | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/static/app/views/starfish/views/appStartup/screenSummary/eventSamples.tsx b/static/app/views/starfish/views/appStartup/screenSummary/eventSamples.tsx index c6b4ed581228fc..22cc64f7e2b35a 100644 --- a/static/app/views/starfish/views/appStartup/screenSummary/eventSamples.tsx +++ b/static/app/views/starfish/views/appStartup/screenSummary/eventSamples.tsx @@ -13,10 +13,7 @@ import { } from 'sentry/views/starfish/components/releaseSelector'; import {useReleaseSelection} from 'sentry/views/starfish/queries/useReleases'; import {SpanMetricsField} from 'sentry/views/starfish/types'; -import { - COLD_START_TYPE, - WARM_START_TYPE, -} from 'sentry/views/starfish/views/appStartup/screenSummary/startTypeSelector'; +import {COLD_START_TYPE} from 'sentry/views/starfish/views/appStartup/screenSummary/startTypeSelector'; import {EventSamplesTable} from 'sentry/views/starfish/views/screens/screenLoadSpans/eventSamplesTable'; import {useTableQuery} from 'sentry/views/starfish/views/screens/screensTable'; @@ -53,15 +50,20 @@ export function EventSamples({ const searchQuery = new MutableSearch([ `transaction:${transaction}`, `release:${release}`, - 'span.op:[app.start.cold,app.start.warm]', + startType + ? `${SpanMetricsField.SPAN_OP}:${ + startType === COLD_START_TYPE ? 'app.start.cold' : 'app.start.warm' + }` + : 'span.op:[app.start.cold,app.start.warm]', '(', 'span.description:"Cold Start"', 'OR', 'span.description:"Warm Start"', ')', - `${SpanMetricsField.APP_START_TYPE}:${ - startType || `[${COLD_START_TYPE},${WARM_START_TYPE}]` - }`, + // TODO: Add this back in once we have the ability to filter by start type + // `${SpanMetricsField.APP_START_TYPE}:${ + // startType || `[${COLD_START_TYPE},${WARM_START_TYPE}]` + // }`, ]); if (deviceClass) { From 322d5c3b2654fff739d6923574f4bee012b15451 Mon Sep 17 00:00:00 2001 From: George Gritsouk <989898+gggritso@users.noreply.github.com> Date: Wed, 7 Feb 2024 14:49:48 -0500 Subject: [PATCH 141/357] style(integrations): Fix typo in class name (#64768) `Reposity` --> `Repository` --- src/sentry/integrations/utils/codecov.py | 4 ++-- src/sentry/integrations/utils/stacktrace_link.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/sentry/integrations/utils/codecov.py b/src/sentry/integrations/utils/codecov.py index 4e7e0fbe2c4384..142bfe4bdf6625 100644 --- a/src/sentry/integrations/utils/codecov.py +++ b/src/sentry/integrations/utils/codecov.py @@ -10,7 +10,7 @@ from sentry_sdk import configure_scope from sentry import options -from sentry.integrations.utils.stacktrace_link import ReposityLinkOutcome +from sentry.integrations.utils.stacktrace_link import RepositoryLinkOutcome from sentry.models.organization import Organization from sentry.models.repository import Repository from sentry.services.hybrid_cloud.integration import integration_service @@ -141,7 +141,7 @@ class CodecovConfig(TypedDict): repository: Repository # Config is a serialized RepositoryProjectPathConfig config: Any - outcome: ReposityLinkOutcome + outcome: RepositoryLinkOutcome class CodecovData(TypedDict): diff --git a/src/sentry/integrations/utils/stacktrace_link.py b/src/sentry/integrations/utils/stacktrace_link.py index 51f6be5b0784d4..ad730dc81674cd 100644 --- a/src/sentry/integrations/utils/stacktrace_link.py +++ b/src/sentry/integrations/utils/stacktrace_link.py @@ -19,7 +19,7 @@ logger = logging.getLogger(__name__) -class ReposityLinkOutcome(TypedDict): +class RepositoryLinkOutcome(TypedDict): sourceUrl: NotRequired[str] error: NotRequired[str] attemptedUrl: NotRequired[str] @@ -32,8 +32,8 @@ def get_link( version: str | None = None, group_id: str | None = None, frame_abs_path: str | None = None, -) -> ReposityLinkOutcome: - result: ReposityLinkOutcome = {} +) -> RepositoryLinkOutcome: + result: RepositoryLinkOutcome = {} integration = integration_service.get_integration( organization_integration_id=config.organization_integration_id @@ -82,7 +82,7 @@ def get_link( class StacktraceLinkConfig(TypedDict): config: RepositoryProjectPathConfig - outcome: ReposityLinkOutcome + outcome: RepositoryLinkOutcome repository: Repository From 90a4a8a2efdf755fb8f14f5a741a0745243dccfc Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 7 Feb 2024 15:05:40 -0500 Subject: [PATCH 142/357] ref: upgrade django-stubs to prevent incorrect django timezone.utc access (#64813) --- requirements-dev-frozen.txt | 2 +- requirements-dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index b4527d3dfea46b..f3e67ade792021 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -175,7 +175,7 @@ selenium==4.16.0 sentry-arroyo==2.16.0 sentry-cli==2.16.0 sentry-devenv==1.2.2 -sentry-forked-django-stubs==4.2.7.post2 +sentry-forked-django-stubs==4.2.7.post3 sentry-forked-djangorestframework-stubs==3.14.5.post1 sentry-kafka-schemas==0.1.38 sentry-ophio==0.1.5 diff --git a/requirements-dev.txt b/requirements-dev.txt index 0002af8fe126f9..0f70b4a7685e23 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -34,7 +34,7 @@ pip-tools>=7.1.0 packaging>=21.3 # for type checking -sentry-forked-django-stubs>=4.2.7.post2 +sentry-forked-django-stubs>=4.2.7.post3 sentry-forked-djangorestframework-stubs>=3.14.5.post1 lxml-stubs msgpack-types>=0.2.0 From c59f9a0f306966262186228492e792527f41ce38 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 7 Feb 2024 15:19:48 -0500 Subject: [PATCH 143/357] ref: upgrade msgpack (#64808) this version is built for 3.12 --- requirements-base.txt | 2 +- requirements-dev-frozen.txt | 2 +- requirements-frozen.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements-base.txt b/requirements-base.txt index 231d6b2e42de54..40eacc76e5ea15 100644 --- a/requirements-base.txt +++ b/requirements-base.txt @@ -82,7 +82,7 @@ pyuwsgi==2.0.23 zstandard>=0.18.0 sentry-usage-accountant==0.0.10 -msgpack>=1.0.4 +msgpack>=1.0.7 cryptography>=38.0.3 # Note, grpcio>1.30.0 requires setting GRPC_POLL_STRATEGY=epoll1 diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index f3e67ade792021..a3b372c4e9813f 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -98,7 +98,7 @@ milksnake==0.1.6 mistune==2.0.4 mmh3==4.0.0 more-itertools==8.13.0 -msgpack==1.0.4 +msgpack==1.0.7 msgpack-types==0.2.0 mypy==1.8.0 mypy-extensions==1.0.0 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index 6b37edd5b5c7f4..93c91e0369c8c9 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -73,7 +73,7 @@ maxminddb==2.3.0 milksnake==0.1.6 mistune==2.0.4 mmh3==4.0.0 -msgpack==1.0.4 +msgpack==1.0.7 oauthlib==3.1.0 openai==1.3.5 packaging==21.3 From 211cdcbbeed93eeb3e63cd63408637372912d38e Mon Sep 17 00:00:00 2001 From: Jodi Jang <116035587+jangjodi@users.noreply.github.com> Date: Wed, 7 Feb 2024 15:20:48 -0500 Subject: [PATCH 144/357] ref(similarity-embedding): Add analytics for similar issue diff (#64685) Add analytics when user clicks on similar issues diff Co-authored-by: Scott Cooper --- .../app/components/issueDiff/index.spec.tsx | 8 +- static/app/components/issueDiff/index.tsx | 75 +++++++++++++++---- static/app/components/modals/diffModal.tsx | 4 +- .../utils/analytics/issueAnalyticsEvents.tsx | 13 ++++ 4 files changed, 85 insertions(+), 15 deletions(-) diff --git a/static/app/components/issueDiff/index.spec.tsx b/static/app/components/issueDiff/index.spec.tsx index e2b7c1cf64a958..e3c9bef4297862 100644 --- a/static/app/components/issueDiff/index.spec.tsx +++ b/static/app/components/issueDiff/index.spec.tsx @@ -1,17 +1,21 @@ import {Entries123Base, Entries123Target} from 'sentry-fixture/entries'; +import {OrganizationFixture} from 'sentry-fixture/organization'; import {ProjectFixture} from 'sentry-fixture/project'; import {render, screen} from 'sentry-test/reactTestingLibrary'; import {IssueDiff} from 'sentry/components/issueDiff'; +import {trackAnalytics} from 'sentry/utils/analytics'; jest.mock('sentry/api'); +jest.mock('sentry/utils/analytics'); describe('IssueDiff', function () { const entries123Target = Entries123Target(); const entries123Base = Entries123Base(); const api = new MockApiClient(); - const project = ProjectFixture(); + const organization = OrganizationFixture(); + const project = ProjectFixture({features: ['similarity-embeddings']}); beforeEach(function () { MockApiClient.addMockResponse({ @@ -67,10 +71,12 @@ describe('IssueDiff', function () { targetIssueId="target" orgId="org-slug" project={project} + organization={organization} /> ); expect(await screen.findByTestId('split-diff')).toBeInTheDocument(); + expect(trackAnalytics).toHaveBeenCalled(); }); it('can diff message', async function () { diff --git a/static/app/components/issueDiff/index.tsx b/static/app/components/issueDiff/index.tsx index 77248f8e876db7..e9be5fd5fb19e1 100644 --- a/static/app/components/issueDiff/index.tsx +++ b/static/app/components/issueDiff/index.tsx @@ -8,7 +8,8 @@ import LoadingIndicator from 'sentry/components/loadingIndicator'; import type SplitDiff from 'sentry/components/splitDiff'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; -import type {Project} from 'sentry/types'; +import type {Organization, Project} from 'sentry/types'; +import {trackAnalytics} from 'sentry/utils/analytics'; import getStacktraceBody from 'sentry/utils/getStacktraceBody'; import withApi from 'sentry/utils/withApi'; @@ -27,6 +28,7 @@ type Props = { targetIssueId: string; baseEventId?: string; className?: string; + organization?: Organization; targetEventId?: string; }; @@ -55,28 +57,71 @@ class IssueDiff extends Component { } fetchData() { - const {baseIssueId, targetIssueId, baseEventId, targetEventId} = this.props; + const { + baseIssueId, + targetIssueId, + baseEventId, + targetEventId, + organization, + project, + } = this.props; + const hasSimilarityEmbeddingsFeature = project.features.includes( + 'similarity-embeddings' + ); // Fetch component and event data - Promise.all([ - import('../splitDiff'), - this.fetchEventData(baseIssueId, baseEventId ?? 'latest'), - this.fetchEventData(targetIssueId, targetEventId ?? 'latest'), - ]) - .then(([{default: SplitDiffAsync}, baseEvent, targetEvent]) => { + const asyncFetch = async () => { + try { + const splitdiffPromise = import('../splitDiff'); + const {default: SplitDiffAsync} = await splitdiffPromise; + + const [baseEventData, targetEventData] = await Promise.all([ + this.fetchEvent(baseIssueId, baseEventId ?? 'latest'), + this.fetchEvent(targetIssueId, targetEventId ?? 'latest'), + ]); + + const [baseEvent, targetEvent] = await Promise.all([ + getStacktraceBody(baseEventData), + getStacktraceBody(targetEventData), + ]); + this.setState({ SplitDiffAsync, baseEvent, targetEvent, loading: false, }); - }) - .catch(() => { + if (organization && hasSimilarityEmbeddingsFeature) { + trackAnalytics('issue_details.similar_issues.diff_clicked', { + organization, + project_id: baseEventData?.projectID, + group_id: baseEventData?.groupID, + error_message: baseEventData?.message + ? baseEventData.message + : baseEventData?.title, + stacktrace: baseEvent.join('/n '), + transaction: this.getTransaction( + baseEventData?.tags ? baseEventData.tags : [] + ), + parent_group_id: targetEventData?.groupID, + parent_error_message: targetEventData?.message + ? targetEventData.message + : targetEventData?.title, + parent_stacktrace: targetEvent.join('/n '), + parent_transaction: this.getTransaction( + targetEventData?.tags ? targetEventData.tags : [] + ), + }); + } + } catch { addErrorMessage(t('Error loading events')); - }); + } + }; + + asyncFetch(); } - fetchEventData = async (issueId: string, eventId: string) => { + fetchEvent = async (issueId: string, eventId: string) => { const {orgId, project, api} = this.props; let paramEventId = eventId; @@ -89,7 +134,11 @@ class IssueDiff extends Component { const event = await api.requestPromise( `/projects/${orgId}/${project.slug}/events/${paramEventId}/` ); - return getStacktraceBody(event); + return event; + }; + + getTransaction = (tags: any[]) => { + return tags.find(tag => tag.key === 'transaction'); }; render() { diff --git a/static/app/components/modals/diffModal.tsx b/static/app/components/modals/diffModal.tsx index 094670b4dbded4..e472aea915c71d 100644 --- a/static/app/components/modals/diffModal.tsx +++ b/static/app/components/modals/diffModal.tsx @@ -2,14 +2,16 @@ import {css} from '@emotion/react'; import type {ModalRenderProps} from 'sentry/actionCreators/modal'; import IssueDiff from 'sentry/components/issueDiff'; +import useOrganization from 'sentry/utils/useOrganization'; type Props = ModalRenderProps & React.ComponentProps; function DiffModal({className, Body, CloseButton, ...props}: Props) { + const organization = useOrganization(); return ( - + ); } diff --git a/static/app/utils/analytics/issueAnalyticsEvents.tsx b/static/app/utils/analytics/issueAnalyticsEvents.tsx index e964689bc9e047..ac3f122f69c626 100644 --- a/static/app/utils/analytics/issueAnalyticsEvents.tsx +++ b/static/app/utils/analytics/issueAnalyticsEvents.tsx @@ -89,6 +89,17 @@ export type IssueEventParameters = { 'issue_details.performance.autogrouped_siblings_toggle': {}; 'issue_details.performance.hidden_spans_expanded': {}; 'issue_details.set_priority': SetPriorityParams; + 'issue_details.similar_issues.diff_clicked': { + error_message?: string; + group_id?: string; + parent_error_message?: string; + parent_group_id?: string; + parent_stacktrace?: string; + parent_transaction?: string; + project_id?: string; + stacktrace?: string; + transaction?: string; + }; 'issue_details.similar_issues.similarity_embeddings_feedback_recieved': { groupId: string; parentGroupId: string; @@ -261,6 +272,8 @@ export const issueEventMap: Record = { 'Issue Details: Escalating Feedback Received', 'issue_details.escalating_issues_banner_feedback_received': 'Issue Details: Escalating Issues Banner Feedback Received', + 'issue_details.similar_issues.diff_clicked': + 'Issue Details: Similar Issues: Diff Clicked', 'issue_details.similar_issues.similarity_embeddings_feedback_recieved': 'Issue Details: Similar Issues: Similarity Embeddings Feedback Recieved', 'issue_details.view_hierarchy.hover_rendering_system': From 76cb5d4d87535877a67d6dbf9bd6a5d80d9d208f Mon Sep 17 00:00:00 2001 From: Ash <0Calories@users.noreply.github.com> Date: Wed, 7 Feb 2024 15:37:10 -0500 Subject: [PATCH 145/357] feat(replays): Add `click.component_name` to searchable fields (#64786) Adds a field to the smart searchbar for replays, to suggest searching by the name of the component that was clicked on ![image](https://github.com/getsentry/sentry/assets/16740047/62daca03-4297-415e-bf8d-5f5eb76600da) --- static/app/utils/fields/index.ts | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/static/app/utils/fields/index.ts b/static/app/utils/fields/index.ts index c834577bc3d5c2..379bbf5aaef4f3 100644 --- a/static/app/utils/fields/index.ts +++ b/static/app/utils/fields/index.ts @@ -1245,6 +1245,7 @@ export enum ReplayClickFieldKey { CLICK_TESTID = 'click.testid', CLICK_TEXT_CONTENT = 'click.textContent', CLICK_TITLE = 'click.title', + CLICK_COMPONENT_NAME = 'click.component_name', } /** @@ -1368,6 +1369,7 @@ export const REPLAY_CLICK_FIELDS = [ ReplayClickFieldKey.CLICK_TEXT_CONTENT, ReplayClickFieldKey.CLICK_TITLE, ReplayClickFieldKey.CLICK_TESTID, + ReplayClickFieldKey.CLICK_COMPONENT_NAME, ]; // This is separated out from REPLAY_FIELD_DEFINITIONS so that it is feature-flaggable @@ -1438,6 +1440,11 @@ const REPLAY_CLICK_FIELD_DEFINITIONS: Record Date: Wed, 7 Feb 2024 15:45:59 -0500 Subject: [PATCH 146/357] ref: projectconfig_cache stores binary in redis so use decode_responses=False (#64816) when upgrading either `redis` or `hiredis` these become `UnicodeDecodeError`s: ``` __________________________ test_invalidate_hierarchy ___________________________ tests/sentry/tasks/test_relay.py:538: in test_invalidate_hierarchy run(max_jobs=10) src/sentry/testutils/helpers/task_runner.py:52: in work self(*args, **kwargs) .venv/lib/python3.11/site-packages/celery/app/task.py:411: in __call__ return self.run(*args, **kwargs) .venv/lib/python3.11/site-packages/sentry_sdk/integrations/celery.py:306: in _inner reraise(*exc_info) .venv/lib/python3.11/site-packages/sentry_sdk/_compat.py:115: in reraise raise value .venv/lib/python3.11/site-packages/sentry_sdk/integrations/celery.py:301: in _inner return f(*args, **kwargs) src/sentry/silo/base.py:146: in override return original_method(*args, **kwargs) src/sentry/tasks/base.py:118: in _wrapped result = func(*args, **kwargs) src/sentry/tasks/relay.py:242: in invalidate_project_config updated_configs = compute_configs( src/sentry/tasks/relay.py:135: in compute_configs if projectconfig_cache.backend.get(key.public_key) is not None: src/sentry/relay/projectconfig_cache/redis.py:60: in get rv = self.cluster_read.get(self.__get_redis_key(public_key)) .venv/lib/python3.11/site-packages/redis/client.py:1579: in get return self.execute_command('GET', name) .venv/lib/python3.11/site-packages/sentry_redis_tools/failover_redis.py:28: in wrapper return get_wrapped_fn()(*args, **kwargs) .venv/lib/python3.11/site-packages/sentry_sdk/integrations/redis/__init__.py:221: in sentry_patched_execute_command return old_execute_command(self, name, *args, **kwargs) .venv/lib/python3.11/site-packages/redis/client.py:878: in execute_command return self.parse_response(conn, command_name, **options) .venv/lib/python3.11/site-packages/redis/client.py:892: in parse_response response = connection.read_response() .venv/lib/python3.11/site-packages/redis/connection.py:734: in read_response response = self._parser.read_response() .venv/lib/python3.11/site-packages/redis/connection.py:464: in read_response response = self._reader.gets() E UnicodeDecodeError: 'utf-8' codec can't decode byte 0xb5 in position 1: invalid start byte ``` --- src/sentry/relay/projectconfig_cache/redis.py | 4 +-- src/sentry/utils/redis.py | 34 +++++++++++-------- 2 files changed, 22 insertions(+), 16 deletions(-) diff --git a/src/sentry/relay/projectconfig_cache/redis.py b/src/sentry/relay/projectconfig_cache/redis.py index f540fd2d0a20d5..b3ae6237096afb 100644 --- a/src/sentry/relay/projectconfig_cache/redis.py +++ b/src/sentry/relay/projectconfig_cache/redis.py @@ -15,10 +15,10 @@ class RedisProjectConfigCache(ProjectConfigCache): def __init__(self, **options): cluster_key = options.get("cluster", "default") - self.cluster = redis.redis_clusters.get(cluster_key) + self.cluster = redis.redis_clusters.get(cluster_key, decode_responses=False) read_cluster_key = options.get("read_cluster", cluster_key) - self.cluster_read = redis.redis_clusters.get(read_cluster_key) + self.cluster_read = redis.redis_clusters.get(read_cluster_key, decode_responses=False) super().__init__(**options) diff --git a/src/sentry/utils/redis.py b/src/sentry/utils/redis.py index 16ae7f1dda1967..f9f1984af865d2 100644 --- a/src/sentry/utils/redis.py +++ b/src/sentry/utils/redis.py @@ -60,7 +60,9 @@ class _RBCluster: def supports(self, config): return not config.get("is_redis_cluster", False) - def factory(self, **config): + def factory(self, *, decode_responses: bool, **config): + if not decode_responses: + raise NotImplementedError("decode_responses=False mode is not implemented for `rb`") # rb expects a dict of { host, port } dicts where the key is the host # ID. Coerce the configuration into the correct format if necessary. hosts = config["hosts"] @@ -107,7 +109,7 @@ def supports(self, config): # in non-cluster mode. return config.get("is_redis_cluster", False) or len(config.get("hosts")) == 1 - def factory(self, **config): + def factory(self, *, decode_responses: bool, **config): # StrictRedisCluster expects a list of { host, port } dicts. Coerce the # configuration into the correct format if necessary. hosts = config.get("hosts") @@ -133,7 +135,7 @@ def cluster_factory(): # # https://github.com/Grokzen/redis-py-cluster/blob/73f27edf7ceb4a408b3008ef7d82dac570ab9c6a/rediscluster/nodemanager.py#L385 startup_nodes=deepcopy(hosts), - decode_responses=True, + decode_responses=decode_responses, skip_full_coverage_check=True, max_connections=16, max_connections_per_node=True, @@ -142,7 +144,7 @@ def cluster_factory(): ) else: host = hosts[0].copy() - host["decode_responses"] = True + host["decode_responses"] = decode_responses return ( import_string(config["client_class"]) if "client_class" in config @@ -170,17 +172,19 @@ def __init__( ... def __init__(self, options_manager, cluster_type=_RBCluster): - self.__clusters = {} + self.__clusters: dict[tuple[str, bool], TCluster] = {} self.__options_manager = options_manager self.__cluster_type = cluster_type() - def get(self, key) -> TCluster: - cluster = self.__clusters.get(key) + def get(self, key: str, *, decode_responses: bool = True) -> TCluster: + cache_key = (key, decode_responses) + try: + return self.__clusters[cache_key] + except KeyError: + # Do not access attributes of the `cluster` object to prevent + # setup/init of lazy objects. The _RedisCluster type will try to + # connect to the cluster during initialization. - # Do not access attributes of the `cluster` object to prevent - # setup/init of lazy objects. The _RedisCluster type will try to - # connect to the cluster during initialization. - if cluster is None: # TODO: This would probably be safer with a lock, but I'm not sure # that it's necessary. configuration = self.__options_manager.get("redis.clusters").get(key) @@ -190,9 +194,11 @@ def get(self, key) -> TCluster: if not self.__cluster_type.supports(configuration): raise KeyError(f"Invalid cluster type, expected: {self.__cluster_type}") - cluster = self.__clusters[key] = self.__cluster_type.factory(**configuration) - - return cluster + ret = self.__clusters[cache_key] = self.__cluster_type.factory( + **configuration, + decode_responses=decode_responses, + ) + return ret # TODO(epurkhiser): When migration of all rb cluster to true redis clusters has From 32adb3d7b4a9f5503157ece21d9a5cdc10e6d531 Mon Sep 17 00:00:00 2001 From: Snigdha Sharma Date: Wed, 7 Feb 2024 13:03:08 -0800 Subject: [PATCH 147/357] feat(issue-priority): Add new `GroupType.default_priority` field (#64817) (Trying this again from a clean branch) Pre-work for https://github.com/getsentry/sentry/pull/64231 We're adding GroupType.default_priority, which will be used in the case where an occurrence is created without providing initial_issue_priority. --- src/sentry/event_manager.py | 9 +++-- src/sentry/issues/grouptype.py | 35 +++++++++++++++++-- src/sentry/issues/priority.py | 9 ++--- src/sentry/types/group.py | 7 ++++ tests/sentry/event_manager/test_priority.py | 2 +- tests/sentry/issues/test_priority.py | 2 +- .../issues/test_status_change_consumer.py | 3 +- tests/sentry/models/test_activity.py | 3 +- tests/sentry/tasks/test_post_process.py | 3 +- .../snuba/api/endpoints/test_group_details.py | 2 +- tests/snuba/api/serializers/test_group.py | 2 +- 11 files changed, 54 insertions(+), 23 deletions(-) diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index 39f6065333f58e..67267bbb9dff81 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -54,7 +54,6 @@ from sentry.ingest.inbound_filters import FilterStatKeys from sentry.issues.grouptype import GroupCategory from sentry.issues.issue_occurrence import IssueOccurrence -from sentry.issues.priority import PriorityLevel from sentry.issues.producer import PayloadType, produce_occurrence_to_kafka from sentry.killswitches import killswitch_matches_context from sentry.lang.native.utils import STORE_CRASH_REPORTS_ALL, convert_crashreport_count @@ -97,7 +96,7 @@ from sentry.tasks.relay import schedule_invalidate_project_config from sentry.tsdb.base import TSDBModel from sentry.types.activity import ActivityType -from sentry.types.group import GroupSubStatus +from sentry.types.group import GroupSubStatus, PriorityLevel from sentry.usage_accountant import record from sentry.utils import json, metrics from sentry.utils.cache import cache_key_for_event @@ -728,9 +727,9 @@ def _associate_commits_with_release(release: Release, project: Project) -> None: "release_id": release.id, "user_id": None, "refs": [{"repository": target_repo.name, "commit": release.version}], - "prev_release_id": previous_release.id - if previous_release is not None - else None, + "prev_release_id": ( + previous_release.id if previous_release is not None else None + ), } ) diff --git a/src/sentry/issues/grouptype.py b/src/sentry/issues/grouptype.py index e329224894db9a..a72a13fbd2d309 100644 --- a/src/sentry/issues/grouptype.py +++ b/src/sentry/issues/grouptype.py @@ -11,6 +11,7 @@ from sentry import features from sentry.features.base import OrganizationFeature from sentry.ratelimits.sliding_windows import Quota +from sentry.types.group import PriorityLevel from sentry.utils import metrics if TYPE_CHECKING: @@ -116,6 +117,7 @@ class GroupType: description: str category: int noise_config: NoiseConfig | None = None + default_priority: int = PriorityLevel.MEDIUM # If True this group type should be released everywhere. If False, fall back to features to # decide if this is released. released: bool = False @@ -217,6 +219,7 @@ class ErrorGroupType(GroupType): slug = "error" description = "Error" category = GroupCategory.ERROR.value + default_priority = PriorityLevel.MEDIUM released = True @@ -232,6 +235,7 @@ class PerformanceSlowDBQueryGroupType(PerformanceGroupTypeDefaults, GroupType): description = "Slow DB Query" category = GroupCategory.PERFORMANCE.value noise_config = NoiseConfig(ignore_limit=100) + default_priority = PriorityLevel.LOW released = True @@ -241,6 +245,7 @@ class PerformanceRenderBlockingAssetSpanGroupType(PerformanceGroupTypeDefaults, slug = "performance_render_blocking_asset_span" description = "Large Render Blocking Asset" category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW released = True @@ -250,6 +255,7 @@ class PerformanceNPlusOneGroupType(PerformanceGroupTypeDefaults, GroupType): slug = "performance_n_plus_one_db_queries" description = "N+1 Query" category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW released = True @@ -260,6 +266,7 @@ class PerformanceConsecutiveDBQueriesGroupType(PerformanceGroupTypeDefaults, Gro description = "Consecutive DB Queries" category = GroupCategory.PERFORMANCE.value noise_config = NoiseConfig(ignore_limit=15) + default_priority = PriorityLevel.LOW released = True @@ -269,6 +276,7 @@ class PerformanceFileIOMainThreadGroupType(PerformanceGroupTypeDefaults, GroupTy slug = "performance_file_io_main_thread" description = "File IO on Main Thread" category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW released = True @@ -279,6 +287,7 @@ class PerformanceConsecutiveHTTPQueriesGroupType(PerformanceGroupTypeDefaults, G description = "Consecutive HTTP" category = GroupCategory.PERFORMANCE.value noise_config = NoiseConfig(ignore_limit=5) + default_priority = PriorityLevel.LOW released = True @@ -288,6 +297,7 @@ class PerformanceNPlusOneAPICallsGroupType(GroupType): slug = "performance_n_plus_one_api_calls" description = "N+1 API Call" category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW released = True @@ -297,6 +307,7 @@ class PerformanceMNPlusOneDBQueriesGroupType(PerformanceGroupTypeDefaults, Group slug = "performance_m_n_plus_one_db_queries" description = "MN+1 Query" category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW released = True @@ -307,6 +318,7 @@ class PerformanceUncompressedAssetsGroupType(PerformanceGroupTypeDefaults, Group description = "Uncompressed Asset" category = GroupCategory.PERFORMANCE.value noise_config = NoiseConfig(ignore_limit=100) + default_priority = PriorityLevel.LOW released = True @@ -316,6 +328,7 @@ class PerformanceDBMainThreadGroupType(PerformanceGroupTypeDefaults, GroupType): slug = "performance_db_main_thread" description = "DB on Main Thread" category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW released = True @@ -325,6 +338,7 @@ class PerformanceLargeHTTPPayloadGroupType(PerformanceGroupTypeDefaults, GroupTy slug = "performance_large_http_payload" description = "Large HTTP payload" category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW released = True @@ -335,6 +349,7 @@ class PerformanceHTTPOverheadGroupType(PerformanceGroupTypeDefaults, GroupType): description = "HTTP/1.1 Overhead" noise_config = NoiseConfig(ignore_limit=20) category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW # experimental @@ -346,6 +361,7 @@ class PerformanceDurationRegressionGroupType(GroupType): category = GroupCategory.PERFORMANCE.value enable_auto_resolve = False enable_escalation_detection = False + default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -356,6 +372,7 @@ class PerformanceP95EndpointRegressionGroupType(GroupType): category = GroupCategory.PERFORMANCE.value enable_auto_resolve = False enable_escalation_detection = False + default_priority = PriorityLevel.MEDIUM released = True @@ -366,6 +383,7 @@ class ProfileFileIOGroupType(GroupType): slug = "profile_file_io_main_thread" description = "File I/O on Main Thread" category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -374,6 +392,7 @@ class ProfileImageDecodeGroupType(GroupType): slug = "profile_image_decode_main_thread" description = "Image Decoding on Main Thread" category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -382,6 +401,7 @@ class ProfileJSONDecodeType(GroupType): slug = "profile_json_decode_main_thread" description = "JSON Decoding on Main Thread" category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -390,17 +410,17 @@ class ProfileCoreDataExperimentalType(GroupType): slug = "profile_core_data_main_exp" description = "Core Data on Main Thread" category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW # 2005 was ProfileRegexExperimentalType - - @dataclass(frozen=True) class ProfileViewIsSlowExperimentalType(GroupType): type_id = 2006 slug = "profile_view_is_slow_experimental" description = "View Render/Layout/Update is slow" category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -410,6 +430,7 @@ class ProfileRegexType(GroupType): description = "Regex on Main Thread" category = GroupCategory.PERFORMANCE.value released = True + default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -418,6 +439,7 @@ class ProfileFrameDropExperimentalType(GroupType): slug = "profile_frame_drop_experimental" description = "Frame Drop" category = GroupCategory.PERFORMANCE.value + default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -428,6 +450,7 @@ class ProfileFrameDropType(GroupType): category = GroupCategory.PERFORMANCE.value noise_config = NoiseConfig(ignore_limit=2000) released = True + default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -437,6 +460,7 @@ class ProfileFunctionRegressionExperimentalType(GroupType): description = "Function Duration Regression (Experimental)" category = GroupCategory.PERFORMANCE.value enable_auto_resolve = False + default_priority = PriorityLevel.LOW @dataclass(frozen=True) @@ -447,6 +471,7 @@ class ProfileFunctionRegressionType(GroupType): category = GroupCategory.PERFORMANCE.value enable_auto_resolve = False released = True + default_priority = PriorityLevel.MEDIUM @dataclass(frozen=True) @@ -457,6 +482,7 @@ class MonitorCheckInFailure(GroupType): category = GroupCategory.CRON.value released = True creation_quota = Quota(3600, 60, 60_000) # 60,000 per hour, sliding window of 60 seconds + default_priority = PriorityLevel.HIGH @dataclass(frozen=True) @@ -467,6 +493,7 @@ class MonitorCheckInTimeout(GroupType): category = GroupCategory.CRON.value released = True creation_quota = Quota(3600, 60, 60_000) # 60,000 per hour, sliding window of 60 seconds + default_priority = PriorityLevel.HIGH @dataclass(frozen=True) @@ -477,6 +504,7 @@ class MonitorCheckInMissed(GroupType): category = GroupCategory.CRON.value released = True creation_quota = Quota(3600, 60, 60_000) # 60,000 per hour, sliding window of 60 seconds + default_priority = PriorityLevel.HIGH @dataclass(frozen=True) @@ -486,6 +514,7 @@ class ReplayDeadClickType(GroupType): slug = "replay_click_dead" description = "Dead Click Detected" category = GroupCategory.REPLAY.value + default_priority = PriorityLevel.MEDIUM @dataclass(frozen=True) @@ -494,6 +523,7 @@ class ReplayRageClickType(GroupType): slug = "replay_click_rage" description = "Rage Click Detected" category = GroupCategory.REPLAY.value + default_priority = PriorityLevel.MEDIUM @dataclass(frozen=True) @@ -503,6 +533,7 @@ class FeedbackGroup(GroupType): description = "Feedback" category = GroupCategory.FEEDBACK.value creation_quota = Quota(3600, 60, 1000) # 1000 per hour, sliding window of 60 seconds + default_priority = PriorityLevel.MEDIUM @metrics.wraps("noise_reduction.should_create_group", sample_rate=1.0) diff --git a/src/sentry/issues/priority.py b/src/sentry/issues/priority.py index b7dc224e64924e..b999af69d5885e 100644 --- a/src/sentry/issues/priority.py +++ b/src/sentry/issues/priority.py @@ -1,7 +1,7 @@ from __future__ import annotations import logging -from enum import Enum, IntEnum +from enum import Enum from typing import TYPE_CHECKING from sentry import features @@ -10,17 +10,12 @@ from sentry.models.user import User from sentry.services.hybrid_cloud.user.model import RpcUser from sentry.types.activity import ActivityType +from sentry.types.group import PriorityLevel if TYPE_CHECKING: from sentry.models.group import Group -class PriorityLevel(IntEnum): - LOW = 25 - MEDIUM = 50 - HIGH = 75 - - PRIORITY_LEVEL_TO_STR: dict[int, str] = { PriorityLevel.LOW: "low", PriorityLevel.MEDIUM: "medium", diff --git a/src/sentry/types/group.py b/src/sentry/types/group.py index a781ed769206f8..096daa63b306b8 100644 --- a/src/sentry/types/group.py +++ b/src/sentry/types/group.py @@ -1,4 +1,5 @@ from collections.abc import Mapping +from enum import IntEnum class GroupSubStatus: @@ -65,3 +66,9 @@ class GroupSubStatus: GroupSubStatus.FOREVER: "archived_forever", GroupSubStatus.UNTIL_CONDITION_MET: "archived_until_condition_met", } + + +class PriorityLevel(IntEnum): + LOW = 25 + MEDIUM = 50 + HIGH = 75 diff --git a/tests/sentry/event_manager/test_priority.py b/tests/sentry/event_manager/test_priority.py index 2fbd97988a43b2..fa25e59b753276 100644 --- a/tests/sentry/event_manager/test_priority.py +++ b/tests/sentry/event_manager/test_priority.py @@ -4,11 +4,11 @@ from unittest.mock import MagicMock, patch from sentry.event_manager import EventManager -from sentry.issues.priority import PriorityLevel from sentry.testutils.cases import TestCase from sentry.testutils.helpers.features import with_feature from sentry.testutils.silo import region_silo_test from sentry.testutils.skips import requires_snuba +from sentry.types.group import PriorityLevel from tests.sentry.event_manager.test_severity import make_event pytestmark = [requires_snuba] diff --git a/tests/sentry/issues/test_priority.py b/tests/sentry/issues/test_priority.py index 55d4c4ab63d59d..07b1145796106c 100644 --- a/tests/sentry/issues/test_priority.py +++ b/tests/sentry/issues/test_priority.py @@ -4,7 +4,6 @@ PRIORITY_LEVEL_TO_STR, PRIORITY_TO_GROUP_HISTORY_STATUS, PriorityChangeReason, - PriorityLevel, auto_update_priority, ) from sentry.models.activity import Activity @@ -14,6 +13,7 @@ from sentry.testutils.helpers.datetime import before_now from sentry.testutils.helpers.features import apply_feature_flag_on_cls from sentry.types.activity import ActivityType +from sentry.types.group import PriorityLevel @apply_feature_flag_on_cls("projects:issue-priority") diff --git a/tests/sentry/issues/test_status_change_consumer.py b/tests/sentry/issues/test_status_change_consumer.py index c16cc93fc34b0d..54a69e0d891e6c 100644 --- a/tests/sentry/issues/test_status_change_consumer.py +++ b/tests/sentry/issues/test_status_change_consumer.py @@ -4,7 +4,6 @@ from unittest.mock import MagicMock, patch from sentry.issues.occurrence_consumer import _process_message -from sentry.issues.priority import PriorityLevel from sentry.issues.status_change_consumer import bulk_get_groups_from_fingerprints from sentry.models.activity import Activity from sentry.models.group import Group, GroupStatus @@ -12,7 +11,7 @@ from sentry.testutils.helpers.features import with_feature from sentry.testutils.pytest.fixtures import django_db_all from sentry.types.activity import ActivityType -from sentry.types.group import GroupSubStatus +from sentry.types.group import GroupSubStatus, PriorityLevel from tests.sentry.issues.test_occurrence_consumer import IssueOccurrenceTestBase, get_test_message diff --git a/tests/sentry/models/test_activity.py b/tests/sentry/models/test_activity.py index 90028503315523..1f9152f3b3d629 100644 --- a/tests/sentry/models/test_activity.py +++ b/tests/sentry/models/test_activity.py @@ -1,12 +1,13 @@ import logging from sentry.event_manager import EventManager -from sentry.issues.priority import PRIORITY_LEVEL_TO_STR, PriorityLevel +from sentry.issues.priority import PRIORITY_LEVEL_TO_STR from sentry.models.activity import Activity from sentry.testutils.cases import TestCase from sentry.testutils.helpers.features import with_feature from sentry.testutils.silo import region_silo_test from sentry.types.activity import ActivityType +from sentry.types.group import PriorityLevel from sentry.utils.iterators import chunked from tests.sentry.event_manager.test_event_manager import make_event diff --git a/tests/sentry/tasks/test_post_process.py b/tests/sentry/tasks/test_post_process.py index 8f23f525a3c22e..b16b0d5df9b658 100644 --- a/tests/sentry/tasks/test_post_process.py +++ b/tests/sentry/tasks/test_post_process.py @@ -29,7 +29,6 @@ ProfileFileIOGroupType, ) from sentry.issues.ingest import save_issue_occurrence -from sentry.issues.priority import PriorityLevel from sentry.models.activity import Activity, ActivityIntegration from sentry.models.group import GROUP_SUBSTATUS_TO_STATUS_MAP, Group, GroupStatus from sentry.models.groupassignee import GroupAssignee @@ -72,7 +71,7 @@ from sentry.testutils.silo import assume_test_silo_mode, region_silo_test from sentry.testutils.skips import requires_snuba from sentry.types.activity import ActivityType -from sentry.types.group import GroupSubStatus +from sentry.types.group import GroupSubStatus, PriorityLevel from sentry.utils import json from sentry.utils.cache import cache from sentry.utils.sdk_crashes.sdk_crash_detection_config import SdkName diff --git a/tests/snuba/api/endpoints/test_group_details.py b/tests/snuba/api/endpoints/test_group_details.py index 48d4a6816ad8d5..b849a01d2bfaae 100644 --- a/tests/snuba/api/endpoints/test_group_details.py +++ b/tests/snuba/api/endpoints/test_group_details.py @@ -4,7 +4,6 @@ from sentry import tsdb from sentry.issues.forecasts import generate_and_save_forecasts -from sentry.issues.priority import PriorityLevel from sentry.models.activity import Activity from sentry.models.environment import Environment from sentry.models.group import GroupStatus @@ -16,6 +15,7 @@ from sentry.testutils.helpers.features import with_feature from sentry.testutils.silo import region_silo_test from sentry.types.activity import ActivityType +from sentry.types.group import PriorityLevel @region_silo_test diff --git a/tests/snuba/api/serializers/test_group.py b/tests/snuba/api/serializers/test_group.py index 63244dc6e5f9bb..8109a38335d465 100644 --- a/tests/snuba/api/serializers/test_group.py +++ b/tests/snuba/api/serializers/test_group.py @@ -7,7 +7,6 @@ from sentry.api.serializers import serialize from sentry.api.serializers.models.group import GroupSerializerSnuba from sentry.issues.grouptype import PerformanceNPlusOneGroupType, ProfileFileIOGroupType -from sentry.issues.priority import PriorityLevel from sentry.models.group import Group, GroupStatus from sentry.models.groupenvironment import GroupEnvironment from sentry.models.grouplink import GroupLink @@ -23,6 +22,7 @@ from sentry.testutils.helpers.features import with_feature from sentry.testutils.performance_issues.store_transaction import PerfIssueTransactionTestMixin from sentry.testutils.silo import assume_test_silo_mode, region_silo_test +from sentry.types.group import PriorityLevel from sentry.utils.samples import load_data from tests.sentry.issues.test_utils import SearchIssueTestMixin From 4210d268d231abc7d46839cacfc1b7ef98714604 Mon Sep 17 00:00:00 2001 From: Nar Saynorath Date: Wed, 7 Feb 2024 16:14:12 -0500 Subject: [PATCH 148/357] feat(app-start): Update module names (#64801) Since the App Start module is also a mobile module, we need to be more specific for the screens module and rename Mobile to Screen Loads and App Startup to App Starts --- static/app/components/sidebar/index.tsx | 4 ++-- static/app/views/starfish/modules/mobile/pageload.tsx | 4 ++-- static/app/views/starfish/utils/routeNames.tsx | 4 ++-- .../views/starfish/views/appStartup/screenSummary/index.tsx | 2 +- .../views/starfish/views/screens/screenLoadSpans/index.tsx | 2 +- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/static/app/components/sidebar/index.tsx b/static/app/components/sidebar/index.tsx index 1ebd69aabe7467..61dabd90c93464 100644 --- a/static/app/components/sidebar/index.tsx +++ b/static/app/components/sidebar/index.tsx @@ -270,7 +270,7 @@ function Sidebar() { } @@ -279,7 +279,7 @@ function Sidebar() { } diff --git a/static/app/views/starfish/modules/mobile/pageload.tsx b/static/app/views/starfish/modules/mobile/pageload.tsx index 59eeff0477741b..5b4ba5ffe343c2 100644 --- a/static/app/views/starfish/modules/mobile/pageload.tsx +++ b/static/app/views/starfish/modules/mobile/pageload.tsx @@ -37,13 +37,13 @@ export default function PageloadModule() { }, [projects, selection.projects]); return ( - + - {t('Mobile')} + {t('Screen Loads')} {organization.features.includes( 'performance-screens-platform-selector' ) && diff --git a/static/app/views/starfish/utils/routeNames.tsx b/static/app/views/starfish/utils/routeNames.tsx index c7120af834fa0d..4844f354b1bf0a 100644 --- a/static/app/views/starfish/utils/routeNames.tsx +++ b/static/app/views/starfish/utils/routeNames.tsx @@ -6,7 +6,7 @@ export const ROUTE_NAMES = { 'endpoint-overview': t('Endpoint Overview'), 'span-summary': t('Span Summary'), 'web-service': t('Web Service'), - 'app-startup': t('App Startup'), - pageload: t('Mobile'), + 'app-startup': t('App Starts'), + pageload: t('Screen Loads'), responsiveness: t('Responsiveness'), }; diff --git a/static/app/views/starfish/views/appStartup/screenSummary/index.tsx b/static/app/views/starfish/views/appStartup/screenSummary/index.tsx index 6894ab74c32bcd..1648b8b41aef86 100644 --- a/static/app/views/starfish/views/appStartup/screenSummary/index.tsx +++ b/static/app/views/starfish/views/appStartup/screenSummary/index.tsx @@ -68,7 +68,7 @@ function ScreenSummary() { const crumbs: Crumb[] = [ { to: startupModule, - label: t('App Startup'), + label: t('App Starts'), preservePageFilters: true, }, { diff --git a/static/app/views/starfish/views/screens/screenLoadSpans/index.tsx b/static/app/views/starfish/views/screens/screenLoadSpans/index.tsx index 760fd88ee06fa2..5cc3bd5f946243 100644 --- a/static/app/views/starfish/views/screens/screenLoadSpans/index.tsx +++ b/static/app/views/starfish/views/screens/screenLoadSpans/index.tsx @@ -76,7 +76,7 @@ function ScreenLoadSpans() { const crumbs: Crumb[] = [ { to: screenLoadModule, - label: t('Mobile'), + label: t('Screen Loads'), preservePageFilters: true, }, { From 1666c88491682b25976456bded4c05fb04b51346 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Wed, 7 Feb 2024 13:35:22 -0800 Subject: [PATCH 149/357] docs(replays): add API documentation for replay-selectors endpoint (#64713) Related: https://github.com/getsentry/team-replay/issues/353 --- .../apidocs/examples/replay_examples.py | 26 +++++++++ .../organization_replay_selector_index.py | 54 ++++++++++++++++++- 2 files changed, 78 insertions(+), 2 deletions(-) diff --git a/src/sentry/apidocs/examples/replay_examples.py b/src/sentry/apidocs/examples/replay_examples.py index 15a0ca3c6319ab..ae8238dcb67e38 100644 --- a/src/sentry/apidocs/examples/replay_examples.py +++ b/src/sentry/apidocs/examples/replay_examples.py @@ -59,6 +59,32 @@ class ReplayExamples: ), ] + GET_SELECTORS = [ + OpenApiExample( + "Retrieve a collection of selectors for an organization.", + value={ + "data": [ + { + "count_dead_clicks": 2, + "count_rage_clicks": 1, + "dom_element": "div#myid.class1.class2", + "element": { + "alt": "", + "aria_label": "", + "class": ["class1", "class2"], + "id": "myid", + "role": "", + "tag": "div", + "testid": "", + "title": "", + }, + "project_id": "1", + } + ] + }, + ) + ] + GET_REPLAY_COUNTS = [ OpenApiExample( "Query replay count by issue or transaction id", diff --git a/src/sentry/replays/endpoints/organization_replay_selector_index.py b/src/sentry/replays/endpoints/organization_replay_selector_index.py index b9142591da8636..0b5dcbbb0ed1e6 100644 --- a/src/sentry/replays/endpoints/organization_replay_selector_index.py +++ b/src/sentry/replays/endpoints/organization_replay_selector_index.py @@ -1,8 +1,9 @@ from __future__ import annotations from datetime import datetime -from typing import Any +from typing import Any, TypedDict +from drf_spectacular.utils import extend_schema from rest_framework.exceptions import ParseError from rest_framework.request import Request from rest_framework.response import Response @@ -28,6 +29,10 @@ from sentry.api.bases.organization import NoProjects, OrganizationEndpoint from sentry.api.event_search import ParenExpression, SearchFilter, parse_search_query from sentry.api.paginator import GenericOffsetPaginator +from sentry.apidocs.constants import RESPONSE_BAD_REQUEST, RESPONSE_FORBIDDEN +from sentry.apidocs.examples.replay_examples import ReplayExamples +from sentry.apidocs.parameters import CursorQueryParam, GlobalParams, VisibilityParams +from sentry.apidocs.utils import inline_sentry_response_serializer from sentry.exceptions import InvalidSearchQuery from sentry.models.organization import Organization from sentry.replays.lib.new_query.conditions import IntegerScalar @@ -39,12 +44,39 @@ from sentry.replays.validators import ReplaySelectorValidator from sentry.utils.snuba import raw_snql_query +ElementResponseType = TypedDict( + "ElementResponseType", + { + "alt": str, + "aria_label": str, + "class": list[str], + "id": str, + "role": str, + "tag": str, + "testid": str, + "title": str, + }, +) + + +class ReplaySelectorResponseData(TypedDict, total=False): + count_dead_clicks: int + count_rage_clicks: int + dom_element: str + element: ElementResponseType + project_id: str + + +class ReplaySelectorResponse(TypedDict): + data: list[ReplaySelectorResponseData] + @region_silo_endpoint +@extend_schema(tags=["Replays"]) class OrganizationReplaySelectorIndexEndpoint(OrganizationEndpoint): owner = ApiOwner.REPLAY publish_status = { - "GET": ApiPublishStatus.UNKNOWN, + "GET": ApiPublishStatus.PUBLIC, } def get_replay_filter_params(self, request, organization): @@ -59,7 +91,25 @@ def get_replay_filter_params(self, request, organization): return filter_params @handled_snuba_exceptions + @extend_schema( + operation_id="List an Organization's Selectors", + parameters=[ + GlobalParams.ORG_SLUG, + GlobalParams.ENVIRONMENT, + ReplaySelectorValidator, + CursorQueryParam, + VisibilityParams.PER_PAGE, + VisibilityParams.QUERY, + ], + responses={ + 200: inline_sentry_response_serializer("ListSelectors", ReplaySelectorResponse), + 400: RESPONSE_BAD_REQUEST, + 403: RESPONSE_FORBIDDEN, + }, + examples=ReplayExamples.GET_SELECTORS, + ) def get(self, request: Request, organization: Organization) -> Response: + """Return a list of selectors for a given organization.""" if not features.has("organizations:session-replay", organization, actor=request.user): return Response(status=404) try: From fd63af8351e0bc62ff54482af6a5b7569ec750ee Mon Sep 17 00:00:00 2001 From: Cathy Teng <70817427+cathteng@users.noreply.github.com> Date: Wed, 7 Feb 2024 13:35:56 -0800 Subject: [PATCH 150/357] fix(open-pr-comments): skip files that don't have a patch (#64811) --- .../tasks/integrations/github/open_pr_comment.py | 13 ++++++------- .../integrations/github/test_open_pr_comment.py | 16 ++++++++-------- 2 files changed, 14 insertions(+), 15 deletions(-) diff --git a/src/sentry/tasks/integrations/github/open_pr_comment.py b/src/sentry/tasks/integrations/github/open_pr_comment.py index 00b2c88a3c4fc2..b6b511cb8de5f6 100644 --- a/src/sentry/tasks/integrations/github/open_pr_comment.py +++ b/src/sentry/tasks/integrations/github/open_pr_comment.py @@ -208,12 +208,9 @@ def safe_for_comment( for file in pr_files: filename = file["filename"] - # don't count the file if it was added or is not a Python file - if ( - file["status"] == "added" - or file["status"] == "renamed" - or filename.split(".")[-1] not in patch_parsers - ): + # we only count the file if it's modified and if the file extension is in the list of supported file extensions + # we cannot look at deleted or newly added files because we cannot extract functions from the diffs + if file["status"] != "modified" or filename.split(".")[-1] not in patch_parsers: continue changed_file_count += 1 @@ -240,7 +237,9 @@ def get_pr_files(pr_files: list[dict[str, str]]) -> list[PullRequestFile]: # new files will not have sentry issues associated with them # only fetch Python files pullrequest_files = [ - PullRequestFile(filename=file["filename"], patch=file["patch"]) for file in pr_files + PullRequestFile(filename=file["filename"], patch=file["patch"]) + for file in pr_files + if "patch" in file ] logger.info("github.open_pr_comment.pr_filenames", extra={"count": len(pullrequest_files)}) diff --git a/tests/sentry/tasks/integrations/github/test_open_pr_comment.py b/tests/sentry/tasks/integrations/github/test_open_pr_comment.py index 4650103d7242db..e82bc86405fdd8 100644 --- a/tests/sentry/tasks/integrations/github/test_open_pr_comment.py +++ b/tests/sentry/tasks/integrations/github/test_open_pr_comment.py @@ -102,6 +102,7 @@ def test_simple(self): {"filename": "bar.js", "changes": 100, "status": "modified"}, {"filename": "baz.py", "changes": 100, "status": "added"}, {"filename": "bee.py", "changes": 100, "status": "deleted"}, + {"filename": "hi.py", "changes": 100, "status": "removed"}, {"filename": "boo.py", "changes": 0, "status": "renamed"}, ] responses.add( @@ -114,7 +115,6 @@ def test_simple(self): pr_files = safe_for_comment(self.gh_client, self.gh_repo, self.pr) assert pr_files == [ {"filename": "foo.py", "changes": 100, "status": "modified"}, - {"filename": "bee.py", "changes": 100, "status": "deleted"}, ] @responses.activate @@ -138,7 +138,6 @@ def test_simple_with_javascript(self): assert pr_files == [ {"filename": "foo.py", "changes": 100, "status": "modified"}, {"filename": "bar.js", "changes": 100, "status": "modified"}, - {"filename": "bee.py", "changes": 100, "status": "deleted"}, ] @responses.activate @@ -174,7 +173,7 @@ def test_too_many_lines(self): status=200, json=[ {"filename": "foo.py", "changes": 300, "status": "modified"}, - {"filename": "bar.py", "changes": 300, "status": "deleted"}, + {"filename": "bar.py", "changes": 300, "status": "modified"}, ], ) @@ -267,14 +266,15 @@ def setUp(self): def test_get_pr_files(self): data: JSONData = [ {"filename": "bar.py", "status": "modified", "patch": "b"}, - {"filename": "baz.py", "status": "deleted", "patch": "c"}, + {"filename": "baz.py", "status": "modified"}, ] pr_files = get_pr_files(data) - for i, pr_file in enumerate(pr_files): - file = data[i] - assert pr_file.filename == file["filename"] - assert pr_file.patch == file["patch"] + assert len(pr_files) == 1 + + pr_file = pr_files[0] + assert pr_file.filename == data[0]["filename"] + assert pr_file.patch == data[0]["patch"] def test_get_projects_and_filenames_from_source_file(self): projects = [self.create_project() for _ in range(4)] From 2046ea1e585125f6267b78fbff01c5e34f7121bb Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Wed, 7 Feb 2024 16:39:18 -0500 Subject: [PATCH 151/357] feat(metrics-extraction): Add on-demand state to dashboard serializer (#62651) This adds on-demand state to the dashboard serializer so the frontend can indicate what state on-demand extraction is in (eg. show if extraction is limited thereby affecting your dashboard) --- .../api/serializers/models/dashboard.py | 37 +++++++++++++++++-- .../serializers/rest_framework/dashboard.py | 10 +++++ 2 files changed, 44 insertions(+), 3 deletions(-) diff --git a/src/sentry/api/serializers/models/dashboard.py b/src/sentry/api/serializers/models/dashboard.py index 9e921b59aa387a..f65e067d7c2781 100644 --- a/src/sentry/api/serializers/models/dashboard.py +++ b/src/sentry/api/serializers/models/dashboard.py @@ -7,6 +7,7 @@ DashboardWidget, DashboardWidgetDisplayTypes, DashboardWidgetQuery, + DashboardWidgetQueryOnDemand, DashboardWidgetTypes, ) from sentry.services.hybrid_cloud.user.service import user_service @@ -20,9 +21,9 @@ def get_attrs(self, item_list, user): result = {} data_sources = serialize( list( - DashboardWidgetQuery.objects.filter( - widget_id__in=[i.id for i in item_list] - ).order_by("order") + DashboardWidgetQuery.objects.filter(widget_id__in=[i.id for i in item_list]) + .prefetch_related("dashboardwidgetqueryondemand_set") + .order_by("order") ) ) @@ -52,8 +53,37 @@ def serialize(self, obj, attrs, user, **kwargs): } +@register(DashboardWidgetQueryOnDemand) +class DashboardWidgetQueryOnDemandSerializer(Serializer): + def serialize(self, obj, attrs, user, **kwargs): + return { + "enabled": obj.extraction_enabled(), + "extractionState": obj.extraction_state, + "dashboardWidgetQueryId": obj.dashboard_widget_query_id, + } + + @register(DashboardWidgetQuery) class DashboardWidgetQuerySerializer(Serializer): + def get_attrs(self, item_list, user): + result = {} + + data_sources = serialize( + list( + DashboardWidgetQueryOnDemand.objects.filter( + dashboard_widget_query_id__in=[i.id for i in item_list] + ) + ) + ) + + for widget_query in item_list: + widget_data_sources = [ + d for d in data_sources if d["dashboardWidgetQueryId"] == widget_query.id + ] + result[widget_query] = {"onDemand": widget_data_sources} + + return result + def serialize(self, obj, attrs, user, **kwargs): return { "id": str(obj.id), @@ -65,6 +95,7 @@ def serialize(self, obj, attrs, user, **kwargs): "conditions": str(obj.conditions), "orderby": str(obj.orderby), "widgetId": str(obj.widget_id), + "onDemand": attrs["onDemand"], } diff --git a/src/sentry/api/serializers/rest_framework/dashboard.py b/src/sentry/api/serializers/rest_framework/dashboard.py index 1fa4af21881ed4..91cf99813d0606 100644 --- a/src/sentry/api/serializers/rest_framework/dashboard.py +++ b/src/sentry/api/serializers/rest_framework/dashboard.py @@ -113,6 +113,14 @@ def to_internal_value(self, data): return convert_dict_key_case(layout_to_store, snake_to_camel_case) +class DashboardWidgetQueryOnDemandSerializer(CamelSnakeSerializer[Dashboard]): + extraction_state = serializers.CharField(required=False) + enabled = serializers.BooleanField(required=False) + + def validate(self, data): + return data + + class DashboardWidgetQuerySerializer(CamelSnakeSerializer[Dashboard]): # Is a string because output serializers also make it a string. id = serializers.CharField(required=False) @@ -128,6 +136,8 @@ class DashboardWidgetQuerySerializer(CamelSnakeSerializer[Dashboard]): conditions = serializers.CharField(required=False, allow_blank=True) orderby = serializers.CharField(required=False, allow_blank=True) + on_demand_extraction = DashboardWidgetQueryOnDemandSerializer(many=False, required=False) + required_for_create = {"fields", "conditions"} validate_id = validate_id From 6634a02b0ca8242c496a5925c7b108c80be5e094 Mon Sep 17 00:00:00 2001 From: Malachi Willey Date: Wed, 7 Feb 2024 14:52:50 -0800 Subject: [PATCH 152/357] feat(issue-priority): Add menu title to priority dropdown (#64822) --- static/app/components/group/groupPriority.tsx | 2 ++ 1 file changed, 2 insertions(+) diff --git a/static/app/components/group/groupPriority.tsx b/static/app/components/group/groupPriority.tsx index 9ba8112192dc92..ffdf5f834ca7bd 100644 --- a/static/app/components/group/groupPriority.tsx +++ b/static/app/components/group/groupPriority.tsx @@ -62,6 +62,8 @@ export function GroupPriorityDropdown({value, onChange}: GroupPriorityDropdownPr return ( ( Date: Wed, 7 Feb 2024 16:57:35 -0600 Subject: [PATCH 153/357] fix: redirect to login when confirming email if user is not authenticated (#64818) Fixes an issue where users receive an error when trying to confirm their emails if they are not already logged in. This redirects the user to login and then to the email confirmation. --- src/sentry/web/frontend/accounts.py | 3 ++- tests/sentry/web/frontend/test_accounts.py | 17 +++++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/src/sentry/web/frontend/accounts.py b/src/sentry/web/frontend/accounts.py index bf0df0bf96fd2a..7504c756e59819 100644 --- a/src/sentry/web/frontend/accounts.py +++ b/src/sentry/web/frontend/accounts.py @@ -241,6 +241,7 @@ def start_confirm_email(request): @set_referrer_policy("strict-origin-when-cross-origin") +@login_required @control_silo_function def confirm_email(request, user_id, hash): msg = _("Thanks for confirming your email") @@ -261,7 +262,7 @@ def confirm_email(request, user_id, hash): else: email.is_verified = True email.validation_hash = "" - email.save() + email.save(update_fields=["is_verified", "validation_hash"]) email_verified.send(email=email.email, sender=email) logger.info( "user.email.confirm", diff --git a/tests/sentry/web/frontend/test_accounts.py b/tests/sentry/web/frontend/test_accounts.py index 07be6f54520eb2..8da315419d49e1 100644 --- a/tests/sentry/web/frontend/test_accounts.py +++ b/tests/sentry/web/frontend/test_accounts.py @@ -357,3 +357,20 @@ def test_confirm_email_invalid_hash(self): messages[0].message == "There was an error confirming your email. Please try again or visit your Account Settings to resend the verification email." ) + + def test_confirm_email_unauthenticated(self): + useremail = UserEmail(user=self.user, email="new@example.com") + useremail.save() + + assert not useremail.is_verified + + url = reverse( + "sentry-account-confirm-email", + kwargs={"user_id": self.user.id, "hash": useremail.validation_hash}, + ) + + resp = self.client.get(url) + + assert resp.status_code == 302 + assert resp.headers["location"] == "/auth/login/" + assert self.client.session["_next"] == url From 67b611d478177796a3f1c5da418778db21cfc536 Mon Sep 17 00:00:00 2001 From: Nathan Hsieh <6186377+nhsiehgit@users.noreply.github.com> Date: Wed, 7 Feb 2024 15:01:41 -0800 Subject: [PATCH 154/357] limit env's to only selected projects, or my projects (#64824) Currently we list _every_ environment if you are a superuser. This PR modifies the filter to only present environments for the selected project, or "my projects" if selected --- .../views/releases/thresholdsList/index.tsx | 56 ++++++++++++++++--- 1 file changed, 49 insertions(+), 7 deletions(-) diff --git a/static/app/views/releases/thresholdsList/index.tsx b/static/app/views/releases/thresholdsList/index.tsx index fd05800626f007..12fe3a46917d51 100644 --- a/static/app/views/releases/thresholdsList/index.tsx +++ b/static/app/views/releases/thresholdsList/index.tsx @@ -86,16 +86,58 @@ function ReleaseThresholdList({}: Props) { projects.flatMap(project => { /** * Include environments from: + * all projects I can access if -1 is the only selected project. + * all member projects if 'my projects' (empty list) is selected. * all projects if the user is a superuser * the requested projects - * all member projects if 'my projects' (empty list) is selected. + */ + const allProjectsSelectedICanAccess = + selectedProjectIds.length === 1 && + selectedProjectIds[0] === String(ALL_ACCESS_PROJECTS) && + project.hasAccess; + const myProjectsSelected = selectedProjectIds.length === 0 && project.isMember; + const allMemberProjectsIfSuperuser = + selectedProjectIds.length === 0 && user.isSuperuser; + if ( + allProjectsSelectedICanAccess || + myProjectsSelected || + allMemberProjectsIfSuperuser || + selectedProjectIds.includes(project.id) + ) { + return project.environments; + } + + return []; + }) + ); + const envDiff = new Set([...allEnvSet].filter(x => !unSortedEnvs.has(x))); + + // bubble the selected projects envs first, then concat the rest of the envs + return Array.from(unSortedEnvs) + .sort() + .concat([...envDiff].sort()); + }, [projects, selection.projects]); + + const getEnvironmentsAvailableToProject = useMemo((): string[] => { + const selectedProjectIds = selection.projects.map(id => String(id)); + const allEnvSet = new Set(projects.flatMap(project => project.environments)); + // NOTE: mostly taken from environmentSelector.tsx + const unSortedEnvs = new Set( + projects.flatMap(project => { + /** + * Include environments from: * all projects if -1 is the only selected project. + * all member projects if 'my projects' (empty list) is selected. + * the requested projects */ + const allProjectsSelected = + selectedProjectIds.length === 1 && + selectedProjectIds[0] === String(ALL_ACCESS_PROJECTS) && + project.hasAccess; + const myProjectsSelected = selectedProjectIds.length === 0 && project.isMember; if ( - (selectedProjectIds.length === 1 && - selectedProjectIds[0] === String(ALL_ACCESS_PROJECTS) && - project.hasAccess) || - (selectedProjectIds.length === 0 && (project.isMember || user.isSuperuser)) || + allProjectsSelected || + myProjectsSelected || selectedProjectIds.includes(project.id) ) { return project.environments; @@ -178,7 +220,7 @@ function ReleaseThresholdList({}: Props) { isError={isError} refetch={refetch} setTempError={setTempError} - allEnvironmentNames={getAllEnvironmentNames} // TODO: determine whether to move down to threshold group table + allEnvironmentNames={getEnvironmentsAvailableToProject} /> ))} {projectsWithoutThresholds.length > 0 && ( @@ -193,7 +235,7 @@ function ReleaseThresholdList({}: Props) { From 1182b7de83ba43221ffadca97d1fc5aae51cc113 Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Wed, 7 Feb 2024 15:22:32 -0800 Subject: [PATCH 155/357] fix(issues): Add analytics for trace timeline "3 more events" (#64830) --- .../app/utils/analytics/workflowAnalyticsEvents.tsx | 5 +++++ .../traceTimeline/traceTimelineTooltip.tsx | 13 ++++++++++++- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/static/app/utils/analytics/workflowAnalyticsEvents.tsx b/static/app/utils/analytics/workflowAnalyticsEvents.tsx index e148bb3642576f..a05c80458e5c68 100644 --- a/static/app/utils/analytics/workflowAnalyticsEvents.tsx +++ b/static/app/utils/analytics/workflowAnalyticsEvents.tsx @@ -107,6 +107,9 @@ export type TeamInsightsEventParameters = { event_id: string; group_id: string; }; + 'issue_details.issue_tab.trace_timeline_more_events_clicked': { + num_hidden: number; + }; 'issue_details.merged_tab.unmerge_clicked': { /** * comma separated list of event ids that were unmerged @@ -189,6 +192,8 @@ export const workflowEventMap: Record = { 'Issue Details: Screenshot modal opened', 'issue_details.issue_tab.trace_timeline_clicked': 'Issue Details: Trace Timeline Clicked', + 'issue_details.issue_tab.trace_timeline_more_events_clicked': + 'Issue Details: Trace Timeline More Events Clicked', 'issue_details.resources_link_clicked': 'Issue Details: Resources Link Clicked', 'issue_details.suspect_commits.commit_clicked': 'Issue Details: Suspect Commit Clicked', 'issue_details.suspect_commits.pull_request_clicked': diff --git a/static/app/views/issueDetails/traceTimeline/traceTimelineTooltip.tsx b/static/app/views/issueDetails/traceTimeline/traceTimelineTooltip.tsx index 4b3b916a482b1e..3861103fa92b2e 100644 --- a/static/app/views/issueDetails/traceTimeline/traceTimelineTooltip.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceTimelineTooltip.tsx @@ -81,7 +81,18 @@ export function TraceTimelineTooltip({event, timelineEvents}: TraceTimelineToolt {filteredTimelineEvents.length > 3 && ( - + { + trackAnalytics( + 'issue_details.issue_tab.trace_timeline_more_events_clicked', + { + organization, + num_hidden: filteredTimelineEvents.length - 3, + } + ); + }} + > {tn( 'View %s more event', 'View %s more events', From 523b2ed729a45336124d333db046af8f5c33f1a4 Mon Sep 17 00:00:00 2001 From: Nar Saynorath Date: Wed, 7 Feb 2024 18:29:48 -0500 Subject: [PATCH 156/357] feat(app-start): Show more spans in span table (#64692) Surface more span ops in the operations table and selector. Since app start is everything up to TTID, we can show the other spans that contribute to a delayed start time. We have to ignore a span with the description "Initial Frame Render" in this case because iOS creates that to encompass the time to run the application code to get TTID and if we showed it, we'd essentially be duplicating the results in the table. --- .../screenSummary/spanOpSelector.tsx | 23 +++++++++++++++---- .../screenSummary/spanOperationTable.tsx | 23 ++++++++----------- .../screenLoadSpans/spanOpSelector.tsx | 13 ++++++++++- .../views/screens/screenLoadSpans/table.tsx | 9 ++++---- 4 files changed, 44 insertions(+), 24 deletions(-) diff --git a/static/app/views/starfish/views/appStartup/screenSummary/spanOpSelector.tsx b/static/app/views/starfish/views/appStartup/screenSummary/spanOpSelector.tsx index c911d1a1539806..385c1e0c2f2543 100644 --- a/static/app/views/starfish/views/appStartup/screenSummary/spanOpSelector.tsx +++ b/static/app/views/starfish/views/appStartup/screenSummary/spanOpSelector.tsx @@ -11,10 +11,19 @@ import {useLocation} from 'sentry/utils/useLocation'; import usePageFilters from 'sentry/utils/usePageFilters'; import {SpanMetricsField} from 'sentry/views/starfish/types'; import {appendReleaseFilters} from 'sentry/views/starfish/utils/releaseComparison'; -import {STARTUP_SPANS} from 'sentry/views/starfish/views/appStartup/screenSummary/spanOperationTable'; import {MobileCursors} from 'sentry/views/starfish/views/screens/constants'; +import {TTID_CONTRIBUTING_SPAN_OPS} from 'sentry/views/starfish/views/screens/screenLoadSpans/spanOpSelector'; import {useTableQuery} from 'sentry/views/starfish/views/screens/screensTable'; +export const APP_START_SPANS = [ + ...TTID_CONTRIBUTING_SPAN_OPS, + 'app.start.cold', + 'app.start.warm', + 'contentprovider.load', + 'application.load', + 'activity.load', +]; + type Props = { primaryRelease?: string; secondaryRelease?: string; @@ -28,12 +37,16 @@ export function SpanOpSelector({transaction, primaryRelease, secondaryRelease}: const value = decodeScalar(location.query[SpanMetricsField.SPAN_OP]) ?? ''; const searchQuery = new MutableSearch([ - 'transaction.op:ui.load', - `transaction:${transaction}`, - `span.op:[${[...STARTUP_SPANS].join(',')}]`, - 'has:span.description', + // Exclude root level spans because they're comprised of nested operations '!span.description:"Cold Start"', '!span.description:"Warm Start"', + // Exclude this span because we can get TTID contributing spans instead + '!span.description:"Initial Frame Render"', + 'has:span.description', + 'transaction.op:ui.load', + `transaction:${transaction}`, + `has:ttid`, + `span.op:[${APP_START_SPANS.join(',')}]`, ]); const queryStringPrimary = appendReleaseFilters( searchQuery, diff --git a/static/app/views/starfish/views/appStartup/screenSummary/spanOperationTable.tsx b/static/app/views/starfish/views/appStartup/screenSummary/spanOperationTable.tsx index 6838ac8a9f450b..43906c7c8eee27 100644 --- a/static/app/views/starfish/views/appStartup/screenSummary/spanOperationTable.tsx +++ b/static/app/views/starfish/views/appStartup/screenSummary/spanOperationTable.tsx @@ -32,6 +32,7 @@ import {OverflowEllipsisTextContainer} from 'sentry/views/starfish/components/te import {SpanMetricsField} from 'sentry/views/starfish/types'; import {STARFISH_CHART_INTERVAL_FIDELITY} from 'sentry/views/starfish/utils/constants'; import {appendReleaseFilters} from 'sentry/views/starfish/utils/releaseComparison'; +import {APP_START_SPANS} from 'sentry/views/starfish/views/appStartup/screenSummary/spanOpSelector'; import { COLD_START_TYPE, WARM_START_TYPE, @@ -55,16 +56,6 @@ type Props = { transaction?: string; }; -const IOS_STARTUP_SPANS = ['app.start.cold', 'app.start.warm']; -const ANDROID_STARTUP_SPANS = [ - 'app.start.cold', - 'app.start.warm', - 'contentprovider.load', - 'application.load', - 'activity.load', -]; -export const STARTUP_SPANS = new Set([...IOS_STARTUP_SPANS, ...ANDROID_STARTUP_SPANS]); - export function SpanOperationTable({ transaction, primaryRelease, @@ -80,16 +71,20 @@ export function SpanOperationTable({ const deviceClass = decodeScalar(location.query[SpanMetricsField.DEVICE_CLASS]) ?? ''; const searchQuery = new MutableSearch([ - 'transaction.op:ui.load', - `transaction:${transaction}`, - 'has:span.description', // Exclude root level spans because they're comprised of nested operations '!span.description:"Cold Start"', '!span.description:"Warm Start"', + // Exclude this span because we can get TTID contributing spans instead + '!span.description:"Initial Frame Render"', + 'has:span.description', + 'transaction.op:ui.load', + `transaction:${transaction}`, + `has:ttid`, `${SpanMetricsField.APP_START_TYPE}:${ startType || `[${COLD_START_TYPE},${WARM_START_TYPE}]` }`, - `${SpanMetricsField.SPAN_OP}:${spanOp || `[${[...STARTUP_SPANS].join(',')}]`}`, + `${SpanMetricsField.SPAN_OP}:${spanOp ? spanOp : `[${APP_START_SPANS.join(',')}]`}`, + ...(spanOp ? [`${SpanMetricsField.SPAN_OP}:${spanOp}`] : []), ...(deviceClass ? [`${SpanMetricsField.DEVICE_CLASS}:${deviceClass}`] : []), ]); const queryStringPrimary = appendReleaseFilters( diff --git a/static/app/views/starfish/views/screens/screenLoadSpans/spanOpSelector.tsx b/static/app/views/starfish/views/screens/screenLoadSpans/spanOpSelector.tsx index 1e00d17d89c9af..40dd76e4b8d2b4 100644 --- a/static/app/views/starfish/views/screens/screenLoadSpans/spanOpSelector.tsx +++ b/static/app/views/starfish/views/screens/screenLoadSpans/spanOpSelector.tsx @@ -16,6 +16,17 @@ import {appendReleaseFilters} from 'sentry/views/starfish/utils/releaseCompariso import {MobileCursors} from 'sentry/views/starfish/views/screens/constants'; import {useTableQuery} from 'sentry/views/starfish/views/screens/screensTable'; +export const TTID_CONTRIBUTING_SPAN_OPS = [ + 'file.read', + 'file.write', + 'ui.load', + 'http.client', + 'db', + 'db.sql.room', + 'db.sql.query', + 'db.sql.transaction', +]; + type Props = { primaryRelease?: string; secondaryRelease?: string; @@ -31,7 +42,7 @@ export function SpanOpSelector({transaction, primaryRelease, secondaryRelease}: const searchQuery = new MutableSearch([ 'transaction.op:ui.load', `transaction:${transaction}`, - 'span.op:[file.read,file.write,ui.load,http.client,db,db.sql.room,db.sql.query,db.sql.transaction]', + `span.op:[${TTID_CONTRIBUTING_SPAN_OPS.join(',')}]`, 'has:span.description', ]); const queryStringPrimary = appendReleaseFilters( diff --git a/static/app/views/starfish/views/screens/screenLoadSpans/table.tsx b/static/app/views/starfish/views/screens/screenLoadSpans/table.tsx index eafdeeabf572ff..98aa3ed1522ac0 100644 --- a/static/app/views/starfish/views/screens/screenLoadSpans/table.tsx +++ b/static/app/views/starfish/views/screens/screenLoadSpans/table.tsx @@ -43,7 +43,10 @@ import { PLATFORM_LOCAL_STORAGE_KEY, PLATFORM_QUERY_PARAM, } from 'sentry/views/starfish/views/screens/platformSelector'; -import {SpanOpSelector} from 'sentry/views/starfish/views/screens/screenLoadSpans/spanOpSelector'; +import { + SpanOpSelector, + TTID_CONTRIBUTING_SPAN_OPS, +} from 'sentry/views/starfish/views/screens/screenLoadSpans/spanOpSelector'; import {useTableQuery} from 'sentry/views/starfish/views/screens/screensTable'; import {isCrossPlatform} from 'sentry/views/starfish/views/screens/utils'; @@ -88,9 +91,7 @@ export function ScreenLoadSpansTable({ 'has:span.description', ...(spanOp ? [`${SpanMetricsField.SPAN_OP}:${spanOp}`] - : [ - 'span.op:[file.read,file.write,ui.load,http.client,db,db.sql.room,db.sql.query,db.sql.transaction]', - ]), + : [`span.op:[${TTID_CONTRIBUTING_SPAN_OPS.join(',')}]`]), ]); if (project && isCrossPlatform(project) && hasPlatformSelectFeature) { From cc0fb58417ac05f72892be5dd8a69bd132dcaffa Mon Sep 17 00:00:00 2001 From: Ryan Albrecht Date: Wed, 7 Feb 2024 15:46:31 -0800 Subject: [PATCH 157/357] bug(replay): Show the correct NotFound screen after a 404 when loading a replay (#64826) I think this regressed with https://github.com/getsentry/sentry/pull/63455, the error type/message might've changed or become compressed. It seemed like it's still working in dev, which makes testing the change a bit harder :( Checking the status code could be more reliable. ~~I'll open the broken replay (https://sentry.sentry.io/replays/a7fcc2b504d34db296f2c7293bd47f28/) in the preview env to check again before landing.~~ Edit: checked, it's showing Not Found in the preview env. Fixes [JAVASCRIPT-2RKD](https://sentry.io/organizations/sentry/issues/?project=11276&query=JAVASCRIPT-2RKD) --- static/app/utils/replays/hooks/useReplayData.tsx | 1 + static/app/views/replays/details.tsx | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/static/app/utils/replays/hooks/useReplayData.tsx b/static/app/utils/replays/hooks/useReplayData.tsx index 33d56e0737e05d..bcde117ad3dbe2 100644 --- a/static/app/utils/replays/hooks/useReplayData.tsx +++ b/static/app/utils/replays/hooks/useReplayData.tsx @@ -89,6 +89,7 @@ function useReplayData({ error: fetchReplayError, } = useApiQuery<{data: unknown}>([`/organizations/${orgSlug}/replays/${replayId}/`], { staleTime: Infinity, + retry: false, }); const replayRecord = useMemo( () => (replayData?.data ? mapResponseToReplayRecord(replayData.data) : undefined), diff --git a/static/app/views/replays/details.tsx b/static/app/views/replays/details.tsx index 1a54e477ecf39d..e42828a0c19464 100644 --- a/static/app/views/replays/details.tsx +++ b/static/app/views/replays/details.tsx @@ -95,7 +95,7 @@ function ReplayDetails({params: {replaySlug}}: Props) { ); } if (fetchError) { - if (fetchError.statusText === 'Not Found') { + if (fetchError.status === 404) { return ( Date: Wed, 7 Feb 2024 15:59:47 -0800 Subject: [PATCH 158/357] fix(api): Stop making a default auth token (#64814) We no longer expose auth token values, so there's no point in creating a default auth token if the user can't read the value. The default token is also an increased security risk if the integration is not used or users are not aware of the side effects. Instead, incentivize the user to create the token when they are comfortable, and in the future, push them towards OAuth 2.0 instead of the long lived tokens. https://github.com/getsentry/sentry/assets/5581484/dc43d44b-01f6-4254-a63a-b54c48b71b63 Resolves: https://github.com/getsentry/sentry/issues/64798 --- .../integrations/sentry_apps/index.py | 3 ++- src/sentry/sentry_apps/apps.py | 11 ++++++++-- .../sentry/api/endpoints/test_sentry_apps.py | 20 ++++++++++--------- .../sentry_apps/test_sentry_app_creator.py | 18 +++++++++++++++++ 4 files changed, 40 insertions(+), 12 deletions(-) diff --git a/src/sentry/api/endpoints/integrations/sentry_apps/index.py b/src/sentry/api/endpoints/integrations/sentry_apps/index.py index fbcb0b3583861c..97f4b5cd327408 100644 --- a/src/sentry/api/endpoints/integrations/sentry_apps/index.py +++ b/src/sentry/api/endpoints/integrations/sentry_apps/index.py @@ -128,7 +128,8 @@ def post(self, request: Request, organization) -> Response: overview=data["overview"], allowed_origins=data["allowedOrigins"], popularity=data["popularity"], - ).run(user=request.user, request=request) + ).run(user=request.user, request=request, skip_default_auth_token=True) + # We want to stop creating the default auth token for new apps and installations through the API except ValidationError as e: # we generate and validate the slug here instead of the serializer since the slug never changes return Response(e.detail, status=400) diff --git a/src/sentry/sentry_apps/apps.py b/src/sentry/sentry_apps/apps.py index 24f8a0362fdcd6..b4e376fb4f8af5 100644 --- a/src/sentry/sentry_apps/apps.py +++ b/src/sentry/sentry_apps/apps.py @@ -289,7 +289,13 @@ def __post_init__(self) -> None: not self.verify_install ), "Internal apps should not require installation verification" - def run(self, *, user: User | RpcUser, request: HttpRequest | None = None) -> SentryApp: + def run( + self, + *, + user: User | RpcUser, + request: HttpRequest | None = None, + skip_default_auth_token: bool = False, + ) -> SentryApp: with transaction.atomic(router.db_for_write(User)), in_test_hide_transaction_boundary(): slug = self._generate_and_validate_slug() proxy = self._create_proxy_user(slug=slug) @@ -300,7 +306,8 @@ def run(self, *, user: User | RpcUser, request: HttpRequest | None = None) -> Se if self.is_internal: install = self._install(slug=slug, user=user, request=request) - self._create_access_token(user=user, install=install, request=request) + if not skip_default_auth_token: + self._create_access_token(user=user, install=install, request=request) self.audit(request=request, sentry_app=sentry_app) self.record_analytics(user=user, sentry_app=sentry_app) diff --git a/tests/sentry/api/endpoints/test_sentry_apps.py b/tests/sentry/api/endpoints/test_sentry_apps.py index d7a382201cc794..8dffedd32bad91 100644 --- a/tests/sentry/api/endpoints/test_sentry_apps.py +++ b/tests/sentry/api/endpoints/test_sentry_apps.py @@ -5,6 +5,7 @@ from typing import Any from unittest.mock import patch +import pytest from django.test import override_settings from django.urls import reverse from rest_framework.response import Response @@ -398,8 +399,13 @@ def setUp(self): super().setUp() self.login_as(self.user) - def assert_sentry_app_status_code(self, sentry_app: SentryApp, status_code: int): - token = ApiToken.objects.get(application=sentry_app.application) + def assert_sentry_app_status_code(self, sentry_app: SentryApp, status_code: int) -> None: + token = ApiToken.objects.create( + application=sentry_app.application, + user_id=self.user.id, + refresh_token=None, + scope_list=["project:read", "event:read", "org:read"], + ) with assume_test_silo_mode(SiloMode.REGION): url = reverse("sentry-api-0-organization-projects", args=[self.organization.slug]) @@ -603,16 +609,12 @@ def test_creates_internal_integration(self): assert response.data["status"] == SentryAppStatus.as_str(SentryAppStatus.INTERNAL) assert not response.data["verifyInstall"] - # Verify tokens are created properly. + # Verify no tokens are created. sentry_app = SentryApp.objects.get(slug=response.data["slug"]) sentry_app_installation = SentryAppInstallation.objects.get(sentry_app=sentry_app) - sentry_app_installation_token = SentryAppInstallationToken.objects.get( - sentry_app_installation=sentry_app_installation - ) - - # Below line will fail once we stop assigning api_token on the sentry_app_installation. - assert sentry_app_installation_token.api_token == sentry_app_installation.api_token + with pytest.raises(SentryAppInstallationToken.DoesNotExist): + SentryAppInstallationToken.objects.get(sentry_app_installation=sentry_app_installation) def test_no_author_public_integration(self): response = self.get_error_response(**self.get_data(author=None), status_code=400) diff --git a/tests/sentry/sentry_apps/test_sentry_app_creator.py b/tests/sentry/sentry_apps/test_sentry_app_creator.py index 2463e90d22d118..942c69a35942e2 100644 --- a/tests/sentry/sentry_apps/test_sentry_app_creator.py +++ b/tests/sentry/sentry_apps/test_sentry_app_creator.py @@ -217,6 +217,24 @@ def test_creates_access_token(self): assert install.api_token + def test_skips_creating_auth_token_when_flag_is_true(self) -> None: + app = SentryAppCreator( + is_internal=True, + verify_install=False, + author=self.org.name, + name="nulldb", + organization_id=self.org.id, + scopes=[ + "project:read", + ], + webhook_url="http://example.com", + schema={"elements": [self.create_issue_link_schema()]}, + ).run(user=self.user, request=None, skip_default_auth_token=True) + + install = SentryAppInstallation.objects.get(organization_id=self.org.id, sentry_app=app) + + assert install.api_token is None + @patch("sentry.utils.audit.create_audit_entry") def test_audits(self, create_audit_entry): SentryAppCreator( From 536214954ae17af303dafba2e3a64aae7d3c9b49 Mon Sep 17 00:00:00 2001 From: Snigdha Sharma Date: Wed, 7 Feb 2024 16:04:55 -0800 Subject: [PATCH 159/357] feat(issue-priority): Add `initial_issue_priority` to IssueOccurrence (#64231) Adding a field, `initial_issue_priority` to IssueOccurrence. This field needs to be optional for backwards compatibility. `initial_issue_priority` will be used to set the `Group.priority` and also the `Group.data["metadata"]["initial_priority"]` on group creation. FIXES https://github.com/getsentry/sentry/issues/63917 FIXES https://github.com/getsentry/sentry/issues/63918 --------- Co-authored-by: getsantry[bot] <66042841+getsantry[bot]@users.noreply.github.com> Co-authored-by: Bartek Ogryczak --- src/sentry/issues/ingest.py | 3 +++ src/sentry/issues/issue_occurrence.py | 6 +++++- src/sentry/issues/occurrence_consumer.py | 6 ++++++ tests/sentry/issues/test_ingest.py | 2 ++ .../sentry/issues/test_occurrence_consumer.py | 18 ++++++++++++++++++ .../issues/test_status_change_consumer.py | 6 +++++- tests/sentry/issues/test_utils.py | 1 + 7 files changed, 40 insertions(+), 2 deletions(-) diff --git a/src/sentry/issues/ingest.py b/src/sentry/issues/ingest.py index d4d7ff0ea571e1..80a88832fdf35d 100644 --- a/src/sentry/issues/ingest.py +++ b/src/sentry/issues/ingest.py @@ -86,6 +86,7 @@ class IssueArgs(TypedDict): type: int data: OccurrenceMetadata first_release: Release | None + priority: int | None def _create_issue_kwargs( @@ -104,6 +105,7 @@ def _create_issue_kwargs( "type": occurrence.type.type_id, "first_release": release, "data": materialize_metadata(occurrence, event), + "priority": occurrence.initial_issue_priority, } kwargs["data"]["last_received"] = json.datetime_to_str(event.datetime) return kwargs @@ -147,6 +149,7 @@ def materialize_metadata(occurrence: IssueOccurrence, event: Event) -> Occurrenc "metadata": event_metadata, "location": event.location, "last_received": json.datetime_to_str(event.datetime), + "initial_priority": occurrence.initial_issue_priority, } diff --git a/src/sentry/issues/issue_occurrence.py b/src/sentry/issues/issue_occurrence.py index 458d79fc4dd8bd..69b8c692da24c8 100644 --- a/src/sentry/issues/issue_occurrence.py +++ b/src/sentry/issues/issue_occurrence.py @@ -4,7 +4,7 @@ from collections.abc import Mapping, Sequence from dataclasses import dataclass from datetime import datetime -from typing import Any, TypedDict, cast +from typing import Any, NotRequired, TypedDict, cast from django.utils.timezone import is_aware @@ -35,6 +35,7 @@ class IssueOccurrenceData(TypedDict): detection_time: float level: str | None culprit: str | None + initial_issue_priority: NotRequired[int] @dataclass(frozen=True) @@ -86,6 +87,7 @@ class IssueOccurrence: detection_time: datetime level: str culprit: str + initial_issue_priority: int | None = None def __post_init__(self) -> None: if not is_aware(self.detection_time): @@ -108,6 +110,7 @@ def to_dict( "detection_time": self.detection_time.timestamp(), "level": self.level, "culprit": self.culprit, + "initial_issue_priority": self.initial_issue_priority, } @classmethod @@ -137,6 +140,7 @@ def from_dict(cls, data: IssueOccurrenceData) -> IssueOccurrence: cast(datetime, parse_timestamp(data["detection_time"])), level, culprit, + data.get("initial_issue_priority"), ) @property diff --git a/src/sentry/issues/occurrence_consumer.py b/src/sentry/issues/occurrence_consumer.py index ad4f1c2b71cdad..e9699900c3bbe9 100644 --- a/src/sentry/issues/occurrence_consumer.py +++ b/src/sentry/issues/occurrence_consumer.py @@ -125,6 +125,12 @@ def _get_kwargs(payload: Mapping[str, Any]) -> Mapping[str, Any]: if payload.get("culprit"): occurrence_data["culprit"] = payload["culprit"] + if payload.get("initial_issue_priority") is not None: + occurrence_data["initial_issue_priority"] = payload["initial_issue_priority"] + else: + group_type = get_group_type_by_type_id(occurrence_data["type"]) + occurrence_data["initial_issue_priority"] = group_type.default_priority + if "event" in payload: event_payload = payload["event"] if payload["project_id"] != event_payload.get("project_id"): diff --git a/tests/sentry/issues/test_ingest.py b/tests/sentry/issues/test_ingest.py index fbab5c47b10a21..352967552ce405 100644 --- a/tests/sentry/issues/test_ingest.py +++ b/tests/sentry/issues/test_ingest.py @@ -325,6 +325,7 @@ def test(self) -> None: "type": occurrence.type.type_id, "first_release": None, "data": materialize_metadata(occurrence, event), + "priority": occurrence.initial_issue_priority, } @@ -339,6 +340,7 @@ def test_simple(self) -> None: "title": occurrence.issue_title, "location": event.location, "last_received": json.datetime_to_str(event.datetime), + "initial_priority": occurrence.initial_issue_priority, } def test_preserves_existing_metadata(self) -> None: diff --git a/tests/sentry/issues/test_occurrence_consumer.py b/tests/sentry/issues/test_occurrence_consumer.py index cd6164779bcabc..9dbcd0eb529890 100644 --- a/tests/sentry/issues/test_occurrence_consumer.py +++ b/tests/sentry/issues/test_occurrence_consumer.py @@ -25,6 +25,7 @@ from sentry.testutils.cases import SnubaTestCase, TestCase from sentry.testutils.helpers.datetime import before_now, iso_format from sentry.testutils.pytest.fixtures import django_db_all +from sentry.types.group import PriorityLevel from sentry.utils.samples import load_data from tests.sentry.issues.test_utils import OccurrenceTestMixin @@ -406,3 +407,20 @@ def test_culprit(self) -> None: message["culprit"] = "i did it" kwargs = _get_kwargs(message) assert kwargs["occurrence_data"]["culprit"] == "i did it" + + def test_priority(self) -> None: + message = deepcopy(get_test_message(self.project.id)) + kwargs = _get_kwargs(message) + assert kwargs["occurrence_data"]["initial_issue_priority"] == PriorityLevel.LOW + + def test_priority_defaults_to_grouptype(self) -> None: + message = deepcopy(get_test_message(self.project.id)) + message["initial_issue_priority"] = None + kwargs = _get_kwargs(message) + assert kwargs["occurrence_data"]["initial_issue_priority"] == PriorityLevel.LOW + + def test_priority_overrides_defaults(self) -> None: + message = deepcopy(get_test_message(self.project.id)) + message["initial_issue_priority"] = PriorityLevel.HIGH + kwargs = _get_kwargs(message) + assert kwargs["occurrence_data"]["initial_issue_priority"] == PriorityLevel.HIGH diff --git a/tests/sentry/issues/test_status_change_consumer.py b/tests/sentry/issues/test_status_change_consumer.py index 54a69e0d891e6c..de362a0e1b5f74 100644 --- a/tests/sentry/issues/test_status_change_consumer.py +++ b/tests/sentry/issues/test_status_change_consumer.py @@ -98,7 +98,11 @@ def test_valid_payload_archived_forever(self) -> None: @with_feature("projects:issue-priority") def test_valid_payload_unresolved_escalating(self) -> None: - self.group.update(status=GroupStatus.IGNORED, substatus=GroupSubStatus.UNTIL_ESCALATING) + self.group.update( + status=GroupStatus.IGNORED, + substatus=GroupSubStatus.UNTIL_ESCALATING, + priority=PriorityLevel.MEDIUM, + ) message = get_test_message_status_change( self.project.id, fingerprint=self.fingerprint, diff --git a/tests/sentry/issues/test_utils.py b/tests/sentry/issues/test_utils.py index 2c3dd818b4a658..81ddd63ab5e7a3 100644 --- a/tests/sentry/issues/test_utils.py +++ b/tests/sentry/issues/test_utils.py @@ -31,6 +31,7 @@ def assert_occurrences_identical(self, o1: IssueOccurrence, o2: IssueOccurrence) assert o1.evidence_display == o2.evidence_display assert o1.type == o2.type assert o1.detection_time == o2.detection_time + assert o1.initial_issue_priority == o2.initial_issue_priority def build_occurrence_data(self, **overrides: Any) -> IssueOccurrenceData: kwargs: IssueOccurrenceData = { From e4664d418bf65b824f65e712d2e55bfc67175473 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Wed, 7 Feb 2024 17:29:12 -0800 Subject: [PATCH 160/357] ref(feedback): add analytics for mark as spam buttons (#64850) track when users click the manual "mark as spam" buttons (both details and bulk) --- .../feedback/feedbackItem/feedbackActions.tsx | 8 ++++++++ .../feedback/list/useBulkEditFeedbacks.tsx | 12 ++++++++++-- .../app/utils/analytics/feedbackAnalyticsEvents.tsx | 2 ++ 3 files changed, 20 insertions(+), 2 deletions(-) diff --git a/static/app/components/feedback/feedbackItem/feedbackActions.tsx b/static/app/components/feedback/feedbackItem/feedbackActions.tsx index 04863f7b0be0a3..f1382781b4c96e 100644 --- a/static/app/components/feedback/feedbackItem/feedbackActions.tsx +++ b/static/app/components/feedback/feedbackItem/feedbackActions.tsx @@ -14,6 +14,7 @@ import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import type {Event} from 'sentry/types'; import {GroupStatus} from 'sentry/types'; +import {trackAnalytics} from 'sentry/utils/analytics'; import type {FeedbackIssue} from 'sentry/utils/feedback/types'; import useOrganization from 'sentry/utils/useOrganization'; @@ -72,6 +73,13 @@ export default function FeedbackActions({ addLoadingMessage(t('Updating feedback...')); const newStatus = isSpam ? GroupStatus.UNRESOLVED : GroupStatus.IGNORED; resolve(newStatus, mutationOptions); + if (!isSpam) { + // not currently spam, clicking the button will turn it into spam + trackAnalytics('feedback.mark-spam-clicked', { + organization, + type: 'details', + }); + } }} > {isSpam ? t('Move to Inbox') : t('Mark as Spam')} diff --git a/static/app/components/feedback/list/useBulkEditFeedbacks.tsx b/static/app/components/feedback/list/useBulkEditFeedbacks.tsx index ed3bf38cf05ee2..065490a486a0d1 100644 --- a/static/app/components/feedback/list/useBulkEditFeedbacks.tsx +++ b/static/app/components/feedback/list/useBulkEditFeedbacks.tsx @@ -9,7 +9,8 @@ import {openConfirmModal} from 'sentry/components/confirm'; import type useListItemCheckboxState from 'sentry/components/feedback/list/useListItemCheckboxState'; import useMutateFeedback from 'sentry/components/feedback/useMutateFeedback'; import {t, tct} from 'sentry/locale'; -import type {GroupStatus} from 'sentry/types'; +import {GroupStatus} from 'sentry/types'; +import {trackAnalytics} from 'sentry/utils/analytics'; import useOrganization from 'sentry/utils/useOrganization'; const statusToButtonLabel: Record = { @@ -42,6 +43,13 @@ export default function useBulkEditFeedbacks({deselectAll, selectedIds}: Props) openConfirmModal({ bypass: Array.isArray(selectedIds) && selectedIds.length === 1, onConfirm: () => { + if (newMailbox === GroupStatus.IGNORED) { + // target action is marking as spam aka ignored + trackAnalytics('feedback.mark-spam-clicked', { + organization, + type: 'bulk', + }); + } addLoadingMessage(t('Updating feedbacks...')); resolve(newMailbox, { onError: () => { @@ -62,7 +70,7 @@ export default function useBulkEditFeedbacks({deselectAll, selectedIds}: Props) withoutBold: true, }); }, - [deselectAll, resolve, selectedIds] + [deselectAll, resolve, selectedIds, organization] ); const onMarkAsRead = useCallback( diff --git a/static/app/utils/analytics/feedbackAnalyticsEvents.tsx b/static/app/utils/analytics/feedbackAnalyticsEvents.tsx index 3a5d2540c9e3c8..642d525390dc30 100644 --- a/static/app/utils/analytics/feedbackAnalyticsEvents.tsx +++ b/static/app/utils/analytics/feedbackAnalyticsEvents.tsx @@ -4,6 +4,7 @@ export type FeedbackEventParameters = { }; 'feedback.index-setup-viewed': {}; 'feedback.list-item-selected': {}; + 'feedback.mark-spam-clicked': {type: 'bulk' | 'details'}; 'feedback.whats-new-banner-dismissed': {}; 'feedback.whats-new-banner-viewed': {}; }; @@ -17,4 +18,5 @@ export const feedbackEventMap: Record = { 'Clicked Integration Issue Button in Feedback Details', 'feedback.whats-new-banner-dismissed': 'Dismissed Feedback Whatss New Banner', 'feedback.whats-new-banner-viewed': 'Viewed Feedback Whats New Banner', + 'feedback.mark-spam-clicked': 'Marked Feedback as Spam', }; From bd5b3c75ec5cfa91e5ec11065e8d4df73f7e5a0c Mon Sep 17 00:00:00 2001 From: Ogi <86684834+obostjancic@users.noreply.github.com> Date: Thu, 8 Feb 2024 09:33:57 +0100 Subject: [PATCH 161/357] feat(stats): metrics stats (#64790) --- static/app/constants/index.tsx | 10 +++++++++- static/app/types/core.tsx | 3 ++- .../app/views/organizationStats/usageChart/index.tsx | 7 +++++++ 3 files changed, 18 insertions(+), 2 deletions(-) diff --git a/static/app/constants/index.tsx b/static/app/constants/index.tsx index 3e3a1fa91bd9ee..8b7958c6cf3f81 100644 --- a/static/app/constants/index.tsx +++ b/static/app/constants/index.tsx @@ -238,7 +238,7 @@ export const DEFAULT_RELATIVE_PERIODS_PAGE_FILTER = { '30d': t('30D'), }; -// https://github.com/getsentry/relay/blob/master/relay-common/src/constants.rs +// https://github.com/getsentry/relay/blob/master/relay-base-schema/src/data_category.rs export const DATA_CATEGORY_INFO = { [DataCategoryExact.ERROR]: { name: DataCategoryExact.ERROR, @@ -312,6 +312,14 @@ export const DATA_CATEGORY_INFO = { titleName: t('Cron Monitors'), uid: 13, }, + [DataCategoryExact.METRICS]: { + name: DataCategoryExact.METRICS, + apiName: 'metric_bucket', + plural: 'metric_buckets', + displayName: 'metrics', + titleName: t('Metrics'), + uid: 15, + }, } as const satisfies Record; // Special Search characters diff --git a/static/app/types/core.tsx b/static/app/types/core.tsx index 37d739f7bdec00..1b763f21606faf 100644 --- a/static/app/types/core.tsx +++ b/static/app/types/core.tsx @@ -83,7 +83,7 @@ export enum DataCategory { } /** - * https://github.com/getsentry/relay/blob/master/relay-common/src/constants.rs + * https://github.com/getsentry/relay/blob/master/relay-base-schema/src/data_category.rs * Matches the backend singular backend enum directly. * For display variations, refer to `DATA_CATEGORY_INFO` rather than manipulating these strings */ @@ -97,6 +97,7 @@ export enum DataCategoryExact { TRANSACTION_INDEXED = 'transaction_indexed', MONITOR = 'monitor', MONITOR_SEAT = 'monitorSeat', + METRICS = 'metric_bucket', } export interface DataCategoryInfo { diff --git a/static/app/views/organizationStats/usageChart/index.tsx b/static/app/views/organizationStats/usageChart/index.tsx index 9814a303d48af2..e6435d919f1281 100644 --- a/static/app/views/organizationStats/usageChart/index.tsx +++ b/static/app/views/organizationStats/usageChart/index.tsx @@ -89,6 +89,12 @@ export const CHART_OPTIONS_DATACATEGORY: CategoryOption[] = [ disabled: false, yAxisMinInterval: 100, }, + { + label: DATA_CATEGORY_INFO.metric_bucket.titleName, + value: DATA_CATEGORY_INFO.metric_bucket.plural, + disabled: false, + yAxisMinInterval: 100, + }, ]; export enum ChartDataTransform { @@ -384,6 +390,7 @@ export class UsageChart extends Component { get chartLegendData() { const {chartSeries} = this.props; const {chartData} = this.chartMetadata; + const legend: LegendComponentOption['data'] = [ { name: SeriesTypes.ACCEPTED, From f4541c8dc7b7a30d327a37d47754e1f741d49693 Mon Sep 17 00:00:00 2001 From: Riccardo Busetti Date: Thu, 8 Feb 2024 10:45:40 +0100 Subject: [PATCH 162/357] feat(ddm): Add new metrics/query endpoint base code (#64785) --- src/sentry/api/bases/organization.py | 9 + .../api/endpoints/organization_metrics.py | 108 ++- src/sentry/api/urls.py | 6 + .../querying/data_v2/__init__.py | 3 + .../sentry_metrics/querying/data_v2/api.py | 61 ++ .../querying/data_v2/execution.py | 669 ++++++++++++++++++ .../querying/data_v2/parsing.py | 173 +++++ .../querying/data_v2/transformation.py | 212 ++++++ .../sentry_metrics/querying/data_v2/utils.py | 45 ++ src/sentry/snuba/referrer.py | 1 + .../endpoints/test_organization_metrics.py | 37 +- .../test_organization_metrics_data.py | 4 +- .../test_organization_metrics_query.py | 38 + .../querying/data_v2/__init__.py | 0 .../querying/data_v2/test_api.py | 70 ++ 15 files changed, 1422 insertions(+), 14 deletions(-) create mode 100644 src/sentry/sentry_metrics/querying/data_v2/__init__.py create mode 100644 src/sentry/sentry_metrics/querying/data_v2/api.py create mode 100644 src/sentry/sentry_metrics/querying/data_v2/execution.py create mode 100644 src/sentry/sentry_metrics/querying/data_v2/parsing.py create mode 100644 src/sentry/sentry_metrics/querying/data_v2/transformation.py create mode 100644 src/sentry/sentry_metrics/querying/data_v2/utils.py create mode 100644 tests/sentry/api/endpoints/test_organization_metrics_query.py create mode 100644 tests/sentry/sentry_metrics/querying/data_v2/__init__.py create mode 100644 tests/sentry/sentry_metrics/querying/data_v2/test_api.py diff --git a/src/sentry/api/bases/organization.py b/src/sentry/api/bases/organization.py index 7f824c317d9caa..ee58564ea16107 100644 --- a/src/sentry/api/bases/organization.py +++ b/src/sentry/api/bases/organization.py @@ -201,6 +201,15 @@ class OrgAuthTokenPermission(OrganizationPermission): } +class OrganizationMetricsPermission(OrganizationPermission): + scope_map = { + "GET": ["org:read", "org:write", "org:admin"], + "POST": ["org:read", "org:write", "org:admin"], + "PUT": ["org:write", "org:admin"], + "DELETE": ["org:admin"], + } + + class ControlSiloOrganizationEndpoint(Endpoint): """ A base class for endpoints that use an organization scoping but lives in the control silo diff --git a/src/sentry/api/endpoints/organization_metrics.py b/src/sentry/api/endpoints/organization_metrics.py index b07b927a557cc8..7886a629ac7bc9 100644 --- a/src/sentry/api/endpoints/organization_metrics.py +++ b/src/sentry/api/endpoints/organization_metrics.py @@ -5,12 +5,14 @@ from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import region_silo_endpoint -from sentry.api.bases.organization import OrganizationEndpoint +from sentry.api.bases.organization import OrganizationEndpoint, OrganizationMetricsPermission from sentry.api.exceptions import ResourceDoesNotExist from sentry.api.paginator import GenericOffsetPaginator from sentry.api.utils import get_date_range_from_params from sentry.exceptions import InvalidParams from sentry.sentry_metrics.querying.data import run_metrics_query +from sentry.sentry_metrics.querying.data_v2 import run_metrics_queries_plan +from sentry.sentry_metrics.querying.data_v2.api import FormulaOrder, MetricsQueriesPlan from sentry.sentry_metrics.querying.errors import ( InvalidMetricsQueryError, LatestReleaseNotFoundError, @@ -304,3 +306,107 @@ def get_result(self, limit, cursor=None): prev=Cursor(0, max(0, offset - limit), True, offset > 0), next=Cursor(0, max(0, offset + limit), False, has_more), ) + + +@region_silo_endpoint +class OrganizationMetricsQueryEndpoint(OrganizationEndpoint): + publish_status = { + "POST": ApiPublishStatus.EXPERIMENTAL, + } + owner = ApiOwner.TELEMETRY_EXPERIENCE + permission_classes = (OrganizationMetricsPermission,) + + """ + Queries one or more metrics over a time range. + """ + + # still 40 req/s but allows for bursts of 200 up to req/s for dashboard loading + default_rate_limit = RateLimit(200, 5) + + rate_limits = { + "POST": { + RateLimitCategory.IP: default_rate_limit, + RateLimitCategory.USER: default_rate_limit, + RateLimitCategory.ORGANIZATION: default_rate_limit, + }, + } + + # Number of groups returned by default for each query. + default_limit = 20 + + def _validate_order(self, order: str | None) -> FormulaOrder | None: + if order is None: + return None + + formula_order = FormulaOrder.from_string(order) + if formula_order is None: + order_choices = [v.value for v in FormulaOrder] + raise InvalidMetricsQueryError( + f"The provided `order` is not a valid, only {order_choices} are supported" + ) + + return formula_order + + def _validate_limit(self, limit: str | None) -> int: + if not limit: + return self.default_limit + + try: + return int(limit) + except ValueError: + raise InvalidMetricsQueryError( + "The provided `limit` is not valid, an integer is required" + ) + + def _interval_from_request(self, request: Request) -> int: + """ + Extracts the interval of the query from the request payload. + """ + interval = parse_stats_period(request.data.get("interval", "1h")) + return int(3600 if interval is None else interval.total_seconds()) + + def _metrics_queries_plan_from_request(self, request: Request) -> MetricsQueriesPlan: + """ + Extracts the metrics queries plan from the request payload. + """ + metrics_queries_plan = MetricsQueriesPlan() + + queries = request.data.get("queries") or [] + for query in queries: + metrics_queries_plan.declare_query(name=query["name"], mql=query["mql"]) + + formulas = request.data.get("formulas") or [] + for formula in formulas: + metrics_queries_plan.apply_formula( + mql=formula["mql"], + order=self._validate_order(formula.get("order")), + limit=self._validate_limit(formula.get("limit")), + ) + + return metrics_queries_plan + + def post(self, request: Request, organization) -> Response: + try: + start, end = get_date_range_from_params(request.GET) + interval = self._interval_from_request(request) + metrics_queries_plan = self._metrics_queries_plan_from_request(request) + + results = run_metrics_queries_plan( + metrics_queries_plan=metrics_queries_plan, + start=start, + end=end, + interval=interval, + organization=organization, + # TODO: figure out how to make these methods work with HTTP body. + projects=self.get_projects(request, organization), + environments=self.get_environments(request, organization), + referrer=Referrer.API_DDM_METRICS_QUERY.value, + ) + except InvalidMetricsQueryError as e: + return Response(status=400, data={"detail": str(e)}) + except LatestReleaseNotFoundError as e: + return Response(status=404, data={"detail": str(e)}) + except MetricsQueryExecutionError as e: + return Response(status=500, data={"detail": str(e)}) + + return Response(status=200, data=results) diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py index 904cbc5bfeaac7..33149e53acb44b 100644 --- a/src/sentry/api/urls.py +++ b/src/sentry/api/urls.py @@ -385,6 +385,7 @@ OrganizationMetricDetailsEndpoint, OrganizationMetricsDataEndpoint, OrganizationMetricsDetailsEndpoint, + OrganizationMetricsQueryEndpoint, OrganizationMetricsTagDetailsEndpoint, OrganizationMetricsTagsEndpoint, ) @@ -1981,6 +1982,11 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: OrganizationMetricsDataEndpoint.as_view(), name="sentry-api-0-organization-metrics-data", ), + re_path( + r"^(?P[^/]+)/metrics/query/$", + OrganizationMetricsQueryEndpoint.as_view(), + name="sentry-api-0-organization-metrics-query", + ), re_path( r"^(?P[^/]+)/metrics/tags/$", OrganizationMetricsTagsEndpoint.as_view(), diff --git a/src/sentry/sentry_metrics/querying/data_v2/__init__.py b/src/sentry/sentry_metrics/querying/data_v2/__init__.py new file mode 100644 index 00000000000000..0df37c30e0f53b --- /dev/null +++ b/src/sentry/sentry_metrics/querying/data_v2/__init__.py @@ -0,0 +1,3 @@ +from .api import run_metrics_queries_plan + +__all__ = ["run_metrics_queries_plan"] diff --git a/src/sentry/sentry_metrics/querying/data_v2/api.py b/src/sentry/sentry_metrics/querying/data_v2/api.py new file mode 100644 index 00000000000000..ad61f12a7b11c3 --- /dev/null +++ b/src/sentry/sentry_metrics/querying/data_v2/api.py @@ -0,0 +1,61 @@ +from collections.abc import Sequence +from dataclasses import dataclass +from datetime import datetime +from enum import Enum +from typing import Union + +from sentry.models.environment import Environment +from sentry.models.organization import Organization +from sentry.models.project import Project + + +# TODO: lift out types in `types.py` once endpoint is finished. +class FormulaOrder(Enum): + ASC = "asc" + DESC = "desc" + + @classmethod + # Used `Union` because `|` conflicts with the parser. + def from_string(cls, value: str) -> Union["FormulaOrder", None]: + for v in cls: + if v.value == value: + return v + + return None + + +@dataclass(frozen=True) +class FormulaDefinition: + mql: str + order: FormulaOrder | None + limit: int | None + + +class MetricsQueriesPlan: + def __init__(self): + self._queries: dict[str, str] = {} + self._formulas: list[FormulaDefinition] = [] + + def declare_query(self, name: str, mql: str) -> "MetricsQueriesPlan": + self._queries[name] = mql + return self + + def apply_formula( + self, mql: str, order: FormulaOrder | None = None, limit: int | None = None + ) -> "MetricsQueriesPlan": + self._formulas.append(FormulaDefinition(mql=mql, order=order, limit=limit)) + return self + + +def run_metrics_queries_plan( + metrics_queries_plan: MetricsQueriesPlan, + start: datetime, + end: datetime, + interval: int, + organization: Organization, + projects: Sequence[Project], + environments: Sequence[Environment], + referrer: str, +): + # TODO: implement new querying logic. + return None diff --git a/src/sentry/sentry_metrics/querying/data_v2/execution.py b/src/sentry/sentry_metrics/querying/data_v2/execution.py new file mode 100644 index 00000000000000..da585c9cd7c304 --- /dev/null +++ b/src/sentry/sentry_metrics/querying/data_v2/execution.py @@ -0,0 +1,669 @@ +import math +from collections.abc import Mapping, Sequence +from dataclasses import dataclass, replace +from datetime import datetime +from typing import Any, Optional, cast + +import sentry_sdk +from snuba_sdk import Column, Direction, MetricsQuery, MetricsScope, Request +from snuba_sdk.conditions import BooleanCondition, BooleanOp, Condition, Op + +from sentry.models.organization import Organization +from sentry.models.project import Project +from sentry.sentry_metrics.querying.common import DEFAULT_QUERY_INTERVALS, SNUBA_QUERY_LIMIT +from sentry.sentry_metrics.querying.errors import ( + InvalidMetricsQueryError, + MetricsQueryExecutionError, +) +from sentry.sentry_metrics.querying.types import GroupKey, GroupsCollection +from sentry.sentry_metrics.querying.visitors import QueriedMetricsVisitor +from sentry.sentry_metrics.visibility import get_metrics_blocking_state +from sentry.snuba.dataset import Dataset +from sentry.snuba.metrics import to_intervals +from sentry.snuba.metrics_layer.query import run_query +from sentry.utils import metrics +from sentry.utils.snuba import SnubaError + + +def _extract_groups_from_seq(seq: Sequence[Mapping[str, Any]]) -> GroupsCollection: + """ + Returns the groups from a sequence of rows returned by Snuba. + + Rows from Snuba are in the form [{"time": x, "aggregate_value": y, "group_1": z, "group_2": a}]. + """ + groups = [] + for data in seq: + inner_group = [] + for key, value in data.items(): + # TODO: check if time can be used as a tag key. + if key not in ["aggregate_value", "time"]: + inner_group.append((key, value)) + + if inner_group: + groups.append(inner_group) + + return groups + + +def _build_composite_key_from_dict( + data: Mapping[str, Any], alignment_keys: Sequence[str] +) -> tuple[tuple[str, str], ...]: + """ + Builds a hashable composite key given a series of keys that are looked up in the supplied data. + """ + composite_key = [] + for key in alignment_keys: + if (value := data.get(key)) is not None: + composite_key.append((key, value)) + + return tuple(composite_key) + + +def _build_indexed_seq( + seq: Sequence[Mapping[str, Any]], alignment_keys: Sequence[str] +) -> Mapping[GroupKey, int]: + """ + Creates an inverted index on the supplied sequence of Snuba rows. The index is keyed by the composite key which is + computed from a set of alignment keys that define the order in which the key is built. + """ + indexed_seq = {} + for index, data in enumerate(seq): + composite_key = _build_composite_key_from_dict(data, alignment_keys) + indexed_seq[composite_key] = index + + return indexed_seq + + +def _build_aligned_seq( + seq: Sequence[Mapping[str, Any]], + reference_seq: Sequence[Mapping[str, Any]], + alignment_keys: Sequence[str], + indexed_seq: Mapping[GroupKey, int], +) -> Sequence[Mapping[str, Any]]: + """ + Aligns a sequence of rows to a reference sequence of rows by using reverse index which was built to speed up the + alignment process. + """ + aligned_seq = [] + + for data in reference_seq: + composite_key = _build_composite_key_from_dict(data, alignment_keys) + index = indexed_seq.get(composite_key) + if index is not None: + aligned_seq.append(seq[index]) + + return aligned_seq + + +@dataclass(frozen=True) +class ExecutableQuery: + with_series: bool + with_totals: bool + + identifier: str + metrics_query: MetricsQuery + group_bys: Sequence[str] | None + order_by: str | None + limit: int | None + + def is_empty(self) -> bool: + return not self.metrics_query.scope.org_ids or not self.metrics_query.scope.project_ids + + def replace_date_range(self, start: datetime, end: datetime) -> "ExecutableQuery": + return replace( + self, + metrics_query=self.metrics_query.set_start(start).set_end(end), + ) + + def replace_limit(self, limit: int = SNUBA_QUERY_LIMIT) -> "ExecutableQuery": + return replace( + self, + metrics_query=self.metrics_query.set_limit(limit), + ) + + def replace_interval(self, new_interval: int) -> "ExecutableQuery": + return replace( + self, + metrics_query=self.metrics_query.set_rollup( + replace(self.metrics_query.rollup, interval=new_interval) + ), + ) + + def replace_order_by(self, direction: Direction) -> "ExecutableQuery": + return replace( + self, + metrics_query=self.metrics_query.set_rollup( + replace(self.metrics_query.rollup, interval=None, totals=True, orderby=direction) + ), + ) + + def to_totals_query(self) -> "ExecutableQuery": + return replace( + self, + metrics_query=self.metrics_query.set_rollup( + # If an order_by is used, we must run a totals query. + replace(self.metrics_query.rollup, interval=None, totals=True) + ), + ) + + def add_group_filters( + self, + groups_collection: GroupsCollection | None, + ) -> "ExecutableQuery": + """ + Returns a new `ExecutableQuery` with a series of filters that ensure that the new query will have the same + groups returned. Keep in mind that there is no guarantee that all the groups will be returned, since data might + change in the meanwhile, so the guarantee of this method is that the returned groups will all be belonging to + `groups_collection`. + + The need for this filter arises because when executing multiple queries, we want to have the same groups + returned, in order to make results consistent. Note that in case queries have different groups, some results + might be missing, since the reference query dictates which values are returned during the alignment process. + """ + if not groups_collection: + return self + + # We perform a transformation in the form [(key_1 = value_1 AND key_2 = value_2) OR (key_3 = value_3)]. + snuba_filters = [] + for groups in groups_collection: + inner_snuba_filters = [] + for filter_key, filter_value in groups: + inner_snuba_filters.append(Condition(Column(filter_key), Op.EQ, filter_value)) + + # In case we have more than one filter, we have to group them into an `AND`. + if len(inner_snuba_filters) > 1: + snuba_filters.append(BooleanCondition(BooleanOp.AND, inner_snuba_filters)) + else: + snuba_filters.append(inner_snuba_filters[0]) + + # In case we have more than one filter, we have to group them into an `OR`. + if len(snuba_filters) > 1: + snuba_filters = [BooleanCondition(BooleanOp.OR, snuba_filters)] + + original_filters = self.metrics_query.query.filters or [] + return replace( + self, + metrics_query=self.metrics_query.set_query( + self.metrics_query.query.set_filters(original_filters + snuba_filters) + ), + ) + + def filter_blocked_projects( + self, + organization: Organization, + projects: set[Project], + blocked_metrics_for_projects: Mapping[str, set[int]], + ) -> "ExecutableQuery": + """ + Returns a new `ExecutableQuery` with the projects for which all the queries are not blocked. In case no projects + exist, the query will be returned with empty projects, signaling the executor to not run the query. + """ + intersected_projects: set[int] = {project.id for project in projects} + + for queried_metric in QueriedMetricsVisitor().visit(self.metrics_query.query): + blocked_for_projects = blocked_metrics_for_projects.get(queried_metric) + if blocked_for_projects: + metrics.incr(key="ddm.metrics_api.blocked_metric_queried", amount=1) + intersected_projects -= blocked_for_projects + + return replace( + self, + metrics_query=self.metrics_query.set_scope( + MetricsScope( + org_ids=[organization.id], + project_ids=list(intersected_projects), + ) + ), + ) + + +@dataclass(frozen=True) +class QueryResult: + series_executable_query: ExecutableQuery | None + totals_executable_query: ExecutableQuery | None + result: Mapping[str, Any] + + def __post_init__(self): + assert self.series_executable_query or self.totals_executable_query + + @classmethod + def empty_from(cls, executable_query: ExecutableQuery) -> "QueryResult": + return QueryResult( + series_executable_query=executable_query, + totals_executable_query=executable_query, + result={ + "series": {"data": {}, "meta": {}}, + "totals": {"data": {}, "meta": {}}, + # We want to honor the date ranges of the supplied query. + "modified_start": executable_query.metrics_query.start, + "modified_end": executable_query.metrics_query.end, + }, + ) + + @property + def query_name(self) -> str: + if self.series_executable_query: + return self.series_executable_query.identifier + + if self.totals_executable_query: + return self.totals_executable_query.identifier + + raise InvalidMetricsQueryError( + "Unable to determine the query name for a result with no queries" + ) + + @property + def modified_start(self) -> datetime: + return self.result["modified_start"] + + @property + def modified_end(self) -> datetime: + return self.result["modified_end"] + + @property + def interval(self) -> int: + if not self.series_executable_query: + raise MetricsQueryExecutionError( + "You have to run a timeseries query in order to use the interval" + ) + + return self.series_executable_query.metrics_query.rollup.interval + + @property + def series(self) -> Sequence[Mapping[str, Any]]: + return self.result["series"]["data"] + + @property + def totals(self) -> Sequence[Mapping[str, Any]]: + return self.result["totals"]["data"] + + @property + def meta(self) -> Sequence[Mapping[str, str]]: + # By default, we extract the metadata from the totals query, if that is not there we extract from the series + # query. + meta_source = "totals" if "totals" in self.result else "series" + return self.result[meta_source]["meta"] + + @property + def groups(self) -> GroupsCollection: + # We prefer to use totals to determine the groups that we received, since those are less likely to hit the limit + # , and thus they will be more comprehensive. In case the query doesn't have totals, we have to use series. + return _extract_groups_from_seq(self.totals or self.series) + + @property + def group_bys(self) -> list[str] | None: + # We return the groups directly from the query and not the actual groups returned by the query. This is done so + # that we can correctly render groups in case they are not returned from the db. + return cast( + Optional[list[str]], + ( + cast(ExecutableQuery, self.series_executable_query or self.totals_executable_query) + ).group_bys, + ) + + @property + def length(self) -> int: + # We try to see how many series results we got, since that is the query which is likely to surpass the limit. + if "series" in self.result: + return len(self.series) + + # If we have no series, totals will give us a hint of the size of the dataset. + if "totals" in self.result: + return len(self.totals) + + return 0 + + def align_with(self, reference_query_result: "QueryResult") -> "QueryResult": + """ + Aligns the series and totals results with a reference query. + + Note that the alignment performs a mutation of the current object. + """ + # Alignment keys define the order in which fields are used for indexing purposes when aligning different + # sequences. + alignment_keys = reference_query_result.group_bys + if not alignment_keys: + return self + + # For timeseries, we want to align based on the time also, since group bys + time are the common values + # across separate queries. + indexed_series = _build_indexed_seq(self.series, alignment_keys + ["time"]) + indexed_totals = _build_indexed_seq(self.totals, alignment_keys) + + aligned_series = _build_aligned_seq( + self.series, reference_query_result.series, alignment_keys + ["time"], indexed_series + ) + aligned_totals = _build_aligned_seq( + self.totals, reference_query_result.totals, alignment_keys, indexed_totals + ) + + # We only mutate with the aligned data, only if we have data, since if it's empty it could be that we are + # trying to align on a query that has no data, and we want to avoid deleting the data of this query. + if aligned_series: + self.result["series"]["data"] = aligned_series + if aligned_totals: + self.result["totals"]["data"] = aligned_totals + + return self + + def align_series_to_totals(self) -> "QueryResult": + """ + Aligns the series to the totals of the same query. + + Note that the alignment performs a mutation of the current object. + """ + alignment_keys = self.group_bys + if not alignment_keys: + return self + + indexed_series: dict[tuple[tuple[str, str], ...], list[int]] = {} + for index, data in enumerate(self.series): + composite_key = _build_composite_key_from_dict(data, alignment_keys) + # Since serieses have also the time component, we store multiple indexes of multiple times for the same + # group. + indexed_series.setdefault(composite_key, []).append(index) + + aligned_series = [] + for data in self.totals: + composite_key = _build_composite_key_from_dict(data, alignment_keys) + indexes = indexed_series.get(composite_key) + for index in indexes or (): + aligned_series.append(self.series[index]) + + if aligned_series: + self.result["series"]["data"] = aligned_series + + return self + + +class QueryExecutor: + def __init__(self, organization: Organization, projects: Sequence[Project], referrer: str): + self._organization = organization + self._projects = projects + self._referrer = referrer + + # Ordered list of the intervals that can be chosen by the executor. They are removed when tried, in order + # to avoid an infinite recursion. + self._interval_choices = sorted(DEFAULT_QUERY_INTERVALS) + # List of queries scheduled for execution. + self._scheduled_queries: list[ExecutableQuery] = [] + # Tracks the number of queries that have been executed (for measuring purposes). + self._number_of_executed_queries = 0 + + # We load the blocked metrics for the supplied projects. + self._blocked_metrics_for_projects = self._load_blocked_metrics_for_projects() + + def _load_blocked_metrics_for_projects(self) -> Mapping[str, set[int]]: + """ + Load the blocked metrics for the supplied projects and stores them in the executor in an efficient way that + speeds up the determining of the projects to exclude from the query. + """ + blocked_metrics_for_projects: dict[str, set[int]] = {} + + for project_id, metrics_blocking_state in get_metrics_blocking_state( + self._projects + ).items(): + for metric_blocking in metrics_blocking_state.metrics.values(): + blocked_metrics_for_projects.setdefault(metric_blocking.metric_mri, set()).add( + project_id + ) + + return blocked_metrics_for_projects + + def _build_request(self, query: MetricsQuery) -> Request: + """ + Builds a Snuba request given a MetricsQuery to execute. + """ + return Request( + # The dataset used here is arbitrary, since the `run_query` function will infer it internally. + dataset=Dataset.Metrics.value, + query=query, + app_id="default", + tenant_ids={"referrer": self._referrer, "organization_id": self._organization.id}, + ) + + def _execute( + self, executable_query: ExecutableQuery, is_reference_query: bool = False + ) -> QueryResult: + """ + Executes a query as series and/or totals and returns the result. + """ + try: + # We merge the query with the blocked projects, in order to obtain a new query with only the projects that + # all have the queried metrics unblocked. + executable_query = executable_query.filter_blocked_projects( + organization=self._organization, + projects=set(self._projects), + blocked_metrics_for_projects=self._blocked_metrics_for_projects, + ) + + # We try to determine the interval of the query, which will be used to define clear time bounds for both + # queries. This is done here since the metrics layer doesn't adjust the time for totals queries. + interval = executable_query.metrics_query.rollup.interval + if interval: + modified_start, modified_end, _ = to_intervals( + executable_query.metrics_query.start, + executable_query.metrics_query.end, + interval, + ) + if modified_start and modified_end: + executable_query = executable_query.replace_date_range( + modified_start, modified_end + ) + + # If, after merging the query with the blocked projects, the query becomes empty, we will return an empty + # result. + if executable_query.is_empty(): + return QueryResult.empty_from(executable_query) + + totals_executable_query = executable_query + totals_result = None + if executable_query.with_totals: + # For totals queries, if there is a limit passed by the user, we will honor that and apply it only for + # the reference query, since we want to load the data for all groups that are decided by the reference + # query. + if is_reference_query and executable_query.limit: + totals_executable_query = totals_executable_query.replace_limit( + executable_query.limit + ) + else: + totals_executable_query = totals_executable_query.replace_limit() + + if executable_query.order_by: + order_by_direction = Direction.ASC + if executable_query.order_by.startswith("-"): + order_by_direction = Direction.DESC + + totals_executable_query = totals_executable_query.replace_order_by( + order_by_direction + ) + + self._number_of_executed_queries += 1 + totals_result = run_query( + request=self._build_request( + totals_executable_query.to_totals_query().metrics_query + ) + ) + + series_executable_query = executable_query + series_result = None + if executable_query.with_series: + # For series queries, we always want to use the default limit. + series_executable_query = series_executable_query.replace_limit() + + # There is a case in which we need to apply the totals groups directly on the series, which happens only + # when the reference queries are executed. The reason for this is that if we don't filter the values, + # we might hit the limit in the series query and lose data. + if is_reference_query and totals_result: + series_executable_query = series_executable_query.add_group_filters( + _extract_groups_from_seq(totals_result["data"]) + ) + + self._number_of_executed_queries += 1 + series_result = run_query( + request=self._build_request(series_executable_query.metrics_query) + ) + + result = {} + if series_result and totals_result: + result = { + "series": series_result, + "totals": totals_result, + "modified_start": series_result["modified_start"], + "modified_end": series_result["modified_end"], + } + elif series_result: + result = { + "series": series_result, + "modified_start": series_result["modified_start"], + "modified_end": series_result["modified_end"], + } + elif totals_result: + result = { + "totals": totals_result, + "modified_start": totals_result["modified_start"], + "modified_end": totals_result["modified_end"], + } + + return QueryResult( + series_executable_query=series_executable_query, + totals_executable_query=totals_executable_query, + result=result, + ) + except SnubaError as e: + sentry_sdk.capture_exception(e) + raise MetricsQueryExecutionError("An error occurred while executing the query") + + def _derive_next_interval(self, result: QueryResult) -> int: + """ + Computes the best possible interval, given a fixed set of available intervals, which can fit in the limit + of rows that Snuba can return. + """ + # We try to estimate the number of groups. + groups_number = len(result.groups) + + # We compute the ideal number of intervals that can fit with a given number of groups. + intervals_number = math.floor(SNUBA_QUERY_LIMIT / groups_number) + + # We compute the optimal size of each interval in seconds. + optimal_interval_size = math.floor( + (result.modified_end - result.modified_start).total_seconds() / intervals_number + ) + + # Get the smallest interval that is larger than optimal out of a set of defined intervals in the product. + for index, interval in enumerate(self._interval_choices): + if interval >= optimal_interval_size: + # We have to put the choice, otherwise we end up in an infinite recursion. + self._interval_choices.pop(index) + return interval + + raise MetricsQueryExecutionError( + "Unable to find an interval to satisfy the query because too many results " + "are returned" + ) + + def _find_reference_query(self) -> int: + """ + Finds the reference query among the _schedule_queries. + + A reference query is the first query which is run, and it's used to determine the ordering of the follow-up + queries. + """ + if not self._scheduled_queries: + raise InvalidMetricsQueryError( + "Can't find a reference query because no queries were supplied" + ) + + for index, query in enumerate(self._scheduled_queries): + if query.order_by: + return index + + return 0 + + def _serial_execute(self) -> Sequence[QueryResult]: + """ + Executes serially all the queries that are supplied to the QueryExecutor. + + The execution will try to satisfy the query by dynamically changing its interval, in the case in which the + Snuba limit is reached. + """ + if not self._scheduled_queries: + return [] + + # We execute the first reference query which will dictate the order of the follow-up queries. + reference_query = self._scheduled_queries.pop(self._find_reference_query()) + reference_query_result = self._execute( + executable_query=reference_query, is_reference_query=True + ) + + # Case 1: we have fewer results that the limit. In this case we are free to run the follow-up queries under the + # assumption that data doesn't change much between queries. + if reference_query_result.length < SNUBA_QUERY_LIMIT: + # Snuba supports order by only for totals, thus we need to align the series to the totals ordering before + # we can run the other queries and align them on this reference query. + reference_query_result.align_series_to_totals() + + results = [reference_query_result] + reference_groups = reference_query_result.groups + metrics.distribution( + key="ddm.metrics_api.groups_cardinality", value=len(reference_groups) + ) + + for query in self._scheduled_queries: + query_result = self._execute( + executable_query=query.add_group_filters(reference_groups), + is_reference_query=False, + ) + + query_result.align_with(reference_query_result) + results.append(query_result) + + return results + + # Case 2: we have more results than the limit. In this case we want to determine a new interval that + # will result in less than limit data points. + new_interval = self._derive_next_interval(reference_query_result) + + # We update the scheduled queries to use the new interval. It's important to note that we also add back the + # reference query, since we need to execute it again. + self._scheduled_queries = [ + query.replace_interval(new_interval) + for query in [reference_query] + self._scheduled_queries + ] + + return self._serial_execute() + + def execute(self) -> Sequence[QueryResult]: + """ + Executes the scheduled queries serially. + """ + results = self._serial_execute() + metrics.distribution( + key="ddm.metrics_api.queries_executed", value=self._number_of_executed_queries + ) + + return results + + def schedule( + self, + identifier: str, + query: MetricsQuery, + group_bys: Sequence[str] | None, + order_by: str | None, + limit: int | None, + ): + """ + Lazily schedules a query for execution. + + Note that this method won't execute the query, since it's lazy in nature. + """ + executable_query = ExecutableQuery( + with_series=True, + with_totals=True, + identifier=identifier, + metrics_query=query, + group_bys=group_bys, + order_by=order_by, + limit=limit, + ) + self._scheduled_queries.append(executable_query) diff --git a/src/sentry/sentry_metrics/querying/data_v2/parsing.py b/src/sentry/sentry_metrics/querying/data_v2/parsing.py new file mode 100644 index 00000000000000..e18481307fbe72 --- /dev/null +++ b/src/sentry/sentry_metrics/querying/data_v2/parsing.py @@ -0,0 +1,173 @@ +import re +from collections.abc import Generator, Sequence + +from parsimonious.exceptions import IncompleteParseError +from snuba_sdk import Timeseries +from snuba_sdk.mql.mql import parse_mql +from snuba_sdk.query_visitors import InvalidQueryError + +from sentry.models.environment import Environment +from sentry.models.project import Project +from sentry.sentry_metrics.querying.errors import InvalidMetricsQueryError +from sentry.sentry_metrics.querying.types import QueryExpression +from sentry.sentry_metrics.querying.utils import remove_if_match +from sentry.sentry_metrics.querying.visitors import ( + EnvironmentsInjectionVisitor, + FiltersCompositeVisitor, + LatestReleaseTransformationVisitor, + QueryExpressionVisitor, + ValidationVisitor, +) + + +class VisitableQueryExpression: + def __init__(self, query: QueryExpression): + self._query = query + self._visitors: list[QueryExpressionVisitor[QueryExpression]] = [] + + def add_visitor( + self, visitor: QueryExpressionVisitor[QueryExpression] + ) -> "VisitableQueryExpression": + """ + Adds a visitor to the query expression. + + The visitor can both perform mutations or not on the expression tree. + """ + self._visitors.append(visitor) + + return self + + def get(self) -> QueryExpression: + """ + Returns the mutated query expression after running all the visitors + in the order of definition. + + Order preservation does matter, since downstream visitors might work under the + assumption that upstream visitors have already been run. + """ + query = self._query + for visitor in self._visitors: + query = visitor.visit(query) + + return query + + +class QueryParser: + # We avoid having the filters expression to be closed or opened. + FILTERS_SANITIZATION_PATTERN = re.compile(r"[{}]$") + # We avoid to have any way of opening and closing other expressions. + GROUP_BYS_SANITIZATION_PATTERN = re.compile(r"[(){}\[\]]") + + def __init__( + self, + projects: Sequence[Project], + fields: Sequence[str], + query: str | None, + group_bys: Sequence[str] | None, + ): + self._projects = projects + self._fields = fields + self._query = query + self._group_bys = group_bys + + # We want to sanitize the input in order to avoid any injection attacks due to the string interpolation that + # it's performed when building the MQL query. + self._sanitize() + + def _sanitize(self): + """ + Sanitizes the query and group bys before using them to build the MQL query. + """ + if self._query: + self._query = remove_if_match(self.FILTERS_SANITIZATION_PATTERN, self._query) + + if self._group_bys: + self._group_bys = [ + remove_if_match(self.GROUP_BYS_SANITIZATION_PATTERN, group_by) + for group_by in self._group_bys + ] + + def _build_mql_filters(self) -> str | None: + """ + Builds a set of MQL filters from a single query string. + + In this case the query passed, is assumed to be already compatible with the filters grammar of MQL, thus no + transformation are performed. + """ + if not self._query: + return None + + return self._query + + def _build_mql_group_bys(self) -> str | None: + """ + Builds a set of MQL group by filters from a list of strings. + """ + if not self._group_bys: + return None + + return ",".join(self._group_bys) + + def _build_mql_query(self, field: str, filters: str | None, group_bys: str | None) -> str: + """ + Builds an MQL query string in the form `aggregate(metric){tag_key:tag_value} by (group_by_1, group_by_2). + """ + mql = field + + if filters is not None: + mql += f"{{{filters}}}" + + if group_bys is not None: + mql += f" by ({group_bys})" + + return mql + + def _parse_mql(self, mql: str) -> VisitableQueryExpression: + """ + Parses the field with the MQL grammar. + """ + try: + query = parse_mql(mql).query + except InvalidQueryError as e: + cause = e.__cause__ + if cause and isinstance(cause, IncompleteParseError): + error_context = cause.text[cause.pos : cause.pos + 20] + # We expose the entire MQL string to give more context when solving the error, since in the future we + # expect that MQL will be directly fed into the endpoint instead of being built from the supplied + # fields. + raise InvalidMetricsQueryError( + f"The query '{mql}' could not be matched starting from '{error_context}...'" + ) + + raise InvalidMetricsQueryError("The supplied query is not valid") + + return VisitableQueryExpression(query=query) + + def generate_queries( + self, environments: Sequence[Environment] + ) -> Generator[tuple[str, Timeseries], None, None]: + """ + Generates multiple timeseries queries given a base query. + """ + if not self._fields: + raise InvalidMetricsQueryError("You must query at least one field") + + # We first parse the filters and group bys, which are then going to be applied on each individual query + # that is executed. + mql_filters = self._build_mql_filters() + mql_group_bys = self._build_mql_group_bys() + + for field in self._fields: + mql_query = self._build_mql_query(field, mql_filters, mql_group_bys) + yield ( + field, + self._parse_mql(mql_query) + # We validate the query. + .add_visitor(ValidationVisitor()) + # We inject the environment filter in each timeseries. + .add_visitor(EnvironmentsInjectionVisitor(environments)) + # We transform all `release:latest` filters into the actual latest releases. + .add_visitor( + FiltersCompositeVisitor(LatestReleaseTransformationVisitor(self._projects)) + ).get(), + ) diff --git a/src/sentry/sentry_metrics/querying/data_v2/transformation.py b/src/sentry/sentry_metrics/querying/data_v2/transformation.py new file mode 100644 index 00000000000000..cbd72b3b2d7937 --- /dev/null +++ b/src/sentry/sentry_metrics/querying/data_v2/transformation.py @@ -0,0 +1,212 @@ +from collections import OrderedDict +from collections.abc import Mapping, Sequence +from dataclasses import dataclass +from datetime import datetime, timezone +from typing import Any + +from sentry.search.utils import parse_datetime_string +from sentry.sentry_metrics.querying.data.execution import QueryResult +from sentry.sentry_metrics.querying.data.utils import get_identity, nan_to_none +from sentry.sentry_metrics.querying.errors import MetricsQueryExecutionError +from sentry.sentry_metrics.querying.types import GroupKey, ResultValue, Series, Total + + +@dataclass +class GroupValue: + series: Series + total: Total + + @classmethod + def empty(cls) -> "GroupValue": + return GroupValue(series=[], total=None) + + def add_series_entry(self, time: str, aggregate_value: ResultValue): + self.series.append((time, self._transform_aggregate_value(aggregate_value))) + + def add_total(self, aggregate_value: ResultValue): + self.total = self._transform_aggregate_value(aggregate_value) + + def _transform_aggregate_value(self, aggregate_value: ResultValue): + # For now, we don't support the array return type, since the set of operations that the API can support + # won't lead to multiple values in a single aggregate value. For this reason, we extract the first value + # in case we get back an array of values, which can happen for multiple quantiles. + if isinstance(aggregate_value, list): + if aggregate_value: + return aggregate_value[0] + + raise MetricsQueryExecutionError("Received an empty array as aggregate value") + + return aggregate_value + + +@dataclass +class QueryMeta: + name: str + type: str + + def __post_init__(self): + self._transform_meta_type() + + def _transform_meta_type(self): + # Since we don't support the array aggregate value, and we return the first element, we just return the type of + # the values of the array. + if self.type.startswith("Array("): + self.type = self.type[6 : len(self.type) - 1] + + +def _build_intervals(start: datetime, end: datetime, interval: int) -> Sequence[datetime]: + """ + Builds a list of all the intervals that are queried by the metrics layer. + """ + start_seconds = start.timestamp() + end_seconds = end.timestamp() + + current_time = start_seconds + intervals = [] + while current_time < end_seconds: + intervals.append(datetime.fromtimestamp(current_time, timezone.utc)) + current_time = current_time + interval + + return intervals + + +def _generate_full_series( + start_seconds: int, + num_intervals: int, + interval: int, + series: Series, + null_value: ResultValue = None, +) -> Sequence[ResultValue]: + """ + Computes a full series over the entire requested interval with None set where there are no data points. + """ + full_series = [null_value] * num_intervals + for time, value in series: + time_seconds = parse_datetime_string(time).timestamp() + index = int((time_seconds - start_seconds) / interval) + full_series[index] = value + + return full_series + + +class QueryTransformer: + def __init__(self, query_results: list[QueryResult]): + self._query_results = query_results + + self._start: datetime | None = None + self._end: datetime | None = None + self._interval: int | None = None + + def _assert_transformation_preconditions(self) -> tuple[datetime, datetime, int]: + assert self._start is not None and self._end is not None and self._interval is not None + return self._start, self._end, self._interval + + def _build_intermediate_results( + self, + ) -> tuple[OrderedDict[GroupKey, OrderedDict[str, GroupValue]], list[QueryMeta]]: + """ + Builds a tuple of intermediate groups and metadata which is used to efficiently transform the query results. + """ + intermediate_groups: OrderedDict[GroupKey, OrderedDict[str, GroupValue]] = OrderedDict() + intermediate_meta: list[QueryMeta] = [] + + def _add_to_intermediate_groups(values, block): + for value in values: + # We compute a list containing all the group values. + grouped_values = [] + for group_by in query_result.group_bys or (): + grouped_values.append((group_by, value.get(group_by))) + + group_metrics = intermediate_groups.setdefault(tuple(grouped_values), OrderedDict()) + group_value = group_metrics.setdefault(query_result.query_name, GroupValue.empty()) + + block(value, group_value) + + for query_result in self._query_results: + # All queries must have the same timerange, so under this assumption we take the first occurrence of each. + if self._start is None: + self._start = query_result.modified_start + if self._end is None: + self._end = query_result.modified_end + if self._interval is None: + self._interval = query_result.interval + + # We group the totals data first, since we want the order to be set by the totals. + _add_to_intermediate_groups( + query_result.totals, + lambda value, group: group.add_total(value.get("aggregate_value")), + ) + + # We group the series data second, which will use the already ordered dictionary entries added by the + # totals. + _add_to_intermediate_groups( + query_result.series, + lambda value, group: group.add_series_entry( + value.get("time"), value.get("aggregate_value") + ), + ) + + meta = query_result.meta + for meta_item in meta: + meta_name = meta_item["name"] + meta_type = meta_item["type"] + + # The meta of each query, contains the metadata for each field in the result. In this case, + # we want to map the aggregate value type to the actual query name, which is used from the outside to + # recognize the query. + name = query_result.query_name if meta_name == "aggregate_value" else meta_name + intermediate_meta.append(QueryMeta(name=name, type=meta_type)) + + return intermediate_groups, intermediate_meta + + def transform(self) -> Mapping[str, Any]: + """ + Transforms the query results into the Sentry's API format. + """ + # We first build intermediate results that we can work efficiently with. + intermediate_groups, intermediate_meta = self._build_intermediate_results() + + # We assert that all the data we require for the transformation has been found during the building of + # intermediate results. + start, end, interval = self._assert_transformation_preconditions() + + # We build the intervals that we will return to the API user. + intervals = _build_intervals(start, end, interval) + + # We build the translated groups given the intermediate groups. + translated_groups = [] + for group_key, group_metrics in intermediate_groups.items(): + translated_serieses: dict[str, Sequence[ResultValue]] = {} + translated_totals: dict[str, ResultValue] = {} + for metric_name, metric_values in group_metrics.items(): + series = metric_values.series + total = metric_values.total + + # We generate the full series by passing as default value the identity of the totals, which is the + # default value applied in the timeseries. This function already aligns the series by sorting it in + # ascending order so there is no need to have the series elements sorted beforehand. + translated_serieses[metric_name] = _generate_full_series( + int(start.timestamp()), len(intervals), interval, series, get_identity(total) + ) + # In case we get nan, we will cast it to None but this can be changed in case there is the need. + translated_totals[metric_name] = nan_to_none(total) + + # The order of the keys is not deterministic in the nested dictionaries. + inner_group = { + "by": {name: value for name, value in group_key}, + "series": translated_serieses, + "totals": translated_totals, + } + + translated_groups.append(inner_group) + + # We build the translated meta given the intermediate meta. + translated_meta = [{"name": meta.name, "type": meta.type} for meta in intermediate_meta] + + return { + "intervals": intervals, + "groups": translated_groups, + "meta": translated_meta, + "start": start, + "end": end, + } diff --git a/src/sentry/sentry_metrics/querying/data_v2/utils.py b/src/sentry/sentry_metrics/querying/data_v2/utils.py new file mode 100644 index 00000000000000..4d9b8a3d9f285c --- /dev/null +++ b/src/sentry/sentry_metrics/querying/data_v2/utils.py @@ -0,0 +1,45 @@ +import math + +from sentry.sentry_metrics.querying.types import ResultValue + + +def get_identity(value: ResultValue) -> ResultValue: + """ + Computes the identity of a value. + + For nan, we want to return None instead of 0.0 but this is just a design decision that conforms + to the previous implementation of the layer. + """ + if value is None: + return None + + if is_nan(value): + return None + + # We might decide in the future to have identity values specific to each aggregate. + return type(value)() + + +def nan_to_none(value: ResultValue) -> ResultValue: + """ + Converts a nan value to None or returns the original value. + """ + if value is None: + return None + + if is_nan(value): + return None + + return value + + +def is_nan(value: ResultValue) -> bool: + """ + Returns whether the result of a query is nan. + """ + if value is None: + return False + elif isinstance(value, list): + return any(map(lambda e: e is not None and math.isnan(e), value)) + + return math.isnan(value) diff --git a/src/sentry/snuba/referrer.py b/src/sentry/snuba/referrer.py index 179af75dc6899c..aafee046b1e783 100644 --- a/src/sentry/snuba/referrer.py +++ b/src/sentry/snuba/referrer.py @@ -69,6 +69,7 @@ class Referrer(Enum): API_DDM_FETCH_SPANS = "api.ddm.fetch.spans" API_DDM_FETCH_METRICS_SUMMARIES = "api.ddm.fetch.metrics_summaries" API_DDM_METRICS_DATA = "api.ddm.metrics.data" + API_DDM_METRICS_QUERY = "api.ddm.metrics.query" API_DISCOVER_TOTAL_COUNT_FIELD = "api.discover.total-events-field" API_DISCOVER_TOTAL_SUM_TRANSACTION_DURATION_FIELD = ( diff --git a/tests/sentry/api/endpoints/test_organization_metrics.py b/tests/sentry/api/endpoints/test_organization_metrics.py index dda6c2a4507895..20b5a6d00dd74b 100644 --- a/tests/sentry/api/endpoints/test_organization_metrics.py +++ b/tests/sentry/api/endpoints/test_organization_metrics.py @@ -57,28 +57,43 @@ def indexer_record(use_case_id: UseCaseID, org_id: int, string: str) -> int: class OrganizationMetricsPermissionTest(APITestCase): endpoints = ( - ("sentry-api-0-organization-metrics-details",), - ("sentry-api-0-organization-metric-details", "foo"), - ("sentry-api-0-organization-metrics-tags",), - ("sentry-api-0-organization-metrics-tag-details", "foo"), - ("sentry-api-0-organization-metrics-data",), + ( + "get", + "sentry-api-0-organization-metrics-details", + ), + ("get", "sentry-api-0-organization-metric-details", "foo"), + ( + "get", + "sentry-api-0-organization-metrics-tags", + ), + ("get", "sentry-api-0-organization-metrics-tag-details", "foo"), + ( + "get", + "sentry-api-0-organization-metrics-data", + ), + ( + "post", + "sentry-api-0-organization-metrics-query", + ), ) - def send_get_request(self, token, endpoint, *args): + def send_request(self, token, method, endpoint, *args): url = reverse(endpoint, args=(self.project.organization.slug,) + args) - return self.client.get(url, HTTP_AUTHORIZATION=f"Bearer {token.token}", format="json") + return getattr(self.client, method)( + url, HTTP_AUTHORIZATION=f"Bearer {token.token}", format="json" + ) def test_permissions(self): with assume_test_silo_mode(SiloMode.CONTROL): token = ApiToken.objects.create(user=self.user, scope_list=[]) - for endpoint in self.endpoints: - response = self.send_get_request(token, *endpoint) + for method, endpoint, *rest in self.endpoints: + response = self.send_request(token, method, endpoint, *rest) assert response.status_code == 403 with assume_test_silo_mode(SiloMode.CONTROL): token = ApiToken.objects.create(user=self.user, scope_list=["org:read"]) - for endpoint in self.endpoints: - response = self.send_get_request(token, *endpoint) + for method, endpoint, *rest in self.endpoints: + response = self.send_request(token, method, endpoint, *rest) assert response.status_code in (200, 400, 404) diff --git a/tests/sentry/api/endpoints/test_organization_metrics_data.py b/tests/sentry/api/endpoints/test_organization_metrics_data.py index 145a9dd2d61909..e640283b299619 100644 --- a/tests/sentry/api/endpoints/test_organization_metrics_data.py +++ b/tests/sentry/api/endpoints/test_organization_metrics_data.py @@ -657,8 +657,8 @@ def test_max_and_min_on_distributions(self): { "by": {}, "totals": { - "max(transaction.measurements.lcp)": 3 * 123.4, - "min(transaction.measurements.lcp)": 1 * 123.4, + "max(transaction.measurements.lcp)": pytest.approx(3 * 123.4), + "min(transaction.measurements.lcp)": pytest.approx(1 * 123.4), }, } ] diff --git a/tests/sentry/api/endpoints/test_organization_metrics_query.py b/tests/sentry/api/endpoints/test_organization_metrics_query.py new file mode 100644 index 00000000000000..078d106e315667 --- /dev/null +++ b/tests/sentry/api/endpoints/test_organization_metrics_query.py @@ -0,0 +1,38 @@ +from __future__ import annotations + +import pytest + +from sentry.testutils.cases import MetricsAPIBaseTestCase +from sentry.testutils.helpers.datetime import freeze_time +from sentry.testutils.silo import region_silo_test + +pytestmark = [pytest.mark.sentry_metrics] + + +@region_silo_test +@freeze_time(MetricsAPIBaseTestCase.MOCK_DATETIME) +class OrganizationMetricsQueryTest(MetricsAPIBaseTestCase): + endpoint = "sentry-api-0-organization-metrics-query" + method = "post" + + def setUp(self): + super().setUp() + self.login_as(user=self.user) + + @property + def now(self): + return MetricsAPIBaseTestCase.MOCK_DATETIME + + def test_query_simple(self): + self.get_success_response( + self.project.organization.slug, + status_code=200, + queries="", + formulas="", + qs_params={ + "statsPeriod": "24h", + "interval": "1h", + "project": [self.project.id], + "environment": [], + }, + ) diff --git a/tests/sentry/sentry_metrics/querying/data_v2/__init__.py b/tests/sentry/sentry_metrics/querying/data_v2/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/tests/sentry/sentry_metrics/querying/data_v2/test_api.py b/tests/sentry/sentry_metrics/querying/data_v2/test_api.py new file mode 100644 index 00000000000000..d23165abadb0af --- /dev/null +++ b/tests/sentry/sentry_metrics/querying/data_v2/test_api.py @@ -0,0 +1,70 @@ +from datetime import datetime, timedelta + +import pytest +from django.utils import timezone as django_timezone + +from sentry.sentry_metrics.use_case_id_registry import UseCaseID +from sentry.snuba.metrics.naming_layer import TransactionMRI +from sentry.testutils.cases import BaseMetricsTestCase, TestCase +from sentry.testutils.helpers.datetime import freeze_time + +pytestmark = pytest.mark.sentry_metrics + +MOCK_DATETIME = (django_timezone.now() - timedelta(days=1)).replace( + hour=10, minute=0, second=0, microsecond=0 +) + + +@freeze_time(MOCK_DATETIME) +class MetricsAPITestCase(TestCase, BaseMetricsTestCase): + def setUp(self): + super().setUp() + + release_1 = self.create_release( + project=self.project, version="1.0", date_added=MOCK_DATETIME + ) + release_2 = self.create_release( + project=self.project, version="2.0", date_added=MOCK_DATETIME + timedelta(minutes=5) + ) + + for value, transaction, platform, env, release, time in ( + (1, "/hello", "android", "prod", release_1.version, self.now()), + (6, "/hello", "ios", "dev", release_2.version, self.now()), + (5, "/world", "windows", "prod", release_1.version, self.now() + timedelta(minutes=30)), + (3, "/hello", "ios", "dev", release_2.version, self.now() + timedelta(hours=1)), + (2, "/hello", "android", "dev", release_1.version, self.now() + timedelta(hours=1)), + ( + 4, + "/world", + "windows", + "prod", + release_2.version, + self.now() + timedelta(hours=1, minutes=30), + ), + ): + self.store_metric( + self.project.organization.id, + self.project.id, + "distribution", + TransactionMRI.DURATION.value, + { + "transaction": transaction, + "platform": platform, + "environment": env, + "release": release, + }, + self.ts(time), + value, + UseCaseID.TRANSACTIONS, + ) + + self.prod_env = self.create_environment(name="prod", project=self.project) + self.dev_env = self.create_environment(name="dev", project=self.project) + + def now(self): + return MOCK_DATETIME + + def ts(self, dt: datetime) -> int: + return int(dt.timestamp()) + + # TODO: add tests once the code is implemented. From 867e41a366d24bcc9e49b9ed325a84a32367f304 Mon Sep 17 00:00:00 2001 From: ArthurKnaus Date: Thu, 8 Feb 2024 11:25:06 +0100 Subject: [PATCH 163/357] feat(ddm): Multiple focused series (#64855) - closes https://github.com/getsentry/sentry/issues/63599 --- static/app/utils/metrics/types.tsx | 10 +-- static/app/views/ddm/summaryTable.tsx | 18 ++++- static/app/views/ddm/utils/index.tsx | 24 +++++++ static/app/views/ddm/widget.tsx | 94 +++++++++++++++++++------- static/app/views/ddm/widgetDetails.tsx | 24 +++---- 5 files changed, 122 insertions(+), 48 deletions(-) create mode 100644 static/app/views/ddm/utils/index.tsx diff --git a/static/app/utils/metrics/types.tsx b/static/app/utils/metrics/types.tsx index 856f3a74739220..e25aab76a97ead 100644 --- a/static/app/utils/metrics/types.tsx +++ b/static/app/utils/metrics/types.tsx @@ -15,12 +15,14 @@ export type SortState = { order: 'asc' | 'desc'; }; +export interface FocusedMetricsSeries { + seriesName: string; + groupBy?: Record; +} + export interface MetricWidgetQueryParams extends MetricsQuerySubject { displayType: MetricDisplayType; - focusedSeries?: { - seriesName: string; - groupBy?: Record; - }; + focusedSeries?: FocusedMetricsSeries[]; highlightedSample?: string | null; powerUserMode?: boolean; showSummaryTable?: boolean; diff --git a/static/app/views/ddm/summaryTable.tsx b/static/app/views/ddm/summaryTable.tsx index 31d97837e65e39..8ad9eb00de0d02 100644 --- a/static/app/views/ddm/summaryTable.tsx +++ b/static/app/views/ddm/summaryTable.tsx @@ -14,7 +14,7 @@ import {trackAnalytics} from 'sentry/utils/analytics'; import {getUtcDateString} from 'sentry/utils/dates'; import {DEFAULT_SORT_STATE} from 'sentry/utils/metrics/constants'; import {formatMetricsUsingUnitAndOp} from 'sentry/utils/metrics/formatters'; -import type {MetricWidgetQueryParams, SortState} from 'sentry/utils/metrics/types'; +import type {FocusedMetricsSeries, SortState} from 'sentry/utils/metrics/types'; import useOrganization from 'sentry/utils/useOrganization'; import usePageFilters from 'sentry/utils/usePageFilters'; import type {Series} from 'sentry/views/ddm/widget'; @@ -24,13 +24,15 @@ export const SummaryTable = memo(function SummaryTable({ series, operation, onRowClick, + onColorDotClick, onSortChange, sort = DEFAULT_SORT_STATE as SortState, setHoveredSeries, }: { - onRowClick: (series: MetricWidgetQueryParams['focusedSeries']) => void; + onRowClick: (series: FocusedMetricsSeries) => void; onSortChange: (sortState: SortState) => void; series: Series[]; + onColorDotClick?: (series: FocusedMetricsSeries) => void; operation?: string; setHoveredSeries?: (seriesName: string) => void; sort?: SortState; @@ -196,7 +198,17 @@ export const SummaryTable = memo(function SummaryTable({ } }} > - + { + event.stopPropagation(); + if (hasMultipleSeries) { + onColorDotClick?.({ + seriesName, + groupBy, + }); + } + }} + > ) { + return Object.entries(queryObject) + .map(([key, value]) => `${key}:"${value}"`) + .join(' '); +} + +export function getQueryWithFocusedSeries(widget: MetricWidgetQueryParams) { + const focusedSeriesQuery = widget.focusedSeries + ?.map(series => { + if (!series.groupBy) { + return ''; + } + return `(${constructQueryString(series.groupBy)})`; + }) + .filter(Boolean) + .join(` ${BooleanOperator.OR} `); + + return focusedSeriesQuery + ? `${widget.query} (${focusedSeriesQuery})`.trim() + : widget.query; +} diff --git a/static/app/views/ddm/widget.tsx b/static/app/views/ddm/widget.tsx index 49594aeb0fda78..69c26252a9bc80 100644 --- a/static/app/views/ddm/widget.tsx +++ b/static/app/views/ddm/widget.tsx @@ -25,6 +25,7 @@ import { import {metricDisplayTypeOptions} from 'sentry/utils/metrics/constants'; import {parseMRI} from 'sentry/utils/metrics/mri'; import type { + FocusedMetricsSeries, MetricCorrelation, MetricWidgetQueryParams, } from 'sentry/utils/metrics/types'; @@ -37,6 +38,7 @@ import type {FocusAreaProps} from 'sentry/views/ddm/context'; import {createChartPalette} from 'sentry/views/ddm/metricsChartPalette'; import {QuerySymbol} from 'sentry/views/ddm/querySymbol'; import {SummaryTable} from 'sentry/views/ddm/summaryTable'; +import {getQueryWithFocusedSeries} from 'sentry/views/ddm/utils'; import {DDM_CHART_GROUP, MIN_WIDGET_WIDTH} from './constants'; @@ -64,12 +66,6 @@ export type Sample = { transactionSpanId: string; }; -const constructQueryString = (queryObject: Record) => { - return Object.entries(queryObject) - .map(([key, value]) => `${key}:"${value}"`) - .join(' '); -}; - export const MetricWidget = memo( ({ widget, @@ -130,11 +126,14 @@ export const MetricWidget = memo( onChange(index, {displayType: value}); }; + const queryWithFocusedSeries = useMemo( + () => getQueryWithFocusedSeries(widget), + [widget] + ); + const samplesQuery = useMetricSamples(metricsQuery.mri, { ...focusArea?.selection?.range, - query: widget?.focusedSeries?.groupBy - ? `${widget.query} ${constructQueryString(widget.focusedSeries.groupBy)}`.trim() - : widget?.query, + query: queryWithFocusedSeries, }); const samples = useMemo(() => { @@ -272,26 +271,68 @@ const MetricWidgetBody = memo( }); }, []); - const toggleSeriesVisibility = useCallback( - (series: MetricWidgetQueryParams['focusedSeries']) => { - setHoveredSeries(''); - onChange?.({ - focusedSeries: - focusedSeries?.seriesName === series?.seriesName ? undefined : series, - }); - }, - [focusedSeries, onChange, setHoveredSeries] - ); - const chartSeries = useMemo(() => { return timeseriesData ? getChartTimeseries(timeseriesData, { getChartPalette, mri, - focusedSeries: focusedSeries?.seriesName, + focusedSeries: + focusedSeries && new Set(focusedSeries?.map(s => s.seriesName)), }) : []; - }, [timeseriesData, getChartPalette, mri, focusedSeries?.seriesName]); + }, [timeseriesData, getChartPalette, mri, focusedSeries]); + + const toggleSeriesVisibility = useCallback( + (series: FocusedMetricsSeries) => { + setHoveredSeries(''); + + // The focused series array is not populated yet, so we can add all series except the one that was de-selected + if (!focusedSeries || focusedSeries.length === 0) { + onChange?.({ + focusedSeries: chartSeries + .filter(s => s.seriesName !== series.seriesName) + .map(s => ({ + seriesName: s.seriesName, + groupBy: s.groupBy, + })), + }); + return; + } + + const filteredSeries = focusedSeries.filter( + s => s.seriesName !== series.seriesName + ); + + if (filteredSeries.length === focusedSeries.length) { + // The series was not focused before so we can add it + filteredSeries.push(series); + } + + onChange?.({ + focusedSeries: filteredSeries, + }); + }, + [chartSeries, focusedSeries, onChange, setHoveredSeries] + ); + + const setSeriesVisibility = useCallback( + (series: FocusedMetricsSeries) => { + setHoveredSeries(''); + if ( + focusedSeries?.length === 1 && + focusedSeries[0].seriesName === series.seriesName + ) { + onChange?.({ + focusedSeries: [], + }); + return; + } + onChange?.({ + focusedSeries: [series], + }); + }, + [focusedSeries, onChange, setHoveredSeries] + ); const handleSortChange = useCallback( newSort => { @@ -345,8 +386,9 @@ const MetricWidgetBody = memo( onSortChange={handleSortChange} sort={sort} operation={metricsQuery.op} - onRowClick={toggleSeriesVisibility} - setHoveredSeries={focusedSeries ? undefined : setHoveredSeries} + onRowClick={setSeriesVisibility} + onColorDotClick={toggleSeriesVisibility} + setHoveredSeries={setHoveredSeries} /> )} @@ -363,7 +405,7 @@ export function getChartTimeseries( }: { getChartPalette: (seriesNames: string[]) => Record; mri: MRI; - focusedSeries?: string; + focusedSeries?: Set; } ) { // this assumes that all series have the same unit @@ -387,7 +429,7 @@ export function getChartTimeseries( groupBy: item.groupBy, unit, color: chartPalette[item.name], - hidden: focusedSeries && focusedSeries !== item.name, + hidden: focusedSeries && focusedSeries.size > 0 && !focusedSeries.has(item.name), data: item.values.map((value, index) => ({ name: moment(data.intervals[index]).valueOf(), value, diff --git a/static/app/views/ddm/widgetDetails.tsx b/static/app/views/ddm/widgetDetails.tsx index 7dbd9f0fb585f9..8844d273f39c33 100644 --- a/static/app/views/ddm/widgetDetails.tsx +++ b/static/app/views/ddm/widgetDetails.tsx @@ -1,4 +1,4 @@ -import {useCallback, useState} from 'react'; +import {useCallback, useMemo, useState} from 'react'; import styled from '@emotion/styled'; import {TabList, TabPanels, Tabs} from 'sentry/components/tabs'; @@ -12,24 +12,19 @@ import useOrganization from 'sentry/utils/useOrganization'; import {CodeLocations} from 'sentry/views/ddm/codeLocations'; import {useDDMContext} from 'sentry/views/ddm/context'; import {SampleTable} from 'sentry/views/ddm/sampleTable'; +import {getQueryWithFocusedSeries} from 'sentry/views/ddm/utils'; enum Tab { SAMPLES = 'samples', CODE_LOCATIONS = 'codeLocations', } -const constructQueryString = (queryObject: Record) => { - return Object.entries(queryObject) - .map(([key, value]) => `${key}:"${value}"`) - .join(' '); -}; - export function WidgetDetails() { const organization = useOrganization(); const {selectedWidgetIndex, widgets, focusArea, setHighlightedSampleId} = useDDMContext(); const [selectedTab, setSelectedTab] = useState(Tab.SAMPLES); - // the tray is minimized when the main content is maximized + const selectedWidget = widgets[selectedWidgetIndex] as | MetricWidgetQueryParams | undefined; @@ -40,6 +35,11 @@ export function WidgetDetails() { setSelectedTab(Tab.SAMPLES); } + const queryWithFocusedSeries = useMemo( + () => selectedWidget && getQueryWithFocusedSeries(selectedWidget), + [selectedWidget] + ); + const handleSampleRowHover = useCallback( (sampleId?: string) => { setHighlightedSampleId(sampleId); @@ -84,13 +84,7 @@ export function WidgetDetails() { From 79d4652bca021b7bbc0f9a6b05d6d2b59276cfd0 Mon Sep 17 00:00:00 2001 From: ArthurKnaus Date: Thu, 8 Feb 2024 11:46:25 +0100 Subject: [PATCH 164/357] fix(ddm): Clicking chart hides tooltip (#64859) - closes https://github.com/getsentry/sentry/issues/64451 --- static/app/views/ddm/chartUtils.spec.tsx | 33 +----------------------- static/app/views/ddm/chartUtils.tsx | 8 ------ static/app/views/ddm/focusArea.tsx | 23 ++++++++++------- 3 files changed, 15 insertions(+), 49 deletions(-) diff --git a/static/app/views/ddm/chartUtils.spec.tsx b/static/app/views/ddm/chartUtils.spec.tsx index 05134ddf0c1676..ac2709f1dda9c3 100644 --- a/static/app/views/ddm/chartUtils.spec.tsx +++ b/static/app/views/ddm/chartUtils.spec.tsx @@ -1,35 +1,4 @@ -import {fitToValueRect, isInRect} from 'sentry/views/ddm/chartUtils'; - -describe('isInRect', () => { - const rect = { - top: 0, - left: 0, - right: 10, - bottom: 10, - x: 0, - y: 0, - width: 10, - height: 10, - toJSON: () => {}, - }; - - it('should return false if rect is undefined', () => { - expect(isInRect(1, 2, undefined)).toBe(false); - }); - - it('should return true if point is within the rect', () => { - expect(isInRect(5, 5, rect)).toBe(true); - }); - - it('should return false if point is outside the rect', () => { - expect(isInRect(11, 11, rect)).toBe(false); - }); - - it('should return true if point is exactly on the border of the rect', () => { - expect(isInRect(0, 0, rect)).toBe(true); - expect(isInRect(10, 10, rect)).toBe(true); - }); -}); +import {fitToValueRect} from 'sentry/views/ddm/chartUtils'; describe('fitToValueRect', () => { it('should return original x and y if rect is undefined', () => { diff --git a/static/app/views/ddm/chartUtils.tsx b/static/app/views/ddm/chartUtils.tsx index 296663492c0e3c..4619d217b003ce 100644 --- a/static/app/views/ddm/chartUtils.tsx +++ b/static/app/views/ddm/chartUtils.tsx @@ -3,14 +3,6 @@ import moment from 'moment'; import type {ReactEchartsRef} from 'sentry/types/echarts'; -export function isInRect(x: number, y: number, rect: DOMRect | undefined) { - if (!rect) { - return false; - } - - return x >= rect.left && x <= rect.right && y >= rect.top && y <= rect.bottom; -} - export type ValueRect = { xMax: number; xMin: number; diff --git a/static/app/views/ddm/focusArea.tsx b/static/app/views/ddm/focusArea.tsx index c74c9d96d86d69..7311f19ee7c3dd 100644 --- a/static/app/views/ddm/focusArea.tsx +++ b/static/app/views/ddm/focusArea.tsx @@ -14,7 +14,7 @@ import {space} from 'sentry/styles/space'; import type {EChartBrushEndHandler, ReactEchartsRef} from 'sentry/types/echarts'; import type {SelectionRange} from 'sentry/utils/metrics/types'; import type {ValueRect} from 'sentry/views/ddm/chartUtils'; -import {getValueRect, isInRect} from 'sentry/views/ddm/chartUtils'; +import {getValueRect} from 'sentry/views/ddm/chartUtils'; import {CHART_HEIGHT} from 'sentry/views/ddm/constants'; import type {FocusAreaProps} from 'sentry/views/ddm/context'; @@ -76,22 +76,27 @@ export function useFocusArea({ brushType: 'rect', }, }); - isDrawingRef.current = true; }, [chartRef, hasFocusArea, isDisabled, onDraw]); useEffect(() => { - const handleMouseDown = event => { - const rect = chartRef.current?.ele.getBoundingClientRect(); + const chartElement = chartRef.current?.ele; + const handleMouseDown = () => { + isDrawingRef.current = true; + startBrush(); + }; - if (isInRect(event.clientX, event.clientY, rect)) { - startBrush(); - } + // Handle mouse up is called after onBrushEnd + // We can use it for a final reliable cleanup as onBrushEnd is not always called (e.g. when simply clicking the chart) + const handleMouseUp = () => { + isDrawingRef.current = false; }; - window.addEventListener('mousedown', handleMouseDown, {capture: true}); + chartElement?.addEventListener('mousedown', handleMouseDown, {capture: true}); + window.addEventListener('mouseup', handleMouseUp); return () => { - window.removeEventListener('mousedown', handleMouseDown, {capture: true}); + chartElement?.removeEventListener('mousedown', handleMouseDown, {capture: true}); + window.removeEventListener('mouseup', handleMouseUp); }; }, [chartRef, startBrush]); From 27a78bdc7fd8966dfc91cbd7400e4aff3ef440da Mon Sep 17 00:00:00 2001 From: Ogi <86684834+obostjancic@users.noreply.github.com> Date: Thu, 8 Feb 2024 11:53:27 +0100 Subject: [PATCH 165/357] fix(ddm): project param rename (#64860) --- static/app/utils/metrics/useMetricsMeta.tsx | 2 +- static/app/utils/metrics/useMetricsTags.tsx | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/static/app/utils/metrics/useMetricsMeta.tsx b/static/app/utils/metrics/useMetricsMeta.tsx index 5a635a868af312..08365222c3f7be 100644 --- a/static/app/utils/metrics/useMetricsMeta.tsx +++ b/static/app/utils/metrics/useMetricsMeta.tsx @@ -26,7 +26,7 @@ export function getMetricsMetaQueryKey( useCase: UseCase ): ApiQueryKey { const queryParams = projects?.length - ? {useCase, projects, ...getMetaDateTimeParams(datetime)} + ? {useCase, project: projects, ...getMetaDateTimeParams(datetime)} : {useCase, ...getMetaDateTimeParams(datetime)}; return [`/organizations/${orgSlug}/metrics/meta/`, {query: queryParams}]; } diff --git a/static/app/utils/metrics/useMetricsTags.tsx b/static/app/utils/metrics/useMetricsTags.tsx index 395818ebc2b9ad..0d7198c54d15c1 100644 --- a/static/app/utils/metrics/useMetricsTags.tsx +++ b/static/app/utils/metrics/useMetricsTags.tsx @@ -19,7 +19,7 @@ export function useMetricsTags( ? { metric: mri, useCase, - projects: pageFilters.projects, + project: pageFilters.projects, ...getMetaDateTimeParams(pageFilters.datetime), } : { From 344a46a97d7764be0c937b94a238ef88d4113ba3 Mon Sep 17 00:00:00 2001 From: ArthurKnaus Date: Thu, 8 Feb 2024 12:36:54 +0100 Subject: [PATCH 166/357] fix(ddm): Global loading state on zoom (#64861) --- static/app/utils/metrics/useMetricsMeta.tsx | 9 ++++-- static/app/views/ddm/context.tsx | 34 +++++++++++++++------ static/app/views/ddm/layout.tsx | 8 ++--- static/app/views/ddm/queryBuilder.tsx | 12 +++++--- 4 files changed, 40 insertions(+), 23 deletions(-) diff --git a/static/app/utils/metrics/useMetricsMeta.tsx b/static/app/utils/metrics/useMetricsMeta.tsx index 08365222c3f7be..cdbbd97522b8cc 100644 --- a/static/app/utils/metrics/useMetricsMeta.tsx +++ b/static/app/utils/metrics/useMetricsMeta.tsx @@ -8,6 +8,7 @@ import type {MetricMeta, MRI, UseCase} from '../../types/metrics'; import {getMetaDateTimeParams} from './index'; +const EMPTY_ARRAY: MetricMeta[] = []; const DEFAULT_USE_CASES = ['sessions', 'transactions', 'custom', 'spans']; export function getMetricsMetaQueryKeys( @@ -91,9 +92,11 @@ export function useMetricsMeta( } return { - data: data.filter(meta => { - return meta.blockingStatus?.every(({isBlocked}) => !isBlocked) ?? true; - }), + data: isLoading + ? EMPTY_ARRAY + : data.filter(meta => { + return meta.blockingStatus?.every(({isBlocked}) => !isBlocked) ?? true; + }), isLoading, }; } diff --git a/static/app/views/ddm/context.tsx b/static/app/views/ddm/context.tsx index e6b81213ba78ee..153edc85a8764a 100644 --- a/static/app/views/ddm/context.tsx +++ b/static/app/views/ddm/context.tsx @@ -17,10 +17,10 @@ import { } from 'sentry/utils/metrics'; import {DEFAULT_SORT_STATE, emptyWidget} from 'sentry/utils/metrics/constants'; import type {MetricWidgetQueryParams} from 'sentry/utils/metrics/types'; -import {useMetricsMeta} from 'sentry/utils/metrics/useMetricsMeta'; import {decodeList} from 'sentry/utils/queryString'; import {useLocalStorageState} from 'sentry/utils/useLocalStorageState'; import usePageFilters from 'sentry/utils/usePageFilters'; +import useProjects from 'sentry/utils/useProjects'; import useRouter from 'sentry/utils/useRouter'; import type {FocusAreaSelection} from 'sentry/views/ddm/focusArea'; import {useStructuralSharing} from 'sentry/views/ddm/useStructuralSharing'; @@ -35,9 +35,8 @@ export type FocusAreaProps = { interface DDMContextValue { addWidget: () => void; duplicateWidget: (index: number) => void; + hasMetrics: boolean; isDefaultQuery: boolean; - isLoading: boolean; - metricsMeta: ReturnType['data']; removeWidget: (index: number) => void; selectedWidgetIndex: number; setDefaultQuery: (query: Record | null) => void; @@ -54,8 +53,7 @@ export const DDMContext = createContext({ addWidget: () => {}, duplicateWidget: () => {}, isDefaultQuery: false, - isLoading: false, - metricsMeta: [], + hasMetrics: false, removeWidget: () => {}, selectedWidgetIndex: 0, setDefaultQuery: () => {}, @@ -198,6 +196,15 @@ const useDefaultQuery = () => { ); }; +function useSelectedProjects() { + const {selection} = usePageFilters(); + const {projects} = useProjects(); + + return useMemo(() => { + return projects.filter(project => selection.projects.includes(Number(project.id))); + }, [selection.projects, projects]); +} + export function DDMContextProvider({children}: {children: React.ReactNode}) { const router = useRouter(); const updateQuery = useUpdateQuery(); @@ -210,7 +217,16 @@ export function DDMContextProvider({children}: {children: React.ReactNode}) { const [highlightedSampleId, setHighlightedSampleId] = useState(); const pageFilters = usePageFilters().selection; - const {data: metricsMeta, isLoading} = useMetricsMeta(pageFilters); + + const selectedProjects = useSelectedProjects(); + const hasMetrics = useMemo( + () => + selectedProjects.some( + project => + project.hasCustomMetrics || project.hasSessions || project.firstTransactionEvent + ), + [selectedProjects] + ); const focusAreaSelection = useMemo( () => router.location.query.focusArea && JSON.parse(router.location.query.focusArea), @@ -284,8 +300,7 @@ export function DDMContextProvider({children}: {children: React.ReactNode}) { removeWidget, duplicateWidget: handleDuplicate, widgets, - isLoading, - metricsMeta, + hasMetrics, focusArea, setDefaultQuery, isDefaultQuery, @@ -300,8 +315,7 @@ export function DDMContextProvider({children}: {children: React.ReactNode}) { handleUpdateWidget, removeWidget, handleDuplicate, - isLoading, - metricsMeta, + hasMetrics, focusArea, setDefaultQuery, isDefaultQuery, diff --git a/static/app/views/ddm/layout.tsx b/static/app/views/ddm/layout.tsx index 37a89ddcfc8a13..ea204f6d1c1610 100644 --- a/static/app/views/ddm/layout.tsx +++ b/static/app/views/ddm/layout.tsx @@ -8,7 +8,6 @@ import {Button} from 'sentry/components/button'; import FeatureBadge from 'sentry/components/featureBadge'; import FloatingFeedbackWidget from 'sentry/components/feedback/widget/floatingFeedbackWidget'; import * as Layout from 'sentry/components/layouts/thirds'; -import LoadingIndicator from 'sentry/components/loadingIndicator'; import OnboardingPanel from 'sentry/components/onboardingPanel'; import {DatePageFilter} from 'sentry/components/organizations/datePageFilter'; import {EnvironmentPageFilter} from 'sentry/components/organizations/environmentPageFilter'; @@ -29,8 +28,7 @@ import {WidgetDetails} from 'sentry/views/ddm/widgetDetails'; export const DDMLayout = memo(() => { const organization = useOrganization(); - const {metricsMeta, isLoading} = useDDMContext(); - const hasMetrics = !isLoading && metricsMeta.length > 0; + const {hasMetrics} = useDDMContext(); const {activateSidebar} = useMetricsOnboardingSidebar(); const addCustomMetric = useCallback( @@ -81,9 +79,7 @@ export const DDMLayout = memo(() => { - {isLoading ? ( - - ) : hasMetrics ? ( + {hasMetrics ? ( diff --git a/static/app/views/ddm/queryBuilder.tsx b/static/app/views/ddm/queryBuilder.tsx index d625c1245130d4..a908bb3d5e6caf 100644 --- a/static/app/views/ddm/queryBuilder.tsx +++ b/static/app/views/ddm/queryBuilder.tsx @@ -25,10 +25,11 @@ import type { } from 'sentry/utils/metrics/types'; import {useBreakpoints} from 'sentry/utils/metrics/useBreakpoints'; import {useIncrementQueryMetric} from 'sentry/utils/metrics/useIncrementQueryMetric'; +import {useMetricsMeta} from 'sentry/utils/metrics/useMetricsMeta'; import {useMetricsTags} from 'sentry/utils/metrics/useMetricsTags'; import {middleEllipsis} from 'sentry/utils/middleEllipsis'; import useKeyPress from 'sentry/utils/useKeyPress'; -import {useDDMContext} from 'sentry/views/ddm/context'; +import usePageFilters from 'sentry/utils/usePageFilters'; import {MetricSearchBar} from 'sentry/views/ddm/metricSearchBar'; type QueryBuilderProps = { @@ -58,7 +59,8 @@ export const QueryBuilder = memo(function QueryBuilder({ powerUserMode, onChange, }: QueryBuilderProps) { - const {metricsMeta: meta} = useDDMContext(); + const pageFilters = usePageFilters(); + const {data: meta} = useMetricsMeta(pageFilters.selection); const mriModeKeyPressed = useKeyPress('`', undefined, true); const [mriMode, setMriMode] = useState(powerUserMode); // power user mode that shows raw MRI instead of metrics names const breakpoints = useBreakpoints(); @@ -70,7 +72,9 @@ export const QueryBuilder = memo(function QueryBuilder({ // eslint-disable-next-line react-hooks/exhaustive-deps }, [mriModeKeyPressed, powerUserMode]); - const {data: tags = []} = useMetricsTags(metricsQuery.mri, {projects}); + const {data: tags = [], isLoading: tagsIsLoading} = useMetricsTags(metricsQuery.mri, { + projects, + }); const displayedMetrics = useMemo(() => { if (mriMode) { @@ -194,7 +198,7 @@ export const QueryBuilder = memo(function QueryBuilder({ ), }))} - disabled={!metricsQuery.mri} + disabled={!metricsQuery.mri || tagsIsLoading} value={metricsQuery.groupBy} onChange={handleGroupByChange} /> From 3245fc4b6784969de41168ba37a5700324e1d20d Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Thu, 8 Feb 2024 08:55:47 -0500 Subject: [PATCH 167/357] ref: upgrade psycopg2-binary (#64809) this version is built for python 3.12 --- requirements-base.txt | 2 +- requirements-dev-frozen.txt | 2 +- requirements-frozen.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements-base.txt b/requirements-base.txt index 40eacc76e5ea15..7e5cda130327b2 100644 --- a/requirements-base.txt +++ b/requirements-base.txt @@ -42,7 +42,7 @@ phonenumberslite>=8.12.32 Pillow>=10.2.0 progressbar2>=3.41.0 python-rapidjson>=1.4 -psycopg2-binary>=2.9.7 +psycopg2-binary>=2.9.9 PyJWT>=2.4.0 pydantic>=1.10.9 python-dateutil>=2.8.2 diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index a3b372c4e9813f..e87fe10fd40fbd 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -128,7 +128,7 @@ prompt-toolkit==3.0.41 proto-plus==1.23.0 protobuf==4.21.6 psutil==5.9.2 -psycopg2-binary==2.9.7 +psycopg2-binary==2.9.9 pyasn1==0.4.5 pyasn1-modules==0.2.4 pycodestyle==2.11.0 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index 93c91e0369c8c9..8d45893d2829d9 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -86,7 +86,7 @@ progressbar2==3.41.0 prompt-toolkit==3.0.41 proto-plus==1.23.0 protobuf==4.21.6 -psycopg2-binary==2.9.7 +psycopg2-binary==2.9.9 pyasn1==0.4.5 pyasn1-modules==0.2.4 pycountry==17.5.14 From b8713c19fb4e5538ab38b7fa89ff32cd2dde2930 Mon Sep 17 00:00:00 2001 From: Jodi Jang <116035587+jangjodi@users.noreply.github.com> Date: Thu, 8 Feb 2024 09:04:53 -0500 Subject: [PATCH 168/357] ref(similarity-embeddings): Remove unused organization level flag (#64815) Remove unused organization level flag since it was replaced with a project level flag --- src/sentry/conf/server.py | 2 -- src/sentry/features/__init__.py | 1 - 2 files changed, 3 deletions(-) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 38a024dad6201a..cf9df255e9a588 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1627,8 +1627,6 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:issue-search-use-cdc-secondary": False, # Enable issue stream performance improvements "organizations:issue-stream-performance": False, - # Enable issue similarity embeddings - "organizations:issues-similarity-embeddings": False, # Enable the trace timeline on issue details "organizations:issues-trace-timeline": False, # Enabled latest adopted release filter for issue alerts diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index 1d191a7949fd7b..71c86383bcc1af 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -133,7 +133,6 @@ default_manager.add("organizations:issue-search-use-cdc-primary", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:issue-search-use-cdc-secondary", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:issue-stream-performance", OrganizationFeature, FeatureHandlerStrategy.REMOTE) -default_manager.add("organizations:issues-similarity-embeddings", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:issues-trace-timeline", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:large-debug-files", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:latest-adopted-release-filter", OrganizationFeature, FeatureHandlerStrategy.REMOTE) From 238cd71e977a85d62cb10c47dd90cd965c972dc0 Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Thu, 8 Feb 2024 15:06:45 +0100 Subject: [PATCH 169/357] ref: Add option to throttle proguard processing (#64866) This is in response to INC-635. It introduces an option `"filestore.proguard-throttle"` that probabilistically short-circuits the function `fetch_difs` so it doesn't access filestore. We only do this short-circuit for `proguard` cases (indicated by the `mapping` feature), but that should actually be all calls to this function. If this throttling happens, `JavaStacktraceProcessor.preprocess_step` will report a processing issue with a cause of `proguard_missing_mapping`, I hope that is acceptable. --- src/sentry/models/debugfile.py | 10 ++++++++++ src/sentry/options/defaults.py | 8 ++++++++ 2 files changed, 18 insertions(+) diff --git a/src/sentry/models/debugfile.py b/src/sentry/models/debugfile.py index d2dfe1bd5f4030..8098687414f2b1 100644 --- a/src/sentry/models/debugfile.py +++ b/src/sentry/models/debugfile.py @@ -6,6 +6,7 @@ import logging import os import os.path +import random import re import shutil import tempfile @@ -631,6 +632,15 @@ def fetch_difs( """Given some ids returns an id to path mapping for where the debug symbol files are on the FS. """ + + # If this call is for proguard purposes, we probabilistically cut this function short + # right here so we don't overload filestore. + if features is not None: + if "mapping" in features and random.random() >= options.get( + "filestore.proguard-throttle" + ): + return {} + debug_ids = [str(debug_id).lower() for debug_id in debug_ids] difs = ProjectDebugFile.objects.find_by_debug_ids(project, debug_ids, features) diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index 1761e706713d7c..3251de9e1dfce3 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -293,6 +293,14 @@ register("filestore.control.backend", default="", flags=FLAG_NOSTORE) register("filestore.control.options", default={}, flags=FLAG_NOSTORE) +# Throttle filestore access in proguard processing. This is in response to +# INC-635. +register( + "filestore.proguard-throttle", + default=1.0, + flags=FLAG_AUTOMATOR_MODIFIABLE | FLAG_MODIFIABLE_RATE, +) + # Whether to use a redis lock on fileblob uploads and deletes register("fileblob.upload.use_lock", default=True, flags=FLAG_AUTOMATOR_MODIFIABLE) # Whether to use redis to cache `FileBlob.id` lookups From d7b40d07e362f5a3257416dfd2b12be041849b46 Mon Sep 17 00:00:00 2001 From: ArthurKnaus Date: Thu, 8 Feb 2024 15:07:35 +0100 Subject: [PATCH 170/357] fix(ddm): Focused series backwards compatibility (#64868) Make the `focusedSeries` query param backwards compatible. - fixes JAVASCRIPT-2RKS - fixes JAVASCRIPT-2RKT --- static/app/views/ddm/context.tsx | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/static/app/views/ddm/context.tsx b/static/app/views/ddm/context.tsx index 153edc85a8764a..63e299f171cfb7 100644 --- a/static/app/views/ddm/context.tsx +++ b/static/app/views/ddm/context.tsx @@ -88,7 +88,13 @@ export function useMetricWidgets() { groupBy: decodeList(widget.groupBy), displayType: widget.displayType ?? getDefaultMetricDisplayType(widget.mri, widget.op), - focusedSeries: widget.focusedSeries, + focusedSeries: + widget.focusedSeries && + // Switch existing focused series to array (it was once a string) + // TODO: remove this after some time (added 08.02.2024) + (Array.isArray(widget.focusedSeries) + ? widget.focusedSeries + : [widget.focusedSeries]), showSummaryTable: widget.showSummaryTable ?? true, // temporary default powerUserMode: widget.powerUserMode, sort: widget.sort ?? DEFAULT_SORT_STATE, From ffd6b4fc171a944c59a474f8355940064e8cb6a7 Mon Sep 17 00:00:00 2001 From: Arpad Borsos Date: Thu, 8 Feb 2024 15:58:39 +0100 Subject: [PATCH 171/357] Sprinkle some tracing around more `save_event` functions (#64869) --- src/sentry/event_manager.py | 1 + src/sentry/quotas/redis.py | 1 + src/sentry/tasks/store.py | 1 + 3 files changed, 3 insertions(+) diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index 67267bbb9dff81..0ad38db4f301d8 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -2005,6 +2005,7 @@ def _get_severity_score(event: Event) -> tuple[float, str]: Attachment = CachedAttachment +@sentry_sdk.tracing.trace def discard_event(job: Job, attachments: Sequence[Attachment]) -> None: """ Refunds consumed quotas for an event and its attachments. diff --git a/src/sentry/quotas/redis.py b/src/sentry/quotas/redis.py index ada9cc731405c9..45c95722f8495b 100644 --- a/src/sentry/quotas/redis.py +++ b/src/sentry/quotas/redis.py @@ -160,6 +160,7 @@ def get_value_for_result(result, refund_result): def get_refunded_quota_key(self, key): return f"r:{key}" + @sentry_sdk.tracing.trace def refund(self, project, key=None, timestamp=None, category=None, quantity=None): if timestamp is None: timestamp = time() diff --git a/src/sentry/tasks/store.py b/src/sentry/tasks/store.py index 0e56e493a42828..70f911903c0d1d 100644 --- a/src/sentry/tasks/store.py +++ b/src/sentry/tasks/store.py @@ -536,6 +536,7 @@ def process_event_from_reprocessing( ) +@sentry_sdk.tracing.trace def delete_raw_event(project_id: int, event_id: str | None, allow_hint_clear: bool = False) -> None: set_current_event_project(project_id) From 087e99c487529e1f794d06046be993753d807272 Mon Sep 17 00:00:00 2001 From: Colton Allen Date: Thu, 8 Feb 2024 09:13:13 -0600 Subject: [PATCH 172/357] fix(replays): Move code requiring exception-prone variables into try block (#64658) Fixes INC-630. Variables are not defined if an exception is raised causing an unhandled crashloop. --- .../replays/consumers/recording_buffered.py | 35 ++++++++-------- .../replays/consumers/test_recording.py | 41 +++++++++++++++++++ 2 files changed, 59 insertions(+), 17 deletions(-) diff --git a/src/sentry/replays/consumers/recording_buffered.py b/src/sentry/replays/consumers/recording_buffered.py index 04432251df7467..3fa2f6045b47d9 100644 --- a/src/sentry/replays/consumers/recording_buffered.py +++ b/src/sentry/replays/consumers/recording_buffered.py @@ -35,6 +35,7 @@ than if throughput is low. The number of messages being operated on in parallel is material only insofar as it impacts the throughput of the consumer. """ + from __future__ import annotations import logging @@ -238,6 +239,23 @@ def process_message(buffer: RecordingBuffer, message: bytes) -> None: decoded_message["retention_days"], parsed_recording_data, ) + + if replay_actions is not None: + buffer.replay_action_events.append(replay_actions) + + # Useful for computing the average cost of a replay. + metrics.distribution( + "replays.usecases.ingest.size_compressed", + len(recording_data), + unit="byte", + ) + + # Useful for computing the compression ratio. + metrics.distribution( + "replays.usecases.ingest.size_uncompressed", + len(decompressed_segment), + unit="byte", + ) except Exception: logging.exception( "Failed to parse recording org=%s, project=%s, replay=%s, segment=%s", @@ -247,23 +265,6 @@ def process_message(buffer: RecordingBuffer, message: bytes) -> None: headers["segment_id"], ) - if replay_actions is not None: - buffer.replay_action_events.append(replay_actions) - - # Useful for computing the average cost of a replay. - metrics.distribution( - "replays.usecases.ingest.size_compressed", - len(recording_data), - unit="byte", - ) - - # Useful for computing the compression ratio. - metrics.distribution( - "replays.usecases.ingest.size_uncompressed", - len(decompressed_segment), - unit="byte", - ) - # Commit. diff --git a/tests/sentry/replays/consumers/test_recording.py b/tests/sentry/replays/consumers/test_recording.py index 6e6edd5ee174cc..3f4d9095bee14f 100644 --- a/tests/sentry/replays/consumers/test_recording.py +++ b/tests/sentry/replays/consumers/test_recording.py @@ -172,6 +172,47 @@ def test_uncompressed_segment_ingestion(self, mock_record, mock_onboarding_task) user_id=self.organization.default_owner_id, ) + @patch("sentry.models.OrganizationOnboardingTask.objects.record") + @patch("sentry.analytics.record") + @patch("sentry.replays.usecases.ingest.dom_index.emit_replay_actions") + def test_invalid_json(self, emit_replay_actions, mock_record, mock_onboarding_task): + """Assert invalid JSON does not break ingestion. + + In production, we'll never received invalid JSON. Its validated in Relay. However, we + may still encounter issues when deserializing JSON that are not encountered in Relay + (e.g. max depth). These issues should not break ingestion. + """ + segment_id = 0 + self.submit( + self.nonchunked_messages(segment_id=segment_id, compressed=True, message=b"[{]") + ) + + # Data was persisted even though an error was encountered. + bytes = self.get_recording_data(segment_id) + assert bytes == zlib.compress(b"[{]") + + # Onboarding and billing tasks were called. + self.project.refresh_from_db() + assert self.project.flags.has_replays + + mock_onboarding_task.assert_called_with( + organization_id=self.project.organization_id, + task=OnboardingTask.SESSION_REPLAY, + status=OnboardingTaskStatus.COMPLETE, + date_completed=ANY, + ) + + mock_record.assert_called_with( + "first_replay.sent", + organization_id=self.organization.id, + project_id=self.project.id, + platform=self.project.platform, + user_id=self.organization.default_owner_id, + ) + + # No replay actions were emitted because JSON deserialization failed. + assert not emit_replay_actions.called + class ThreadedRecordingTestCase(RecordingTestCase): force_synchronous = False From 922707e3537d9b0f1656ae95ced53a2a3db5223c Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Thu, 8 Feb 2024 10:43:53 -0500 Subject: [PATCH 173/357] ref: upgrade hiredis (#64789) this gets us to a version which is supported on python 3.12 --- requirements-base.txt | 2 +- requirements-dev-frozen.txt | 2 +- requirements-frozen.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements-base.txt b/requirements-base.txt index 7e5cda130327b2..34fcbd15897c1e 100644 --- a/requirements-base.txt +++ b/requirements-base.txt @@ -91,7 +91,7 @@ cryptography>=38.0.3 grpcio>=1.59.0 # not directly used, but provides a speedup for redis -hiredis>=0.3.1 +hiredis>=2.3.2 # sentry-plugins specific dependencies phabricator>=0.7.0 diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index e87fe10fd40fbd..9197f0b396802d 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -72,7 +72,7 @@ grpc-google-iam-v1==0.12.4 grpcio==1.60.1 grpcio-status==1.60.1 h11==0.13.0 -hiredis==0.3.1 +hiredis==2.3.2 honcho==1.1.0 httpcore==1.0.2 httpx==0.25.2 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index 8d45893d2829d9..0ebbac703bd0f8 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -58,7 +58,7 @@ grpc-google-iam-v1==0.12.4 grpcio==1.60.1 grpcio-status==1.60.1 h11==0.14.0 -hiredis==0.3.1 +hiredis==2.3.2 httpcore==1.0.2 httpx==0.25.2 idna==2.10 From 0ba6d479ca87acf541b1f89552103e956feda149 Mon Sep 17 00:00:00 2001 From: ArthurKnaus Date: Thu, 8 Feb 2024 17:03:51 +0100 Subject: [PATCH 174/357] fix(ddm): Handle all and my projects (#64877) --- static/app/views/ddm/context.tsx | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/static/app/views/ddm/context.tsx b/static/app/views/ddm/context.tsx index 63e299f171cfb7..00fa094de7b781 100644 --- a/static/app/views/ddm/context.tsx +++ b/static/app/views/ddm/context.tsx @@ -207,6 +207,12 @@ function useSelectedProjects() { const {projects} = useProjects(); return useMemo(() => { + if (selection.projects.length === 0) { + return projects.filter(project => project.isMember); + } + if (selection.projects.includes(-1)) { + return projects; + } return projects.filter(project => selection.projects.includes(Number(project.id))); }, [selection.projects, projects]); } From 657a2d38d49a175fd4f90463ddb64ad96c3df373 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Thu, 8 Feb 2024 08:21:09 -0800 Subject: [PATCH 175/357] fix(replays): check before attempting to load in onboarding doc (#64842) the problem: the user can get to the replay onboarding sidebar from the "quick start" pendo, which we weren't checking for before. they can get to the sidebar from any platform, even those that aren't supported by replay, so we should do a check first that the platform is even supported before we attempt to load in the onboarding module. the solution: if the platform isn't supported, return null docs --> this will result in our classic "fiddlesticks" message. fixes https://sentry.sentry.io/issues/4750881789/events/f673d336bc354cabb5ff95073c085a06/ --- .../useLoadOnboardingDoc.tsx | 38 ++++++++++++++----- 1 file changed, 29 insertions(+), 9 deletions(-) diff --git a/static/app/components/replaysOnboarding/useLoadOnboardingDoc.tsx b/static/app/components/replaysOnboarding/useLoadOnboardingDoc.tsx index ff3cc68bb24e93..d1eb714e92c451 100644 --- a/static/app/components/replaysOnboarding/useLoadOnboardingDoc.tsx +++ b/static/app/components/replaysOnboarding/useLoadOnboardingDoc.tsx @@ -1,6 +1,8 @@ import {useEffect, useState} from 'react'; +import * as Sentry from '@sentry/react'; import type {Docs} from 'sentry/components/onboarding/gettingStartedDoc/types'; +import {replayPlatforms} from 'sentry/data/platformCategories'; import type {Organization, PlatformIntegration, ProjectKey} from 'sentry/types'; import {useApiQuery} from 'sentry/utils/queryClient'; @@ -13,9 +15,13 @@ function useLoadOnboardingDoc({ platform: PlatformIntegration; projectSlug: string; }) { - const [module, setModule] = useState; - }>(null); + const [module, setModule] = useState< + | null + | { + default: Docs; + } + | 'none' + >(null); const platformPath = platform?.type === 'framework' @@ -34,17 +40,31 @@ function useLoadOnboardingDoc({ useEffect(() => { async function getGettingStartedDoc() { - const mod = await import( - /* webpackExclude: /.spec/ */ - `sentry/gettingStartedDocs/${platformPath}` - ); - setModule(mod); + if (!replayPlatforms.includes(platform.id)) { + setModule('none'); + return; + } + try { + const mod = await import( + /* webpackExclude: /.spec/ */ + `sentry/gettingStartedDocs/${platformPath}` + ); + setModule(mod); + } catch (err) { + Sentry.captureException(err); + } } getGettingStartedDoc(); return () => { setModule(null); }; - }, [platformPath]); + }, [platformPath, platform.id]); + + if (module === 'none') { + return { + docs: null, + }; + } if (!module || projectKeysIsLoading) { return { From 84668fe77fb59232e11b882936666a94da7e0ab5 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Thu, 8 Feb 2024 08:21:18 -0800 Subject: [PATCH 176/357] ref(replay): remove 'new' badge from selector widgets & tab (#64848) not new anymore! SCR-20240207-ozja --- .../replays/deadRageClick/deadRageSelectorCards.tsx | 3 --- static/app/views/replays/tabs.tsx | 9 ++------- 2 files changed, 2 insertions(+), 10 deletions(-) diff --git a/static/app/views/replays/deadRageClick/deadRageSelectorCards.tsx b/static/app/views/replays/deadRageClick/deadRageSelectorCards.tsx index b1bad5a69e73a7..4b6aa1425653ff 100644 --- a/static/app/views/replays/deadRageClick/deadRageSelectorCards.tsx +++ b/static/app/views/replays/deadRageClick/deadRageSelectorCards.tsx @@ -5,7 +5,6 @@ import styled from '@emotion/styled'; import Accordion from 'sentry/components/accordion/accordion'; import {LinkButton} from 'sentry/components/button'; import EmptyStateWarning from 'sentry/components/emptyStateWarning'; -import FeatureBadge from 'sentry/components/featureBadge'; import Placeholder from 'sentry/components/placeholder'; import {Flex} from 'sentry/components/profiling/flex'; import QuestionTooltip from 'sentry/components/questionTooltip'; @@ -51,7 +50,6 @@ function DeadRageSelectorCards() { isHoverable /> - {t('Suggested replays to watch')}
    @@ -74,7 +72,6 @@ function DeadRageSelectorCards() { isHoverable /> - {t('Suggested replays to watch')}
    diff --git a/static/app/views/replays/tabs.tsx b/static/app/views/replays/tabs.tsx index f1ed90873419e5..a3f3c080fc4b10 100644 --- a/static/app/views/replays/tabs.tsx +++ b/static/app/views/replays/tabs.tsx @@ -1,6 +1,5 @@ -import {Fragment, useMemo} from 'react'; +import {useMemo} from 'react'; -import FeatureBadge from 'sentry/components/featureBadge'; import {TabList, Tabs} from 'sentry/components/tabs'; import {t} from 'sentry/locale'; import {useLocation} from 'sentry/utils/useLocation'; @@ -25,11 +24,7 @@ export default function ReplayTabs({selected}: Props) { }, { key: 'selectors', - label: ( - - {t('Selectors')} - - ), + label: t('Selectors'), pathname: normalizeUrl(`/organizations/${organization.slug}/replays/selectors/`), query: {...location.query, sort: '-count_dead_clicks'}, }, From d7ef6ef062be6370116baf256fa0419ce5da054b Mon Sep 17 00:00:00 2001 From: Richard Roggenkemper <46740234+roggenkemper@users.noreply.github.com> Date: Thu, 8 Feb 2024 09:10:05 -0800 Subject: [PATCH 177/357] feat(integrations): Autofill project in selector after project creation modal (#64791) this pr adds in autofilling the project into the selector after a user creates one using the project creation modal. https://github.com/getsentry/sentry/assets/46740234/d809e6be-66c8-44a0-a766-5e693299c8ae --- .../components/forms/fields/projectMapperField.tsx | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/static/app/components/forms/fields/projectMapperField.tsx b/static/app/components/forms/fields/projectMapperField.tsx index c550c4b67fa8ce..6f2cb7644f318a 100644 --- a/static/app/components/forms/fields/projectMapperField.tsx +++ b/static/app/components/forms/fields/projectMapperField.tsx @@ -1,6 +1,7 @@ import {Component, Fragment} from 'react'; import {components} from 'react-select'; import styled from '@emotion/styled'; +import difference from 'lodash/difference'; import {openProjectCreationModal} from 'sentry/actionCreators/modal'; import {Button} from 'sentry/components/button'; @@ -59,6 +60,18 @@ export class RenderField extends Component { selectedMappedValue: null, }; + componentDidUpdate(prevProps: RenderProps) { + const projectIds = this.props.sentryProjects.map(project => project.id); + const prevProjectIds = prevProps.sentryProjects.map(project => project.id); + const newProjects = difference(projectIds, prevProjectIds); + + if (newProjects.length === 1) { + this.setState({ + selectedSentryProjectId: newProjects[0], + }); + } + } + render() { const { onChange, From 6013463f06951155432d77d15a8460b851c0eefc Mon Sep 17 00:00:00 2001 From: Alexander Tarasov Date: Thu, 8 Feb 2024 18:11:51 +0100 Subject: [PATCH 178/357] fix(integrations): phabricator host validation (#64882) --- src/sentry_plugins/phabricator/plugin.py | 8 ++++++++ tests/sentry_plugins/phabricator/test_plugin.py | 14 ++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/src/sentry_plugins/phabricator/plugin.py b/src/sentry_plugins/phabricator/plugin.py index 9d7f9c8f0bb609..514111308e68b4 100644 --- a/src/sentry_plugins/phabricator/plugin.py +++ b/src/sentry_plugins/phabricator/plugin.py @@ -8,6 +8,7 @@ from sentry.exceptions import PluginError from sentry.integrations import FeatureDescription, IntegrationFeatures +from sentry.net.socket import is_valid_url from sentry.plugins.bases.issue2 import IssueGroupActionEndpoint, IssuePlugin2 from sentry.utils import json from sentry.utils.http import absolute_uri @@ -33,6 +34,12 @@ def query_to_result(field, result): return result["fields"]["name"] +def validate_host(value: str, **kwargs: object) -> str: + if not value.startswith(("http://", "https://")) or not is_valid_url(value): + raise PluginError("Not a valid URL.") + return value + + class PhabricatorPlugin(CorePluginMixin, IssuePlugin2): description = DESCRIPTION @@ -78,6 +85,7 @@ def get_configure_plugin_fields(self, request: Request, project, **kwargs): "type": "text", "placeholder": "e.g. http://secure.phabricator.org", "required": True, + "validators": [validate_host], }, secret_field, { diff --git a/tests/sentry_plugins/phabricator/test_plugin.py b/tests/sentry_plugins/phabricator/test_plugin.py index a5c3196f539b40..25a0b97e111c47 100644 --- a/tests/sentry_plugins/phabricator/test_plugin.py +++ b/tests/sentry_plugins/phabricator/test_plugin.py @@ -2,8 +2,11 @@ import responses from django.test import RequestFactory +from pytest import raises +from sentry.exceptions import PluginError from sentry.testutils.cases import PluginTestCase +from sentry.testutils.helpers import override_blocklist from sentry_plugins.phabricator.plugin import PhabricatorPlugin @@ -43,3 +46,14 @@ def test_is_configured(self): assert self.plugin.is_configured(None, self.project) is False self.plugin.set_option("certificate", "a-certificate", self.project) assert self.plugin.is_configured(None, self.project) is True + + @override_blocklist("127.0.0.1") + def test_invalid_url(self): + with raises(PluginError): + self.plugin.validate_config_field( + project=self.project, name="host", value="ftp://example.com" + ) + with raises(PluginError): + self.plugin.validate_config_field( + project=self.project, name="host", value="http://127.0.0.1" + ) From 270094f47c840b6f7c084f5e569d3d61d9fda0e2 Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Thu, 8 Feb 2024 12:12:30 -0500 Subject: [PATCH 179/357] ref(sdk): Remove excessive json.loads spans (#64883) ### Summary Many of our transactions use up their span limit immediately because of json.loads creating spans for every field call etc. ![Screenshot 2024-02-08 at 11 25 04 AM](https://github.com/getsentry/sentry/assets/6111995/cc5181cf-845a-4546-9703-8989c68d0a52) I ran the devserver and tried a couple endpoints and removed spans for anything I saw spamming. Feel free to re-add it if you find a python profile for your transaction that shows json.loads is taking up a significant amount of time, I'm just trying to get most of them that concern me at the moment without opening a bunch of PRs one by one. In the future we should probably consider moving these over to a metric as the span offers no additional information, so we can avoid this altogether. At the moment, since json can be called at startup there is currently an issue with config not being loaded so metrics can be used here so that would need to be resolved first. --- src/sentry/db/models/fields/jsonfield.py | 6 +++--- src/sentry/db/models/fields/picklefield.py | 2 +- src/sentry/ingest/billing_metrics_consumer.py | 4 +++- src/sentry/ingest/consumer/processors.py | 2 +- src/sentry/utils/codecs.py | 2 +- src/sentry/utils/snuba.py | 2 +- 6 files changed, 10 insertions(+), 8 deletions(-) diff --git a/src/sentry/db/models/fields/jsonfield.py b/src/sentry/db/models/fields/jsonfield.py index d0dea9b26f0579..eb02a2145f8f66 100644 --- a/src/sentry/db/models/fields/jsonfield.py +++ b/src/sentry/db/models/fields/jsonfield.py @@ -83,8 +83,8 @@ def get_default(self): if callable(default): default = default() if isinstance(default, str): - return json.loads(default) - return json.loads(json.dumps(default)) + return json.loads(default, skip_trace=True) + return json.loads(json.dumps(default), skip_trace=True) return super().get_default() def get_internal_type(self): @@ -101,7 +101,7 @@ def to_python(self, value): if self.blank: return "" try: - value = json.loads(value) + value = json.loads(value, skip_trace=True) except ValueError: msg = self.error_messages["invalid"] % value raise ValidationError(msg) diff --git a/src/sentry/db/models/fields/picklefield.py b/src/sentry/db/models/fields/picklefield.py index c2bee6bb6aea44..ea939b49f87da0 100644 --- a/src/sentry/db/models/fields/picklefield.py +++ b/src/sentry/db/models/fields/picklefield.py @@ -26,6 +26,6 @@ def to_python(self, value): if value is None: return None try: - return json.loads(value) + return json.loads(value, skip_trace=True) except (ValueError, TypeError): return super().to_python(value) diff --git a/src/sentry/ingest/billing_metrics_consumer.py b/src/sentry/ingest/billing_metrics_consumer.py index acd9704731dfaf..3c9fd71deb0599 100644 --- a/src/sentry/ingest/billing_metrics_consumer.py +++ b/src/sentry/ingest/billing_metrics_consumer.py @@ -85,7 +85,9 @@ def submit(self, message: Message[KafkaPayload]) -> None: self.__next_step.submit(message) def _get_payload(self, message: Message[KafkaPayload]) -> GenericMetric: - payload = json.loads(message.payload.value.decode("utf-8"), use_rapid_json=True) + payload = json.loads( + message.payload.value.decode("utf-8"), use_rapid_json=True, skip_trace=True + ) return cast(GenericMetric, payload) def _count_processed_items(self, generic_metric: GenericMetric) -> Mapping[DataCategory, int]: diff --git a/src/sentry/ingest/consumer/processors.py b/src/sentry/ingest/consumer/processors.py index 733d379e5a188c..13ae8f232b5f69 100644 --- a/src/sentry/ingest/consumer/processors.py +++ b/src/sentry/ingest/consumer/processors.py @@ -106,7 +106,7 @@ def process_event(message: IngestMessage, project: Project) -> None: # serializing it again. # XXX: Do not use CanonicalKeyDict here. This may break preprocess_event # which assumes that data passed in is a raw dictionary. - data = json.loads(payload, use_rapid_json=True) + data = json.loads(payload, use_rapid_json=True, skip_trace=True) if project_id == settings.SENTRY_PROJECT: metrics.incr( "internal.captured.ingest_consumer.parsed", diff --git a/src/sentry/utils/codecs.py b/src/sentry/utils/codecs.py index 45813a02473516..c5b46edab5993a 100644 --- a/src/sentry/utils/codecs.py +++ b/src/sentry/utils/codecs.py @@ -75,7 +75,7 @@ def encode(self, value: JSONData) -> str: return str(json.dumps(value)) def decode(self, value: str) -> JSONData: - return json.loads(value) + return json.loads(value, skip_trace=True) class ZlibCodec(Codec[bytes, bytes]): diff --git a/src/sentry/utils/snuba.py b/src/sentry/utils/snuba.py index 1a57e851e04973..5ebeec42213e17 100644 --- a/src/sentry/utils/snuba.py +++ b/src/sentry/utils/snuba.py @@ -958,7 +958,7 @@ def _bulk_snuba_query( for index, item in enumerate(query_results): response, _, reverse = item try: - body = json.loads(response.data) + body = json.loads(response.data, skip_trace=True) if SNUBA_INFO: if "sql" in body: print( # NOQA: only prints when an env variable is set From acb068acde14004e93fc0b91414b84c330cf572f Mon Sep 17 00:00:00 2001 From: Malachi Willey Date: Thu, 8 Feb 2024 09:18:32 -0800 Subject: [PATCH 180/357] feat(issue-details): Show static replay when error is not within the replay (#64827) This covers an edge case when the error event is not within the bounds of the replay. --- .../events/eventReplay/replayClipPreview.tsx | 14 +++ .../events/eventReplay/replayPreview.spec.tsx | 23 +--- .../events/eventReplay/replayPreview.tsx | 82 ++----------- .../eventReplay/staticReplayPreview.tsx | 108 ++++++++++++++++++ 4 files changed, 139 insertions(+), 88 deletions(-) create mode 100644 static/app/components/events/eventReplay/staticReplayPreview.tsx diff --git a/static/app/components/events/eventReplay/replayClipPreview.tsx b/static/app/components/events/eventReplay/replayClipPreview.tsx index c402c033b866f4..860718dd6381bf 100644 --- a/static/app/components/events/eventReplay/replayClipPreview.tsx +++ b/static/app/components/events/eventReplay/replayClipPreview.tsx @@ -6,6 +6,7 @@ import {Alert} from 'sentry/components/alert'; import {LinkButton} from 'sentry/components/button'; import ButtonBar from 'sentry/components/buttonBar'; import ErrorBoundary from 'sentry/components/errorBoundary'; +import {StaticReplayPreview} from 'sentry/components/events/eventReplay/staticReplayPreview'; import Panel from 'sentry/components/panels/panel'; import Placeholder from 'sentry/components/placeholder'; import {Flex} from 'sentry/components/profiling/flex'; @@ -191,6 +192,19 @@ function ReplayClipPreview({ ); } + if (replay.getDurationMs() <= 0) { + return ( + + ); + } + return ( { }); const render: typeof baseRender = children => { - const {router, routerContext} = initializeOrg({ + const {routerContext} = initializeOrg({ router: { routes: [ {path: '/'}, @@ -79,21 +77,10 @@ const render: typeof baseRender = children => { }, }); - return baseRender( - - - {children} - - , - {context: routerContext} - ); + return baseRender(children, { + context: routerContext, + organization: OrganizationFixture({slug: mockOrgSlug}), + }); }; const defaultProps = { diff --git a/static/app/components/events/eventReplay/replayPreview.tsx b/static/app/components/events/eventReplay/replayPreview.tsx index 85b7d27b964e2c..afbfbd2435966c 100644 --- a/static/app/components/events/eventReplay/replayPreview.tsx +++ b/static/app/components/events/eventReplay/replayPreview.tsx @@ -1,26 +1,20 @@ import type {ComponentProps} from 'react'; -import {Fragment, useMemo} from 'react'; +import {useMemo} from 'react'; import styled from '@emotion/styled'; import {Alert} from 'sentry/components/alert'; -import {LinkButton} from 'sentry/components/button'; +import type {LinkButton} from 'sentry/components/button'; +import {StaticReplayPreview} from 'sentry/components/events/eventReplay/staticReplayPreview'; import Placeholder from 'sentry/components/placeholder'; import {Flex} from 'sentry/components/profiling/flex'; import MissingReplayAlert from 'sentry/components/replays/alerts/missingReplayAlert'; -import {Provider as ReplayContextProvider} from 'sentry/components/replays/replayContext'; -import ReplayPlayer from 'sentry/components/replays/replayPlayer'; -import ReplayProcessingError from 'sentry/components/replays/replayProcessingError'; -import {IconDelete, IconPlay} from 'sentry/icons'; +import {IconDelete} from 'sentry/icons'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; -import getRouteStringFromRoutes from 'sentry/utils/getRouteStringFromRoutes'; -import {TabKey} from 'sentry/utils/replays/hooks/useActiveReplayTab'; +import type {TabKey} from 'sentry/utils/replays/hooks/useActiveReplayTab'; import useReplayReader from 'sentry/utils/replays/hooks/useReplayReader'; import type RequestError from 'sentry/utils/requestError/requestError'; import useRouteAnalyticsParams from 'sentry/utils/routeAnalytics/useRouteAnalyticsParams'; -import {useRoutes} from 'sentry/utils/useRoutes'; -import {normalizeUrl} from 'sentry/utils/withDomainRequired'; -import FluidHeight from 'sentry/views/replays/detail/layout/fluidHeight'; import type {ReplayRecord} from 'sentry/views/replays/types'; type Props = { @@ -62,7 +56,6 @@ function ReplayPreview({ orgSlug, replaySlug, }: Props) { - const routes = useRoutes(); const {fetching, replay, replayRecord, fetchError, replayId} = useReplayReader({ orgSlug, replaySlug, @@ -106,70 +99,19 @@ function ReplayPreview({ ); } - const fullReplayUrl = { - pathname: normalizeUrl(`/organizations/${orgSlug}/replays/${replayId}/`), - query: { - referrer: getRouteStringFromRoutes(routes), - t_main: focusTab ?? TabKey.ERRORS, - t: initialTimeOffsetMs / 1000, - }, - }; - return ( - - - {replay?.hasProcessingErrors() ? ( - - ) : ( - - - - - - - } - priority="primary" - to={fullReplayUrl} - > - {t('Open Replay')} - - - - )} - - + replay={replay} + replayId={replayId} + fullReplayButtonProps={fullReplayButtonProps} + initialTimeOffsetMs={initialTimeOffsetMs} + /> ); } -const PlayerContainer = styled(FluidHeight)` - position: relative; - background: ${p => p.theme.background}; - gap: ${space(1)}; - max-height: 448px; -`; - -const StaticPanel = styled(FluidHeight)` - border: 1px solid ${p => p.theme.border}; - border-radius: ${p => p.theme.borderRadius}; -`; - -const CTAOverlay = styled('div')` - position: absolute; - width: 100%; - height: 100%; - display: flex; - justify-content: center; - align-items: center; - background: rgba(255, 255, 255, 0.5); -`; - const StyledPlaceholder = styled(Placeholder)` margin-bottom: ${space(2)}; `; diff --git a/static/app/components/events/eventReplay/staticReplayPreview.tsx b/static/app/components/events/eventReplay/staticReplayPreview.tsx new file mode 100644 index 00000000000000..99ed70a6022352 --- /dev/null +++ b/static/app/components/events/eventReplay/staticReplayPreview.tsx @@ -0,0 +1,108 @@ +import {type ComponentProps, Fragment, useMemo} from 'react'; +import styled from '@emotion/styled'; + +import {LinkButton} from 'sentry/components/button'; +import {Provider as ReplayContextProvider} from 'sentry/components/replays/replayContext'; +import ReplayPlayer from 'sentry/components/replays/replayPlayer'; +import ReplayProcessingError from 'sentry/components/replays/replayProcessingError'; +import {IconPlay} from 'sentry/icons'; +import {t} from 'sentry/locale'; +import {space} from 'sentry/styles/space'; +import getRouteStringFromRoutes from 'sentry/utils/getRouteStringFromRoutes'; +import {TabKey} from 'sentry/utils/replays/hooks/useActiveReplayTab'; +import type ReplayReader from 'sentry/utils/replays/replayReader'; +import useOrganization from 'sentry/utils/useOrganization'; +import {useRoutes} from 'sentry/utils/useRoutes'; +import FluidHeight from 'sentry/views/replays/detail/layout/fluidHeight'; + +type StaticReplayPreviewProps = { + analyticsContext: string; + initialTimeOffsetMs: number; + isFetching: boolean; + replay: ReplayReader | null; + replayId: string; + focusTab?: TabKey; + fullReplayButtonProps?: Partial>; +}; + +export function StaticReplayPreview({ + analyticsContext, + initialTimeOffsetMs, + isFetching, + focusTab, + replayId, + fullReplayButtonProps, + replay, +}: StaticReplayPreviewProps) { + const organization = useOrganization(); + const routes = useRoutes(); + const fullReplayUrl = { + pathname: `/organizations/${organization.slug}/replays/${replayId}/`, + query: { + referrer: getRouteStringFromRoutes(routes), + t_main: focusTab ?? TabKey.ERRORS, + t: initialTimeOffsetMs / 1000, + }, + }; + + const offset = useMemo( + () => ({ + offsetMs: initialTimeOffsetMs, + }), + [initialTimeOffsetMs] + ); + + return ( + + + {replay?.hasProcessingErrors() ? ( + + ) : ( + + + + + + + } + priority="primary" + to={fullReplayUrl} + > + {t('Open Replay')} + + + + )} + + + ); +} + +const PlayerContainer = styled(FluidHeight)` + position: relative; + background: ${p => p.theme.background}; + gap: ${space(1)}; + max-height: 448px; +`; + +const StaticPanel = styled(FluidHeight)` + border: 1px solid ${p => p.theme.border}; + border-radius: ${p => p.theme.borderRadius}; +`; + +const CTAOverlay = styled('div')` + position: absolute; + width: 100%; + height: 100%; + display: flex; + justify-content: center; + align-items: center; + background: rgba(255, 255, 255, 0.5); +`; From 782b0f4466c8eb843319d46c2862ffc4bfd5fbd7 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Thu, 8 Feb 2024 17:27:00 +0000 Subject: [PATCH 181/357] Revert "ref: upgrade hiredis (#64789)" This reverts commit 922707e3537d9b0f1656ae95ced53a2a3db5223c. Co-authored-by: asottile-sentry <103459774+asottile-sentry@users.noreply.github.com> --- requirements-base.txt | 2 +- requirements-dev-frozen.txt | 2 +- requirements-frozen.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements-base.txt b/requirements-base.txt index 34fcbd15897c1e..7e5cda130327b2 100644 --- a/requirements-base.txt +++ b/requirements-base.txt @@ -91,7 +91,7 @@ cryptography>=38.0.3 grpcio>=1.59.0 # not directly used, but provides a speedup for redis -hiredis>=2.3.2 +hiredis>=0.3.1 # sentry-plugins specific dependencies phabricator>=0.7.0 diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index 9197f0b396802d..e87fe10fd40fbd 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -72,7 +72,7 @@ grpc-google-iam-v1==0.12.4 grpcio==1.60.1 grpcio-status==1.60.1 h11==0.13.0 -hiredis==2.3.2 +hiredis==0.3.1 honcho==1.1.0 httpcore==1.0.2 httpx==0.25.2 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index 0ebbac703bd0f8..8d45893d2829d9 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -58,7 +58,7 @@ grpc-google-iam-v1==0.12.4 grpcio==1.60.1 grpcio-status==1.60.1 h11==0.14.0 -hiredis==2.3.2 +hiredis==0.3.1 httpcore==1.0.2 httpx==0.25.2 idna==2.10 From 692becbca0eb3b66a210b5187dce6b67a6277c53 Mon Sep 17 00:00:00 2001 From: Malachi Willey Date: Thu, 8 Feb 2024 09:29:20 -0800 Subject: [PATCH 182/357] feat(issue-priority): Add priority column to issue stream (#64820) - Add priority column - Adjust assignee column to have take up less space (tested this even with 3 suggested assignees, still looks good) - Adjust responsive elements to hide columns and actions sooner because of the extra required space. (I decided to make these changes without the feature flag because it works fine without it and it was already a bit crowded at smaller screen sizes) --- fixtures/page_objects/issue_list.py | 19 ++++++++++-- static/app/components/issues/groupList.tsx | 8 ++++- .../app/components/issues/groupListHeader.tsx | 21 +++++++++++-- static/app/components/stream/group.spec.tsx | 31 +++++++++++++++++-- static/app/components/stream/group.tsx | 27 +++++++++++++--- .../views/alerts/rules/issue/previewTable.tsx | 1 + .../app/views/issueDetails/groupPriority.tsx | 3 +- .../app/views/issueList/actions/actionSet.tsx | 4 +-- .../app/views/issueList/actions/headers.tsx | 30 +++++++++++++++--- static/app/views/issueList/actions/index.tsx | 2 +- 10 files changed, 123 insertions(+), 23 deletions(-) diff --git a/fixtures/page_objects/issue_list.py b/fixtures/page_objects/issue_list.py index a5e27c0ec76be4..eebf5b38c626d5 100644 --- a/fixtures/page_objects/issue_list.py +++ b/fixtures/page_objects/issue_list.py @@ -47,8 +47,21 @@ def delete_issues(self): self.browser.click('[data-test-id="confirm-button"]') def merge_issues(self): - self.browser.click('[aria-label="Merge Selected Issues"]') - self.browser.click('[data-test-id="confirm-button"]') + # Merge button gets put into an overflow menu for small viewports + if self.browser.element_exists('[aria-label="Merge Selected Issues"]'): + self.browser.click('[aria-label="Merge Selected Issues"]') + self.browser.click('[data-test-id="confirm-button"]') + else: + self.browser.click('[aria-label="More issue actions"]') + self.browser.wait_until('[data-test-id="merge"]') + self.browser.click('[data-test-id="merge"]') + self.browser.click('[data-test-id="confirm-button"]') def mark_reviewed_issues(self): - self.browser.click('[aria-label="Mark Reviewed"]') + # Marked reviewed button gets put into an overflow menu for small viewports + if self.browser.element_exists('[aria-label="Mark Reviewed"]'): + self.browser.click('[aria-label="Mark Reviewed"]') + else: + self.browser.click('[aria-label="More issue actions"]') + self.browser.wait_until('[data-test-id="mark-reviewed"]') + self.browser.click('[data-test-id="mark-reviewed"]') diff --git a/static/app/components/issues/groupList.tsx b/static/app/components/issues/groupList.tsx index d6e39ce23d0dde..3855990ecfb799 100644 --- a/static/app/components/issues/groupList.tsx +++ b/static/app/components/issues/groupList.tsx @@ -41,7 +41,13 @@ const defaultProps = { withColumns: ['graph', 'event', 'users', 'assignee'] satisfies GroupListColumn[], }; -export type GroupListColumn = 'graph' | 'event' | 'users' | 'assignee' | 'lastTriggered'; +export type GroupListColumn = + | 'graph' + | 'event' + | 'users' + | 'priority' + | 'assignee' + | 'lastTriggered'; type Props = WithRouterProps & { api: Client; diff --git a/static/app/components/issues/groupListHeader.tsx b/static/app/components/issues/groupListHeader.tsx index b382c8bf3ddc77..ce330233dfd1e1 100644 --- a/static/app/components/issues/groupListHeader.tsx +++ b/static/app/components/issues/groupListHeader.tsx @@ -3,6 +3,7 @@ import styled from '@emotion/styled'; import PanelHeader from 'sentry/components/panels/panelHeader'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; +import useOrganization from 'sentry/utils/useOrganization'; import type {GroupListColumn} from './groupList'; @@ -17,6 +18,8 @@ function GroupListHeader({ narrowGroups = false, withColumns = ['graph', 'event', 'users', 'assignee', 'lastTriggered'], }: Props) { + const organization = useOrganization(); + return ( {t('Issue')} @@ -27,6 +30,10 @@ function GroupListHeader({ {t('events')} )} {withColumns.includes('users') && {t('users')}} + {withColumns.includes('priority') && + organization.features.includes('issue-priority-ui') && ( + {t('Priority')} + )} {withColumns.includes('assignee') && ( {t('Assignee')} )} @@ -70,14 +77,24 @@ const ChartWrapper = styled(Heading)<{narrowGroups: boolean}>` width: 160px; @media (max-width: ${p => - p.narrowGroups ? p.theme.breakpoints.xlarge : p.theme.breakpoints.large}) { + p.narrowGroups ? p.theme.breakpoints.xxlarge : p.theme.breakpoints.xlarge}) { + display: none; + } +`; + +const PriorityWrapper = styled(Heading)<{narrowGroups: boolean}>` + justify-content: flex-end; + width: 85px; + + @media (max-width: ${p => + p.narrowGroups ? p.theme.breakpoints.large : p.theme.breakpoints.medium}) { display: none; } `; const AssigneeWrapper = styled(Heading)<{narrowGroups: boolean}>` justify-content: flex-end; - width: 80px; + width: 60px; @media (max-width: ${p => p.narrowGroups ? p.theme.breakpoints.large : p.theme.breakpoints.medium}) { diff --git a/static/app/components/stream/group.spec.tsx b/static/app/components/stream/group.spec.tsx index 2db37022534a74..dd78a5d9831dea 100644 --- a/static/app/components/stream/group.spec.tsx +++ b/static/app/components/stream/group.spec.tsx @@ -1,14 +1,15 @@ import {GroupFixture} from 'sentry-fixture/group'; +import {OrganizationFixture} from 'sentry-fixture/organization'; import {ProjectFixture} from 'sentry-fixture/project'; import {initializeOrg} from 'sentry-test/initializeOrg'; -import {act, render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; +import {act, render, screen, userEvent, within} from 'sentry-test/reactTestingLibrary'; import StreamGroup from 'sentry/components/stream/group'; import GroupStore from 'sentry/stores/groupStore'; import GuideStore from 'sentry/stores/guideStore'; import type {GroupStatusResolution, MarkReviewed} from 'sentry/types'; -import {EventOrGroupType, GroupStatus} from 'sentry/types'; +import {EventOrGroupType, GroupStatus, PriorityLevel} from 'sentry/types'; import {trackAnalytics} from 'sentry/utils/analytics'; jest.mock('sentry/utils/analytics'); @@ -89,6 +90,32 @@ describe('StreamGroup', function () { expect(screen.getByTestId('resolved-issue')).toBeInTheDocument(); }); + it('can change priority', async function () { + const mockModifyGroup = MockApiClient.addMockResponse({ + url: '/projects/org-slug/foo-project/issues/', + method: 'PUT', + body: {priority: PriorityLevel.HIGH}, + }); + + render(, { + organization: OrganizationFixture({features: ['issue-priority-ui']}), + }); + + const priorityDropdown = screen.getByRole('button', {name: 'Modify issue priority'}); + expect(within(priorityDropdown).getByText('Medium')).toBeInTheDocument(); + await userEvent.click(priorityDropdown); + await userEvent.click(screen.getByRole('menuitemradio', {name: 'High'})); + expect(within(priorityDropdown).getByText('High')).toBeInTheDocument(); + expect(mockModifyGroup).toHaveBeenCalledWith( + '/projects/org-slug/foo-project/issues/', + expect.objectContaining({ + data: expect.objectContaining({ + priority: 'high', + }), + }) + ); + }); + it('tracks clicks from issues stream', async function () { const {routerContext, organization} = initializeOrg(); render( diff --git a/static/app/components/stream/group.tsx b/static/app/components/stream/group.tsx index 368f63012f69e4..a95def1db5180f 100644 --- a/static/app/components/stream/group.tsx +++ b/static/app/components/stream/group.tsx @@ -44,6 +44,7 @@ import {getConfigForIssueType} from 'sentry/utils/issueTypeConfig'; import usePageFilters from 'sentry/utils/usePageFilters'; import withOrganization from 'sentry/utils/withOrganization'; import type {TimePeriodType} from 'sentry/views/alerts/rules/metric/details/constants'; +import GroupPriority from 'sentry/views/issueDetails/groupPriority'; import { DISCOVER_EXCLUSION_FIELDS, getTabs, @@ -87,7 +88,7 @@ function BaseGroupRow({ statsPeriod = DEFAULT_STREAM_GROUP_STATS_PERIOD, canSelect = true, withChart = true, - withColumns = ['graph', 'event', 'users', 'assignee', 'lastTriggered'], + withColumns = ['graph', 'event', 'users', 'priority', 'assignee', 'lastTriggered'], useFilteredStats = false, useTintRow = true, narrowGroups = false, @@ -454,6 +455,12 @@ function BaseGroupRow({ {withColumns.includes('users') && issueTypeConfig.stats.enabled && ( {groupUsersCount} )} + {organization.features.includes('issue-priority-ui') && + withColumns.includes('priority') ? ( + + {group.priority ? : null} + + ) : null} {withColumns.includes('assignee') && ( ` width: 200px; align-self: center; - @media (max-width: ${p => - p.narrowGroups ? p.theme.breakpoints.xlarge : p.theme.breakpoints.large}) { + @media (max-width: ${p => (p.narrowGroups ? '1600px' : p.theme.breakpoints.xlarge)}) { display: none; } `; @@ -603,8 +609,21 @@ const EventCountsWrapper = styled('div')` } `; +const PriorityWrapper = styled('div')<{narrowGroups: boolean}>` + width: 85px; + margin: 0 ${space(2)}; + align-self: center; + display: flex; + justify-content: flex-end; + + @media (max-width: ${p => + p.narrowGroups ? p.theme.breakpoints.large : p.theme.breakpoints.medium}) { + display: none; + } +`; + const AssigneeWrapper = styled('div')<{narrowGroups: boolean}>` - width: 80px; + width: 60px; margin: 0 ${space(2)}; align-self: center; diff --git a/static/app/views/alerts/rules/issue/previewTable.tsx b/static/app/views/alerts/rules/issue/previewTable.tsx index 3b1f86a7f70563..8dfd39545c2f18 100644 --- a/static/app/views/alerts/rules/issue/previewTable.tsx +++ b/static/app/views/alerts/rules/issue/previewTable.tsx @@ -70,6 +70,7 @@ function PreviewTable({ withChart={false} canSelect={false} showLastTriggered + withColumns={['assignee', 'event', 'lastTriggered', 'users']} /> ); }); diff --git a/static/app/views/issueDetails/groupPriority.tsx b/static/app/views/issueDetails/groupPriority.tsx index a4cc950038528a..4242620c252221 100644 --- a/static/app/views/issueDetails/groupPriority.tsx +++ b/static/app/views/issueDetails/groupPriority.tsx @@ -30,6 +30,7 @@ function GroupPriority({group}: GroupDetailsPriorityProps) { }); addLoadingMessage(t('Saving changes\u2026')); + IssueListCacheStore.reset(); bulkUpdate( api, @@ -41,8 +42,6 @@ function GroupPriority({group}: GroupDetailsPriorityProps) { }, {complete: clearIndicators} ); - - IssueListCacheStore.reset(); }; return ( diff --git a/static/app/views/issueList/actions/actionSet.tsx b/static/app/views/issueList/actions/actionSet.tsx index cef61f2e395739..9279369f7b6f61 100644 --- a/static/app/views/issueList/actions/actionSet.tsx +++ b/static/app/views/issueList/actions/actionSet.tsx @@ -1,5 +1,4 @@ import {Fragment} from 'react'; -import {useTheme} from '@emotion/react'; import ActionLink from 'sentry/components/actions/actionLink'; import ArchiveActions from 'sentry/components/actions/archive'; @@ -103,8 +102,7 @@ function ActionSet({ // Determine whether to nest "Merge" and "Mark as Reviewed" buttons inside // the dropdown menu based on the current screen size - const theme = useTheme(); - const nestMergeAndReview = useMedia(`(max-width: ${theme.breakpoints.xlarge})`); + const nestMergeAndReview = useMedia(`(max-width: 1700px`); const menuItems: MenuItemProps[] = [ { diff --git a/static/app/views/issueList/actions/headers.tsx b/static/app/views/issueList/actions/headers.tsx index bcf972a2fe7ee5..1f33b112296e31 100644 --- a/static/app/views/issueList/actions/headers.tsx +++ b/static/app/views/issueList/actions/headers.tsx @@ -5,6 +5,7 @@ import ToolbarHeader from 'sentry/components/toolbarHeader'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import type {PageFilters} from 'sentry/types'; +import useOrganization from 'sentry/utils/useOrganization'; type Props = { isReprocessingQuery: boolean; @@ -21,6 +22,8 @@ function Headers({ isReprocessingQuery, isSavedSearchesOpen, }: Props) { + const organization = useOrganization(); + return ( {isReprocessingQuery ? ( @@ -52,9 +55,14 @@ function Headers({ {t('Events')} {t('Users')} - + {organization.features.includes('issue-priority-ui') && ( + + {t('Priority')} + + )} + {t('Assignee')} - + )} @@ -70,7 +78,7 @@ const GraphHeaderWrapper = styled('div')<{isSavedSearchesOpen?: boolean}>` animation: 0.25s FadeIn linear forwards; @media (max-width: ${p => - p.isSavedSearchesOpen ? p.theme.breakpoints.xlarge : p.theme.breakpoints.large}) { + p.isSavedSearchesOpen ? '1600px' : p.theme.breakpoints.xlarge}) { display: none; } @@ -117,10 +125,22 @@ const EventsOrUsersLabel = styled(ToolbarHeader)` } `; -const AssigneesLabel = styled('div')<{isSavedSearchesOpen?: boolean}>` +const PriorityLabel = styled('div')<{isSavedSearchesOpen?: boolean}>` justify-content: flex-end; text-align: right; - width: 80px; + width: 85px; + margin: 0 ${space(2)}; + + @media (max-width: ${p => + p.isSavedSearchesOpen ? p.theme.breakpoints.large : p.theme.breakpoints.medium}) { + display: none; + } +`; + +const AssigneeLabel = styled('div')<{isSavedSearchesOpen?: boolean}>` + justify-content: flex-end; + text-align: right; + width: 60px; margin-left: ${space(2)}; margin-right: ${space(2)}; diff --git a/static/app/views/issueList/actions/index.tsx b/static/app/views/issueList/actions/index.tsx index 2c6b2bf32b260c..e5ba64c83cfcfd 100644 --- a/static/app/views/issueList/actions/index.tsx +++ b/static/app/views/issueList/actions/index.tsx @@ -75,7 +75,7 @@ function IssueListActions({ const disableActions = useMedia( `(max-width: ${ - isSavedSearchesOpen ? theme.breakpoints.large : theme.breakpoints.small + isSavedSearchesOpen ? theme.breakpoints.xlarge : theme.breakpoints.medium })` ); From d38869aed4c8f87aa3849b6e170c378368ecb63d Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Thu, 8 Feb 2024 18:33:10 +0100 Subject: [PATCH 183/357] ref(api): Make processing team endpoints private (#63939) This makes the endpoints private that the processing team doesn't think should be public. --- src/sentry/api/endpoints/chunk.py | 4 ++-- src/sentry/api/endpoints/debug_files.py | 6 +++--- src/sentry/api/endpoints/event_apple_crash_report.py | 2 +- src/sentry/api/endpoints/event_reprocessable.py | 2 +- .../endpoints/project_app_store_connect_credentials.py | 10 +++++----- src/sentry/api/endpoints/project_reprocessing.py | 2 +- 6 files changed, 13 insertions(+), 13 deletions(-) diff --git a/src/sentry/api/endpoints/chunk.py b/src/sentry/api/endpoints/chunk.py index d2472a81a6aa63..fefc9541b01495 100644 --- a/src/sentry/api/endpoints/chunk.py +++ b/src/sentry/api/endpoints/chunk.py @@ -48,8 +48,8 @@ def __init__(self, file): @region_silo_endpoint class ChunkUploadEndpoint(OrganizationEndpoint): publish_status = { - "GET": ApiPublishStatus.UNKNOWN, - "POST": ApiPublishStatus.UNKNOWN, + "GET": ApiPublishStatus.PRIVATE, + "POST": ApiPublishStatus.PRIVATE, } owner = ApiOwner.OWNERS_NATIVE permission_classes = (OrganizationReleasePermission,) diff --git a/src/sentry/api/endpoints/debug_files.py b/src/sentry/api/endpoints/debug_files.py index 2eb63a9ad07ca2..e818ef85d60bb3 100644 --- a/src/sentry/api/endpoints/debug_files.py +++ b/src/sentry/api/endpoints/debug_files.py @@ -360,7 +360,7 @@ def post(self, request: Request, project) -> Response: class UnknownDebugFilesEndpoint(ProjectEndpoint): owner = ApiOwner.OWNERS_NATIVE publish_status = { - "GET": ApiPublishStatus.UNKNOWN, + "GET": ApiPublishStatus.PRIVATE, } permission_classes = (ProjectReleasePermission,) @@ -374,7 +374,7 @@ def get(self, request: Request, project) -> Response: class AssociateDSymFilesEndpoint(ProjectEndpoint): owner = ApiOwner.OWNERS_NATIVE publish_status = { - "POST": ApiPublishStatus.UNKNOWN, + "POST": ApiPublishStatus.PRIVATE, } permission_classes = (ProjectReleasePermission,) @@ -387,7 +387,7 @@ def post(self, request: Request, project) -> Response: class DifAssembleEndpoint(ProjectEndpoint): owner = ApiOwner.OWNERS_NATIVE publish_status = { - "POST": ApiPublishStatus.UNKNOWN, + "POST": ApiPublishStatus.PRIVATE, } permission_classes = (ProjectReleasePermission,) diff --git a/src/sentry/api/endpoints/event_apple_crash_report.py b/src/sentry/api/endpoints/event_apple_crash_report.py index 99266b44f40f4a..c26a7789ae5e2e 100644 --- a/src/sentry/api/endpoints/event_apple_crash_report.py +++ b/src/sentry/api/endpoints/event_apple_crash_report.py @@ -16,7 +16,7 @@ class EventAppleCrashReportEndpoint(ProjectEndpoint): owner = ApiOwner.OWNERS_NATIVE publish_status = { - "GET": ApiPublishStatus.UNKNOWN, + "GET": ApiPublishStatus.PRIVATE, } def get(self, request: Request, project, event_id) -> HttpResponseBase: diff --git a/src/sentry/api/endpoints/event_reprocessable.py b/src/sentry/api/endpoints/event_reprocessable.py index 91f6d86ad8e400..f64873a0f2139f 100644 --- a/src/sentry/api/endpoints/event_reprocessable.py +++ b/src/sentry/api/endpoints/event_reprocessable.py @@ -14,7 +14,7 @@ class EventReprocessableEndpoint(ProjectEndpoint): owner = ApiOwner.OWNERS_PROCESSING publish_status = { - "GET": ApiPublishStatus.UNKNOWN, + "GET": ApiPublishStatus.PRIVATE, } def get(self, request: Request, project, event_id) -> Response: diff --git a/src/sentry/api/endpoints/project_app_store_connect_credentials.py b/src/sentry/api/endpoints/project_app_store_connect_credentials.py index 6446542b5fee7d..281638c6a9cc58 100644 --- a/src/sentry/api/endpoints/project_app_store_connect_credentials.py +++ b/src/sentry/api/endpoints/project_app_store_connect_credentials.py @@ -85,7 +85,7 @@ class AppStoreConnectCredentialsSerializer(serializers.Serializer): @region_silo_endpoint class AppStoreConnectAppsEndpoint(ProjectEndpoint): publish_status = { - "POST": ApiPublishStatus.UNKNOWN, + "POST": ApiPublishStatus.PRIVATE, } """Retrieves available applications with provided credentials. @@ -204,7 +204,7 @@ class AppStoreCreateCredentialsSerializer(serializers.Serializer): @region_silo_endpoint class AppStoreConnectCreateCredentialsEndpoint(ProjectEndpoint): publish_status = { - "POST": ApiPublishStatus.UNKNOWN, + "POST": ApiPublishStatus.PRIVATE, } """Returns all the App Store Connect symbol source settings ready to be saved. @@ -282,7 +282,7 @@ def validate_appconnectPrivateKey( @region_silo_endpoint class AppStoreConnectUpdateCredentialsEndpoint(ProjectEndpoint): publish_status = { - "POST": ApiPublishStatus.UNKNOWN, + "POST": ApiPublishStatus.PRIVATE, } """Updates a subset of the existing credentials. @@ -348,7 +348,7 @@ def post(self, request: Request, project: Project, credentials_id: str) -> Respo @region_silo_endpoint class AppStoreConnectRefreshEndpoint(ProjectEndpoint): publish_status = { - "POST": ApiPublishStatus.UNKNOWN, + "POST": ApiPublishStatus.PRIVATE, } """Triggers an immediate check for new App Store Connect builds. @@ -402,7 +402,7 @@ def post(self, request: Request, project: Project, credentials_id: str) -> Respo @region_silo_endpoint class AppStoreConnectStatusEndpoint(ProjectEndpoint): publish_status = { - "GET": ApiPublishStatus.UNKNOWN, + "GET": ApiPublishStatus.PRIVATE, } """Returns a summary of the project's App Store Connect configuration and builds. diff --git a/src/sentry/api/endpoints/project_reprocessing.py b/src/sentry/api/endpoints/project_reprocessing.py index 5c5edcb46170aa..45738c68e9ba4c 100644 --- a/src/sentry/api/endpoints/project_reprocessing.py +++ b/src/sentry/api/endpoints/project_reprocessing.py @@ -12,7 +12,7 @@ class ProjectReprocessingEndpoint(ProjectEndpoint): owner = ApiOwner.OWNERS_PROCESSING publish_status = { - "POST": ApiPublishStatus.UNKNOWN, + "POST": ApiPublishStatus.PRIVATE, } permission_classes = (ProjectReleasePermission,) From ff4c7ec45fee6edf119139a8ec8d05048a05b9a5 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Thu, 8 Feb 2024 17:53:49 +0000 Subject: [PATCH 184/357] release: 24.1.2 --- CHANGES | 32 ++++++++++++++++++++++++++++++++ setup.cfg | 2 +- src/sentry/conf/server.py | 2 +- 3 files changed, 34 insertions(+), 2 deletions(-) diff --git a/CHANGES b/CHANGES index 97f9661c1c04d5..5f37397a84f2e3 100644 --- a/CHANGES +++ b/CHANGES @@ -1,3 +1,35 @@ +24.1.2 +------ + +### Various fixes & improvements + +- ref(api): Make processing team endpoints private (#63939) by @loewenheim +- feat(issue-priority): Add priority column to issue stream (#64820) by @malwilley +- Revert "ref: upgrade hiredis (#64789)" (782b0f44) by @getsentry-bot +- feat(issue-details): Show static replay when error is not within the replay (#64827) by @malwilley +- ref(sdk): Remove excessive json.loads spans (#64883) by @k-fish +- fix(integrations): phabricator host validation (#64882) by @oioki +- feat(integrations): Autofill project in selector after project creation modal (#64791) by @roggenkemper +- ref(replay): remove 'new' badge from selector widgets & tab (#64848) by @michellewzhang +- fix(replays): check before attempting to load in onboarding doc (#64842) by @michellewzhang +- fix(ddm): Handle all and my projects (#64877) by @ArthurKnaus +- ref: upgrade hiredis (#64789) by @asottile-sentry +- fix(replays): Move code requiring exception-prone variables into try block (#64658) by @cmanallen +- Sprinkle some tracing around more `save_event` functions (#64869) by @Swatinem +- fix(ddm): Focused series backwards compatibility (#64868) by @ArthurKnaus +- ref: Add option to throttle proguard processing (#64866) by @loewenheim +- ref(similarity-embeddings): Remove unused organization level flag (#64815) by @jangjodi +- ref: upgrade psycopg2-binary (#64809) by @asottile-sentry +- fix(ddm): Global loading state on zoom (#64861) by @ArthurKnaus +- fix(ddm): project param rename (#64860) by @obostjancic +- fix(ddm): Clicking chart hides tooltip (#64859) by @ArthurKnaus +- feat(ddm): Multiple focused series (#64855) by @ArthurKnaus +- feat(ddm): Add new metrics/query endpoint base code (#64785) by @iambriccardo +- feat(stats): metrics stats (#64790) by @obostjancic +- ref(feedback): add analytics for mark as spam buttons (#64850) by @michellewzhang + +_Plus 606 more_ + 24.1.1 ------ diff --git a/setup.cfg b/setup.cfg index fa0daa98fe099e..2b7d43e423dab4 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = sentry -version = 24.2.0.dev0 +version = 24.1.2 description = A realtime logging and aggregation server. long_description = file: README.md long_description_content_type = text/markdown diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index cf9df255e9a588..6afd13339c0665 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -3054,7 +3054,7 @@ def build_cdc_postgres_init_db_volume(settings: Any) -> dict[str, dict[str, str] SENTRY_SELF_HOSTED = True # only referenced in getsentry to provide the stable beacon version # updated with scripts/bump-version.sh -SELF_HOSTED_STABLE_VERSION = "24.1.1" +SELF_HOSTED_STABLE_VERSION = "24.1.2" # Whether we should look at X-Forwarded-For header or not # when checking REMOTE_ADDR ip addresses From 4b7723e3bd5b21c8ed3996457a8e7a38ea5432dd Mon Sep 17 00:00:00 2001 From: Ryan Skonnord Date: Thu, 8 Feb 2024 09:59:27 -0800 Subject: [PATCH 185/357] test(hc): Check for inheritance in silo-decorated test classes (#64733) Fix failure modes when inheriting from a test class with a silo mode decorator on it. Enforce that subclasses must have their own silo mode decorators. Add new decorators to test case subclasses where required. Delete the unused `_validate_that_no_ancestor_is_silo_decorated` method. It's unlikely to be used again, as the thing it was prohibiting is now mandatory. --- src/sentry/testutils/silo.py | 71 ++++++++++--------- .../test_organization_member_team_details.py | 1 + .../api/endpoints/test_userroles_details.py | 3 + .../sentry/integrations/github/test_client.py | 3 + .../github_enterprise/test_search.py | 3 + .../jira/test_sentry_installation.py | 2 + .../integrations/vercel/test_webhook.py | 1 + .../endpoints/test_organization_events_mep.py | 2 + .../test_organization_events_stats_mep.py | 1 + ..._organization_events_stats_span_metrics.py | 1 + .../test_organization_events_trace.py | 1 + 11 files changed, 54 insertions(+), 35 deletions(-) diff --git a/src/sentry/testutils/silo.py b/src/sentry/testutils/silo.py index 167cd96f0c7390..85b4e73147df7c 100644 --- a/src/sentry/testutils/silo.py +++ b/src/sentry/testutils/silo.py @@ -111,7 +111,7 @@ def _model_silo_limit(t: type[Model]) -> ModelSiloLimit: return silo_limit -class AncestorAlreadySiloDecoratedException(Exception): +class SubclassNotSiloDecoratedException(Exception): pass @@ -192,20 +192,47 @@ def test_config(self, silo_mode: SiloMode): def _create_overriding_test_class( self, test_class: type[TestCase], silo_mode: SiloMode, name_suffix: str = "" ) -> type[TestCase]: - def override_method(method_name: str) -> Callable[..., Any]: - context = self.test_config(silo_mode) - method: Callable[..., Any] = getattr(test_class, method_name) - return context(method) + silo_mode_attr = "__silo_mode_override" + + @contextmanager + def create_context(obj: TestCase) -> Generator[None, None, None]: + tagged_class, tagged_mode = getattr(obj, silo_mode_attr) + + if type(obj) is not tagged_class: + # This condition indicates that the test case inherits the silo mode + # attribute from a superclass. Although we could just test in that + # mode, doing so would silently skip other modes if the superclass is + # supposed to be tested in more than one mode. So, enforce a general + # rule that test case subclasses must have decorators of their own. + sup = tagged_class.__name__ + sub = type(obj).__name__ + raise SubclassNotSiloDecoratedException( + f"A test class ({sub}) extends a silo-decorated test class ({sup}) " + f"without a silo decorator of its own. Add a decorator to {sub}. " + f"(You probably want to copy and paste the decorator from {sup}. " + f"If you don't want to run {sub} in a silo mode at all, use " + f"`@no_silo_test`.)" + ) + + with self.test_config(tagged_mode): + yield # Unfortunately, due to the way DjangoTestCase setup and app manipulation works, `override_settings` in a # run method produces unusual, broken results. We're forced to wrap the hidden methods that invoke setup # test method in order to use override_settings correctly in django test cases. - new_methods = { - method_name: override_method(method_name) - for method_name in ("_callSetUp", "_callTestMethod") - } + + def _callSetUp(obj: TestCase) -> Any: + with create_context(obj): + return TestCase._callSetUp(obj) # type: ignore[attr-defined] + + def _callTestMethod(obj: TestCase, method: Any) -> Any: + with create_context(obj): + return TestCase._callTestMethod(obj, method) # type: ignore[attr-defined] + + new_methods = {"_callSetUp": _callSetUp, "_callTestMethod": _callTestMethod} name = test_class.__name__ + name_suffix new_class = type(name, (test_class,), new_methods) + setattr(new_class, silo_mode_attr, (new_class, silo_mode)) return cast(type[TestCase], new_class) def _arrange_silo_modes(self) -> tuple[SiloMode, Collection[SiloMode]]: @@ -274,32 +301,6 @@ def apply(self, decorated_obj: Any) -> Any: return self._mark_parameterized_by_silo_mode(decorated_obj) - def _validate_that_no_ancestor_is_silo_decorated(self, object_to_validate: Any): - # Deprecated? Silo decorators at multiple places in the inheritance tree may - # be necessary if a base class needs to be run in a non-default mode, - # especially when the default is no longer region mode. The previous - # rationale may have been limited to problems around swapping the local - # region, which may now be resolved. - # - # TODO(RyanSkonnord): Delete this after ascertaining that it's safe to have - # silo decorators on test case class ancestors - - class_queue = [object_to_validate] - - # Do a breadth-first traversal of all base classes to ensure that the - # object does not inherit from a class which has already been decorated, - # even in multi-inheritance scenarios. - while len(class_queue) > 0: - current_class = class_queue.pop(0) - if getattr(current_class, "_silo_modes", None): - raise AncestorAlreadySiloDecoratedException( - f"Cannot decorate class '{object_to_validate.__name__}', " - f"which inherits from a silo decorated class ({current_class.__name__})" - ) - class_queue.extend(current_class.__bases__) - - object_to_validate._silo_modes = self.silo_modes - all_silo_test = SiloModeTestDecorator(*SiloMode) """ diff --git a/tests/sentry/api/endpoints/test_organization_member_team_details.py b/tests/sentry/api/endpoints/test_organization_member_team_details.py index 72cb93d4c6d1ac..8b6b6d62fbd9ec 100644 --- a/tests/sentry/api/endpoints/test_organization_member_team_details.py +++ b/tests/sentry/api/endpoints/test_organization_member_team_details.py @@ -299,6 +299,7 @@ def test_team_admin_can_add_member(self): ).exists() +@region_silo_test class CreateWithClosedMembershipTest(CreateOrganizationMemberTeamTest): @cached_property def org(self): diff --git a/tests/sentry/api/endpoints/test_userroles_details.py b/tests/sentry/api/endpoints/test_userroles_details.py index 9b910ca2e8a20a..508617d36785bc 100644 --- a/tests/sentry/api/endpoints/test_userroles_details.py +++ b/tests/sentry/api/endpoints/test_userroles_details.py @@ -31,6 +31,7 @@ def test_fails_without_users_admin_permission(self): assert resp.status_code == 403 +@control_silo_test class UserRolesDetailsGetTest(UserRolesDetailsTest): def test_simple(self): self.create_user_role(name="test-role") @@ -40,6 +41,7 @@ def test_simple(self): assert resp.data["name"] == "test-role" +@control_silo_test class UserRolesDetailsPutTest(UserRolesDetailsTest): method = "PUT" @@ -55,6 +57,7 @@ def test_simple(self): assert role2.permissions == ["users.edit"] +@control_silo_test class UserRolesDetailsDeleteTest(UserRolesDetailsTest): method = "DELETE" diff --git a/tests/sentry/integrations/github/test_client.py b/tests/sentry/integrations/github/test_client.py index 2fd0902e36b36c..f613fa5632dce1 100644 --- a/tests/sentry/integrations/github/test_client.py +++ b/tests/sentry/integrations/github/test_client.py @@ -867,6 +867,7 @@ def setUp(self, get_jwt): ) +@region_silo_test class GitHubClientFileBlameQueryBuilderTest(GitHubClientFileBlameBase): """ Tests that get_blame_for_files builds the correct GraphQL query @@ -1209,6 +1210,7 @@ def test_trim_file_path_for_query(self, get_jwt): assert json.loads(responses.calls[1].request.body)["query"] == query +@region_silo_test class GitHubClientFileBlameResponseTest(GitHubClientFileBlameBase): """ Tests that get_blame_for_files handles the GraphQL response correctly @@ -1623,6 +1625,7 @@ def test_get_blame_for_files_invalid_commit(self, get_jwt, mock_logger_error): ) +@region_silo_test class GitHubClientFileBlameRateLimitTest(GitHubClientFileBlameBase): """ Tests that rate limits are handled correctly diff --git a/tests/sentry/integrations/github_enterprise/test_search.py b/tests/sentry/integrations/github_enterprise/test_search.py index 253c7c82a88146..09fe6957db99c7 100644 --- a/tests/sentry/integrations/github_enterprise/test_search.py +++ b/tests/sentry/integrations/github_enterprise/test_search.py @@ -1,8 +1,11 @@ from datetime import datetime, timedelta +from sentry.testutils.silo import control_silo_test + from ..github import test_search +@control_silo_test class GithubEnterpriseSearchTest(test_search.GithubSearchTest): # Inherit test methods/scenarios from GithubSearchTest # and fill out the slots that customize it to use github:enterprise diff --git a/tests/sentry/integrations/jira/test_sentry_installation.py b/tests/sentry/integrations/jira/test_sentry_installation.py index 37d447b0a2f339..9d900b234a0800 100644 --- a/tests/sentry/integrations/jira/test_sentry_installation.py +++ b/tests/sentry/integrations/jira/test_sentry_installation.py @@ -24,6 +24,7 @@ def setUp(self): self.integration = self.create_provider_integration(provider="jira", name="Example Jira") +@control_silo_test class JiraSentryInstallationViewErrorsTest(JiraSentryInstallationViewTestCase): @patch( "sentry.integrations.jira.views.sentry_installation.get_integration_from_request", @@ -44,6 +45,7 @@ def test_expired_invalid_installation_error(self, mock_get_integration_from_requ assert UNABLE_TO_VERIFY_INSTALLATION.encode() in response.content +@control_silo_test class JiraSentryInstallationViewTest(JiraSentryInstallationViewTestCase): def setUp(self): super().setUp() diff --git a/tests/sentry/integrations/vercel/test_webhook.py b/tests/sentry/integrations/vercel/test_webhook.py index 40bf5a4261a7ad..db89a009185410 100644 --- a/tests/sentry/integrations/vercel/test_webhook.py +++ b/tests/sentry/integrations/vercel/test_webhook.py @@ -304,6 +304,7 @@ def test_missing_repository(self): assert "Could not determine repository" == response.data["detail"] +@control_silo_test class VercelReleasesNewTest(VercelReleasesTest): webhook_url = "/extensions/vercel/delete/" diff --git a/tests/snuba/api/endpoints/test_organization_events_mep.py b/tests/snuba/api/endpoints/test_organization_events_mep.py index e2965d7f79f0df..f73ddfd89cc55c 100644 --- a/tests/snuba/api/endpoints/test_organization_events_mep.py +++ b/tests/snuba/api/endpoints/test_organization_events_mep.py @@ -3126,6 +3126,7 @@ def test_on_demand_with_mep(self): assert not meta["isMetricsData"] +@region_silo_test class OrganizationEventsMetricsEnhancedPerformanceEndpointTestWithOnDemandMetrics( MetricsEnhancedPerformanceTestCase ): @@ -3205,6 +3206,7 @@ def test_transaction_user_misery(self) -> None: } +@region_silo_test class OrganizationEventsMetricsEnhancedPerformanceEndpointTestWithMetricLayer( OrganizationEventsMetricsEnhancedPerformanceEndpointTest ): diff --git a/tests/snuba/api/endpoints/test_organization_events_stats_mep.py b/tests/snuba/api/endpoints/test_organization_events_stats_mep.py index 1856b19e443757..f50cff2bbc31d8 100644 --- a/tests/snuba/api/endpoints/test_organization_events_stats_mep.py +++ b/tests/snuba/api/endpoints/test_organization_events_stats_mep.py @@ -766,6 +766,7 @@ def test_top_events_with_project(self): assert data["order"] == 0 +@region_silo_test class OrganizationEventsStatsMetricsEnhancedPerformanceEndpointTestWithMetricLayer( OrganizationEventsStatsMetricsEnhancedPerformanceEndpointTest ): diff --git a/tests/snuba/api/endpoints/test_organization_events_stats_span_metrics.py b/tests/snuba/api/endpoints/test_organization_events_stats_span_metrics.py index 9b55a802ebb9fc..538e741a796658 100644 --- a/tests/snuba/api/endpoints/test_organization_events_stats_span_metrics.py +++ b/tests/snuba/api/endpoints/test_organization_events_stats_span_metrics.py @@ -273,6 +273,7 @@ def test_resource_transfer_size(self): assert data[1][1][0]["count"] == 4.0 +@region_silo_test class OrganizationEventsStatsSpansMetricsEndpointTestWithMetricLayer( OrganizationEventsStatsSpansMetricsEndpointTest ): diff --git a/tests/snuba/api/endpoints/test_organization_events_trace.py b/tests/snuba/api/endpoints/test_organization_events_trace.py index 0a29319a902fd7..d2566535992c20 100644 --- a/tests/snuba/api/endpoints/test_organization_events_trace.py +++ b/tests/snuba/api/endpoints/test_organization_events_trace.py @@ -1497,6 +1497,7 @@ def test_pruning_event(self): assert len(gen1["children"]) == 0 +@region_silo_test class OrganizationEventsTraceEndpointTestUsingSpans(OrganizationEventsTraceEndpointTest): def client_get(self, data, url=None): data["useSpans"] = 1 From 052df366647e51634ff9997ec4116819cb04ba1e Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Thu, 8 Feb 2024 18:24:10 +0000 Subject: [PATCH 186/357] meta: Bump new development version --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 2b7d43e423dab4..fa0daa98fe099e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = sentry -version = 24.1.2 +version = 24.2.0.dev0 description = A realtime logging and aggregation server. long_description = file: README.md long_description_content_type = text/markdown From 8ec3015a3223a13f7c915824c14e8be2d2363f05 Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Thu, 8 Feb 2024 13:33:46 -0500 Subject: [PATCH 187/357] feat(metrics-extraction): Only return default spec version (#64863) ### Summary This returns only the default spec version (which is what controls state), since later versions may have spec limits or cardinality limits imposed as they can be entirely experimental. --- src/sentry/api/serializers/models/dashboard.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/sentry/api/serializers/models/dashboard.py b/src/sentry/api/serializers/models/dashboard.py index f65e067d7c2781..86f06db37926e9 100644 --- a/src/sentry/api/serializers/models/dashboard.py +++ b/src/sentry/api/serializers/models/dashboard.py @@ -11,6 +11,7 @@ DashboardWidgetTypes, ) from sentry.services.hybrid_cloud.user.service import user_service +from sentry.snuba.metrics.extraction import OnDemandMetricSpecVersioning from sentry.utils import json from sentry.utils.dates import outside_retention_with_modified_start, parse_timestamp @@ -68,10 +69,14 @@ class DashboardWidgetQuerySerializer(Serializer): def get_attrs(self, item_list, user): result = {} + stateful_extraction_version = ( + OnDemandMetricSpecVersioning.get_default_spec_version().version + ) data_sources = serialize( list( DashboardWidgetQueryOnDemand.objects.filter( - dashboard_widget_query_id__in=[i.id for i in item_list] + dashboard_widget_query_id__in=[i.id for i in item_list], + spec_version=stateful_extraction_version, ) ) ) From 8fc69896d5a571360e11848ebce67393b647bba8 Mon Sep 17 00:00:00 2001 From: Seiji Chew <67301797+schew2381@users.noreply.github.com> Date: Thu, 8 Feb 2024 14:07:53 -0800 Subject: [PATCH 188/357] fix(api): Create workaround for null union with multiple types (#64834) See comment in code for detail --- src/sentry/apidocs/spectacular_ports.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/src/sentry/apidocs/spectacular_ports.py b/src/sentry/apidocs/spectacular_ports.py index b2de000ff2574e..9c13a55ab7db4b 100644 --- a/src/sentry/apidocs/spectacular_ports.py +++ b/src/sentry/apidocs/spectacular_ports.py @@ -146,7 +146,18 @@ def resolve_type_hint(hint) -> Any: else: schema = resolve_type_hint(type_args[0]) if type(None) in args: - schema["nullable"] = True + # There's an issue where if 3 or more types are OR'd together and one of + # them is None, validating the schema will fail because "nullable: true" + # with "anyOf" raises an error because there is no "type" key on the + # schema. This works around it by including a proxy null object in + # the "anyOf". + # See: + # - https://github.com/tfranzel/drf-spectacular/issues/925 + # - https://github.com/OAI/OpenAPI-Specification/issues/1368. + if len(args) > 2: + schema["oneOf"].append({"type": "object", "nullable": True}) + else: + schema["nullable"] = True return schema elif origin is collections.abc.Iterable: return build_array_type(resolve_type_hint(args[0])) From 450560571e5fc86933bef9a00f71b22bb94845f5 Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Thu, 8 Feb 2024 16:15:39 -0600 Subject: [PATCH 189/357] chore(profiling): Remove some flamegraph related feature flags (#64905) These feature flags on the flamegraph are GA'ed, so it's safe to remove. --- .../profiling/flamegraph/flamegraph.tsx | 30 +++++-------------- .../groupEventDetailsContent.tsx | 27 +++++++---------- 2 files changed, 19 insertions(+), 38 deletions(-) diff --git a/static/app/components/profiling/flamegraph/flamegraph.tsx b/static/app/components/profiling/flamegraph/flamegraph.tsx index b617ac69038c48..5c5fa50de80485 100644 --- a/static/app/components/profiling/flamegraph/flamegraph.tsx +++ b/static/app/components/profiling/flamegraph/flamegraph.tsx @@ -62,7 +62,6 @@ import type {ProfilingFormatterUnit} from 'sentry/utils/profiling/units/units'; import {formatTo, fromNanoJoulesToWatts} from 'sentry/utils/profiling/units/units'; import {useDevicePixelRatio} from 'sentry/utils/useDevicePixelRatio'; import {useMemoWithPrevious} from 'sentry/utils/useMemoWithPrevious'; -import useOrganization from 'sentry/utils/useOrganization'; import {useProfileGroup} from 'sentry/views/profiling/profileGroupProvider'; import { useProfileTransaction, @@ -194,7 +193,6 @@ const LOADING_OR_FALLBACK_MEMORY_CHART = FlamegraphChartModel.Empty; const noopFormatDuration = () => ''; function Flamegraph(): ReactElement { - const organization = useOrganization(); const devicePixelRatio = useDevicePixelRatio(); const profiledTransaction = useProfileTransaction(); const dispatch = useDispatchFlamegraphState(); @@ -237,35 +235,23 @@ function Flamegraph(): ReactElement { const hasUIFrames = useMemo(() => { const platform = profileGroup.metadata.platform; - return ( - (platform === 'cocoa' || platform === 'android') && - organization.features.includes('profiling-ui-frames') - ); - }, [organization.features, profileGroup.metadata.platform]); + return platform === 'cocoa' || platform === 'android'; + }, [profileGroup.metadata.platform]); const hasBatteryChart = useMemo(() => { const platform = profileGroup.metadata.platform; - return ( - platform === 'cocoa' && - organization.features.includes('profiling-battery-usage-chart') - ); - }, [profileGroup.metadata.platform, organization.features]); + return platform === 'cocoa'; + }, [profileGroup.metadata.platform]); const hasCPUChart = useMemo(() => { const platform = profileGroup.metadata.platform; - return ( - (platform === 'cocoa' || platform === 'android' || platform === 'node') && - organization.features.includes('profiling-cpu-chart') - ); - }, [profileGroup.metadata.platform, organization.features]); + return platform === 'cocoa' || platform === 'android' || platform === 'node'; + }, [profileGroup.metadata.platform]); const hasMemoryChart = useMemo(() => { const platform = profileGroup.metadata.platform; - return ( - (platform === 'cocoa' || platform === 'android' || platform === 'node') && - organization.features.includes('profiling-memory-chart') - ); - }, [profileGroup.metadata.platform, organization.features]); + return platform === 'cocoa' || platform === 'android' || platform === 'node'; + }, [profileGroup.metadata.platform]); const profile = useMemo(() => { return profileGroup.profiles.find(p => p.threadId === threadId); diff --git a/static/app/views/issueDetails/groupEventDetails/groupEventDetailsContent.tsx b/static/app/views/issueDetails/groupEventDetails/groupEventDetailsContent.tsx index a15f1d5b8274a4..e8db4434ebdf3d 100644 --- a/static/app/views/issueDetails/groupEventDetails/groupEventDetailsContent.tsx +++ b/static/app/views/issueDetails/groupEventDetails/groupEventDetailsContent.tsx @@ -1,7 +1,6 @@ import {Fragment} from 'react'; import styled from '@emotion/styled'; -import Feature from 'sentry/components/acl/feature'; import {CommitRow} from 'sentry/components/commitRow'; import ErrorBoundary from 'sentry/components/errorBoundary'; import {EventContexts} from 'sentry/components/events/contexts'; @@ -216,8 +215,6 @@ function ProfilingDurationRegressionIssueDetailsContent({ event, project, }: Required) { - const organization = useOrganization(); - return ( @@ -230,20 +227,18 @@ function ProfilingDurationRegressionIssueDetailsContent({ - - - - {t('Largest Changes in Call Stack Frequency')} -

    - {t(`See which functions changed the most before and after the regression. The - frame with the largest increase in call stack population likely - contributed to the cause for the duration regression.`)} -

    + + + {t('Largest Changes in Call Stack Frequency')} +

    + {t(`See which functions changed the most before and after the regression. The + frame with the largest increase in call stack population likely + contributed to the cause for the duration regression.`)} +

    - -
    -
    -
    + + + From d387e613b2226e6a6981ff6ec28ff491c6a7a008 Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Thu, 8 Feb 2024 16:16:19 -0600 Subject: [PATCH 190/357] chore(profiling): Remove unused stacktrace links endpoint (#64909) This mutli stacktrace links endpoint was never used as it did not cover some edge cases. It's also no longer needed so we can remove it. --------- Co-authored-by: getsantry[bot] <66042841+getsantry[bot]@users.noreply.github.com> --- pyproject.toml | 1 - .../api/endpoints/project_stacktrace_links.py | 177 ---------- src/sentry/api/urls.py | 6 - .../api_ownership_allowlist_dont_modify.py | 1 - ...pi_publish_status_allowlist_dont_modify.py | 1 - src/sentry/conf/server.py | 2 - src/sentry/features/__init__.py | 1 - .../test_project_stacktrace_links.py | 307 ------------------ 8 files changed, 496 deletions(-) delete mode 100644 src/sentry/api/endpoints/project_stacktrace_links.py delete mode 100644 tests/sentry/api/endpoints/test_project_stacktrace_links.py diff --git a/pyproject.toml b/pyproject.toml index dce7d516c2d5d1..07c0102a5d518e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -202,7 +202,6 @@ module = [ "sentry.api.endpoints.project_rule_preview", "sentry.api.endpoints.project_rules_configuration", "sentry.api.endpoints.project_servicehook_stats", - "sentry.api.endpoints.project_stacktrace_links", "sentry.api.endpoints.project_transaction_names", "sentry.api.endpoints.rule_snooze", "sentry.api.endpoints.source_map_debug", diff --git a/src/sentry/api/endpoints/project_stacktrace_links.py b/src/sentry/api/endpoints/project_stacktrace_links.py deleted file mode 100644 index 8fa35b77a1bc42..00000000000000 --- a/src/sentry/api/endpoints/project_stacktrace_links.py +++ /dev/null @@ -1,177 +0,0 @@ -from __future__ import annotations - -from rest_framework import serializers -from rest_framework.request import Request -from rest_framework.response import Response - -from sentry import features -from sentry.api.api_owners import ApiOwner -from sentry.api.api_publish_status import ApiPublishStatus -from sentry.api.base import region_silo_endpoint -from sentry.api.bases.project import ProjectEndpoint -from sentry.integrations.base import IntegrationInstallation -from sentry.integrations.mixins import RepositoryMixin -from sentry.integrations.utils.code_mapping import get_sorted_code_mapping_configs -from sentry.models.integrations.repository_project_path_config import RepositoryProjectPathConfig -from sentry.models.project import Project -from sentry.services.hybrid_cloud.integration import integration_service -from sentry.shared_integrations.exceptions import ApiError -from sentry.utils.sdk import set_measurement - -MAX_CODE_MAPPINGS_USED = 3 - - -class StacktraceLinksSerializer(serializers.Serializer): - file = serializers.ListField(child=serializers.CharField()) - - # falls back to the default branch - ref = serializers.CharField(required=False) - - -@region_silo_endpoint -class ProjectStacktraceLinksEndpoint(ProjectEndpoint): - owner = ApiOwner.ISSUES - publish_status = { - "GET": ApiPublishStatus.UNKNOWN, - } - """ - Returns valid links for source code providers so that - users can go from files in the stack trace to the - provider of their choice. - - Similar to `ProjectStacktraceLinkEndpoint` but allows - for bulk resolution. - - `file`: The file paths from the stack trace - `ref` (optional): The commit_id for the last commit of the - release associated to the stack trace's event - """ - - def get(self, request: Request, project: Project) -> Response: - if not features.has( - "organizations:profiling-stacktrace-links", project.organization, actor=request.user - ): - return Response(status=404) - - serializer = StacktraceLinksSerializer(data=request.GET) - - if not serializer.is_valid(): - return Response(serializer.errors, status=400) - - data = serializer.validated_data - - result = {"files": [{"file": file} for file in data["file"]]} - - mappings_used = 0 - mappings_attempted = 0 - - configs = get_sorted_code_mapping_configs(project) - - default_error = "stack_root_mismatch" if configs else "no_code_mappings" - - for config in configs: - # find all the files that match the current code mapping's stack_root - # and have not already been resolved by another code mapping - # - # if the's an error from a previous code mapping attempted, but this - # current code mapping can be used, we should try again - files = [ - file - for file in result["files"] - if file.get("sourceUrl") is None and file["file"].startswith(config.stack_root) - ] - if not files: - continue - - mappings_attempted += 1 - - # safety to limit the maximum number of mappings used - # to avoid reaching API rate limits - if mappings_used >= MAX_CODE_MAPPINGS_USED: - for file in files: - if not file.get("error") and file.get("sourceUrl") is None: - file["error"] = "max_code_mappings_applied" - continue - - mappings_used += 1 - - install = get_installation(config) - - # should always be overwritten - error: str | None = "file_not_checked" - - # since the same code mapping stack root matches all these files, we only check the - # first file and we will assume the other matching files will resolve the same way - ref = data.get("ref") - if ref: - error = check_file(install, config, files[0]["file"], ref) - if not ref or error: - ref = config.default_branch - error = check_file(install, config, files[0]["file"], ref) - - for file in files: - formatted_path = file["file"].replace(config.stack_root, config.source_root, 1) - url = install.format_source_url(config.repository, formatted_path, ref) - if error: - file["error"] = error - file["attemptedUrl"] = url - else: - file["sourceUrl"] = url - - # there may be an error from an previous code mapping, clear it - if "error" in file: - del file["error"] - if "attemptedUrl" in file: - del file["attemptedUrl"] - - # number of available code mappings - set_measurement("mappings.found", len(configs)) - - # number of code mappings that matched a stack root - set_measurement("mappings.attempted", mappings_attempted) - - # number of code mappings that was attempted - set_measurement("mappings.used", mappings_used) - - for file in result["files"]: - if not file.get("error") and file.get("sourceUrl") is None: - file["error"] = default_error - - return Response(result, status=200) - - -def get_installation(config: RepositoryProjectPathConfig) -> IntegrationInstallation: - integration = integration_service.get_integration( - organization_integration_id=config.organization_integration_id - ) - return integration.get_installation(organization_id=config.project.organization_id) - - -def check_file( - install: IntegrationInstallation, - config: RepositoryProjectPathConfig, - filepath: str, - ref: str, -) -> str | None: - """ - Checks to see if the given filepath exists using the given code mapping + ref. - - Returns a string indicating the error if it doesn't exist, and `None` otherwise. - """ - - formatted_path = filepath.replace(config.stack_root, config.source_root, 1) - - link = None - try: - if isinstance(install, RepositoryMixin): - # the logic to fall back to the default branch is handled from the caller - link = install.get_stacktrace_link(config.repository, formatted_path, ref, "") - except ApiError as e: - if e.code != 403: - raise - return "integration_link_forbidden" - - if not link: - return "file_not_found" - - return None diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py index 33149e53acb44b..a28c71db3a6660 100644 --- a/src/sentry/api/urls.py +++ b/src/sentry/api/urls.py @@ -531,7 +531,6 @@ from .endpoints.project_servicehook_stats import ProjectServiceHookStatsEndpoint from .endpoints.project_servicehooks import ProjectServiceHooksEndpoint from .endpoints.project_stacktrace_link import ProjectStacktraceLinkEndpoint -from .endpoints.project_stacktrace_links import ProjectStacktraceLinksEndpoint from .endpoints.project_stats import ProjectStatsEndpoint from .endpoints.project_symbol_sources import ProjectSymbolSourcesEndpoint from .endpoints.project_tagkey_details import ProjectTagKeyDetailsEndpoint @@ -2591,11 +2590,6 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: ProjectStacktraceLinkEndpoint.as_view(), name="sentry-api-0-project-stacktrace-link", ), - re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/stacktrace-links/$", - ProjectStacktraceLinksEndpoint.as_view(), - name="sentry-api-0-project-stacktrace-links", - ), re_path( r"^(?P[^\/]+)/(?P[^\/]+)/repo-path-parsing/$", ProjectRepoPathParsingEndpoint.as_view(), diff --git a/src/sentry/apidocs/api_ownership_allowlist_dont_modify.py b/src/sentry/apidocs/api_ownership_allowlist_dont_modify.py index 48cab9953af53f..c2d02d56c7c4fc 100644 --- a/src/sentry/apidocs/api_ownership_allowlist_dont_modify.py +++ b/src/sentry/apidocs/api_ownership_allowlist_dont_modify.py @@ -240,7 +240,6 @@ "/api/0/projects/{organization_slug}/{project_slug}/{var}/", "/api/0/users/{user_id}/roles/", "/api/0/sentry-apps/{sentry_app_slug}/avatar/", - "/api/0/projects/{organization_slug}/{project_slug}/stacktrace-links/", "/api/0/organizations/{organization_slug}/config/repos/", "/api/0/organizations/{organization_slug}/api-keys/", "/api/0/organizations/{organization_slug}/releases/{version}/commits/", diff --git a/src/sentry/apidocs/api_publish_status_allowlist_dont_modify.py b/src/sentry/apidocs/api_publish_status_allowlist_dont_modify.py index 28f94785fac016..1b55bfb9013992 100644 --- a/src/sentry/apidocs/api_publish_status_allowlist_dont_modify.py +++ b/src/sentry/apidocs/api_publish_status_allowlist_dont_modify.py @@ -578,7 +578,6 @@ "/api/0/projects/{organization_slug}/{project_slug}/tombstones/": {"GET"}, "/api/0/projects/{organization_slug}/{project_slug}/tombstones/{tombstone_id}/": {"DELETE"}, "/api/0/projects/{organization_slug}/{project_slug}/stacktrace-link/": {"GET"}, - "/api/0/projects/{organization_slug}/{project_slug}/stacktrace-links/": {"GET"}, "/api/0/projects/{organization_slug}/{project_slug}/grouping-configs/": {"GET"}, "/api/0/projects/{organization_slug}/{project_slug}/appstoreconnect/": {"POST"}, "/api/0/projects/{organization_slug}/{project_slug}/appstoreconnect/apps/": {"POST"}, diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 6afd13339c0665..fb004043a7f8c5 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1788,8 +1788,6 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:profiling-global-suspect-functions": False, # Enable profiling Memory chart "organizations:profiling-memory-chart": False, - # Enable stacktrace linking of multiple frames in profiles - "organizations:profiling-stacktrace-links": False, # Enable profiling statistical detectors breakpoint detection "organizations:profiling-statistical-detectors-breakpoint": False, # Enable profiling statistical detectors ema detection diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index 71c86383bcc1af..2c09be24c614f6 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -219,7 +219,6 @@ default_manager.add("organizations:profiling-differential-flamegraph", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:profiling-global-suspect-functions", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:profiling-memory-chart", OrganizationFeature, FeatureHandlerStrategy.REMOTE) -default_manager.add("organizations:profiling-stacktrace-links", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:profiling-statistical-detectors-breakpoint", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:profiling-statistical-detectors-ema", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:profiling-summary-redesign", OrganizationFeature, FeatureHandlerStrategy.REMOTE) diff --git a/tests/sentry/api/endpoints/test_project_stacktrace_links.py b/tests/sentry/api/endpoints/test_project_stacktrace_links.py deleted file mode 100644 index 000655208c3e2b..00000000000000 --- a/tests/sentry/api/endpoints/test_project_stacktrace_links.py +++ /dev/null @@ -1,307 +0,0 @@ -from unittest.mock import patch - -from rest_framework.exceptions import ErrorDetail - -from sentry.integrations.example.integration import ExampleIntegration -from sentry.integrations.utils.code_mapping import get_sorted_code_mapping_configs -from sentry.shared_integrations.exceptions import ApiError -from sentry.testutils.cases import APITestCase -from sentry.testutils.silo import region_silo_test - - -@region_silo_test -class ProjectStacktraceLinksTest(APITestCase): - endpoint = "sentry-api-0-project-stacktrace-links" - filepath = "foo/bar/baz.py" - url = "https://example.com/example/foo/blob/master/src/foo/bar/baz.py" - - def setUp(self): - self.integration, self.oi = self.create_provider_integration_for( - self.organization, self.user, provider="example", name="Example" - ) - - self.repo = self.create_repo( - project=self.project, - name="example/foo", - ) - self.repo.integration_id = self.integration.id - self.repo.provider = "example" - self.repo.save() - - self.login_as(self.user) - - def setup_code_mapping(self, stack_root, source_root): - return self.create_code_mapping( - organization_integration=self.oi, - project=self.project, - repo=self.repo, - stack_root=stack_root, - source_root=source_root, - ) - - def test_no_feature(self): - self.get_error_response(self.organization.slug, self.project.slug, status_code=404) - - def test_no_files(self): - """The file query search is missing""" - with self.feature(["organizations:profiling-stacktrace-links"]): - response = self.get_error_response( - self.organization.slug, self.project.slug, status_code=400 - ) - assert response.data == { - "file": [ErrorDetail(string="This field is required.", code="required")] - } - - def test_no_configs(self): - """No code mappings have been set for this project""" - # new project that has no configurations set up for it - project = self.create_project( - name="foo", - organization=self.organization, - teams=[self.create_team(organization=self.organization)], - ) - - with self.feature(["organizations:profiling-stacktrace-links"]): - response = self.get_success_response( - self.organization.slug, project.slug, qs_params={"file": self.filepath} - ) - assert response.data == { - "files": [ - { - "error": "no_code_mappings", - "file": self.filepath, - }, - ], - } - - def test_file_not_found_error(self): - self.setup_code_mapping("foo", "src/foo") - - with self.feature(["organizations:profiling-stacktrace-links"]): - response = self.get_success_response( - self.organization.slug, self.project.slug, qs_params={"file": self.filepath} - ) - assert response.data == { - "files": [ - { - "attemptedUrl": self.url, - "error": "file_not_found", - "file": self.filepath, - }, - ], - } - - def test_integration_link_forbidden(self): - with patch.object( - ExampleIntegration, "get_stacktrace_link", side_effect=ApiError("error", code=403) - ): - self.setup_code_mapping("foo", "src/foo") - - with self.feature(["organizations:profiling-stacktrace-links"]): - response = self.get_success_response( - self.organization.slug, self.project.slug, qs_params={"file": self.filepath} - ) - assert response.data == { - "files": [ - { - "attemptedUrl": self.url, - "error": "integration_link_forbidden", - "file": self.filepath, - }, - ], - } - - def test_stack_root_mismatch_error(self): - self.setup_code_mapping("baz", "src/foo") - - with self.feature(["organizations:profiling-stacktrace-links"]): - response = self.get_success_response( - self.organization.slug, self.project.slug, qs_params={"file": self.filepath} - ) - assert response.data == { - "files": [ - { - "error": "stack_root_mismatch", - "file": self.filepath, - }, - ], - } - - def test_config_and_source_url(self): - with patch.object( - ExampleIntegration, "get_stacktrace_link", return_value="https://sourceurl.com" - ): - self.setup_code_mapping("foo", "src/foo") - - with self.feature(["organizations:profiling-stacktrace-links"]): - response = self.get_success_response( - self.organization.slug, self.project.slug, qs_params={"file": self.filepath} - ) - assert response.data == { - "files": [ - { - "file": self.filepath, - "sourceUrl": self.url, - }, - ], - } - - def test_config_and_source_url_with_ref(self): - with patch.object( - ExampleIntegration, "get_stacktrace_link", return_value="https://sourceurl.com" - ): - self.setup_code_mapping("foo", "src/foo") - - qs = { - "ref": "3c2e87573d3bd16f61cf08fece0638cc47a4fc22", - "file": self.filepath, - } - - with self.feature(["organizations:profiling-stacktrace-links"]): - response = self.get_success_response( - self.organization.slug, self.project.slug, qs_params=qs - ) - assert response.data == { - "files": [ - { - "file": self.filepath, - "sourceUrl": "https://example.com/example/foo/blob/3c2e87573d3bd16f61cf08fece0638cc47a4fc22/src/foo/bar/baz.py", - }, - ], - } - - def test_fallback_to_default_branch(self): - with patch.object( - ExampleIntegration, "get_stacktrace_link", side_effect=[None, "https://sourceurl.com"] - ): - self.setup_code_mapping("foo", "src/foo") - - qs = { - "ref": "3c2e87573d3bd16f61cf08fece0638cc47a4fc22", - "file": self.filepath, - } - - with self.feature(["organizations:profiling-stacktrace-links"]): - response = self.get_success_response( - self.organization.slug, self.project.slug, qs_params=qs - ) - assert response.data == { - "files": [ - { - "file": self.filepath, - "sourceUrl": self.url, - }, - ], - } - - def test_second_config_works(self): - code_mapping1 = self.setup_code_mapping("foo", "src/foo") - code_mapping2 = self.setup_code_mapping("foo/bar", "bar") - - # this is the code mapping order that will be tried - assert get_sorted_code_mapping_configs(self.project) == [code_mapping2, code_mapping1] - - with patch.object( - ExampleIntegration, "get_stacktrace_link", side_effect=[None, "https://sourceurl.com"] - ): - with self.feature(["organizations:profiling-stacktrace-links"]): - response = self.get_success_response( - self.organization.slug, self.project.slug, qs_params={"file": self.filepath} - ) - assert response.data == { - "files": [ - { - "file": self.filepath, - "sourceUrl": self.url, - }, - ], - } - - def test_multiple_configs_and_files(self): - files = [ - "foo0/bar.py", - "foo0/baz.py", - "foo1/bar.py", - "foo1/baz.py", - "foo2/bar.py", - "foo2/baz.py", - "foo3/bar.py", - "foo3/baz.py", - "foo4/bar.py", - "foo4/baz.py", - ] - - expected = [ - { - "error": "max_code_mappings_applied", - "file": "foo0/bar.py", - }, - { - "error": "max_code_mappings_applied", - "file": "foo0/baz.py", - }, - { - "file": "foo1/bar.py", - "sourceUrl": "https://example.com/example/foo/blob/master/src/foo1/bar.py", - }, - { - "file": "foo1/baz.py", - "sourceUrl": "https://example.com/example/foo/blob/master/src/foo1/baz.py", - }, - { - "attemptedUrl": "https://example.com/example/foo/blob/master/src/foo2/bar.py", - "error": "file_not_found", - "file": "foo2/bar.py", - }, - { - "attemptedUrl": "https://example.com/example/foo/blob/master/src/foo2/baz.py", - "error": "file_not_found", - "file": "foo2/baz.py", - }, - { - "file": "foo3/bar.py", - "sourceUrl": "https://example.com/example/foo/blob/master/src/foo3/bar.py", - }, - { - "file": "foo3/baz.py", - "sourceUrl": "https://example.com/example/foo/blob/master/src/foo3/baz.py", - }, - { - "error": "stack_root_mismatch", - "file": "foo4/bar.py", - }, - { - "error": "stack_root_mismatch", - "file": "foo4/baz.py", - }, - ] - - code_mapping1 = self.setup_code_mapping("bar", "") - code_mapping2 = self.setup_code_mapping("foo0", "src/foo0") - code_mapping3 = self.setup_code_mapping("foo1", "src/foo1") - code_mapping4 = self.setup_code_mapping("foo2", "src/foo2") - code_mapping5 = self.setup_code_mapping("foo3", "src/foo3") - code_mapping6 = self.setup_code_mapping("baz", "") - - # this is the code mapping order that will be tried - assert get_sorted_code_mapping_configs(self.project) == [ - code_mapping6, - code_mapping5, - code_mapping4, - code_mapping3, - code_mapping2, - code_mapping1, - ] - - with patch.object( - ExampleIntegration, - "get_stacktrace_link", - side_effect=["https://sourceurl.com", None, "https://sourceurl.com", None], - ): - qs = {"file": files} - - with self.feature(["organizations:profiling-stacktrace-links"]): - response = self.get_success_response( - self.organization.slug, self.project.slug, qs_params=qs - ) - assert response.data == {"files": expected} From 742e9c94bea74c98afcadfbc6dece3563e7230dc Mon Sep 17 00:00:00 2001 From: Seiji Chew <67301797+schew2381@users.noreply.github.com> Date: Thu, 8 Feb 2024 14:29:55 -0800 Subject: [PATCH 191/357] chore(staff): Let staff access doc integrations endpoints (#64738) After this PR, the doc integrations _admin tab should be complete :) --- ### Summary The following applies to: 1. `DocIntegrationsEndpoint` - GET, POST 2. `DocIntegrationDetailsEndpoint` - GET, PUT, DELETE 3. `DocIntegrationAvatarEndpoint` - GET, PUT GET - allows everyone with scopes, superuser can fetch unpublished doc integration(s) PUT, POST, DEL - Limited to superuser. Because these are only used in _admin, we will eventually prevent superuser entirely and allow only staff once the staff feature flag is rolled out --- ### Video Using Staff https://github.com/getsentry/sentry/assets/67301797/bc7adab2-46b2-4678-a34d-4d49d83fa1ab --- src/sentry/api/bases/doc_integrations.py | 31 ++++++- .../integrations/doc_integrations/index.py | 5 +- .../endpoints/test_doc_integration_avatar.py | 48 +++++++++-- .../endpoints/test_doc_integration_details.py | 85 ++++++++++++++++--- .../api/endpoints/test_doc_integrations.py | 60 +++++++++++-- 5 files changed, 198 insertions(+), 31 deletions(-) diff --git a/src/sentry/api/bases/doc_integrations.py b/src/sentry/api/bases/doc_integrations.py index 7a3d734a237f8c..091d10ec7f5980 100644 --- a/src/sentry/api/bases/doc_integrations.py +++ b/src/sentry/api/bases/doc_integrations.py @@ -5,7 +5,7 @@ from sentry.api.base import Endpoint from sentry.api.bases.integration import PARANOID_GET -from sentry.api.permissions import SentryPermission +from sentry.api.permissions import SentryPermission, StaffPermissionMixin from sentry.api.validators.doc_integration import METADATA_PROPERTIES from sentry.auth.superuser import is_active_superuser from sentry.models.integrations.doc_integration import DocIntegration @@ -14,12 +14,23 @@ class DocIntegrationsPermission(SentryPermission): + """ " + Allows all org members to access GET as long as they have the necessary + scopes. For item endpoints, the doc integration must be published. + + # TODO(schew2381): Remove superuser language once staff feature flag is rolled out + Superusers can access unpublished doc integrations (GET) and also use PUT + DEL + which endpoints which are all accessible through _admin. + """ + scope_map = {"GET": PARANOID_GET} def has_permission(self, request: Request, view: object) -> bool: if not super().has_permission(request, view): return False + # TODO(schew2381): Remove superuser check once staff feature flag is rolled out. + # We want to allow staff through the StaffPermissionMixin instead of mixing logic here. if is_active_superuser(request) or request.method == "GET": return True @@ -31,6 +42,8 @@ def has_object_permission( if not hasattr(request, "user") or not request.user: return False + # TODO(schew2381): Remove superuser check once staff feature flag is rolled out. + # We want to allow staff through the StaffPermissionMixin instead of mixing logic here. if is_active_superuser(request): return True @@ -40,14 +53,28 @@ def has_object_permission( return False +class DocIntegrationsAndStaffPermission(StaffPermissionMixin, DocIntegrationsPermission): + """Allows staff to to access all doc integration endpoints""" + + pass + + class DocIntegrationsBaseEndpoint(Endpoint): - permission_classes = (DocIntegrationsPermission,) + """ + Base endpoint used for doc integration collection endpoints. + """ + + permission_classes = (DocIntegrationsAndStaffPermission,) def generate_incoming_metadata(self, request: Request) -> JSONData: return {k: v for k, v in request.json_body.items() if k in METADATA_PROPERTIES} class DocIntegrationBaseEndpoint(DocIntegrationsBaseEndpoint): + """ + Base endpoint used for doc integration item endpoints. + """ + def convert_args(self, request: Request, doc_integration_slug: str, *args, **kwargs): try: doc_integration = DocIntegration.objects.get(slug=doc_integration_slug) diff --git a/src/sentry/api/endpoints/integrations/doc_integrations/index.py b/src/sentry/api/endpoints/integrations/doc_integrations/index.py index 66055e4bae7606..3ef7beb6c3b23b 100644 --- a/src/sentry/api/endpoints/integrations/doc_integrations/index.py +++ b/src/sentry/api/endpoints/integrations/doc_integrations/index.py @@ -11,7 +11,7 @@ from sentry.api.paginator import OffsetPaginator from sentry.api.serializers import serialize from sentry.api.serializers.rest_framework import DocIntegrationSerializer -from sentry.auth.superuser import is_active_superuser +from sentry.auth.elevated_mode import has_elevated_mode from sentry.models.integrations.doc_integration import DocIntegration logger = logging.getLogger(__name__) @@ -26,7 +26,8 @@ class DocIntegrationsEndpoint(DocIntegrationsBaseEndpoint): } def get(self, request: Request): - if is_active_superuser(request): + # TODO(schew2381): Change to is_active_staff once the feature flag is rolled out. + if has_elevated_mode(request): queryset = DocIntegration.objects.all() else: queryset = DocIntegration.objects.filter(is_draft=False) diff --git a/tests/sentry/api/endpoints/test_doc_integration_avatar.py b/tests/sentry/api/endpoints/test_doc_integration_avatar.py index b9f8d35d8a2ee2..8be73de75e0561 100644 --- a/tests/sentry/api/endpoints/test_doc_integration_avatar.py +++ b/tests/sentry/api/endpoints/test_doc_integration_avatar.py @@ -19,6 +19,7 @@ class DocIntegrationAvatarTest(APITestCase): def setUp(self): self.user = self.create_user(email="peter@marvel.com", is_superuser=True) self.superuser = self.create_user(email="gwen@marvel.com", is_superuser=True) + self.staff_user = self.create_user(is_staff=True) self.draft_doc = self.create_doc_integration( name="spiderman", is_draft=True, has_avatar=True ) @@ -35,7 +36,7 @@ def setUp(self): class GetDocIntegrationAvatarTest(DocIntegrationAvatarTest): method = "GET" - def test_view_avatar_for_user(self): + def test_user_view_avatar(self): """ Tests that regular users can see only published doc integration avatars """ @@ -49,7 +50,8 @@ def test_view_avatar_for_user(self): self.draft_doc.slug, status_code=status.HTTP_403_FORBIDDEN ) - def test_view_avatar_for_superuser(self): + # TODO(schew2381): Change test to check that superusers can only see published doc integration avatars + def test_superuser_view_avatar(self): """ Tests that superusers can see all doc integration avatars """ @@ -59,12 +61,22 @@ def test_view_avatar_for_superuser(self): assert serialize(doc) == response.data assert serialize(doc.avatar.get()) == response.data["avatar"] + def test_staff_view_avatar(self): + """ + Tests that staff can see all doc integration avatars + """ + self.login_as(user=self.staff_user, staff=True) + for doc in [self.published_doc, self.draft_doc]: + response = self.get_success_response(doc.slug, status_code=status.HTTP_200_OK) + assert serialize(doc) == response.data + assert serialize(doc.avatar.get()) == response.data["avatar"] + @control_silo_test class PutDocIntegrationAvatarTest(DocIntegrationAvatarTest): method = "PUT" - def test_upload_avatar_for_user(self): + def test_user_upload_avatar(self): """ Tests that regular users cannot upload doc integration avatars """ @@ -72,7 +84,8 @@ def test_upload_avatar_for_user(self): self.get_error_response(self.published_doc.slug, status_code=status.HTTP_403_FORBIDDEN) self.get_error_response(self.draft_doc.slug, status_code=status.HTTP_403_FORBIDDEN) - def test_upload_avatar_for_superuser(self): + # TODO(schew2381): Change test to check that superusers cannot upload doc integration avatars + def test_superuser_upload_avatar(self): """ Tests that superusers can upload avatars """ @@ -97,11 +110,36 @@ def test_upload_avatar_for_superuser(self): assert serialize(prev_avatar) != response.data["avatar"] assert prev_avatar.control_file_id != doc.avatar.get().control_file_id + def test_staff_upload_avatar(self): + """ + Tests that superusers can upload avatars + """ + with self.options( + { + "filestore.control.backend": options_store.get("filestore.backend"), + "filestore.control.options": options_store.get("filestore.options"), + } + ): + self.login_as(user=self.staff_user, staff=True) + + with assume_test_silo_mode(SiloMode.CONTROL), override_settings( + SILO_MODE=SiloMode.CONTROL + ): + for doc in [self.published_doc, self.draft_doc]: + prev_avatar = doc.avatar.get() + response = self.get_success_response( + doc.slug, status_code=status.HTTP_200_OK, **self.avatar_payload + ) + assert serialize(doc) == response.data + assert serialize(doc.avatar.get()) == response.data["avatar"] + assert serialize(prev_avatar) != response.data["avatar"] + assert prev_avatar.control_file_id != doc.avatar.get().control_file_id + def test_upload_avatar_payload_structure(self): """ Tests that errors are thrown on malformed upload payloads """ - self.login_as(user=self.superuser, superuser=True) + self.login_as(user=self.staff_user, staff=True) # Structured as 'error-description' : (malformed-payload, erroring-fields) invalid_payloads: dict[str, tuple[dict[str, Any], list[str]]] = { "empty_payload": ({}, ["avatar_photo", "avatar_type"]), diff --git a/tests/sentry/api/endpoints/test_doc_integration_details.py b/tests/sentry/api/endpoints/test_doc_integration_details.py index 5457ba41a3bd49..ab03220871e192 100644 --- a/tests/sentry/api/endpoints/test_doc_integration_details.py +++ b/tests/sentry/api/endpoints/test_doc_integration_details.py @@ -18,6 +18,7 @@ class DocIntegrationDetailsTest(APITestCase): def setUp(self): self.user = self.create_user(email="jinx@lol.com") self.superuser = self.create_user(email="vi@lol.com", is_superuser=True) + self.staff_user = self.create_user(is_staff=True) self.doc_1 = self.create_doc_integration(name="test_1", is_draft=True, has_avatar=False) self.doc_2 = self.create_doc_integration( name="test_2", @@ -35,7 +36,28 @@ def setUp(self): class GetDocIntegrationDetailsTest(DocIntegrationDetailsTest): method = "GET" - def test_read_doc_for_superuser(self): + def test_staff_read_doc(self): + """ + Tests that any DocIntegration is visible (with all the expected data) + for those with superuser permissions + """ + self.login_as(user=self.staff_user, staff=True) + # Non-draft DocIntegration, with features and an avatar + response = self.get_success_response(self.doc_2.slug, status_code=status.HTTP_200_OK) + assert serialize(self.doc_2) == response.data + features = IntegrationFeature.objects.filter( + target_id=self.doc_2.id, target_type=IntegrationTypes.DOC_INTEGRATION.value + ) + for feature in features: + assert serialize(feature) in response.data["features"] + assert serialize(self.doc_2.avatar.get()) == response.data["avatar"] + # Draft DocIntegration, without features or an avatar + response = self.get_success_response(self.doc_1.slug, status_code=status.HTTP_200_OK) + assert serialize(self.doc_1) == response.data + assert not response.data["avatar"] + + # TODO(schew2381): Change test to check that superusers can only fetch non-draft DocIntegrations + def test_superuser_read_doc(self): """ Tests that any DocIntegration is visible (with all the expected data) for those with superuser permissions @@ -55,7 +77,7 @@ def test_read_doc_for_superuser(self): assert serialize(self.doc_1) == response.data assert not response.data["avatar"] - def test_read_doc_for_public(self): + def test_public_read_doc(self): """ Tests that only non-draft DocIntegrations (with all the expected data) are visible for those without superuser permissions @@ -89,7 +111,32 @@ class PutDocIntegrationDetailsTest(DocIntegrationDetailsTest): } ignored_keys = ["metadata"] - def test_update_doc_for_superuser(self): + def setUp(self): + super().setUp() + self.login_as(user=self.staff_user, staff=True) + + def test_staff_update_doc(self): + """ + Tests that a DocIntegration can be updated by superuser requests + """ + response = self.get_success_response( + self.doc_2.slug, status_code=status.HTTP_200_OK, **self.payload + ) + self.doc_2.refresh_from_db() + assert serialize(self.doc_2) == response.data + features = IntegrationFeature.objects.filter( + target_id=self.doc_2.id, target_type=IntegrationTypes.DOC_INTEGRATION.value + ) + assert features.exists() + assert len(features) == 3 + for feature in features: + # Ensure payload features are in the database + assert feature.feature in self.payload["features"] + # Ensure they are also serialized in the response + assert serialize(feature) in response.data["features"] + + # TODO(schew2381): Change test to check that superusers cannot update DocIntegrations + def test_superuser_update_doc(self): """ Tests that a DocIntegration can be updated by superuser requests """ @@ -125,7 +172,6 @@ def test_update_removes_unused_features(self): Tests that DocIntegration updates remove any unused and no longer necessary features from the database """ - self.login_as(user=self.superuser, superuser=True) self.get_success_response(self.doc_2.slug, status_code=status.HTTP_200_OK, **self.payload) unused_features = IntegrationFeature.objects.filter( target_id=self.doc_2.id, @@ -138,7 +184,6 @@ def test_update_retains_carryover_features(self): Tests that DocIntegration updates retain any existing features if applicable to avoid pointless database transactions """ - self.login_as(user=self.superuser, superuser=True) unaffected_feature = IntegrationFeature.objects.get( target_id=self.doc_2.id, target_type=IntegrationTypes.DOC_INTEGRATION.value, feature=4 ) @@ -152,7 +197,6 @@ def test_update_duplicate_features(self): Tests that providing duplicate keys do not result in a server error; instead, the excess are ignored. """ - self.login_as(user=self.superuser, superuser=True) payload = {**self.payload, "features": [0, 0, 0, 0, 1, 1, 1, 2]} self.get_success_response(self.doc_2.slug, status_code=status.HTTP_200_OK, **payload) features = IntegrationFeature.objects.filter( @@ -167,7 +211,6 @@ def test_update_does_not_change_slug(self): effect on the slug of the DocIntegration """ previous_slug = self.doc_2.slug - self.login_as(user=self.superuser, superuser=True) self.get_success_response(self.doc_2.slug, status_code=status.HTTP_200_OK, **self.payload) self.doc_2.refresh_from_db() assert self.doc_2.slug == previous_slug @@ -176,7 +219,6 @@ def test_update_invalid_metadata(self): """ Tests that incorrectly structured metadata throws an error """ - self.login_as(user=self.superuser, superuser=True) invalid_resources = { "not_an_array": {}, "extra_keys": [{**self.payload["resources"][0], "extra": "key"}], @@ -195,7 +237,6 @@ def test_update_empty_metadata(self): metadata contained on the record """ previous_metadata = self.doc_2.metadata - self.login_as(user=self.superuser, superuser=True) payload = {**self.payload} del payload["resources"] response = self.get_success_response( @@ -210,7 +251,6 @@ def test_update_ignore_keys(self): Tests that certain reserved keys cannot be overridden by the request payload. They must be created by the API. """ - self.login_as(user=self.superuser, superuser=True) payload = {**self.payload, "metadata": {"should": "not override"}} self.get_success_response(self.doc_2.slug, status_code=status.HTTP_200_OK, **payload) # Ensure the DocIntegration was not created with the ignored keys' values @@ -222,7 +262,6 @@ def test_update_simple_without_avatar(self): Tests that the DocIntegration can be edited without an associated DocIntegrationAvatar. """ - self.login_as(user=self.superuser, superuser=True) payload = {**self.payload, "is_draft": True} response = self.get_success_response( self.doc_1.slug, status_code=status.HTTP_200_OK, **payload @@ -235,7 +274,6 @@ def test_update_publish_without_avatar(self): Tests that the DocIntegration cannot be published without an associated DocIntegrationAvatar. """ - self.login_as(user=self.superuser, superuser=True) response = self.get_error_response( self.doc_1.slug, status_code=status.HTTP_400_BAD_REQUEST, **self.payload ) @@ -253,7 +291,26 @@ def test_update_publish_without_avatar(self): class DeleteDocIntegrationDetailsTest(DocIntegrationDetailsTest): method = "DELETE" - def test_delete_valid_for_superuser(self): + def test_staff_delete_valid(self): + """ + Tests that the delete method works for those with superuser + permissions, deleting the DocIntegration and associated + IntegrationFeatures and DocIntegrationAvatar + """ + self.login_as(user=self.staff_user, staff=True) + features = IntegrationFeature.objects.filter( + target_id=self.doc_delete.id, target_type=IntegrationTypes.DOC_INTEGRATION.value + ) + assert features.exists() + assert self.doc_delete.avatar.exists() + self.get_success_response(self.doc_delete.slug, status_code=status.HTTP_204_NO_CONTENT) + with pytest.raises(DocIntegration.DoesNotExist): + DocIntegration.objects.get(id=self.doc_delete.id) + assert not features.exists() + assert not self.doc_delete.avatar.exists() + + # TODO(schew2381): Change test to check that superusers cannot delete DocIntegrations + def test_superuser_delete_valid(self): """ Tests that the delete method works for those with superuser permissions, deleting the DocIntegration and associated @@ -271,7 +328,7 @@ def test_delete_valid_for_superuser(self): assert not features.exists() assert not self.doc_delete.avatar.exists() - def test_delete_invalid_for_public(self): + def test_public_delete_invalid(self): """ Tests that the delete method is not accessible by those with regular member permissions, and no changes occur in the database. diff --git a/tests/sentry/api/endpoints/test_doc_integrations.py b/tests/sentry/api/endpoints/test_doc_integrations.py index 00742122f13056..e07899d00042ef 100644 --- a/tests/sentry/api/endpoints/test_doc_integrations.py +++ b/tests/sentry/api/endpoints/test_doc_integrations.py @@ -9,6 +9,7 @@ from sentry.models.integrations.doc_integration import DocIntegration from sentry.models.integrations.integration_feature import IntegrationFeature, IntegrationTypes from sentry.testutils.cases import APITestCase +from sentry.testutils.helpers import with_feature from sentry.testutils.silo import control_silo_test from sentry.utils.json import JSONData @@ -19,6 +20,7 @@ class DocIntegrationsTest(APITestCase): def setUp(self): self.user = self.create_user(email="jinx@lol.com") self.superuser = self.create_user(email="vi@lol.com", is_superuser=True) + self.staff_user = self.create_user(is_staff=True) self.doc_1 = self.create_doc_integration(name="test_1", is_draft=False, has_avatar=True) self.doc_2 = self.create_doc_integration(name="test_2", is_draft=True, has_avatar=True) self.doc_3 = self.create_doc_integration( @@ -36,7 +38,30 @@ def get_avatars(self, response: Response) -> list[JSONData]: class GetDocIntegrationsTest(DocIntegrationsTest): method = "GET" - def test_read_docs_for_superuser(self): + @with_feature("auth:enterprise-staff-cookie") + def test_staff_read_docs(self): + """ + Tests that all DocIntegrations are returned for staff users, + along with serialized versions of their avatars and IntegrationFeatures + """ + self.login_as(user=self.staff_user, staff=True) + response = self.get_success_response(status_code=status.HTTP_200_OK) + assert len(response.data) == 3 + for doc in [self.doc_1, self.doc_2, self.doc_3]: + assert serialize(doc) in response.data + # Check that DocIntegrationAvatars were serialized + for doc in [self.doc_1, self.doc_2]: + assert doc.avatar.exists() + assert serialize(doc.avatar.get()) in self.get_avatars(response) + # Check that IntegrationFeatures were also serialized + features = IntegrationFeature.objects.filter( + target_id=self.doc_3.id, target_type=IntegrationTypes.DOC_INTEGRATION.value + ) + for feature in features: + assert serialize(feature) in serialize(self.doc_3)["features"] + + # TODO(schew2381): Change test to check superuser can only fetch non-draft DocIntegrations + def test_superuser_read_docs(self): """ Tests that all DocIntegrations are returned for super users, along with serialized versions of their avatars and IntegrationFeatures @@ -93,7 +118,32 @@ class PostDocIntegrationsTest(DocIntegrationsTest): } ignored_keys = ["is_draft", "metadata"] - def test_create_doc_for_superuser(self): + def setUp(self): + super().setUp() + self.login_as(user=self.staff_user, staff=True) + + def test_staff_create_doc(self): + """ + Tests that a draft DocIntegration is created for superuser requests along + with all the appropriate IntegrationFeatures + """ + response = self.get_success_response(status_code=status.HTTP_201_CREATED, **self.payload) + doc = DocIntegration.objects.get(name=self.payload["name"], author=self.payload["author"]) + assert serialize(doc) == response.data + assert doc.is_draft + features = IntegrationFeature.objects.filter( + target_id=doc.id, target_type=IntegrationTypes.DOC_INTEGRATION.value + ) + assert features.exists() + assert len(features) == 3 + for feature in features: + # Ensure payload features are in the database + assert feature.feature in self.payload["features"] + # Ensure they are also serialized in the response + assert serialize(feature) in response.data["features"] + + # TODO(schew2381): Change test to check superuser can't access POST + def test_superuser_create_doc(self): """ Tests that a draft DocIntegration is created for superuser requests along with all the appropriate IntegrationFeatures @@ -125,7 +175,6 @@ def test_create_repeated_slug(self): """ Tests that repeated names throw errors when generating slugs """ - self.login_as(user=self.superuser, superuser=True) payload = {**self.payload, "name": self.doc_1.name} response = self.get_error_response(status_code=status.HTTP_400_BAD_REQUEST, **payload) assert "name" in response.data.keys() @@ -134,7 +183,6 @@ def test_generated_slug_not_entirely_numeric(self): """ Tests that generated slug based on name is not entirely numeric """ - self.login_as(user=self.superuser, superuser=True) payload = {**self.payload, "name": "1234"} response = self.get_success_response(status_code=status.HTTP_201_CREATED, **payload) @@ -146,7 +194,6 @@ def test_create_invalid_metadata(self): """ Tests that incorrectly structured metadata throws an error """ - self.login_as(user=self.superuser, superuser=True) invalid_resources = { "not_an_array": {}, "extra_keys": [{**self.payload["resources"][0], "extra": "key"}], @@ -162,7 +209,6 @@ def test_create_empty_metadata(self): Tests that sending no metadata keys does not trigger any server/database errors """ - self.login_as(user=self.superuser, superuser=True) payload = {**self.payload} del payload["resources"] response = self.get_success_response(status_code=status.HTTP_201_CREATED, **payload) @@ -173,7 +219,6 @@ def test_create_ignore_keys(self): Tests that certain reserved keys cannot be overridden by the request payload. They must be created by the API. """ - self.login_as(user=self.superuser, superuser=True) payload = {**self.payload, "is_draft": False, "metadata": {"should": "not override"}} self.get_success_response(status_code=status.HTTP_201_CREATED, **payload) doc = DocIntegration.objects.get(name=self.payload["name"], author=self.payload["author"]) @@ -186,7 +231,6 @@ def test_create_duplicate_features(self): Tests that providing duplicate keys do not result in a server error; instead, the excess are ignored. """ - self.login_as(user=self.superuser, superuser=True) payload = {**self.payload, "features": [0, 0, 0, 0, 1, 1, 1, 2]} self.get_success_response(status_code=status.HTTP_201_CREATED, **payload) doc = DocIntegration.objects.get(name=self.payload["name"], author=self.payload["author"]) From 4750c0db29763771b5231a3f428e3555b1fa8202 Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Thu, 8 Feb 2024 15:26:06 -0800 Subject: [PATCH 192/357] fix(issues): Add timeline menu title, fix rerendering (#64898) --- .../traceTimeline/traceTimelineEvents.tsx | 8 ++++++-- .../traceTimeline/traceTimelineTooltip.tsx | 11 ++++++++++- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx b/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx index 3b2986745ab677..ebd52556163227 100644 --- a/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx @@ -61,9 +61,10 @@ export function TraceTimelineEvents({event, width}: TraceTimelineEventsProps) { column - 1, durationMs / totalColumns ); + const hasCurrentEvent = colEvents.some(e => e.id === event.id); return ( @@ -156,7 +157,10 @@ function NodeGroup({ {Array.from(eventsByColumn.entries()).map(([column, groupEvents]) => { const isCurrentNode = groupEvents.some(e => e.id === currentEventId); return ( - + {isCurrentNode && ( diff --git a/static/app/views/issueDetails/traceTimeline/traceTimelineTooltip.tsx b/static/app/views/issueDetails/traceTimeline/traceTimelineTooltip.tsx index 3861103fa92b2e..c742e64c94b9c5 100644 --- a/static/app/views/issueDetails/traceTimeline/traceTimelineTooltip.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceTimelineTooltip.tsx @@ -40,6 +40,7 @@ export function TraceTimelineTooltip({event, timelineEvents}: TraceTimelineToolt {displayYouAreHere && {t('You are here')}} + {t('Around the same time')} {filteredTimelineEvents.slice(0, 3).map(timelineEvent => { const project = projects.find(p => p.slug === timelineEvent.project); return ( @@ -115,7 +116,15 @@ const UnstyledUnorderedList = styled('div')` const EventItemsWrapper = styled('div')` display: flex; flex-direction: column; - padding: ${space(0.5)}; + padding: ${space(1)} ${space(0.5)} ${space(0.5)} ${space(0.5)}; +`; + +const EventItemsTitle = styled('div')` + padding-left: ${space(1)}; + text-transform: uppercase; + font-size: ${p => p.theme.fontSizeExtraSmall}; + font-weight: 600; + color: ${p => p.theme.subText}; `; const YouAreHere = styled('div')` From 78f4a831284a1e9106450ec138ef1878990b90c5 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Thu, 8 Feb 2024 15:44:58 -0800 Subject: [PATCH 193/357] ref(replays): update missing replay banner (#64897) Closes https://github.com/getsentry/sentry/issues/53082 The banner is collapsed by default https://github.com/getsentry/sentry/assets/56095982/604af852-74b1-4a4a-bf48-f879627b6f90 --- .../replays/alerts/missingReplayAlert.tsx | 50 +++++++++---------- 1 file changed, 24 insertions(+), 26 deletions(-) diff --git a/static/app/components/replays/alerts/missingReplayAlert.tsx b/static/app/components/replays/alerts/missingReplayAlert.tsx index f31dd9f3b9ceab..9270dac4908b16 100644 --- a/static/app/components/replays/alerts/missingReplayAlert.tsx +++ b/static/app/components/replays/alerts/missingReplayAlert.tsx @@ -1,5 +1,7 @@ +import {Fragment} from 'react'; +import styled from '@emotion/styled'; + import {Alert} from 'sentry/components/alert'; -import {LinkButton} from 'sentry/components/button'; import ExternalLink from 'sentry/components/links/externalLink'; import Link from 'sentry/components/links/link'; import List from 'sentry/components/list'; @@ -12,7 +14,7 @@ interface Props { export default function MissingReplayAlert({orgSlug}: Props) { const reasons = [ - t('The replay is still processing'), + t('The replay is still processing.'), tct( 'The replay was rate-limited and could not be accepted. [link:View the stats page] for more information.', { @@ -21,38 +23,34 @@ export default function MissingReplayAlert({orgSlug}: Props) { ), t('The replay has been deleted by a member in your organization.'), t('There were network errors and the replay was not saved.'), - tct('[link:Read the docs] to understand why.', { - link: ( - - ), - }), ]; - return ( - {t('Read Docs')} - + expand={ + + {t('Other reasons may include:')} + + {reasons.map((reason, i) => ( + {reason} + ))} + + } > -

    - {t( - 'The replay for this event cannot be found. This could be due to these reasons:' - )} -

    - - {reasons.map((reason, i) => ( - {reason} - ))} - + {tct( + "The replay associated with this event cannot be found. In most cases, the replay wasn't accepted because your replay quota was exceeded at the time. To learn more, [link:read our docs].", + { + link: ( + + ), + } + )}
    ); } + +const ListIntro = styled('div')` +line-height: 2em;`; From 8db28eb47a9c7b5b6b12f093f6087ba9f733bcef Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Thu, 8 Feb 2024 15:45:43 -0800 Subject: [PATCH 194/357] fix(replays): improve a11y of replay details (#64923) Closes https://github.com/getsentry/sentry/issues/57285 Contributes to https://github.com/getsentry/sentry/issues/64383 (not fully closed) Several updates to the replay details to improve a11y and general consistency: ## 1. Breadcrumbs tab has errors highlighted in red, to be consistent with other tabs https://github.com/getsentry/sentry/assets/56095982/1597dd45-de4f-4e34-ae2f-b28d20992c8c ## 2. Console tab text is black but background highlight & icon color still persists SCR-20240208-nkcb We also do not show a light grey color for events in the future; all text is the same color. ## 3. Network tab: selected item is highlighted purple https://github.com/getsentry/sentry/assets/56095982/06ede027-6d91-421c-98cd-2fa4e30b413f Same as console tab, we also do not show a light grey color for events in the future; all text is the same color. ## 4. A11y tab has background highlights. No more syntax highlighting or scrollbar issues in the row itself because we're using a tooltip on overflow now. https://github.com/getsentry/sentry/assets/56095982/c98aab20-5b5e-4f53-9e4c-3dcc8af53ca7 ## 5. Everything looks good in dark mode https://github.com/getsentry/sentry/assets/56095982/c66bb2f8-0d82-464b-aa0c-a502e9d0b01d --- .../replays/breadcrumbs/breadcrumbItem.tsx | 5 +- .../replays/virtualizedGrid/bodyCell.tsx | 15 +--- .../accessibility/accessibilityTableCell.tsx | 68 ++++++++++++------- .../replays/detail/console/consoleLogRow.tsx | 13 ++-- 4 files changed, 55 insertions(+), 46 deletions(-) diff --git a/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx b/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx index e507e65be95ce2..4ba181025b271d 100644 --- a/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx +++ b/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx @@ -72,6 +72,7 @@ function BreadcrumbItem({ return ( onClick?.(frame, e)} onMouseEnter={e => onMouseEnter(frame, e)} @@ -250,7 +251,7 @@ const Description = styled(Tooltip)` color: ${p => p.theme.subText}; `; -const CrumbItem = styled(PanelItem)` +const CrumbItem = styled(PanelItem)<{isErrorFrame?: boolean}>` display: grid; grid-template-columns: max-content auto; align-items: flex-start; @@ -258,7 +259,7 @@ const CrumbItem = styled(PanelItem)` width: 100%; font-size: ${p => p.theme.fontSizeMedium}; - background: transparent; + background: ${p => (p.isErrorFrame ? `${p.theme.red100}` : `transparent`)}; padding: ${space(1)}; text-align: left; border: none; diff --git a/static/app/components/replays/virtualizedGrid/bodyCell.tsx b/static/app/components/replays/virtualizedGrid/bodyCell.tsx index 9deb92768b4e11..f5ddd78e2f1a10 100644 --- a/static/app/components/replays/virtualizedGrid/bodyCell.tsx +++ b/static/app/components/replays/virtualizedGrid/bodyCell.tsx @@ -6,7 +6,7 @@ import {space} from 'sentry/styles/space'; const cellBackground = (p: CellProps & {theme: Theme}) => { if (p.isSelected) { - return `background-color: ${p.theme.black};`; + return `background-color: ${p.theme.purple300};`; } if (p.isStatusError) { return `background-color: ${p.theme.red100};`; @@ -19,20 +19,11 @@ const cellBackground = (p: CellProps & {theme: Theme}) => { const cellColor = (p: CellProps & {theme: Theme}) => { if (p.isSelected) { - const color = p.isStatusError - ? p.theme.red300 - : p.isStatusWarning - ? p.theme.yellow300 - : p.theme.white; + const color = p.theme.white; return `color: ${color};`; } - const colors = p.isStatusError - ? [p.theme.red300, p.theme.red400] - : p.isStatusWarning - ? [p.theme.textColor, p.theme.subText] - : ['inherit', p.theme.subText]; - return `color: ${p.hasOccurred !== false ? colors[0] : colors[1]};`; + return `color: inherit`; }; type CellProps = { diff --git a/static/app/views/replays/detail/accessibility/accessibilityTableCell.tsx b/static/app/views/replays/detail/accessibility/accessibilityTableCell.tsx index 04709a3f694c90..fb67858748ace9 100644 --- a/static/app/views/replays/detail/accessibility/accessibilityTableCell.tsx +++ b/static/app/views/replays/detail/accessibility/accessibilityTableCell.tsx @@ -1,29 +1,20 @@ -import type {ComponentProps, CSSProperties} from 'react'; +import type {ComponentProps, CSSProperties, ReactNode} from 'react'; import {forwardRef} from 'react'; +import styled from '@emotion/styled'; import classNames from 'classnames'; -import { - Cell, - CodeHighlightCell, - Text, -} from 'sentry/components/replays/virtualizedGrid/bodyCell'; +import {Cell, Text} from 'sentry/components/replays/virtualizedGrid/bodyCell'; +import TextOverflow from 'sentry/components/textOverflow'; import {Tooltip} from 'sentry/components/tooltip'; import {IconFire, IconInfo, IconWarning} from 'sentry/icons'; +import {space} from 'sentry/styles/space'; import type useCrumbHandlers from 'sentry/utils/replays/hooks/useCrumbHandlers'; import type {HydratedA11yFrame} from 'sentry/utils/replays/hydrateA11yFrame'; -import type {Color} from 'sentry/utils/theme'; import useUrlParams from 'sentry/utils/useUrlParams'; import type useSortAccessibility from 'sentry/views/replays/detail/accessibility/useSortAccessibility'; const EMPTY_CELL = '--'; -const IMPACT_ICON_MAPPING: Record = { - minor: , - moderate: , - serious: , - critical: , -}; - interface Props extends ReturnType { a11yIssue: HydratedA11yFrame; columnIndex: number; @@ -57,6 +48,13 @@ const AccessibilityTableCell = forwardRef( const {getParamValue} = useUrlParams('a_detail_row', ''); const isSelected = getParamValue() === String(dataIndex); + const IMPACT_ICON_MAPPING: Record = { + minor: , + moderate: , + serious: , + critical: , + }; + const hasOccurred = currentTime >= a11yIssue.offsetMs; const isBeforeHover = currentHoverTime === undefined || currentHoverTime >= a11yIssue.offsetMs; @@ -99,7 +97,7 @@ const AccessibilityTableCell = forwardRef( const renderFns = [ () => ( - + {a11yIssue.impact ? ( @@ -109,19 +107,26 @@ const AccessibilityTableCell = forwardRef( EMPTY_CELL )} - + ), () => ( - + {a11yIssue.id ?? EMPTY_CELL} - + ), () => ( - - - {a11yIssue.element.element ?? EMPTY_CELL} - - + + + + {a11yIssue.element.element ?? EMPTY_CELL} + + + ), ]; @@ -130,3 +135,20 @@ const AccessibilityTableCell = forwardRef( ); export default AccessibilityTableCell; + +const StyledTextOverflow = styled(TextOverflow)` +padding-right: ${space(1)};`; + +const StyledCell = styled(Cell)<{ + impact: HydratedA11yFrame['impact']; + isRowSelected: boolean; +}>` +background: ${p => + p.isSelected + ? p.theme.purple300 + : p.impact === 'serious' + ? p.theme.yellow100 + : p.impact === 'critical' + ? p.theme.red100 + : 'transparent'} +`; diff --git a/static/app/views/replays/detail/console/consoleLogRow.tsx b/static/app/views/replays/detail/console/consoleLogRow.tsx index 98c6255d6685a0..02f90f80d806df 100644 --- a/static/app/views/replays/detail/console/consoleLogRow.tsx +++ b/static/app/views/replays/detail/console/consoleLogRow.tsx @@ -104,12 +104,7 @@ const ConsoleLog = styled('div')<{ border-top: 1px solid transparent; border-bottom: 1px solid transparent; - color: ${p => - ['warning', 'error'].includes(String(p.level)) - ? p.theme.alert[String(p.level)].iconColor - : p.hasOccurred - ? 'inherit' - : p.theme.gray300}; + color: ${p => p.theme.gray400}; /* Show the timestamp button "Play" icon when we hover the row. @@ -124,17 +119,17 @@ const ConsoleLog = styled('div')<{ const ICONS = { [BreadcrumbLevelType.ERROR]: ( - + ), [BreadcrumbLevelType.WARNING]: ( - + ), [BreadcrumbLevelType.INFO]: ( - + ), }; From 52b4d0706f77e43552269532b93059d35cfa3537 Mon Sep 17 00:00:00 2001 From: Gabe Villalobos Date: Thu, 8 Feb 2024 16:09:46 -0800 Subject: [PATCH 195/357] fix(hc): Makes base user endpoint test region by default compatible (#64718) --- tests/sentry/api/bases/test_user.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/tests/sentry/api/bases/test_user.py b/tests/sentry/api/bases/test_user.py index ee349483068cc0..ed26e6d42b8283 100644 --- a/tests/sentry/api/bases/test_user.py +++ b/tests/sentry/api/bases/test_user.py @@ -15,7 +15,7 @@ from sentry.auth.staff import is_active_staff from sentry.testutils.cases import DRFPermissionTestCase from sentry.testutils.helpers.features import with_feature -from sentry.testutils.silo import all_silo_test, control_silo_test, region_silo_test +from sentry.testutils.silo import all_silo_test, control_silo_test, no_silo_test, region_silo_test @all_silo_test @@ -107,7 +107,7 @@ def test_allows_active_staff(self, mock_is_active_staff): class BaseUserEndpointTest(DRFPermissionTestCase): - endpoint: RegionSiloUserEndpoint | UserEndpoint = UserEndpoint() + endpoint: RegionSiloUserEndpoint | UserEndpoint = RegionSiloUserEndpoint() def test_retrieves_me_anonymous(self): with pytest.raises(ResourceDoesNotExist): @@ -124,11 +124,17 @@ def test_retrieves_user_id(self): assert kwargs["user"].id == user.id +@no_silo_test +class MonolithUserEndpoint(BaseUserEndpointTest): + endpoint = UserEndpoint() + + @control_silo_test -class UserEndpointTest(BaseUserEndpointTest): +class ControlUserEndpointTest(BaseUserEndpointTest): endpoint = UserEndpoint() +# TODO(HC): Delete this once region silo by default changes land @region_silo_test class RegionSiloUserEndpointTest(BaseUserEndpointTest): endpoint = RegionSiloUserEndpoint() From 3ccd269563e784fdcd52b020f7fde6da0f549821 Mon Sep 17 00:00:00 2001 From: Seiji Chew <67301797+schew2381@users.noreply.github.com> Date: Thu, 8 Feb 2024 16:44:01 -0800 Subject: [PATCH 196/357] fix(api): Use anyOf instead of oneOf (#64927) --- src/sentry/apidocs/spectacular_ports.py | 9 +++++-- tests/sentry/apidocs/test_extensions.py | 33 ++++++++++++++++++++++--- 2 files changed, 36 insertions(+), 6 deletions(-) diff --git a/src/sentry/apidocs/spectacular_ports.py b/src/sentry/apidocs/spectacular_ports.py index 9c13a55ab7db4b..34abd0e4e417e7 100644 --- a/src/sentry/apidocs/spectacular_ports.py +++ b/src/sentry/apidocs/spectacular_ports.py @@ -142,7 +142,12 @@ def resolve_type_hint(hint) -> Any: elif origin is Union or origin is UnionType: type_args = [arg for arg in args if arg is not type(None)] if len(type_args) > 1: - schema = {"oneOf": [resolve_type_hint(arg) for arg in type_args]} + # We use anyOf instead of oneOf (which DRF uses) b/c there's cases + # where you can have int | float | long, where a valid value can be + # multiple types but errors with oneOf. + # TODO(schew2381): Create issue in drf-spectacular to see if this + # fix makes sense + schema = {"anyOf": [resolve_type_hint(arg) for arg in type_args]} else: schema = resolve_type_hint(type_args[0]) if type(None) in args: @@ -155,7 +160,7 @@ def resolve_type_hint(hint) -> Any: # - https://github.com/tfranzel/drf-spectacular/issues/925 # - https://github.com/OAI/OpenAPI-Specification/issues/1368. if len(args) > 2: - schema["oneOf"].append({"type": "object", "nullable": True}) + schema["anyOf"].append({"type": "object", "nullable": True}) else: schema["nullable"] = True return schema diff --git a/tests/sentry/apidocs/test_extensions.py b/tests/sentry/apidocs/test_extensions.py index b02e03bcfd799b..3101bd17e89312 100644 --- a/tests/sentry/apidocs/test_extensions.py +++ b/tests/sentry/apidocs/test_extensions.py @@ -32,6 +32,7 @@ class BasicSerializerResponse(BasicSerializerOptional): f: Literal[3] g: str | bool h: str | None + i: int | float | None excluded: str @@ -59,10 +60,22 @@ def test_sentry_response_serializer_extension(): "d": {"type": "array", "items": {"type": "integer"}}, "e": {"type": "object", "properties": {"zz": {"type": "string"}}, "required": ["zz"]}, "f": {"enum": [3], "type": "integer"}, - "g": {"oneOf": [{"type": "string"}, {"type": "boolean"}]}, + # Test that a Union generates an anyOf + "g": {"anyOf": [{"type": "string"}, {"type": "boolean"}]}, + # Test that including None with a 2 type Union adds nullable: True + # but does not create an anyOf "h": {"type": "string", "nullable": True}, + # Test that including None with a >2 type Union does not add nullable: True + # but includes {type: "object", nullable: True} in the anyOf + "i": { + "anyOf": [ + {"type": "integer"}, + {"format": "double", "type": "number"}, + {"type": "object", "nullable": True}, + ] + }, }, - "required": ["b", "c", "d", "e", "f", "g", "h"], + "required": ["b", "c", "d", "e", "f", "g", "h", "i"], } @@ -88,10 +101,22 @@ def test_sentry_inline_response_serializer_extension(): "required": ["zz"], }, "f": {"enum": [3], "type": "integer"}, - "g": {"oneOf": [{"type": "string"}, {"type": "boolean"}]}, + # Test that a Union generates an anyOf + "g": {"anyOf": [{"type": "string"}, {"type": "boolean"}]}, + # Test that including None with a 2 type Union adds nullable: True + # but does not create an anyOf "h": {"type": "string", "nullable": True}, + # Test that including None with a >2 type Union does not add nullable: True + # but includes {type: "object", nullable: True} in the anyOf + "i": { + "anyOf": [ + {"type": "integer"}, + {"format": "double", "type": "number"}, + {"type": "object", "nullable": True}, + ] + }, }, - "required": ["b", "c", "d", "e", "f", "g", "h"], + "required": ["b", "c", "d", "e", "f", "g", "h", "i"], }, } From 3cd0a9ccd8c7721d6b67f405593520bd6b3cce4f Mon Sep 17 00:00:00 2001 From: Alex Zaslavsky Date: Thu, 8 Feb 2024 17:02:48 -0800 Subject: [PATCH 197/357] docs(backup): Add clarity about public key kind (#64901) --- src/sentry/runner/commands/backup.py | 35 ++++++++++++++-------------- 1 file changed, 18 insertions(+), 17 deletions(-) diff --git a/src/sentry/runner/commands/backup.py b/src/sentry/runner/commands/backup.py index a5dc5a9e45ea5f..87dc9b402a36ad 100644 --- a/src/sentry/runner/commands/backup.py +++ b/src/sentry/runner/commands/backup.py @@ -29,9 +29,9 @@ DECRYPT_WITH_HELP = """A path to a file containing a private key with which to decrypt a tarball previously encrypted using an `export ... --encrypt_with=` command. - The private key provided via this flag should be the complement of the public - key used to encrypt the tarball (this public key is included in the tarball - itself). + The private key provided via this flag should be the complement of the 2048-bit + public RSA key used to encrypt the tarball (this public key is included in the + tarball itself). This flag is mutually exclusive with the `--decrypt-with-gcp-kms` flag.""" @@ -43,8 +43,9 @@ This flag should point to a JSON file containing a single top-level object storing the `project-id`, `location`, `keyring`, `key`, and `version` of the desired asymmetric private key that pairs with the - public key included in the tarball being imported (for more information - on these resource identifiers and how to set up KMS to use the, see: + 2048-bit public RSA key in text (PEM) format included in the tarball + being imported (for more information on these resource identifiers and + how to set up KMS to use the, see: https://cloud.google.com/kms/docs/getting-resource-ids). An example version of this file might look like: @@ -57,18 +58,18 @@ } ``` - Property names must be spelled exactly as above, and the `version` - field in particular must be a string, not an integer.""" - -ENCRYPT_WITH_HELP = """A path to the a public key with which to encrypt this export. If this flag is - enabled and points to a valid key, the output file will be a tarball - containing 3 constituent files: 1. An encrypted JSON file called - `export.json`, which is encrypted using 2. An asymmetrically encrypted data - encryption key (DEK) called `data.key`, which is itself encrypted by 3. The - public key contained in the file supplied to this flag, called `key.pub`. To - decrypt the exported JSON data, decryptors should use the private key paired - with `key.pub` to decrypt the DEK, which can then be used to decrypt the - export data in `export.json`.""" + Property names must be spelled exactly as above, and the `version` field + in particular must be a string, not an integer.""" + +ENCRYPT_WITH_HELP = """A path to the 2048-bit public RSA key in text (PEM) format with which to + encrypt this export. If this flag is enabled and points to a valid key, the + output file will be a tarball containing 3 constituent files: 1. An encrypted + JSON file called `export.json`, which is encrypted using 2. An asymmetrically + encrypted data encryption key (DEK) called `data.key`, which is itself + encrypted by 3. The public key contained in the file supplied to this flag, + called `key.pub`. To decrypt the exported JSON data, decryptors should use + the private key paired with `key.pub` to decrypt the DEK, which can then be + used to decrypt the export data in `export.json`.""" ENCRYPT_WITH_GCP_KMS_HELP = """For users that want to avoid storing their own public keys, this flag can be used in lieu of `--encrypt-with` to retrieve those keys from From 291fabef4507db8e7fa237b3d357566a30db793c Mon Sep 17 00:00:00 2001 From: Yagiz Nizipli Date: Thu, 8 Feb 2024 20:16:52 -0500 Subject: [PATCH 198/357] build: move several linting rules to biome (#64911) --- .eslintrc.js | 30 ++++++++++++++++++++++++++++++ biome.json | 15 +++++++++++++++ 2 files changed, 45 insertions(+) diff --git a/.eslintrc.js b/.eslintrc.js index f80c3cdcf4126e..cc3fcd04bede16 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -34,6 +34,21 @@ module.exports = { // TODO(@anonrig): Remove these rules from eslint-sentry-config. 'import/no-nodejs-modules': 'off', + semi: 'off', + 'use-isnan': 'off', + curly: 'off', + eqeqeq: 'off', + 'no-extra-semi': 'off', + 'no-eq-null': 'off', + 'comma-dangle': 'off', + 'react/jsx-no-target-blank': 'off', + 'react/jsx-no-duplicate-props': 'off', + 'react-hooks/rules-of-hooks': 'off', + 'no-duplicate-case': 'off', + 'no-dupe-keys': 'off', + 'no-redeclare': 'off', + 'no-debugger': 'off', + 'no-unreachable': 'off', }, // JSON file formatting is handled by Biome. ESLint should not be linting // and formatting these files. @@ -45,6 +60,21 @@ module.exports = { rules: { // TODO(@anonrig): Remove these rules from eslint-sentry-config. 'import/no-nodejs-modules': 'off', + semi: 'off', + 'use-isnan': 'off', + curly: 'off', + eqeqeq: 'off', + 'no-extra-semi': 'off', + 'no-eq-null': 'off', + 'comma-dangle': 'off', + 'react/jsx-no-target-blank': 'off', + 'react/jsx-no-duplicate-props': 'off', + 'react-hooks/rules-of-hooks': 'off', + 'no-duplicate-case': 'off', + 'no-dupe-keys': 'off', + 'no-redeclare': 'off', + 'no-debugger': 'off', + 'no-unreachable': 'off', }, }, { diff --git a/biome.json b/biome.json index a50109a6152ccf..c95af0f8d39367 100644 --- a/biome.json +++ b/biome.json @@ -13,6 +13,14 @@ "enabled": true, "rules": { "recommended": false, + "a11y": { + "noBlankTarget": "error" + }, + "correctness": { + "noUnreachable": "error", + "useHookAtTopLevel": "error", + "useIsNan": "error" + }, "complexity": { "useFlatMap": "error" }, @@ -23,6 +31,13 @@ "useImportType": "error" }, "suspicious": { + "noDebugger": "error", + "noDoubleEquals": "error", + "noDuplicateJsxProps": "error", + "noDuplicateObjectKeys": "error", + "noDuplicateParameters": "error", + "noDuplicateCase": "error", + "noRedeclare": "error", "useIsArray": "error" } } From f01a8af0625a03a12363ddd8f70623bd5432b770 Mon Sep 17 00:00:00 2001 From: Ogi <86684834+obostjancic@users.noreply.github.com> Date: Fri, 9 Feb 2024 09:20:27 +0100 Subject: [PATCH 199/357] fix(stats): hide metrics behind feature flag (#64934) --- static/app/views/organizationStats/index.tsx | 31 ++++++++++++-------- 1 file changed, 19 insertions(+), 12 deletions(-) diff --git a/static/app/views/organizationStats/index.tsx b/static/app/views/organizationStats/index.tsx index cded91b80f9c3f..bf530703a8bef6 100644 --- a/static/app/views/organizationStats/index.tsx +++ b/static/app/views/organizationStats/index.tsx @@ -35,6 +35,7 @@ import type { PageFilters, Project, } from 'sentry/types'; +import {hasDDMFeature} from 'sentry/utils/metrics/features'; import withOrganization from 'sentry/utils/withOrganization'; import withPageFilters from 'sentry/utils/withPageFilters'; import HeaderTabs from 'sentry/views/organizationStats/header'; @@ -258,12 +259,15 @@ export class OrganizationStats extends Component { return null; } - const hasReplay = organization.features.includes('session-replay'); - const options = hasReplay - ? CHART_OPTIONS_DATACATEGORY - : CHART_OPTIONS_DATACATEGORY.filter( - opt => opt.value !== DATA_CATEGORY_INFO.replay.plural - ); + const options = CHART_OPTIONS_DATACATEGORY.filter(opt => { + if (opt.value === DATA_CATEGORY_INFO.replay.plural) { + return organization.features.includes('session-replay'); + } + if (opt.value === DATA_CATEGORY_INFO.metric_bucket.plural) { + return hasDDMFeature(organization); + } + return true; + }); return ( @@ -313,12 +317,15 @@ export class OrganizationStats extends Component { const {start, end, period, utc} = this.dataDatetime; - const hasReplay = organization.features.includes('session-replay'); - const options = hasReplay - ? CHART_OPTIONS_DATACATEGORY - : CHART_OPTIONS_DATACATEGORY.filter( - opt => opt.value !== DATA_CATEGORY_INFO.replay.plural - ); + const options = CHART_OPTIONS_DATACATEGORY.filter(opt => { + if (opt.value === DATA_CATEGORY_INFO.replay.plural) { + return organization.features.includes('session-replay'); + } + if (opt.value === DATA_CATEGORY_INFO.metric_bucket.plural) { + return hasDDMFeature(organization); + } + return true; + }); return ( From d1a1eab8c8775a3b3f5d96fc3a47619e23cb2bc2 Mon Sep 17 00:00:00 2001 From: Ogi <86684834+obostjancic@users.noreply.github.com> Date: Fri, 9 Feb 2024 09:21:15 +0100 Subject: [PATCH 200/357] feat(ddm): dashboard feature guard (#64875) --- static/app/views/ddm/contextMenu.tsx | 25 ++++++++++++-- static/app/views/ddm/layout.tsx | 17 +++++++--- static/app/views/ddm/pageHeaderActions.tsx | 38 +++++++++++++++++++--- static/app/views/ddm/queries.tsx | 37 +++++++++++++-------- 4 files changed, 91 insertions(+), 26 deletions(-) diff --git a/static/app/views/ddm/contextMenu.tsx b/static/app/views/ddm/contextMenu.tsx index dde82ff6988c61..8562806df3f1ea 100644 --- a/static/app/views/ddm/contextMenu.tsx +++ b/static/app/views/ddm/contextMenu.tsx @@ -1,10 +1,12 @@ -import {useMemo} from 'react'; +import {Fragment, useMemo} from 'react'; import * as Sentry from '@sentry/react'; import {openAddToDashboardModal, openModal} from 'sentry/actionCreators/modal'; import {navigateTo} from 'sentry/actionCreators/navigation'; +import FeatureDisabled from 'sentry/components/acl/featureDisabled'; import type {MenuItemProps} from 'sentry/components/dropdownMenu'; import {DropdownMenu} from 'sentry/components/dropdownMenu'; +import {Hovercard} from 'sentry/components/hovercard'; import { IconClose, IconCopy, @@ -33,6 +35,7 @@ import {OrganizationContext} from 'sentry/views/organizationContext'; type ContextMenuProps = { displayType: MetricDisplayType; + hasDashboardFeature: boolean; metricsQuery: MetricsQuery; widgetIndex: number; }; @@ -41,6 +44,7 @@ export function MetricQueryContextMenu({ metricsQuery, displayType, widgetIndex, + hasDashboardFeature, }: ContextMenuProps) { const organization = useOrganization(); const router = useRouter(); @@ -88,8 +92,22 @@ export function MetricQueryContextMenu({ { leadingItems: [], key: 'add-dashoard', - label: t('Add to Dashboard'), - disabled: !createDashboardWidget, + label: hasDashboardFeature ? ( + {t('Add to Dashboard')} + ) : ( + + } + > + {t('Add to Dashboard')} + + ), + disabled: !hasDashboardFeature || !createDashboardWidget, onAction: () => { trackAnalytics('ddm.add-to-dashboard', { organization, @@ -129,6 +147,7 @@ export function MetricQueryContextMenu({ }, ], [ + hasDashboardFeature, createAlert, createDashboardWidget, metricsQuery.mri, diff --git a/static/app/views/ddm/layout.tsx b/static/app/views/ddm/layout.tsx index ea204f6d1c1610..16f4e9a73f4636 100644 --- a/static/app/views/ddm/layout.tsx +++ b/static/app/views/ddm/layout.tsx @@ -4,6 +4,7 @@ import * as Sentry from '@sentry/react'; import emptyStateImg from 'sentry-images/spot/custom-metrics-empty-state.svg'; +import Feature from 'sentry/components/acl/feature'; import {Button} from 'sentry/components/button'; import FeatureBadge from 'sentry/components/featureBadge'; import FloatingFeedbackWidget from 'sentry/components/feedback/widget/floatingFeedbackWidget'; @@ -63,10 +64,18 @@ export const DDMLayout = memo(() => { - addCustomMetric('header')} - /> + + {({hasFeature}) => ( + addCustomMetric('header')} + hasDashboardFeature={hasFeature} + /> + )} + diff --git a/static/app/views/ddm/pageHeaderActions.tsx b/static/app/views/ddm/pageHeaderActions.tsx index 6d554ec57d0986..7e478cb4d7ae5e 100644 --- a/static/app/views/ddm/pageHeaderActions.tsx +++ b/static/app/views/ddm/pageHeaderActions.tsx @@ -1,10 +1,12 @@ -import {useCallback, useMemo} from 'react'; +import {Fragment, useCallback, useMemo} from 'react'; import * as Sentry from '@sentry/react'; import {navigateTo} from 'sentry/actionCreators/navigation'; +import FeatureDisabled from 'sentry/components/acl/featureDisabled'; import {Button} from 'sentry/components/button'; import ButtonBar from 'sentry/components/buttonBar'; import {DropdownMenu} from 'sentry/components/dropdownMenu'; +import {Hovercard} from 'sentry/components/hovercard'; import { IconAdd, IconBookmark, @@ -28,10 +30,15 @@ import {useCreateDashboard} from 'sentry/views/ddm/useCreateDashboard'; interface Props { addCustomMetric: () => void; + hasDashboardFeature: boolean; showCustomMetricButton: boolean; } -export function PageHeaderActions({showCustomMetricButton, addCustomMetric}: Props) { +export function PageHeaderActions({ + showCustomMetricButton, + addCustomMetric, + hasDashboardFeature, +}: Props) { const router = useRouter(); const organization = useOrganization(); const {selection} = usePageFilters(); @@ -62,7 +69,6 @@ export function PageHeaderActions({showCustomMetricButton, addCustomMetric}: Pro setDefaultQuery(router.location.query); } }, [isDefaultQuery, organization, router.location.query, setDefaultQuery]); - const items = useMemo( () => [ { @@ -81,7 +87,22 @@ export function PageHeaderActions({showCustomMetricButton, addCustomMetric}: Pro { leadingItems: [], key: 'add-dashboard', - label: t('Add to Dashboard'), + label: hasDashboardFeature ? ( + {t('Add to Dashboard')} + ) : ( + + } + > + {t('Add to Dashboard')} + + ), + disabled: !hasDashboardFeature, onAction: () => { trackAnalytics('ddm.add-to-dashboard', { organization, @@ -97,7 +118,14 @@ export function PageHeaderActions({showCustomMetricButton, addCustomMetric}: Pro onAction: () => navigateTo(`/settings/projects/:projectId/metrics/`, router), }, ], - [addWidget, createDashboard, hasEmptyWidget, organization, router] + [ + addWidget, + createDashboard, + hasEmptyWidget, + organization, + router, + hasDashboardFeature, + ] ); const alertItems = useMemo( diff --git a/static/app/views/ddm/queries.tsx b/static/app/views/ddm/queries.tsx index e00132925003e2..11f1f85667f37d 100644 --- a/static/app/views/ddm/queries.tsx +++ b/static/app/views/ddm/queries.tsx @@ -2,6 +2,7 @@ import {useCallback, useLayoutEffect} from 'react'; import styled from '@emotion/styled'; import * as echarts from 'echarts/core'; +import Feature from 'sentry/components/acl/feature'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import type {MetricWidgetQueryParams} from 'sentry/utils/metrics/types'; @@ -60,20 +61,28 @@ export function Queries() { isEdit projects={selection.projects} /> - + + {({hasFeature}) => ( + + )} + ))} From 6644c78c269b546c29537a09b3896b0782fb93f4 Mon Sep 17 00:00:00 2001 From: Ogi <86684834+obostjancic@users.noreply.github.com> Date: Fri, 9 Feb 2024 09:24:45 +0100 Subject: [PATCH 201/357] chore(api): telemetry experience team private apis (#64933) --- .../api/endpoints/organization_metrics_estimation_stats.py | 2 +- src/sentry/api/endpoints/organization_sdk_updates.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/sentry/api/endpoints/organization_metrics_estimation_stats.py b/src/sentry/api/endpoints/organization_metrics_estimation_stats.py index 852ed84714cc5a..a711d08a6e7b12 100644 --- a/src/sentry/api/endpoints/organization_metrics_estimation_stats.py +++ b/src/sentry/api/endpoints/organization_metrics_estimation_stats.py @@ -53,7 +53,7 @@ class StatsQualityEstimation(Enum): @region_silo_endpoint class OrganizationMetricsEstimationStatsEndpoint(OrganizationEventsV2EndpointBase): publish_status = { - "GET": ApiPublishStatus.UNKNOWN, + "GET": ApiPublishStatus.PRIVATE, } owner = ApiOwner.TELEMETRY_EXPERIENCE """Gets the estimated volume of an organization's metric events.""" diff --git a/src/sentry/api/endpoints/organization_sdk_updates.py b/src/sentry/api/endpoints/organization_sdk_updates.py index b59b28e47d8641..2165aa7d892203 100644 --- a/src/sentry/api/endpoints/organization_sdk_updates.py +++ b/src/sentry/api/endpoints/organization_sdk_updates.py @@ -74,7 +74,7 @@ class OrganizationSdkUpdatesEndpoint(OrganizationEndpoint): owner = ApiOwner.TELEMETRY_EXPERIENCE publish_status = { - "GET": ApiPublishStatus.UNKNOWN, + "GET": ApiPublishStatus.PRIVATE, } def get(self, request: Request, organization) -> Response: From 33c040450f5200b98301b4d86f7475c30042ef82 Mon Sep 17 00:00:00 2001 From: Ogi <86684834+obostjancic@users.noreply.github.com> Date: Fri, 9 Feb 2024 10:32:54 +0100 Subject: [PATCH 202/357] fix(dashboards): widget modal tooltip (#64936) --- static/app/views/dashboards/widgetCard/chart.tsx | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/static/app/views/dashboards/widgetCard/chart.tsx b/static/app/views/dashboards/widgetCard/chart.tsx index 56d48187a3e977..757882deeba66a 100644 --- a/static/app/views/dashboards/widgetCard/chart.tsx +++ b/static/app/views/dashboards/widgetCard/chart.tsx @@ -421,9 +421,13 @@ class WidgetCardChart extends Component { tooltip: { trigger: 'axis', formatter: (params, asyncTicket) => { + const {chartGroup} = this.props; + const isInGroup = + chartGroup && chartGroup === this.chartRef?.getEchartsInstance().group; + // tooltip is triggered whenever any chart in the group is hovered, // so we need to check if the mouse is actually over this chart - if (!isChartHovered(this.chartRef)) { + if (isInGroup && !isChartHovered(this.chartRef)) { return ''; } From 5e26ab0d011a188695e2f6979830197ca1c97fc1 Mon Sep 17 00:00:00 2001 From: Ogi <86684834+obostjancic@users.noreply.github.com> Date: Fri, 9 Feb 2024 11:34:53 +0100 Subject: [PATCH 203/357] feat(dashboards): ddm chart intervals (#64939) --- static/app/types/metrics.tsx | 12 ++-- static/app/utils/metrics/index.spec.tsx | 4 +- static/app/utils/metrics/index.tsx | 66 ++++++++++--------- .../dashboards/datasetConfig/metrics.tsx | 2 +- .../widgetCard/metricWidgetCard/index.tsx | 2 +- static/app/views/ddm/widget.tsx | 2 +- 6 files changed, 48 insertions(+), 40 deletions(-) diff --git a/static/app/types/metrics.tsx b/static/app/types/metrics.tsx index 0d710b098f6e75..136003efca53b7 100644 --- a/static/app/types/metrics.tsx +++ b/static/app/types/metrics.tsx @@ -31,7 +31,7 @@ export type MetricsApiRequestMetric = { query?: string; }; -export type MetricsApiRequestQuery = MetricsApiRequestMetric & { +export interface MetricsApiRequestQuery extends MetricsApiRequestMetric { interval: string; end?: DateString; environment?: string[]; @@ -41,11 +41,13 @@ export type MetricsApiRequestQuery = MetricsApiRequestMetric & { project?: number[]; start?: DateString; statsPeriod?: string; -}; +} -export type MetricsApiRequestQueryOptions = Partial & { - fidelity?: 'high' | 'low'; -}; +export type MetricsDataIntervalLadder = 'ddm' | 'bar' | 'dashboard'; + +export interface MetricsApiRequestQueryOptions extends Partial { + intervalLadder?: MetricsDataIntervalLadder; +} export type MetricsApiResponse = { end: string; diff --git a/static/app/utils/metrics/index.spec.tsx b/static/app/utils/metrics/index.spec.tsx index 92a7aed171a192..d20998d127573b 100644 --- a/static/app/utils/metrics/index.spec.tsx +++ b/static/app/utils/metrics/index.spec.tsx @@ -183,7 +183,7 @@ describe('getMetricsApiRequestQuery', () => { environments: ['production'], datetime: {start: '2023-01-01', end: '2023-01-02', period: null, utc: true}, }; - const overrides: MetricsApiRequestQueryOptions = {fidelity: 'high'}; + const overrides: MetricsApiRequestQueryOptions = {intervalLadder: 'ddm'}; const result = getMetricsApiRequestQuery(metric, filters, overrides); @@ -219,7 +219,7 @@ describe('getDDMInterval', () => { }; const useCase = 'custom'; - const result = getDDMInterval(dateTimeObj, useCase, 'high'); + const result = getDDMInterval(dateTimeObj, useCase); expect(result).toBe('10s'); }); diff --git a/static/app/utils/metrics/index.tsx b/static/app/utils/metrics/index.tsx index 56f60a969fc9b1..f904aec6599ccb 100644 --- a/static/app/utils/metrics/index.tsx +++ b/static/app/utils/metrics/index.tsx @@ -3,7 +3,7 @@ import type {InjectedRouter} from 'react-router'; import moment from 'moment'; import * as qs from 'query-string'; -import type {DateTimeObject, Fidelity} from 'sentry/components/charts/utils'; +import type {DateTimeObject} from 'sentry/components/charts/utils'; import { getDiffInMinutes, GranularityLadder, @@ -26,6 +26,7 @@ import type { MetricsApiRequestMetric, MetricsApiRequestQuery, MetricsApiRequestQueryOptions, + MetricsDataIntervalLadder, MetricsGroup, MetricsOperation, MRI, @@ -108,11 +109,11 @@ export function getDdmUrl( export function getMetricsApiRequestQuery( {field, query, groupBy, orderBy}: MetricsApiRequestMetric, {projects, environments, datetime}: PageFilters, - {fidelity, ...overrides}: Partial = {} + {intervalLadder, ...overrides}: Partial = {} ): MetricsApiRequestQuery { const {mri: mri} = parseField(field) ?? {}; const useCase = getUseCaseFromMRI(mri) ?? 'custom'; - const interval = getDDMInterval(datetime, useCase, fidelity); + const interval = getDDMInterval(datetime, useCase, intervalLadder); const hasGroupBy = groupBy && groupBy.length > 0; @@ -136,44 +137,49 @@ function sanitizeQuery(query?: string) { return query?.trim(); } -const ddmHighFidelityLadder = new GranularityLadder([ - [SIXTY_DAYS, '1d'], - [THIRTY_DAYS, '2h'], - [TWO_WEEKS, '1h'], - [ONE_WEEK, '30m'], - [TWENTY_FOUR_HOURS, '5m'], - [ONE_HOUR, '1m'], - [0, '5m'], -]); - -const ddmLowFidelityLadder = new GranularityLadder([ - [SIXTY_DAYS, '1d'], - [THIRTY_DAYS, '12h'], - [TWO_WEEKS, '4h'], - [ONE_WEEK, '2h'], - [TWENTY_FOUR_HOURS, '1h'], - [SIX_HOURS, '30m'], - [ONE_HOUR, '5m'], - [0, '1m'], -]); +const intervalLadders: Record = { + ddm: new GranularityLadder([ + [SIXTY_DAYS, '1d'], + [THIRTY_DAYS, '2h'], + [TWO_WEEKS, '1h'], + [ONE_WEEK, '30m'], + [TWENTY_FOUR_HOURS, '5m'], + [ONE_HOUR, '1m'], + [0, '1m'], + ]), + bar: new GranularityLadder([ + [SIXTY_DAYS, '1d'], + [THIRTY_DAYS, '12h'], + [TWO_WEEKS, '4h'], + [ONE_WEEK, '2h'], + [TWENTY_FOUR_HOURS, '1h'], + [SIX_HOURS, '30m'], + [ONE_HOUR, '5m'], + [0, '1m'], + ]), + dashboard: new GranularityLadder([ + [SIXTY_DAYS, '1d'], + [THIRTY_DAYS, '1h'], + [TWO_WEEKS, '30m'], + [ONE_WEEK, '30m'], + [TWENTY_FOUR_HOURS, '5m'], + [0, '5m'], + ]), +}; // Wraps getInterval since other users of this function, and other metric use cases do not have support for 10s granularity export function getDDMInterval( datetimeObj: DateTimeObject, useCase: UseCase, - fidelity: Fidelity = 'high' + ladder: MetricsDataIntervalLadder = 'ddm' ) { const diffInMinutes = getDiffInMinutes(datetimeObj); - if (diffInMinutes <= ONE_HOUR && useCase === 'custom' && fidelity === 'high') { + if (diffInMinutes <= ONE_HOUR && useCase === 'custom' && ladder === 'ddm') { return '10s'; } - if (fidelity === 'low') { - return ddmLowFidelityLadder.getInterval(diffInMinutes); - } - - return ddmHighFidelityLadder.getInterval(diffInMinutes); + return intervalLadders[ladder].getInterval(diffInMinutes); } export function getDateTimeParams({start, end, period}: PageFilters['datetime']) { diff --git a/static/app/views/dashboards/datasetConfig/metrics.tsx b/static/app/views/dashboards/datasetConfig/metrics.tsx index 50b7d89bc8c66a..6301845a25ae62 100644 --- a/static/app/views/dashboards/datasetConfig/metrics.tsx +++ b/static/app/views/dashboards/datasetConfig/metrics.tsx @@ -401,7 +401,7 @@ function getMetricRequest( pageFilters, { limit: limit || undefined, - fidelity: displayType === DisplayType.BAR ? 'low' : 'high', + intervalLadder: displayType === DisplayType.BAR ? 'bar' : 'dashboard', } ); diff --git a/static/app/views/dashboards/widgetCard/metricWidgetCard/index.tsx b/static/app/views/dashboards/widgetCard/metricWidgetCard/index.tsx index 969f7a64a8843f..350f680de38ceb 100644 --- a/static/app/views/dashboards/widgetCard/metricWidgetCard/index.tsx +++ b/static/app/views/dashboards/widgetCard/metricWidgetCard/index.tsx @@ -232,7 +232,7 @@ export function MetricWidgetChartContainer({ environments, datetime, }, - {fidelity: displayType === MetricDisplayType.BAR ? 'low' : 'high'} + {intervalLadder: displayType === MetricDisplayType.BAR ? 'bar' : 'dashboard'} ); const chartRef = useRef(null); diff --git a/static/app/views/ddm/widget.tsx b/static/app/views/ddm/widget.tsx index 69c26252a9bc80..3f55f0b3ebedaa 100644 --- a/static/app/views/ddm/widget.tsx +++ b/static/app/views/ddm/widget.tsx @@ -255,7 +255,7 @@ const MetricWidgetBody = memo( environments, datetime, }, - {fidelity: displayType === MetricDisplayType.BAR ? 'low' : 'high'} + {intervalLadder: displayType === MetricDisplayType.BAR ? 'bar' : 'ddm'} ); const chartRef = useRef(null); From 70898e11c51c76ae0461f00d76e2e81073cc3ef4 Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Fri, 9 Feb 2024 06:52:50 -0500 Subject: [PATCH 204/357] fix(metrics-extraction): Fix n+1 due to filter (#64902) ### Summary Filter apparently ignores the prefetched `...ondemand_set` so we need to do this in memory, unfortunately, otherwise have hundreds of short queries being run. --------- Co-authored-by: getsantry[bot] <66042841+getsantry[bot]@users.noreply.github.com> --- src/sentry/relay/config/metric_extraction.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/sentry/relay/config/metric_extraction.py b/src/sentry/relay/config/metric_extraction.py index 74932361cf15af..c90095ca92225d 100644 --- a/src/sentry/relay/config/metric_extraction.py +++ b/src/sentry/relay/config/metric_extraction.py @@ -467,9 +467,11 @@ def _can_widget_query_use_stateful_extraction( default_version_specs = specs_per_version.get(stateful_extraction_version, []) spec_hashes = [hashed_spec[0] for hashed_spec in default_version_specs] - on_demand_entries = widget_query.dashboardwidgetqueryondemand_set.filter( - spec_version=stateful_extraction_version - ) + on_demand_entries = [ + entry + for entry in widget_query.dashboardwidgetqueryondemand_set.all() + if entry.spec_version == stateful_extraction_version + ] if len(on_demand_entries) == 0: # 0 on-demand entries is expected, and happens when the on-demand task hasn't caught up yet for newly created widgets or widgets recently modified to have on-demand state. From 6eb78ab7fbaba3a84f6f55561127d16478209700 Mon Sep 17 00:00:00 2001 From: Joris Bayer Date: Fri, 9 Feb 2024 14:33:01 +0100 Subject: [PATCH 205/357] fix(sdk): Discard generic redirects (#64945) Discard generic redirects in `before_send_transaction` until https://github.com/getsentry/team-sdks/issues/48 is done. --- src/sentry/utils/sdk.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 6f02c598f3faef..6a5c666d425cd4 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -221,6 +221,14 @@ def traces_sampler(sampling_context): def before_send_transaction(event, _): + # Discard generic redirects. + # This condition can be removed once https://github.com/getsentry/team-sdks/issues/48 is fixed. + if ( + event.get("tags", {}).get("http.status_code") == "301" + and event.get("transaction_info", {}).get("source") == "url" + ): + return None + # Occasionally the span limit is hit and we drop spans from transactions, this helps find transactions where this occurs. num_of_spans = len(event["spans"]) event["tags"]["spans_over_limit"] = num_of_spans >= 1000 From f4e0e01ba7efb921f8e9c2e1ce0fe48b45e6abe6 Mon Sep 17 00:00:00 2001 From: Colton Allen Date: Fri, 9 Feb 2024 08:04:24 -0600 Subject: [PATCH 206/357] docs(replays): Add project-replay-clicks-index API documentation (#64879) Related: https://github.com/getsentry/team-replay/issues/353 --- .../apidocs/examples/replay_examples.py | 9 +++++ .../endpoints/project_replay_clicks_index.py | 38 ++++++++++++++++++- 2 files changed, 46 insertions(+), 1 deletion(-) diff --git a/src/sentry/apidocs/examples/replay_examples.py b/src/sentry/apidocs/examples/replay_examples.py index ae8238dcb67e38..82c5af7ebbe582 100644 --- a/src/sentry/apidocs/examples/replay_examples.py +++ b/src/sentry/apidocs/examples/replay_examples.py @@ -50,6 +50,15 @@ class ReplayExamples: ) ] + GET_REPLAY_CLIKS = [ + OpenApiExample( + "Retrieve a collection of RRWeb DOM node-ids and the timestamp they were clicked.", + value={"data": [{"node_id": 1, "timestamp": "2024-02-08T15:52:25+00:00"}]}, + status_codes=["200"], + response_only=True, + ) + ] + GET_REPLAY_DETAILS = [ OpenApiExample( "Get single replay details", diff --git a/src/sentry/replays/endpoints/project_replay_clicks_index.py b/src/sentry/replays/endpoints/project_replay_clicks_index.py index 186d31e3385e48..ed0659060b66be 100644 --- a/src/sentry/replays/endpoints/project_replay_clicks_index.py +++ b/src/sentry/replays/endpoints/project_replay_clicks_index.py @@ -2,7 +2,9 @@ import datetime import uuid +from typing import TypedDict +from drf_spectacular.utils import extend_schema from rest_framework.exceptions import ParseError from rest_framework.response import Response from snuba_sdk import ( @@ -28,6 +30,10 @@ from sentry.api.bases.project import ProjectEndpoint from sentry.api.event_search import ParenExpression, SearchFilter, parse_search_query from sentry.api.paginator import GenericOffsetPaginator +from sentry.apidocs.constants import RESPONSE_BAD_REQUEST, RESPONSE_FORBIDDEN, RESPONSE_NOT_FOUND +from sentry.apidocs.examples.replay_examples import ReplayExamples +from sentry.apidocs.parameters import CursorQueryParam, GlobalParams, ReplayParams, VisibilityParams +from sentry.apidocs.utils import inline_sentry_response_serializer from sentry.exceptions import InvalidSearchQuery from sentry.models.project import Project from sentry.replays.lib.new_query.errors import CouldNotParseValue, OperatorNotSupported @@ -41,14 +47,44 @@ REFERRER = "replays.query.query_replay_clicks_dataset" +class ReplayClickResponseData(TypedDict): + node_id: int + timestamp: datetime.datetime + + +class ReplayClickResponse(TypedDict): + data: list[ReplayClickResponseData] + + @region_silo_endpoint +@extend_schema(tags=["Replays"]) class ProjectReplayClicksIndexEndpoint(ProjectEndpoint): owner = ApiOwner.REPLAY publish_status = { - "GET": ApiPublishStatus.UNKNOWN, + "GET": ApiPublishStatus.PUBLIC, } + @extend_schema( + operation_id="List Clicked Nodes", + parameters=[ + CursorQueryParam, + GlobalParams.ORG_SLUG, + GlobalParams.PROJECT_SLUG, + GlobalParams.ENVIRONMENT, + ReplayParams.REPLAY_ID, + VisibilityParams.PER_PAGE, + VisibilityParams.QUERY, + ], + responses={ + 200: inline_sentry_response_serializer("ListReplayClicks", ReplayClickResponse), + 400: RESPONSE_BAD_REQUEST, + 403: RESPONSE_FORBIDDEN, + 404: RESPONSE_NOT_FOUND, + }, + examples=ReplayExamples.GET_REPLAY_CLIKS, + ) def get(self, request: Request, project: Project, replay_id: str) -> Response: + """Retrieve a collection of RRWeb DOM node-ids and the timestamp they were clicked.""" if not features.has( "organizations:session-replay", project.organization, actor=request.user ): From 9d9556ae26055057eb0981215b85be8f887e1b61 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Fri, 9 Feb 2024 09:09:49 -0500 Subject: [PATCH 207/357] ref: upgrade django to 5.0.2 (#64915) this addresses a this CVE https://github.com/advisories/GHSA-xxj9-f6rv-m3x4 --- requirements-base.txt | 2 +- requirements-dev-frozen.txt | 2 +- requirements-frozen.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements-base.txt b/requirements-base.txt index 7e5cda130327b2..68efd7f49ea300 100644 --- a/requirements-base.txt +++ b/requirements-base.txt @@ -13,7 +13,7 @@ datadog>=0.44 django-crispy-forms>=1.14.0 django-csp>=3.7 django-pg-zero-downtime-migrations>=0.13 -Django>=5,<6 +Django>=5.0.2,<6 djangorestframework>=3.14.0 drf-spectacular>=0.26.3 email-reply-parser>=0.5.12 diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index e87fe10fd40fbd..b411e208ca3daf 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -39,7 +39,7 @@ cssutils==2.9.0 datadog==0.44.0 distlib==0.3.4 distro==1.8.0 -django==5.0.1 +django==5.0.2 django-crispy-forms==1.14.0 django-csp==3.7 django-pg-zero-downtime-migrations==0.13 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index 8d45893d2829d9..48f39e668ac1f7 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -32,7 +32,7 @@ cssselect==1.0.3 cssutils==2.9.0 datadog==0.44.0 distro==1.8.0 -django==5.0.1 +django==5.0.2 django-crispy-forms==1.14.0 django-csp==3.7 django-pg-zero-downtime-migrations==0.13 From ef7a9fc2696c70aef9418f98790261b10d71b85a Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Fri, 9 Feb 2024 14:26:19 +0000 Subject: [PATCH 208/357] Revert "ref: projectconfig_cache stores binary in redis so use decode_responses=False (#64816)" This reverts commit a81bf4525a5ee2830022ad8533697f158f7f4f69. Co-authored-by: asottile-sentry <103459774+asottile-sentry@users.noreply.github.com> --- src/sentry/relay/projectconfig_cache/redis.py | 4 +-- src/sentry/utils/redis.py | 34 ++++++++----------- 2 files changed, 16 insertions(+), 22 deletions(-) diff --git a/src/sentry/relay/projectconfig_cache/redis.py b/src/sentry/relay/projectconfig_cache/redis.py index b3ae6237096afb..f540fd2d0a20d5 100644 --- a/src/sentry/relay/projectconfig_cache/redis.py +++ b/src/sentry/relay/projectconfig_cache/redis.py @@ -15,10 +15,10 @@ class RedisProjectConfigCache(ProjectConfigCache): def __init__(self, **options): cluster_key = options.get("cluster", "default") - self.cluster = redis.redis_clusters.get(cluster_key, decode_responses=False) + self.cluster = redis.redis_clusters.get(cluster_key) read_cluster_key = options.get("read_cluster", cluster_key) - self.cluster_read = redis.redis_clusters.get(read_cluster_key, decode_responses=False) + self.cluster_read = redis.redis_clusters.get(read_cluster_key) super().__init__(**options) diff --git a/src/sentry/utils/redis.py b/src/sentry/utils/redis.py index f9f1984af865d2..16ae7f1dda1967 100644 --- a/src/sentry/utils/redis.py +++ b/src/sentry/utils/redis.py @@ -60,9 +60,7 @@ class _RBCluster: def supports(self, config): return not config.get("is_redis_cluster", False) - def factory(self, *, decode_responses: bool, **config): - if not decode_responses: - raise NotImplementedError("decode_responses=False mode is not implemented for `rb`") + def factory(self, **config): # rb expects a dict of { host, port } dicts where the key is the host # ID. Coerce the configuration into the correct format if necessary. hosts = config["hosts"] @@ -109,7 +107,7 @@ def supports(self, config): # in non-cluster mode. return config.get("is_redis_cluster", False) or len(config.get("hosts")) == 1 - def factory(self, *, decode_responses: bool, **config): + def factory(self, **config): # StrictRedisCluster expects a list of { host, port } dicts. Coerce the # configuration into the correct format if necessary. hosts = config.get("hosts") @@ -135,7 +133,7 @@ def cluster_factory(): # # https://github.com/Grokzen/redis-py-cluster/blob/73f27edf7ceb4a408b3008ef7d82dac570ab9c6a/rediscluster/nodemanager.py#L385 startup_nodes=deepcopy(hosts), - decode_responses=decode_responses, + decode_responses=True, skip_full_coverage_check=True, max_connections=16, max_connections_per_node=True, @@ -144,7 +142,7 @@ def cluster_factory(): ) else: host = hosts[0].copy() - host["decode_responses"] = decode_responses + host["decode_responses"] = True return ( import_string(config["client_class"]) if "client_class" in config @@ -172,19 +170,17 @@ def __init__( ... def __init__(self, options_manager, cluster_type=_RBCluster): - self.__clusters: dict[tuple[str, bool], TCluster] = {} + self.__clusters = {} self.__options_manager = options_manager self.__cluster_type = cluster_type() - def get(self, key: str, *, decode_responses: bool = True) -> TCluster: - cache_key = (key, decode_responses) - try: - return self.__clusters[cache_key] - except KeyError: - # Do not access attributes of the `cluster` object to prevent - # setup/init of lazy objects. The _RedisCluster type will try to - # connect to the cluster during initialization. + def get(self, key) -> TCluster: + cluster = self.__clusters.get(key) + # Do not access attributes of the `cluster` object to prevent + # setup/init of lazy objects. The _RedisCluster type will try to + # connect to the cluster during initialization. + if cluster is None: # TODO: This would probably be safer with a lock, but I'm not sure # that it's necessary. configuration = self.__options_manager.get("redis.clusters").get(key) @@ -194,11 +190,9 @@ def get(self, key: str, *, decode_responses: bool = True) -> TCluster: if not self.__cluster_type.supports(configuration): raise KeyError(f"Invalid cluster type, expected: {self.__cluster_type}") - ret = self.__clusters[cache_key] = self.__cluster_type.factory( - **configuration, - decode_responses=decode_responses, - ) - return ret + cluster = self.__clusters[key] = self.__cluster_type.factory(**configuration) + + return cluster # TODO(epurkhiser): When migration of all rb cluster to true redis clusters has From 2f5ca664c000e203b329f5e9230837954cef5428 Mon Sep 17 00:00:00 2001 From: Nar Saynorath Date: Fri, 9 Feb 2024 09:39:35 -0500 Subject: [PATCH 209/357] feat(app-start): Remove loading style on widgets (#64871) The data manipulation of the responses accounts for empty data and the charts have loading states baked in which behave better when using CSS grid here --- .../screenSummary/deviceClassBreakdownBarChart.tsx | 9 ++------- .../appStartup/screenSummary/startDurationWidget.tsx | 5 ----- 2 files changed, 2 insertions(+), 12 deletions(-) diff --git a/static/app/views/starfish/views/appStartup/screenSummary/deviceClassBreakdownBarChart.tsx b/static/app/views/starfish/views/appStartup/screenSummary/deviceClassBreakdownBarChart.tsx index 2e66b22ee7128d..ebb4b757579d0f 100644 --- a/static/app/views/starfish/views/appStartup/screenSummary/deviceClassBreakdownBarChart.tsx +++ b/static/app/views/starfish/views/appStartup/screenSummary/deviceClassBreakdownBarChart.tsx @@ -1,7 +1,6 @@ import {BarChart} from 'sentry/components/charts/barChart'; import ErrorPanel from 'sentry/components/charts/errorPanel'; import TransitionChart from 'sentry/components/charts/transitionChart'; -import LoadingContainer from 'sentry/components/loading/loadingContainer'; import {IconWarning} from 'sentry/icons'; import {t} from 'sentry/locale'; import type {Series} from 'sentry/types/echarts'; @@ -41,10 +40,6 @@ function DeviceClassBreakdownBarChart({ isLoading: isReleasesLoading, } = useReleaseSelection(); - if (isReleasesLoading || isLoading) { - return ; - } - return ( diff --git a/static/app/views/starfish/views/appStartup/screenSummary/startDurationWidget.tsx b/static/app/views/starfish/views/appStartup/screenSummary/startDurationWidget.tsx index ed04da255d0686..92378ad8862509 100644 --- a/static/app/views/starfish/views/appStartup/screenSummary/startDurationWidget.tsx +++ b/static/app/views/starfish/views/appStartup/screenSummary/startDurationWidget.tsx @@ -1,5 +1,4 @@ import {getInterval} from 'sentry/components/charts/utils'; -import LoadingContainer from 'sentry/components/loading/loadingContainer'; import {t} from 'sentry/locale'; import type {MultiSeriesEventsStats} from 'sentry/types'; import type {Series, SeriesDataUnit} from 'sentry/types/echarts'; @@ -90,10 +89,6 @@ function StartDurationWidget({additionalFilters, chartHeight, type}: Props) { initialData: {}, }); - if (isSeriesLoading) { - return ; - } - // The expected response is a multi series response, but if there is no data // then we get an object representing a single series with all empty values // (i.e without being grouped by release) From ad000bb96c0cba0b0b9c548c73f6d18ede370937 Mon Sep 17 00:00:00 2001 From: Nar Saynorath Date: Fri, 9 Feb 2024 09:39:56 -0500 Subject: [PATCH 210/357] fix(mobile-starfish): Fix release selection order when >1 release (#64872) In the case where we had two releases, the primary release was set to be the newest one and the secondary release was the older one. This made the regression view wonky because we couldn't easily swap them --- static/app/views/starfish/queries/useReleases.tsx | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/static/app/views/starfish/queries/useReleases.tsx b/static/app/views/starfish/queries/useReleases.tsx index 28a0c93285de66..8a989205f460f1 100644 --- a/static/app/views/starfish/queries/useReleases.tsx +++ b/static/app/views/starfish/queries/useReleases.tsx @@ -121,12 +121,16 @@ export function useReleaseSelection(): { const location = useLocation(); const {data: releases, isLoading} = useReleases(); + + // If there are more than 1 release, the first one should be the older one const primaryRelease = - decodeScalar(location.query.primaryRelease) ?? releases?.[0]?.version ?? undefined; + decodeScalar(location.query.primaryRelease) ?? + (releases && releases.length > 1 ? releases?.[1]?.version : releases?.[0]?.version); + // If there are more than 1 release, the second one should be the newest one const secondaryRelease = decodeScalar(location.query.secondaryRelease) ?? - (releases && releases.length > 1 ? releases?.[1]?.version : undefined); + (releases && releases.length > 1 ? releases?.[0]?.version : undefined); return {primaryRelease, secondaryRelease, isLoading}; } From 1e7985fda8a047ac28f2a0020d57052a0f053f3f Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Fri, 9 Feb 2024 08:59:47 -0600 Subject: [PATCH 211/357] feat(metrics): Add feature flag for metrics samples list (#64895) This adds the feature flag to switch to the new metrics samples list experience. --- src/sentry/conf/server.py | 2 ++ src/sentry/features/__init__.py | 1 + 2 files changed, 3 insertions(+) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index fb004043a7f8c5..4d0dc6ab2898ac 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1647,6 +1647,8 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:metrics-api-new-metrics-layer": False, # Enables the ability to block metrics. "organizations:metrics-blocking": False, + # Enables the new samples list experience + "organizations:metrics-samples-list": False, # Enable Session Stats down to a minute resolution "organizations:minute-resolution-sessions": True, # Adds the ttid & ttfd vitals to the frontend diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index 2c09be24c614f6..fa2639398a419d 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -146,6 +146,7 @@ default_manager.add("organizations:metrics-api-new-metrics-layer", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:metrics-blocking", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:metrics-extraction", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) +default_manager.add("organizations:metrics-samples-list", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:minute-resolution-sessions", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:mobile-cpu-memory-in-transactions", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:mobile-ttid-ttfd-contribution", OrganizationFeature, FeatureHandlerStrategy.REMOTE) From cb53812ce4c01bbd2c123a2d2e01968d29249c58 Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Fri, 9 Feb 2024 09:00:08 -0600 Subject: [PATCH 212/357] chore(profiling): Clean upsome flamegraph related feature flags (#64906) These features flags will be unused after #64905, so let's remove them. --- src/sentry/conf/server.py | 10 ---------- src/sentry/features/__init__.py | 5 ----- 2 files changed, 15 deletions(-) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 4d0dc6ab2898ac..301d0c339e0bc0 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1774,30 +1774,20 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:performance-vitals-inp": False, # Enable profiling "organizations:profiling": False, - # Enable profiling battery usage chart - "organizations:profiling-battery-usage-chart": False, # Enabled for those orgs who participated in the profiling Beta program "organizations:profiling-beta": False, # Enables production profiling in sentry browser application "organizations:profiling-browser": False, - # Enable profiling CPU chart - "organizations:profiling-cpu-chart": False, - # Enables differential flamegraph in profiling - "organizations:profiling-differential-flamegraph": False, # Enables separate differential flamegraph page "organizations:profiling-differential-flamegraph-page": False, # Enable global suspect functions in profiling "organizations:profiling-global-suspect-functions": False, - # Enable profiling Memory chart - "organizations:profiling-memory-chart": False, # Enable profiling statistical detectors breakpoint detection "organizations:profiling-statistical-detectors-breakpoint": False, # Enable profiling statistical detectors ema detection "organizations:profiling-statistical-detectors-ema": False, # Enable profiling summary redesign view "organizations:profiling-summary-redesign": False, - # Enable ui frames in flamecharts - "organizations:profiling-ui-frames": False, # Enable the transactions backed profiling views "organizations:profiling-using-transactions": False, # Enable profiling view diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index fa2639398a419d..b33494d7989574 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -212,18 +212,13 @@ default_manager.add("organizations:performance-trendsv2-dev-only", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:performance-use-metrics", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:performance-vitals-inp", OrganizationFeature, FeatureHandlerStrategy.REMOTE) -default_manager.add("organizations:profiling-battery-usage-chart", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:profiling-beta", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:profiling-browser", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) -default_manager.add("organizations:profiling-cpu-chart", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:profiling-differential-flamegraph-page", OrganizationFeature, FeatureHandlerStrategy.REMOTE) -default_manager.add("organizations:profiling-differential-flamegraph", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:profiling-global-suspect-functions", OrganizationFeature, FeatureHandlerStrategy.REMOTE) -default_manager.add("organizations:profiling-memory-chart", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:profiling-statistical-detectors-breakpoint", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:profiling-statistical-detectors-ema", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:profiling-summary-redesign", OrganizationFeature, FeatureHandlerStrategy.REMOTE) -default_manager.add("organizations:profiling-ui-frames", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:profiling-using-transactions", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:profiling", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:project-create-replay-feedback", OrganizationFeature, FeatureHandlerStrategy.REMOTE) From ffdeaa00bb81f0bededb038103ac4d6a06938efd Mon Sep 17 00:00:00 2001 From: Jodi Jang <116035587+jangjodi@users.noreply.github.com> Date: Fri, 9 Feb 2024 10:02:14 -0500 Subject: [PATCH 213/357] ref(similarity-embedding): Update seer API parameters (#64829) Add 50 frame limit to stacktrace string used in seer API parameters Add project_id to seer API parameters --- .../group_similar_issues_embeddings.py | 48 +++++--- src/sentry/seer/utils.py | 1 + .../test_group_similar_issues_embeddings.py | 106 ++++++++++++++++++ tests/sentry/seer/test_utils.py | 2 + 4 files changed, 140 insertions(+), 17 deletions(-) diff --git a/src/sentry/api/endpoints/group_similar_issues_embeddings.py b/src/sentry/api/endpoints/group_similar_issues_embeddings.py index 78b89ff970c84c..75c7d2ee923da5 100644 --- a/src/sentry/api/endpoints/group_similar_issues_embeddings.py +++ b/src/sentry/api/endpoints/group_similar_issues_embeddings.py @@ -23,6 +23,7 @@ from sentry.web.helpers import render_to_string logger = logging.getLogger(__name__) +MAX_FRAME_COUNT = 50 def get_stacktrace_string(exception: Mapping[Any, Any], event: GroupEvent) -> str: @@ -30,31 +31,43 @@ def get_stacktrace_string(exception: Mapping[Any, Any], event: GroupEvent) -> st if not exception["values"]: return "" + frame_count = 0 output = [] for exc in exception["values"]: if not exc: continue - output.append(f'{exc["type"]}: {exc["value"]}') if exc["stacktrace"] and exc["stacktrace"].get("frames"): + # If the total number of frames exceeds 50, keep only the last in-app 50 frames + in_app_frames = [frame for frame in exc["stacktrace"]["frames"] if frame["in_app"]] + num_frames = len(in_app_frames) + if frame_count + num_frames > MAX_FRAME_COUNT: + remaining_frame_count = MAX_FRAME_COUNT - frame_count + in_app_frames = in_app_frames[-remaining_frame_count:] + frame_count += remaining_frame_count + num_frames = remaining_frame_count + frame_count += num_frames + + if in_app_frames: + output.append(f'{exc["type"]}: {exc["value"]}') + choices = [event.platform, "default"] if event.platform else ["default"] templates = [f"sentry/partial/frames/{choice}.txt" for choice in choices] - for frame in exc["stacktrace"]["frames"]: - if frame["in_app"]: - output.append( - render_to_string( - templates, - { - "abs_path": frame.get("abs_path"), - "filename": frame.get("filename"), - "function": frame.get("function"), - "module": frame.get("module"), - "lineno": frame.get("lineno"), - "colno": frame.get("colno"), - "context_line": frame.get("context_line"), - }, - ).strip("\n") - ) + for frame in in_app_frames: + output.append( + render_to_string( + templates, + { + "abs_path": frame.get("abs_path"), + "filename": frame.get("filename"), + "function": frame.get("function"), + "module": frame.get("module"), + "lineno": frame.get("lineno"), + "colno": frame.get("colno"), + "context_line": frame.get("context_line"), + }, + ).strip("\n") + ) return "\n".join(output) @@ -111,6 +124,7 @@ def get(self, request: Request, group) -> Response: similar_issues_params: SimilarIssuesEmbeddingsRequest = { "group_id": group.id, + "project_id": group.project.id, "stacktrace": stacktrace_string, "message": group.message, } diff --git a/src/sentry/seer/utils.py b/src/sentry/seer/utils.py index 66743818b887b7..a17152805d0334 100644 --- a/src/sentry/seer/utils.py +++ b/src/sentry/seer/utils.py @@ -53,6 +53,7 @@ class SimilarIssuesEmbeddingsRequestNotRequired(TypedDict, total=False): class SimilarIssuesEmbeddingsRequest(SimilarIssuesEmbeddingsRequestNotRequired): group_id: int + project_id: int stacktrace: str message: str diff --git a/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py b/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py index 772552070db65e..57d5ec4ecff515 100644 --- a/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py +++ b/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py @@ -67,6 +67,38 @@ def setUp(self): self.path = f"/api/0/issues/{self.group.id}/similar-issues-embeddings/" self.similar_group = self.create_group(project=self.project) + def create_exception_values( + self, + num_values: int, + num_frames_per_value: int, + starting_frame_number: int = 1, + in_app: bool = True, + ) -> list[dict[str, Any]]: + """ + Return an exception value dictionary, where the line number corresponds to the total frame + number + """ + exception_values = [] + frame_count = starting_frame_number + for _ in range(num_values): + value: dict[str, Any] = {"type": "Error", "value": "this is an error"} + frames = [] + for _ in range(num_frames_per_value): + frame = { + "function": "function", + "module": "__main__", + "filename": "python_onboarding.py", + "abs_path": "/Users/jodi/python_onboarding/python_onboarding.py", + "lineno": frame_count, + "context_line": "function()", + "in_app": in_app, + } + frames.append(frame) + frame_count += 1 + value.update({"stacktrace": {"frames": frames}}) + exception_values.append(value) + return exception_values + def get_expected_response( self, group_ids: Sequence[int], @@ -99,6 +131,76 @@ def test_get_stacktrace_string_no_values(self): stacktrace_string = get_stacktrace_string({"values": []}, self.event) assert stacktrace_string == "" + def test_get_stacktrace_string_50_frames(self): + """Test that when there are 50 frames, all frames are included""" + + # Exception value where the line number corresponds to the total frame number + exception_values = self.create_exception_values(num_values=5, num_frames_per_value=10) + large_error_trace = { + "exception": {"values": exception_values}, + "platform": "python", + } + event = self.store_event(data=large_error_trace, project_id=self.project) + stacktrace_string = get_stacktrace_string(large_error_trace["exception"], event) # type: ignore + + # Assert that we take all exception frames + for line_no in range(1, 51): + assert str(line_no) in stacktrace_string + + def test_get_stacktrace_string_over_50_frames(self): + """Test that when there are 60 frames, frames 1-45, and frames 56-60 are included""" + + # Exception value where the line number corresponds to the total frame number + exception_values = self.create_exception_values(num_values=4, num_frames_per_value=15) + large_error_trace = { + "exception": {"values": exception_values}, + "platform": "python", + } + event = self.store_event(data=large_error_trace, project_id=self.project) + stacktrace_string = get_stacktrace_string(large_error_trace["exception"], event) # type: ignore + + # Assert that we take only the last 5 frames of the last exception + for line_no in range(46, 56): + assert str(line_no) not in stacktrace_string + for line_no in range(56, 61): + assert str(line_no) in stacktrace_string + + def test_get_stacktrace_string_non_in_app_frames(self): + """Test that only in-app frames are included and count towards the frame count limit of 50""" + # Make 20 in-app frames + exception_values_in_app_start = self.create_exception_values( + num_values=2, num_frames_per_value=10 + ) + # Make 10 non in-app frames + exception_values_non_in_app = self.create_exception_values( + num_values=1, num_frames_per_value=10, starting_frame_number=21, in_app=False + ) + # Make 30 in-app frames + exception_values_in_app_end = self.create_exception_values( + num_values=3, num_frames_per_value=10, starting_frame_number=31 + ) + + exception_values = ( + exception_values_in_app_start + + exception_values_non_in_app + + exception_values_in_app_end + ) + + large_error_trace = { + "exception": {"values": exception_values}, + "platform": "python", + } + event = self.store_event(data=large_error_trace, project_id=self.project) + stacktrace_string = get_stacktrace_string(large_error_trace["exception"], event) # type: ignore + + # Assert that the only the in-app frames are taken + for line_no in range(1, 21): + assert str(line_no) in stacktrace_string + for line_no in range(21, 31): + assert str(line_no) not in stacktrace_string + for line_no in range(31, 61): + assert str(line_no) in stacktrace_string + def test_get_formatted_results(self): new_group = self.create_group(project=self.project) response_1: SimilarIssuesEmbeddingsData = { @@ -156,6 +258,7 @@ def test_simple(self, mock_seer_request): body=json.dumps( { "group_id": self.group.id, + "project_id": self.project.id, "stacktrace": EXPECTED_STACKTRACE_STRING, "message": self.group.message, "k": 1, @@ -297,6 +400,7 @@ def test_no_optional_params(self, mock_seer_request): body=json.dumps( { "group_id": self.group.id, + "project_id": self.project.id, "stacktrace": EXPECTED_STACKTRACE_STRING, "message": self.group.message, }, @@ -319,6 +423,7 @@ def test_no_optional_params(self, mock_seer_request): body=json.dumps( { "group_id": self.group.id, + "project_id": self.project.id, "stacktrace": EXPECTED_STACKTRACE_STRING, "message": self.group.message, "k": 1, @@ -342,6 +447,7 @@ def test_no_optional_params(self, mock_seer_request): body=json.dumps( { "group_id": self.group.id, + "project_id": self.project.id, "stacktrace": EXPECTED_STACKTRACE_STRING, "message": self.group.message, "threshold": 0.98, diff --git a/tests/sentry/seer/test_utils.py b/tests/sentry/seer/test_utils.py index ee68451477c678..bd43e1e88a4432 100644 --- a/tests/sentry/seer/test_utils.py +++ b/tests/sentry/seer/test_utils.py @@ -27,6 +27,7 @@ def test_simple_similar_issues_embeddings(self, mock_seer_request): params: SimilarIssuesEmbeddingsRequest = { "group_id": 1, + "project_id": 1, "stacktrace": "string", "message": "message", } @@ -41,6 +42,7 @@ def test_empty_similar_issues_embeddings(self, mock_seer_request): params: SimilarIssuesEmbeddingsRequest = { "group_id": 1, + "project_id": 1, "stacktrace": "string", "message": "message", } From 80bfa7baec7f1bbf9c3aa2c143f105cd6f550ced Mon Sep 17 00:00:00 2001 From: Colton Allen Date: Fri, 9 Feb 2024 09:21:50 -0600 Subject: [PATCH 214/357] docs(replays): Publish recording segment endpoints (#64874) Related: https://github.com/getsentry/team-replay/issues/353 --- .../apidocs/examples/replay_examples.py | 78 +++++++++++++++++++ src/sentry/apidocs/parameters.py | 8 ++ ...roject_replay_recording_segment_details.py | 36 ++++++++- .../project_replay_recording_segment_index.py | 29 ++++++- src/sentry/replays/types.py | 1 + 5 files changed, 147 insertions(+), 5 deletions(-) create mode 100644 src/sentry/replays/types.py diff --git a/src/sentry/apidocs/examples/replay_examples.py b/src/sentry/apidocs/examples/replay_examples.py index 82c5af7ebbe582..e59f32542b5654 100644 --- a/src/sentry/apidocs/examples/replay_examples.py +++ b/src/sentry/apidocs/examples/replay_examples.py @@ -108,3 +108,81 @@ class ReplayExamples: response_only=True, ) ] + + GET_REPLAY_SEGMENTS = [ + OpenApiExample( + "Retrieve a collection of replay segments", + value=[ + [ + { + "type": 5, + "timestamp": 1658770772.902, + "data": { + "tag": "performanceSpan", + "payload": { + "op": "memory", + "description": "", + "startTimestamp": 1658770772.902, + "endTimestamp": 1658770772.902, + "data": { + "memory": { + "jsHeapSizeLimit": 4294705152, + "totalJSHeapSize": 10204109, + "usedJSHeapSize": 9131621, + } + }, + }, + }, + } + ], + [ + { + "type": 5, + "timestamp": 1665063926.125, + "data": { + "tag": "performanceSpan", + "payload": { + "op": "navigation.navigate", + "description": "https://sentry.io", + "startTimestamp": 1665063926.125, + "endTimestamp": 1665063926.833, + "data": {"size": 9538, "duration": 710}, + }, + }, + } + ], + ], + status_codes=["200"], + response_only=True, + ) + ] + + GET_REPLAY_SEGMENT = [ + OpenApiExample( + "Retrieve a replay segment", + value=[ + { + "type": 5, + "timestamp": 1658770772.902, + "data": { + "tag": "performanceSpan", + "payload": { + "op": "memory", + "description": "", + "startTimestamp": 1658770772.902, + "endTimestamp": 1658770772.902, + "data": { + "memory": { + "jsHeapSizeLimit": 4294705152, + "totalJSHeapSize": 10204109, + "usedJSHeapSize": 9131621, + } + }, + }, + }, + } + ], + status_codes=["200"], + response_only=True, + ) + ] diff --git a/src/sentry/apidocs/parameters.py b/src/sentry/apidocs/parameters.py index 3f51b1e1c687f1..cc9557060248e0 100644 --- a/src/sentry/apidocs/parameters.py +++ b/src/sentry/apidocs/parameters.py @@ -317,6 +317,14 @@ class ReplayParams: description="""The ID of the replay you'd like to retrieve.""", ) + SEGMENT_ID = OpenApiParameter( + name="segment_id", + location="path", + required=True, + type=OpenApiTypes.INT, + description="""The ID of the segment you'd like to retrieve.""", + ) + class NotificationParams: TRIGGER_TYPE = OpenApiParameter( diff --git a/src/sentry/replays/endpoints/project_replay_recording_segment_details.py b/src/sentry/replays/endpoints/project_replay_recording_segment_details.py index 034bc001141bdf..bbb990d621e97b 100644 --- a/src/sentry/replays/endpoints/project_replay_recording_segment_details.py +++ b/src/sentry/replays/endpoints/project_replay_recording_segment_details.py @@ -5,6 +5,7 @@ import sentry_sdk from django.http import StreamingHttpResponse from django.http.response import HttpResponseBase +from drf_spectacular.utils import extend_schema from rest_framework.request import Request from sentry import features @@ -12,18 +13,43 @@ from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import region_silo_endpoint from sentry.api.bases.project import ProjectEndpoint +from sentry.apidocs.constants import RESPONSE_BAD_REQUEST, RESPONSE_FORBIDDEN, RESPONSE_NOT_FOUND +from sentry.apidocs.examples.replay_examples import ReplayExamples +from sentry.apidocs.parameters import GlobalParams, ReplayParams +from sentry.apidocs.utils import inline_sentry_response_serializer from sentry.replays.lib.storage import RecordingSegmentStorageMeta, make_filename +from sentry.replays.types import ReplayRecordingSegment from sentry.replays.usecases.reader import download_segment, fetch_segment_metadata @region_silo_endpoint +@extend_schema(tags=["Replays"]) class ProjectReplayRecordingSegmentDetailsEndpoint(ProjectEndpoint): owner = ApiOwner.REPLAY publish_status = { - "GET": ApiPublishStatus.UNKNOWN, + "GET": ApiPublishStatus.PUBLIC, } + @extend_schema( + operation_id="Fetch Recording Segment", + parameters=[ + GlobalParams.ORG_SLUG, + GlobalParams.PROJECT_SLUG, + ReplayParams.REPLAY_ID, + ReplayParams.SEGMENT_ID, + ], + responses={ + 200: inline_sentry_response_serializer( + "GetReplayRecordingSegment", ReplayRecordingSegment + ), + 400: RESPONSE_BAD_REQUEST, + 403: RESPONSE_FORBIDDEN, + 404: RESPONSE_NOT_FOUND, + }, + examples=ReplayExamples.GET_REPLAY_SEGMENT, + ) def get(self, request: Request, project, replay_id, segment_id) -> HttpResponseBase: + """Return a replay recording segment.""" if not features.has( "organizations:session-replay", project.organization, actor=request.user ): @@ -42,9 +68,11 @@ def get(self, request: Request, project, replay_id, segment_id) -> HttpResponseB "replayId": segment.replay_id, "segmentId": segment.segment_id, "projectId": str(segment.project_id), - "dateAdded": segment.date_added.replace(microsecond=0).isoformat() - if segment.date_added - else None, + "dateAdded": ( + segment.date_added.replace(microsecond=0).isoformat() + if segment.date_added + else None + ), } } ) diff --git a/src/sentry/replays/endpoints/project_replay_recording_segment_index.py b/src/sentry/replays/endpoints/project_replay_recording_segment_index.py index 1f174a30692e48..7e412cd92146da 100644 --- a/src/sentry/replays/endpoints/project_replay_recording_segment_index.py +++ b/src/sentry/replays/endpoints/project_replay_recording_segment_index.py @@ -1,6 +1,7 @@ import functools from django.http import StreamingHttpResponse +from drf_spectacular.utils import extend_schema from rest_framework.request import Request from rest_framework.response import Response @@ -10,22 +11,48 @@ from sentry.api.base import region_silo_endpoint from sentry.api.bases.project import ProjectEndpoint from sentry.api.paginator import GenericOffsetPaginator +from sentry.apidocs.constants import RESPONSE_BAD_REQUEST, RESPONSE_FORBIDDEN, RESPONSE_NOT_FOUND +from sentry.apidocs.examples.replay_examples import ReplayExamples +from sentry.apidocs.parameters import CursorQueryParam, GlobalParams, ReplayParams, VisibilityParams +from sentry.apidocs.utils import inline_sentry_response_serializer from sentry.replays.lib.storage import storage +from sentry.replays.types import ReplayRecordingSegment from sentry.replays.usecases.reader import download_segments, fetch_segments_metadata @region_silo_endpoint +@extend_schema(tags=["Replays"]) class ProjectReplayRecordingSegmentIndexEndpoint(ProjectEndpoint): owner = ApiOwner.REPLAY publish_status = { - "GET": ApiPublishStatus.UNKNOWN, + "GET": ApiPublishStatus.PUBLIC, } def __init__(self, **options) -> None: storage.initialize_client() super().__init__(**options) + @extend_schema( + operation_id="List Recording Segments", + parameters=[ + CursorQueryParam, + GlobalParams.ORG_SLUG, + GlobalParams.PROJECT_SLUG, + ReplayParams.REPLAY_ID, + VisibilityParams.PER_PAGE, + ], + responses={ + 200: inline_sentry_response_serializer( + "ListReplayRecordingSegments", list[ReplayRecordingSegment] + ), + 400: RESPONSE_BAD_REQUEST, + 403: RESPONSE_FORBIDDEN, + 404: RESPONSE_NOT_FOUND, + }, + examples=ReplayExamples.GET_REPLAY_SEGMENTS, + ) def get(self, request: Request, project, replay_id: str) -> Response: + """Return a collection of replay recording segments.""" if not features.has( "organizations:session-replay", project.organization, actor=request.user ): diff --git a/src/sentry/replays/types.py b/src/sentry/replays/types.py new file mode 100644 index 00000000000000..f7829f1ce16423 --- /dev/null +++ b/src/sentry/replays/types.py @@ -0,0 +1 @@ +ReplayRecordingSegment = list[dict] From e7eca1bad618c0819145ef1273c9412b314432aa Mon Sep 17 00:00:00 2001 From: Leander Rodrigues Date: Fri, 9 Feb 2024 07:25:28 -0800 Subject: [PATCH 215/357] fix(hybrid-cloud): Correct proxy header for Opsgenie (#64913) Resolves [SENTRY-2JDF](https://sentry.sentry.io/issues/4939521218/) This PR resolves the above bug and also ensures opsgenie clients are always created using `get_keyring_client` to hopefully avoid it in the future. --- .../endpoints/internal/integration_proxy.py | 6 ++--- src/sentry/incidents/logic.py | 25 +++++++++---------- .../integrations/opsgenie/actions/form.py | 14 +++++------ .../integrations/opsgenie/integration.py | 2 +- src/sentry/integrations/opsgenie/utils.py | 17 ++++++------- 5 files changed, 29 insertions(+), 35 deletions(-) diff --git a/src/sentry/api/endpoints/internal/integration_proxy.py b/src/sentry/api/endpoints/internal/integration_proxy.py index ab0482dc658643..01f6c6aa21181b 100644 --- a/src/sentry/api/endpoints/internal/integration_proxy.py +++ b/src/sentry/api/endpoints/internal/integration_proxy.py @@ -81,11 +81,11 @@ def _validate_request(self, request: HttpRequest) -> bool: from sentry.shared_integrations.client.proxy import IntegrationProxyClient # Get the organization integration - org_id_header = request.headers.get(PROXY_OI_HEADER) - if org_id_header is None or not org_id_header.isnumeric(): + org_integration_id_header = request.headers.get(PROXY_OI_HEADER) + if org_integration_id_header is None or not org_integration_id_header.isnumeric(): logger.info("integration_proxy.missing_org_integration", extra=self.log_extra) return False - org_integration_id = int(org_id_header) + org_integration_id = int(org_integration_id_header) self.log_extra["org_integration_id"] = org_integration_id self.org_integration = ( diff --git a/src/sentry/incidents/logic.py b/src/sentry/incidents/logic.py index c74e083249e5a0..3875247071ae3c 100644 --- a/src/sentry/incidents/logic.py +++ b/src/sentry/incidents/logic.py @@ -5,7 +5,7 @@ from copy import deepcopy from dataclasses import replace from datetime import datetime, timedelta, timezone -from typing import Any +from typing import Any, cast from uuid import uuid4 from django.db import router, transaction @@ -1400,26 +1400,25 @@ def get_alert_rule_trigger_action_opsgenie_team( input_channel_id=None, integrations=None, ) -> tuple[str, str]: - from sentry.integrations.opsgenie.client import OpsgenieClient + from sentry.integrations.opsgenie.integration import OpsgenieIntegration from sentry.integrations.opsgenie.utils import get_team - oi = integration_service.get_organization_integration( - integration_id=integration_id, organization_id=organization.id + integration, oi = integration_service.get_organization_context( + organization_id=organization.id, integration_id=integration_id ) + if integration is None or oi is None: + raise InvalidTriggerActionError("Opsgenie integration not found.") + team = get_team(target_value, oi) if not team: raise InvalidTriggerActionError("No Opsgenie team found.") - integration_key = team["integration_key"] - integration = integration_service.get_integration(integration_id=integration_id) - if integration is None: - raise InvalidTriggerActionError("Opsgenie integration not found.") - client = OpsgenieClient( - integration=integration, - integration_key=integration_key, - org_integration_id=oi.id, - keyid=team["id"], + install = cast( + "OpsgenieIntegration", + integration.get_installation(organization_id=organization.id), ) + client = install.get_keyring_client(keyid=team["id"]) + try: client.authorize_integration(type="sentry") except ApiError as e: diff --git a/src/sentry/integrations/opsgenie/actions/form.py b/src/sentry/integrations/opsgenie/actions/form.py index 74865821370678..1cb6cae0d0a139 100644 --- a/src/sentry/integrations/opsgenie/actions/form.py +++ b/src/sentry/integrations/opsgenie/actions/form.py @@ -1,12 +1,12 @@ from __future__ import annotations from collections.abc import Mapping -from typing import Any +from typing import Any, cast from django import forms from django.utils.translation import gettext_lazy as _ -from sentry.integrations.opsgenie.client import OpsgenieClient +from sentry.integrations.opsgenie.integration import OpsgenieIntegration from sentry.integrations.opsgenie.utils import get_team from sentry.services.hybrid_cloud.integration import integration_service from sentry.services.hybrid_cloud.integration.model import ( @@ -66,13 +66,11 @@ def _get_team_status( if not team or not team_id: return INVALID_TEAM - integration_key = team["integration_key"] - client = OpsgenieClient( - integration=integration, - integration_key=integration_key, - org_integration_id=org_integration.id, - keyid=team_id, + install = cast( + "OpsgenieIntegration", + integration.get_installation(organization_id=org_integration.organization_id), ) + client = install.get_keyring_client(keyid=team_id) # the integration should be of type "sentry" # there's no way to authenticate that a key is an integration key # without specifying the type... even though the type is arbitrary diff --git a/src/sentry/integrations/opsgenie/integration.py b/src/sentry/integrations/opsgenie/integration.py index b37f63407ce804..c6efaa344c3133 100644 --- a/src/sentry/integrations/opsgenie/integration.py +++ b/src/sentry/integrations/opsgenie/integration.py @@ -116,7 +116,7 @@ class OpsgenieIntegration(IntegrationInstallation): def get_keyring_client(self, keyid: str) -> OpsgenieClient: org_integration = self.org_integration assert org_integration, "OrganizationIntegration is required" - team = get_team(keyid, org_integration) + team = get_team(team_id=keyid, org_integration=org_integration) assert team, "Cannot get client for unknown team" return OpsgenieClient( diff --git a/src/sentry/integrations/opsgenie/utils.py b/src/sentry/integrations/opsgenie/utils.py index 6ce52658532ec2..189ba1cf309fae 100644 --- a/src/sentry/integrations/opsgenie/utils.py +++ b/src/sentry/integrations/opsgenie/utils.py @@ -1,7 +1,7 @@ from __future__ import annotations import logging -from typing import Any +from typing import Any, cast from sentry.constants import ObjectStatus from sentry.incidents.models import AlertRuleTriggerAction, Incident, IncidentStatus @@ -11,7 +11,6 @@ from sentry.shared_integrations.exceptions import ApiError logger = logging.getLogger("sentry.integrations.opsgenie") -from .client import OpsgenieClient def build_incident_attachment( @@ -63,6 +62,8 @@ def send_incident_alert_notification( new_status: IncidentStatus, notification_uuid: str | None = None, ) -> bool: + from sentry.integrations.opsgenie.integration import OpsgenieIntegration + integration, org_integration = integration_service.get_organization_context( organization_id=incident.organization_id, integration_id=action.integration_id ) @@ -76,15 +77,11 @@ def send_incident_alert_notification( logger.info("Opsgenie team removed, but the rule is still active.") return False - integration_key = team["integration_key"] - - # TODO(hybridcloud) Use integration.get_keyring_client instead. - client = OpsgenieClient( - integration=integration, - integration_key=integration_key, - org_integration_id=incident.organization_id, - keyid=team["id"], + install = cast( + "OpsgenieIntegration", + integration.get_installation(organization_id=org_integration.organization_id), ) + client = install.get_keyring_client(keyid=team["id"]) attachment = build_incident_attachment(incident, new_status, metric_value, notification_uuid) try: resp = client.send_notification(attachment) From 3cc20b9fae920c52cb1e447e6e382395f60ca919 Mon Sep 17 00:00:00 2001 From: William Mak Date: Fri, 9 Feb 2024 11:13:27 -0500 Subject: [PATCH 216/357] fix(metrics_extraction): Handle order-by (#64723) - ondemand wasn't passing orderby which meant table and top event queries would return the wrong results --------- Co-authored-by: Kev --- src/sentry/search/events/builder/discover.py | 1 + src/sentry/search/events/builder/metrics.py | 66 +++++- src/sentry/snuba/metrics/datasource.py | 14 +- src/sentry/snuba/metrics/query.py | 5 +- src/sentry/snuba/metrics/query_builder.py | 2 +- .../test_organization_events_stats_mep.py | 217 ++++++++++++++++++ 6 files changed, 291 insertions(+), 14 deletions(-) diff --git a/src/sentry/search/events/builder/discover.py b/src/sentry/search/events/builder/discover.py index 3b0acfedc51373..c1f1ec9fe2a5f3 100644 --- a/src/sentry/search/events/builder/discover.py +++ b/src/sentry/search/events/builder/discover.py @@ -209,6 +209,7 @@ def __init__( org_id if org_id is not None and isinstance(org_id, int) else None ) self.raw_equations = equations + self.raw_orderby = orderby self.query = query self.selected_columns = selected_columns self.groupby_columns = groupby_columns diff --git a/src/sentry/search/events/builder/metrics.py b/src/sentry/search/events/builder/metrics.py index 934dcc2d0f83b1..f8cb3df6be5eaf 100644 --- a/src/sentry/search/events/builder/metrics.py +++ b/src/sentry/search/events/builder/metrics.py @@ -61,7 +61,12 @@ should_use_on_demand_metrics, ) from sentry.snuba.metrics.fields import histogram as metrics_histogram -from sentry.snuba.metrics.query import MetricField, MetricGroupByField, MetricsQuery +from sentry.snuba.metrics.query import ( + MetricField, + MetricGroupByField, + MetricOrderByField, + MetricsQuery, +) from sentry.snuba.metrics.utils import get_num_intervals from sentry.utils.dates import to_timestamp from sentry.utils.snuba import DATASETS, bulk_snql_query, raw_snql_query @@ -203,11 +208,21 @@ def _on_demand_metric_spec_map(self) -> dict[str, OnDemandMetricSpec] | None: } return map + def convert_spec_to_metric_field(self, spec: OnDemandMetricSpec) -> MetricField: + if isinstance(self, (TopMetricsQueryBuilder, TimeseriesMetricQueryBuilder)): + alias = get_function_alias(spec.field) or "count" + elif isinstance(self, AlertMetricsQueryBuilder): + alias = spec.mri + else: + alias = get_function_alias(spec.field) or spec.mri + return MetricField(spec.op, spec.mri, alias=alias) + def _get_metrics_query_from_on_demand_spec( self, spec: OnDemandMetricSpec, require_time_range: bool = True, groupby: Sequence[MetricGroupByField] | None = None, + orderby: Sequence[MetricOrderByField] | None = None, # Where normally isn't accepted for on-demand since it should only encoded into the metric # but in the case of top events, etc. there is need for another where condition dynamically for top N groups. additional_where: Sequence[Condition] | None = None, @@ -234,22 +249,18 @@ def _get_metrics_query_from_on_demand_spec( if intervals_len > 0: limit = Limit(int(limit.limit / intervals_len)) max_limit = 10_000 - alias = get_function_alias(spec.field) or "count" include_series = True interval = self.interval elif isinstance(self, TimeseriesMetricQueryBuilder): limit = Limit(1) - alias = get_function_alias(spec.field) or "count" include_series = True interval = self.interval elif isinstance(self, AlertMetricsQueryBuilder): limit = self.limit or Limit(1) - alias = spec.mri include_series = False interval = None else: limit = self.limit or Limit(1) - alias = get_function_alias(spec.field) or spec.mri include_series = False interval = None @@ -282,7 +293,7 @@ def _get_metrics_query_from_on_demand_spec( where.extend(additional_where) return MetricsQuery( - select=[MetricField(spec.op, spec.mri, alias=alias)], + select=[self.convert_spec_to_metric_field(spec)], where=where, limit=limit, max_limit=max_limit, @@ -293,9 +304,11 @@ def _get_metrics_query_from_on_demand_spec( org_id=self.params.organization.id, project_ids=[p.id for p in self.params.projects], include_series=include_series, + orderby=orderby, groupby=groupby, start=start, end=end, + skip_orderby_validation=True, ) def validate_aggregate_arguments(self) -> None: @@ -965,6 +978,46 @@ def use_case_id_from_metrics_query(self, metrics_query: MetricsQuery) -> UseCase return use_case_ids.pop() + def resolve_ondemand_orderby(self) -> Any: + """Ondemand needs to resolve their orderby separately than how any other QB system does it + + - Functions are resolved in self._on_demand_metric_spec_map so we need to get those back and throw 'em into + the orderby. + - This is problematic though, because for historical reasons (ie. we used to do it and we've kept it + instead of introducing additional risk by removing it) orderbys in the QB and MetricLayer both verify + that the orderby is in the selected fields + - This is why we pass skip_orderby_validation to the MetricsQuery + """ + result = [] + raw_orderby = self.raw_orderby + + if not raw_orderby: + return [] + + if isinstance(self.raw_orderby, str): + raw_orderby = [self.raw_orderby] + # While technically feasible to order by multiple fields, we would need to know which table each orderby is + # going to. Leaving that out for now to keep this simple since we don't allow more than one in the UI anyways + if len(raw_orderby) > 1: + raise IncompatibleMetricsQuery("Can't orderby more than one field") + + for orderby in raw_orderby: + direction = Direction.DESC if orderby.startswith("-") else Direction.ASC + bare_orderby = orderby.lstrip("-") + if bare_orderby in self._on_demand_metric_spec_map: + spec = self._on_demand_metric_spec_map[bare_orderby] + result.append( + MetricOrderByField( + field=self.convert_spec_to_metric_field(spec), + direction=direction, + ) + ) + else: + raise IncompatibleMetricsQuery( + f"Cannot orderby {bare_orderby}, likely because its a tag" + ) + return result + def run_query(self, referrer: str, use_cache: bool = False) -> Any: groupbys = self.groupby if not groupbys and self.use_on_demand: @@ -1050,6 +1103,7 @@ def run_query(self, referrer: str, use_cache: bool = False) -> Any: spec=spec, require_time_range=True, groupby=[MetricGroupByField(field=c) for c in group_bys], + orderby=self.resolve_ondemand_orderby(), ) ) else: diff --git a/src/sentry/snuba/metrics/datasource.py b/src/sentry/snuba/metrics/datasource.py index 4646d2447d6b7e..ea37c58af5ac62 100644 --- a/src/sentry/snuba/metrics/datasource.py +++ b/src/sentry/snuba/metrics/datasource.py @@ -947,12 +947,14 @@ def get_series( # This logic is in place because we don't want to put the project_id in the select, as it would require # a DerivedOp, therefore - orderby_fields = [] - for select_field in metrics_query.select: - for orderby in metrics_query.orderby: - if select_field == orderby.field: - orderby_fields.append(select_field) - metrics_query = replace(metrics_query, select=orderby_fields) + # Because ondemand queries skip validation this next block will result in no fields in the select + if not metrics_query.skip_orderby_validation: + orderby_fields = [] + for select_field in metrics_query.select: + for orderby in metrics_query.orderby: + if select_field == orderby.field: + orderby_fields.append(select_field) + metrics_query = replace(metrics_query, select=orderby_fields) snuba_queries, _ = SnubaQueryBuilder( projects, metrics_query, use_case_id diff --git a/src/sentry/snuba/metrics/query.py b/src/sentry/snuba/metrics/query.py index 332e8c8b48a40b..95920a8b9374f0 100644 --- a/src/sentry/snuba/metrics/query.py +++ b/src/sentry/snuba/metrics/query.py @@ -166,6 +166,9 @@ class MetricsQuery(MetricsQueryValidationRunner): # doesn't take into account time bounds as the alerts service uses subscriptable queries that react in real time # to dataset changes. is_alerts_query: bool = False + # Need to skip the orderby validation for ondemand queries, this is because ondemand fields are based on a spec + # instead of being direct fields + skip_orderby_validation: bool = False @cached_property def projects(self) -> list[Project]: @@ -231,7 +234,7 @@ def validate_where(self) -> None: ) def validate_orderby(self) -> None: - if not self.orderby: + if not self.orderby or self.skip_orderby_validation: return for metric_order_by_field in self.orderby: diff --git a/src/sentry/snuba/metrics/query_builder.py b/src/sentry/snuba/metrics/query_builder.py index fc31a41373876a..463035b1d8461a 100644 --- a/src/sentry/snuba/metrics/query_builder.py +++ b/src/sentry/snuba/metrics/query_builder.py @@ -879,7 +879,7 @@ def generate_snql_for_action_by_fields( action_by_name = "order by" raise NotImplementedError( - f"Unsupported {action_by_name} field: {metric_action_by_field.field}" + f"Unsupported {action_by_name} field: {metric_action_by_field.field} needs to be either a MetricField or a string" ) def _build_where(self) -> list[BooleanCondition | Condition]: diff --git a/tests/snuba/api/endpoints/test_organization_events_stats_mep.py b/tests/snuba/api/endpoints/test_organization_events_stats_mep.py index f50cff2bbc31d8..338d00fd24af2c 100644 --- a/tests/snuba/api/endpoints/test_organization_events_stats_mep.py +++ b/tests/snuba/api/endpoints/test_organization_events_stats_mep.py @@ -1470,3 +1470,220 @@ def test_group_by_transaction(self): [{"count": 5.0}], [{"count": 10.0}], ] + + def _setup_orderby_tests(self, query): + count_spec = OnDemandMetricSpec( + field="count()", + groupbys=["networkId"], + query=query, + spec_type=MetricSpecType.DYNAMIC_QUERY, + ) + p95_spec = OnDemandMetricSpec( + field="p95(transaction.duration)", + groupbys=["networkId"], + query=query, + spec_type=MetricSpecType.DYNAMIC_QUERY, + ) + + for hour in range(0, 5): + self.store_on_demand_metric( + 1, + spec=count_spec, + additional_tags={"networkId": "1234"}, + timestamp=self.day_ago + timedelta(hours=hour), + ) + self.store_on_demand_metric( + 100, + spec=p95_spec, + additional_tags={"networkId": "1234"}, + timestamp=self.day_ago + timedelta(hours=hour), + ) + self.store_on_demand_metric( + 200, + spec=p95_spec, + additional_tags={"networkId": "5678"}, + timestamp=self.day_ago + timedelta(hours=hour), + ) + # Store twice as many 5678 so orderby puts it later + self.store_on_demand_metric( + 2, + spec=count_spec, + additional_tags={"networkId": "5678"}, + timestamp=self.day_ago + timedelta(hours=hour), + ) + + def test_order_by_aggregate_top_events_desc(self): + url = "https://sentry.io" + query = f'http.url:{url}/*/foo/bar/* http.referer:"{url}/*/bar/*" event.type:transaction' + self._setup_orderby_tests(query) + response = self.do_request( + data={ + "dataset": "metricsEnhanced", + "field": ["networkId", "count()"], + "start": iso_format(self.day_ago), + "end": iso_format(self.day_ago + timedelta(hours=5)), + "onDemandType": "dynamic_query", + "orderby": "-count()", + "interval": "1d", + "partial": 1, + "query": query, + "referrer": "api.dashboards.widget.bar-chart", + "project": self.project.id, + "topEvents": 2, + "useOnDemandMetrics": "true", + "yAxis": "count()", + }, + ) + + assert response.status_code == 200, response.content + assert len(response.data) == 3 + data1 = response.data["5678"] + assert data1["order"] == 0 + assert data1["data"][0][1][0]["count"] == 10 + data2 = response.data["1234"] + assert data2["order"] == 1 + assert data2["data"][0][1][0]["count"] == 5 + for datum in response.data.values(): + assert datum["meta"] == { + "dataset": "metricsEnhanced", + "datasetReason": "unchanged", + "fields": {}, + "isMetricsData": False, + "isMetricsExtractedData": True, + "tips": {}, + "units": {}, + } + + def test_order_by_aggregate_top_events_asc(self): + url = "https://sentry.io" + query = f'http.url:{url}/*/foo/bar/* http.referer:"{url}/*/bar/*" event.type:transaction' + self._setup_orderby_tests(query) + response = self.do_request( + data={ + "dataset": "metricsEnhanced", + "field": ["networkId", "count()"], + "start": iso_format(self.day_ago), + "end": iso_format(self.day_ago + timedelta(hours=5)), + "onDemandType": "dynamic_query", + "orderby": "count()", + "interval": "1d", + "partial": 1, + "query": query, + "referrer": "api.dashboards.widget.bar-chart", + "project": self.project.id, + "topEvents": 2, + "useOnDemandMetrics": "true", + "yAxis": "count()", + }, + ) + + assert response.status_code == 200, response.content + assert len(response.data) == 3 + data1 = response.data["1234"] + assert data1["order"] == 0 + assert data1["data"][0][1][0]["count"] == 5 + data2 = response.data["5678"] + assert data2["order"] == 1 + assert data2["data"][0][1][0]["count"] == 10 + for datum in response.data.values(): + assert datum["meta"] == { + "dataset": "metricsEnhanced", + "datasetReason": "unchanged", + "fields": {}, + "isMetricsData": False, + "isMetricsExtractedData": True, + "tips": {}, + "units": {}, + } + + def test_order_by_aggregate_top_events_graph_different_aggregate(self): + url = "https://sentry.io" + query = f'http.url:{url}/*/foo/bar/* http.referer:"{url}/*/bar/*" event.type:transaction' + self._setup_orderby_tests(query) + response = self.do_request( + data={ + "dataset": "metricsEnhanced", + "field": ["networkId", "count()"], + "start": iso_format(self.day_ago), + "end": iso_format(self.day_ago + timedelta(hours=5)), + "onDemandType": "dynamic_query", + "orderby": "count()", + "interval": "1d", + "partial": 1, + "query": query, + "referrer": "api.dashboards.widget.bar-chart", + "project": self.project.id, + "topEvents": 2, + "useOnDemandMetrics": "true", + "yAxis": "p95(transaction.duration)", + }, + ) + + assert response.status_code == 200, response.content + assert len(response.data) == 3 + data1 = response.data["1234"] + assert data1["order"] == 0 + assert data1["data"][0][1][0]["count"] == 100 + data2 = response.data["5678"] + assert data2["order"] == 1 + assert data2["data"][0][1][0]["count"] == 200 + for datum in response.data.values(): + assert datum["meta"] == { + "dataset": "metricsEnhanced", + "datasetReason": "unchanged", + "fields": {}, + "isMetricsData": False, + "isMetricsExtractedData": True, + "tips": {}, + "units": {}, + } + + def test_cannot_order_by_tag(self): + url = "https://sentry.io" + query = f'http.url:{url}/*/foo/bar/* http.referer:"{url}/*/bar/*" event.type:transaction' + self._setup_orderby_tests(query) + response = self.do_request( + data={ + "dataset": "metrics", + "field": ["networkId", "count()"], + "start": iso_format(self.day_ago), + "end": iso_format(self.day_ago + timedelta(hours=5)), + "onDemandType": "dynamic_query", + "orderby": "-networkId", + "interval": "1d", + "partial": 1, + "query": query, + "referrer": "api.dashboards.widget.bar-chart", + "project": self.project.id, + "topEvents": 2, + "useOnDemandMetrics": "true", + "yAxis": "count()", + }, + ) + + assert response.status_code == 400, response.content + + def test_order_by_two_aggregates(self): + url = "https://sentry.io" + query = f'http.url:{url}/*/foo/bar/* http.referer:"{url}/*/bar/*" event.type:transaction' + self._setup_orderby_tests(query) + response = self.do_request( + data={ + "dataset": "metrics", + "field": ["networkId", "count()", "p95(transaction.duration)"], + "start": iso_format(self.day_ago), + "end": iso_format(self.day_ago + timedelta(hours=5)), + "onDemandType": "dynamic_query", + "orderby": ["count()", "p95(transaction.duration)"], + "interval": "1d", + "partial": 1, + "query": query, + "referrer": "api.dashboards.widget.bar-chart", + "project": self.project.id, + "topEvents": 2, + "useOnDemandMetrics": "true", + "yAxis": "p95(transaction.duration)", + }, + ) + + assert response.status_code == 400, response.content From 7c1b3351e69237818ab25ef99388224edeaf6f11 Mon Sep 17 00:00:00 2001 From: Jodi Jang <116035587+jangjodi@users.noreply.github.com> Date: Fri, 9 Feb 2024 11:14:18 -0500 Subject: [PATCH 217/357] ref(similarity-embedding): Register option for feature (#64896) Register option to allow project feature flag to be controlled by options --- src/sentry/options/defaults.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index 3251de9e1dfce3..047ed3c93ae14b 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -789,6 +789,12 @@ flags=FLAG_MODIFIABLE_BOOL | FLAG_AUTOMATOR_MODIFIABLE, ) +register( + "issues.similarity-embeddings.projects-allowlist", + type=Sequence, + default=[], + flags=FLAG_ALLOW_EMPTY | FLAG_AUTOMATOR_MODIFIABLE, +) # ## sentry.killswitches # From 2c6127c073cef1a0fcc5267a225f25945263711c Mon Sep 17 00:00:00 2001 From: Mark Story Date: Fri, 9 Feb 2024 11:17:51 -0500 Subject: [PATCH 218/357] fix(hybridcloud) Add retries to integration proxy requests (#64684) We've seen an increasing number of 503 errors when proxying to control. Try using a retry strategy as that helped with other RPC pod traffic. --- src/sentry/net/http.py | 18 +++++++++++++----- src/sentry/shared_integrations/client/base.py | 13 ++++++++++++- src/sentry/shared_integrations/client/proxy.py | 12 +++++++++++- .../shared_integrations/exceptions/__init__.py | 10 ++++++++++ 4 files changed, 46 insertions(+), 7 deletions(-) diff --git a/src/sentry/net/http.py b/src/sentry/net/http.py index 6d32992fe5fded..783d505340f1e3 100644 --- a/src/sentry/net/http.py +++ b/src/sentry/net/http.py @@ -8,7 +8,7 @@ from typing import Optional from requests import Session as _Session -from requests.adapters import DEFAULT_POOLBLOCK, HTTPAdapter +from requests.adapters import DEFAULT_POOLBLOCK, DEFAULT_RETRIES, HTTPAdapter, Retry from urllib3.connection import HTTPConnection, HTTPSConnection from urllib3.connectionpool import HTTPConnectionPool, HTTPSConnectionPool from urllib3.connectionpool import connection_from_url as _connection_from_url @@ -146,11 +146,15 @@ class BlacklistAdapter(HTTPAdapter): is_ipaddress_permitted: IsIpAddressPermitted = None - def __init__(self, is_ipaddress_permitted: IsIpAddressPermitted = None) -> None: + def __init__( + self, + is_ipaddress_permitted: IsIpAddressPermitted = None, + max_retries: Retry | int = DEFAULT_RETRIES, + ) -> None: # If is_ipaddress_permitted is defined, then we pass it as an additional parameter to freshly created # `urllib3.connectionpool.ConnectionPool` instances managed by `SafePoolManager`. self.is_ipaddress_permitted = is_ipaddress_permitted - super().__init__() + super().__init__(max_retries=max_retries) def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs): self._pool_connections = connections @@ -196,10 +200,14 @@ def request(self, *args, **kwargs): class SafeSession(Session): - def __init__(self, is_ipaddress_permitted: IsIpAddressPermitted = None) -> None: + def __init__( + self, is_ipaddress_permitted: IsIpAddressPermitted = None, max_retries: Retry | None = None + ) -> None: Session.__init__(self) self.headers.update({"User-Agent": USER_AGENT}) - adapter = BlacklistAdapter(is_ipaddress_permitted=is_ipaddress_permitted) + adapter = BlacklistAdapter( + is_ipaddress_permitted=is_ipaddress_permitted, max_retries=max_retries + ) self.mount("https://", adapter) self.mount("http://", adapter) diff --git a/src/sentry/shared_integrations/client/base.py b/src/sentry/shared_integrations/client/base.py index 1fdf31349552b4..e25531652d50a8 100644 --- a/src/sentry/shared_integrations/client/base.py +++ b/src/sentry/shared_integrations/client/base.py @@ -8,6 +8,7 @@ import sentry_sdk from django.core.cache import cache from requests import PreparedRequest, Request, Response +from requests.adapters import RetryError from requests.exceptions import ConnectionError, HTTPError, Timeout from sentry import audit_log, features @@ -24,7 +25,13 @@ from sentry.utils.audit import create_system_audit_entry from sentry.utils.hashlib import md5_text -from ..exceptions import ApiConnectionResetError, ApiError, ApiHostError, ApiTimeoutError +from ..exceptions import ( + ApiConnectionResetError, + ApiError, + ApiHostError, + ApiRetryError, + ApiTimeoutError, +) from ..response.base import BaseApiResponse from ..track_response import TrackResponseMixin @@ -293,6 +300,10 @@ def _request( self.track_response_data("timeout", span, e, extra=extra) self.record_error(e) raise ApiTimeoutError.from_exception(e) from e + except RetryError as e: + self.track_response_data("max_retries", span, e, extra=extra) + self.record_error(e) + raise ApiRetryError.from_exception(e) from e except HTTPError as e: error_resp = e.response if error_resp is None: diff --git a/src/sentry/shared_integrations/client/proxy.py b/src/sentry/shared_integrations/client/proxy.py index 0d59ac89455e96..fb2d5aff32eba4 100644 --- a/src/sentry/shared_integrations/client/proxy.py +++ b/src/sentry/shared_integrations/client/proxy.py @@ -13,6 +13,7 @@ from django.conf import settings from django.utils.encoding import force_str from requests import PreparedRequest +from requests.adapters import Retry from sentry.db.postgres.transactions import in_test_hide_transaction_boundary from sentry.http import build_session @@ -144,11 +145,20 @@ def build_session(self) -> SafeSession: """ Generates a safe Requests session for the API client to use. This injects a custom is_ipaddress_permitted function to allow only connections to the IP address of the Control Silo. + We only validate the IP address from within the Region Silo. For all other silo modes, we use the default is_ipaddress_permitted function, which tests against SENTRY_DISALLOWED_IPS. """ if SiloMode.get_current_mode() == SiloMode.REGION: - return build_session(is_ipaddress_permitted=is_control_silo_ip_address) + return build_session( + is_ipaddress_permitted=is_control_silo_ip_address, + max_retries=Retry( + total=5, + backoff_factor=0.1, + status_forcelist=[503], + allowed_methods=["PATCH", "HEAD", "PUT", "GET", "DELETE", "POST"], + ), + ) return build_session() @staticmethod diff --git a/src/sentry/shared_integrations/exceptions/__init__.py b/src/sentry/shared_integrations/exceptions/__init__.py index 03b0f713ff3205..5007dd57ff88c5 100644 --- a/src/sentry/shared_integrations/exceptions/__init__.py +++ b/src/sentry/shared_integrations/exceptions/__init__.py @@ -7,6 +7,7 @@ from bs4 import BeautifulSoup from requests import Response +from requests.adapters import RetryError from requests.exceptions import RequestException from sentry.utils import json @@ -109,6 +110,15 @@ def from_request(cls, request: _RequestHasUrl) -> ApiHostError: return cls(f"Unable to reach host: {host}", url=request.url) +class ApiRetryError(ApiError): + code = 503 + + @classmethod + def from_exception(cls, exception: RetryError) -> ApiRetryError: + msg = str(exception) + return cls(msg) + + class ApiTimeoutError(ApiError): code = 504 From 0fa9d1a80ec4880c6896e515adc50a04e7aff86b Mon Sep 17 00:00:00 2001 From: Mark Story Date: Fri, 9 Feb 2024 11:23:31 -0500 Subject: [PATCH 219/357] fix(hybridcloud) Add an index to apitokenreplica.token (#64795) We frequently query by this column and having an index should improve query plans where we look up tokens. --- migrations_lockfile.txt | 2 +- .../0011_add_hybridcloudapitoken_index.py | 30 +++++++++++++++++++ .../hybridcloud/models/apitokenreplica.py | 1 + 3 files changed, 32 insertions(+), 1 deletion(-) create mode 100644 src/sentry/hybridcloud/migrations/0011_add_hybridcloudapitoken_index.py diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt index 2ffd501f641099..a1c1e509c99e21 100644 --- a/migrations_lockfile.txt +++ b/migrations_lockfile.txt @@ -6,7 +6,7 @@ To resolve this, rebase against latest master and regenerate your migration. Thi will then be regenerated, and you should be able to merge without conflicts. feedback: 0004_index_together -hybridcloud: 0010_add_webhook_payload +hybridcloud: 0011_add_hybridcloudapitoken_index nodestore: 0002_nodestore_no_dictfield replays: 0004_index_together sentry: 0643_add_date_modified_col_dashboard_widget_query diff --git a/src/sentry/hybridcloud/migrations/0011_add_hybridcloudapitoken_index.py b/src/sentry/hybridcloud/migrations/0011_add_hybridcloudapitoken_index.py new file mode 100644 index 00000000000000..9c0556c159622b --- /dev/null +++ b/src/sentry/hybridcloud/migrations/0011_add_hybridcloudapitoken_index.py @@ -0,0 +1,30 @@ +# Generated by Django 5.0.1 on 2024-02-07 17:23 + +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. For + # the most part, this should only be used for operations where it's safe to run the migration + # after your code has deployed. So this should not be used for most operations that alter the + # schema of a table. + # Here are some things that make sense to mark as dangerous: + # - Large data migrations. Typically we want these to be run manually by ops so that they can + # be monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # have ops run this and not block the deploy. Note that while adding an index is a schema + # change, it's completely safe to run the operation after the code has deployed. + is_dangerous = False + + dependencies = [ + ("hybridcloud", "0010_add_webhook_payload"), + ] + + operations = [ + migrations.AddIndex( + model_name="apitokenreplica", + index=models.Index(fields=["token"], name="hybridcloud_token_1b7b55_idx"), + ), + ] diff --git a/src/sentry/hybridcloud/models/apitokenreplica.py b/src/sentry/hybridcloud/models/apitokenreplica.py index 5a5bda916a7d31..a18ed4ee1f855f 100644 --- a/src/sentry/hybridcloud/models/apitokenreplica.py +++ b/src/sentry/hybridcloud/models/apitokenreplica.py @@ -25,6 +25,7 @@ class ApiTokenReplica(Model, HasApiScopes): class Meta: app_label = "hybridcloud" db_table = "hybridcloud_apitokenreplica" + indexes = (models.Index(fields=["token"]),) __repr__ = sane_repr("user_id", "token", "application_id") From 4d3b93bd6efe3ab27b842a5993223f06dd2b560e Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Fri, 9 Feb 2024 10:40:09 -0600 Subject: [PATCH 220/357] chore(stats-detectors): Cleanup stats detectors feature flags (#64912) Stats detectors are GA'ed, so let's remove these feature flags. --- src/sentry/conf/server.py | 10 --- src/sentry/features/__init__.py | 5 -- src/sentry/tasks/statistical_detectors.py | 4 - .../tasks/test_statistical_detectors.py | 78 ++++--------------- 4 files changed, 15 insertions(+), 82 deletions(-) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 301d0c339e0bc0..16713c6ff5feba 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1748,12 +1748,6 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:performance-slow-db-issue": False, # Enable histogram view in span details "organizations:performance-span-histogram-view": False, - # Enable performance statistical detectors breakpoint detection - "organizations:performance-statistical-detectors-breakpoint": False, - # Enable performance statistical detectors ema detection - "organizations:performance-statistical-detectors-ema": False, - # Enable performance statistical detectors breakpoint lifecycles - "organizations:performance-statistical-detectors-lifecycles": False, # Enable trace details page with embedded spans "organizations:performance-trace-details": False, # Enable FE/BE for tracing without performance @@ -1782,10 +1776,6 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:profiling-differential-flamegraph-page": False, # Enable global suspect functions in profiling "organizations:profiling-global-suspect-functions": False, - # Enable profiling statistical detectors breakpoint detection - "organizations:profiling-statistical-detectors-breakpoint": False, - # Enable profiling statistical detectors ema detection - "organizations:profiling-statistical-detectors-ema": False, # Enable profiling summary redesign view "organizations:profiling-summary-redesign": False, # Enable the transactions backed profiling views diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index b33494d7989574..f4e87bc0e873f6 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -200,9 +200,6 @@ default_manager.add("organizations:performance-screens-platform-selector", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:performance-slow-db-issue", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:performance-span-histogram-view", OrganizationFeature, FeatureHandlerStrategy.REMOTE) -default_manager.add("organizations:performance-statistical-detectors-breakpoint", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) -default_manager.add("organizations:performance-statistical-detectors-ema", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) -default_manager.add("organizations:performance-statistical-detectors-lifecyles", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:performance-trace-details", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:performance-tracing-without-performance", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:performance-transaction-name-only-search-indexed", OrganizationFeature, FeatureHandlerStrategy.REMOTE) @@ -216,8 +213,6 @@ default_manager.add("organizations:profiling-browser", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:profiling-differential-flamegraph-page", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:profiling-global-suspect-functions", OrganizationFeature, FeatureHandlerStrategy.REMOTE) -default_manager.add("organizations:profiling-statistical-detectors-breakpoint", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) -default_manager.add("organizations:profiling-statistical-detectors-ema", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:profiling-summary-redesign", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:profiling-using-transactions", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:profiling", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) diff --git a/src/sentry/tasks/statistical_detectors.py b/src/sentry/tasks/statistical_detectors.py index 30743790b88cff..1716bebc59bae1 100644 --- a/src/sentry/tasks/statistical_detectors.py +++ b/src/sentry/tasks/statistical_detectors.py @@ -323,7 +323,6 @@ def detect_transaction_trends( projects = get_detector_enabled_projects( project_ids, - feature_name="organizations:performance-statistical-detectors-ema", project_option=InternalProjectOptions.TRANSACTION_DURATION_REGRESSION, ) @@ -372,7 +371,6 @@ def _detect_transaction_change_points( project.id: project for project in get_detector_enabled_projects( [project_id for project_id, _ in transactions], - feature_name="organizations:performance-statistical-detectors-breakpoint", ) } @@ -412,7 +410,6 @@ def detect_function_trends(project_ids: list[int], start: datetime, *args, **kwa projects = get_detector_enabled_projects( project_ids, - feature_name="organizations:profiling-statistical-detectors-ema", ) trends = FunctionRegressionDetector.detect_trends(projects, start) @@ -460,7 +457,6 @@ def _detect_function_change_points( project.id: project for project in get_detector_enabled_projects( [project_id for project_id, _ in functions_list], - feature_name="organizations:profiling-statistical-detectors-breakpoint", ) } diff --git a/tests/sentry/tasks/test_statistical_detectors.py b/tests/sentry/tasks/test_statistical_detectors.py index cfca65a7c9047a..dec894401d5968 100644 --- a/tests/sentry/tasks/test_statistical_detectors.py +++ b/tests/sentry/tasks/test_statistical_detectors.py @@ -44,7 +44,7 @@ ) from sentry.testutils.cases import MetricsAPIBaseTestCase, ProfilesSnubaTestCase from sentry.testutils.factories import Factories -from sentry.testutils.helpers import Feature, override_options +from sentry.testutils.helpers import override_options from sentry.testutils.helpers.datetime import before_now, freeze_time from sentry.testutils.pytest.fixtures import django_db_all from sentry.testutils.silo import region_silo_test @@ -163,12 +163,7 @@ def test_run_detection_options_multiple_batches( "statistical_detectors.enable": True, } - features = { - "organizations:performance-statistical-detectors-ema": [organization.slug], - "organizations:profiling-statistical-detectors-ema": [organization.slug], - } - - with freeze_time(timestamp), override_options(options), Feature(features): + with freeze_time(timestamp), override_options(options): run_detection() # total of 9 projects, broken into batches of 5 means batch sizes of 5 + 4 @@ -227,11 +222,7 @@ def test_detect_transaction_trends_options( "statistical_detectors.enable": task_enabled, } - features = { - "organizations:performance-statistical-detectors-ema": [project.organization.slug], - } - - with override_options(options), Feature(features): + with override_options(options): detect_transaction_trends([project.organization_id], [project.id], timestamp) assert query_transactions.called == (task_enabled and option_enabled) @@ -255,11 +246,7 @@ def test_detect_function_trends_options( "statistical_detectors.enable": enabled, } - features = { - "organizations:profiling-statistical-detectors-ema": [project.organization.slug], - } - - with override_options(options), Feature(features): + with override_options(options): detect_function_trends([project.id], timestamp) assert query_functions.called == enabled @@ -271,11 +258,7 @@ def test_detect_function_trends_query_timerange(functions_query, timestamp, proj "statistical_detectors.enable": True, } - features = { - "organizations:profiling-statistical-detectors-ema": [project.organization.slug], - } - - with override_options(options), Feature(features): + with override_options(options): detect_function_trends([project.id], timestamp) assert functions_query.called @@ -314,11 +297,7 @@ def test_detect_transaction_trends( "statistical_detectors.enable": True, } - features = { - "organizations:performance-statistical-detectors-ema": [project.organization.slug], - } - - with override_options(options), Feature(features): + with override_options(options): for ts in timestamps: detect_transaction_trends([project.organization.id], [project.id], ts) assert detect_transaction_change_points.apply_async.called @@ -357,17 +336,13 @@ def test_detect_transaction_trends_auto_resolution( "statistical_detectors.enable": True, } - features = { - "organizations:performance-statistical-detectors-ema": [project.organization.slug], - } - - with override_options(options), Feature(features): + with override_options(options): for ts in timestamps[:50]: detect_transaction_trends([project.organization.id], [project.id], ts) assert detect_transaction_change_points.apply_async.called - with override_options(options), Feature(features): + with override_options(options): RegressionGroup.objects.create( type=RegressionType.ENDPOINT.value, date_regressed=timestamps[10], @@ -446,11 +421,7 @@ def test_detect_transaction_trends_ratelimit( "statistical_detectors.ratelimit.ema": ratelimit, } - features = { - "organizations:performance-statistical-detectors-ema": [project.organization.slug], - } - - with override_options(options), Feature(features): + with override_options(options): for ts in timestamps: detect_transaction_trends([project.organization.id], [project.id], ts) @@ -775,11 +746,7 @@ def test_detect_function_trends( "statistical_detectors.enable": True, } - features = { - "organizations:profiling-statistical-detectors-ema": [project.organization.slug], - } - - with override_options(options), Feature(features): + with override_options(options): for ts in timestamps: detect_function_trends([project.id], ts) assert detect_function_change_points.apply_async.called @@ -818,17 +785,13 @@ def test_detect_function_trends_auto_resolution( "statistical_detectors.enable": True, } - features = { - "organizations:profiling-statistical-detectors-ema": [project.organization.slug], - } - - with override_options(options), Feature(features): + with override_options(options): for ts in timestamps[:50]: detect_function_trends([project.id], ts) assert detect_function_change_points.apply_async.called - with override_options(options), Feature(features): + with override_options(options): RegressionGroup.objects.create( type=RegressionType.FUNCTION.value, date_regressed=timestamps[10], @@ -898,11 +861,7 @@ def test_detect_function_trends_ratelimit( "statistical_detectors.ratelimit.ema": ratelimit, } - features = { - "organizations:profiling-statistical-detectors-ema": [project.organization.slug], - } - - with override_options(options), Feature(features): + with override_options(options): for ts in timestamps: detect_function_trends([project.id], ts) @@ -979,11 +938,7 @@ def test_detect_function_change_points( "statistical_detectors.enable": True, } - features = { - "organizations:profiling-statistical-detectors-breakpoint": [project.organization.slug] - } - - with override_options(options), Feature(features): + with override_options(options): detect_function_change_points([(project.id, fingerprint)], timestamp) assert mock_emit_function_regression_issue.called @@ -1704,11 +1659,8 @@ def test_transaction_change_point_detection( } options = {"statistical_detectors.enable": True} - features = { - "organizations:performance-statistical-detectors-breakpoint": [self.org.slug], - } - with override_options(options), Feature(features): + with override_options(options): detect_transaction_change_points( [ (self.projects[0].id, "transaction_1"), From b6d595dbc4a2fb2412cab342d9a5ae6bea424842 Mon Sep 17 00:00:00 2001 From: Evan Hicks Date: Fri, 9 Feb 2024 11:40:56 -0500 Subject: [PATCH 221/357] feat(mql) Add bulk_run_query function to the metrics layer (#64884) This new function accepts a list of requests, and uses the threaded executor in the underlying snuba code to run the queries in parallel and then return a list of matching results for each request. --- src/sentry/snuba/metrics_layer/query.py | 114 ++++++++++++++---------- tests/snuba/test_metrics_layer.py | 59 +++++++++++- 2 files changed, 127 insertions(+), 46 deletions(-) diff --git a/src/sentry/snuba/metrics_layer/query.py b/src/sentry/snuba/metrics_layer/query.py index 318b35082bb09b..cc0a0ddde70b0d 100644 --- a/src/sentry/snuba/metrics_layer/query.py +++ b/src/sentry/snuba/metrics_layer/query.py @@ -77,18 +77,73 @@ def __init__(self) -> None: self.reverse_mappings: dict[int, str] = dict() +def bulk_run_query(requests: list[Request]) -> list[Mapping[str, Any]]: + """ + Entrypoint for executing a list of metrics queries in Snuba. + + This function is used to execute multiple metrics queries in a single request. + """ + queries = [] + for request in requests: + request, start, end = _setup_metrics_query(request) + queries.append([request, start, end]) + + logging_tags = {"referrer": request.tenant_ids["referrer"] or "unknown", "lang": "mql"} + + for q in queries: + q[0], reverse_mappings, mappings = _resolve_metrics_query(q[0], logging_tags) + q.extend([reverse_mappings, mappings]) + + try: + snuba_results = bulk_snuba_queries( + [q[0] for q in queries], + queries[0][0].tenant_ids["referrer"], + use_cache=True, + ) + except Exception: + metrics.incr( + "metrics_layer.query", + tags={**logging_tags, "status": "query_error"}, + ) + raise + + for idx, snuba_result in enumerate(snuba_results): + request, start, end, reverse_mappings, mappings = queries[idx] + metrics_query = request.query + + snuba_result = convert_snuba_result( + snuba_result, + reverse_mappings, + request.dataset, + metrics_query.scope.use_case_id, + metrics_query.scope.org_ids[0], + ) + + # If we normalized the start/end, return those values in the response so the caller is aware + results = { + **snuba_result, + "modified_start": start, + "modified_end": end, + "indexer_mappings": mappings, + } + + snuba_results[idx] = results + + metrics.incr( + "metrics_layer.query", + tags={**logging_tags, "status": "success"}, + ) + return snuba_results + + def run_query(request: Request) -> Mapping[str, Any]: """ Entrypoint for executing a metrics query in Snuba. - - First iteration: - The purpose of this function is to eventually replace datasource.py::get_series(). - As a first iteration, this function will only support single timeseries metric queries. - This means that for now, other queries such as total, formula, or meta queries - will not be supported. Additionally, the first iteration will only support - querying raw metrics (no derived). This means that each call to this function will only - resolve into a single request (and single entity) to the Snuba API. """ + return bulk_run_query([request])[0] + + +def _setup_metrics_query(request: Request) -> tuple[Request, datetime, datetime]: metrics_query = request.query assert isinstance(metrics_query, MetricsQuery) @@ -108,7 +163,7 @@ def run_query(request: Request) -> Mapping[str, Any]: start = metrics_query.start end = metrics_query.end if metrics_query.rollup.interval: - start, end, _num_intervals = to_intervals( + start, end, _ = to_intervals( metrics_query.start, metrics_query.end, metrics_query.rollup.interval ) metrics_query = metrics_query.set_start(start).set_end(end) @@ -121,7 +176,7 @@ def run_query(request: Request) -> Mapping[str, Any]: ) request.query = metrics_query - return mql_query(request, start, end) + return request, start, end def _resolve_aggregate_aliases(exp: Timeseries | Formula) -> MetricsQuery: @@ -182,9 +237,10 @@ def _resolve_granularity(start: datetime, end: datetime, interval: int | None) - return min(found_granularities) -def mql_query(request: Request, start: datetime, end: datetime) -> Mapping[str, Any]: +def _resolve_metrics_query( + request: Request, logging_tags: dict[str, str] +) -> tuple[Request, ReverseMappings, dict[str, str | int]]: metrics_query = request.query - logging_tags = {"referrer": request.tenant_ids["referrer"] or "unknown", "lang": "mql"} try: # There are two kinds of resolving: lookup up in the indexer, and resolving things like @@ -208,39 +264,7 @@ def mql_query(request: Request, start: datetime, end: datetime) -> Mapping[str, ) raise - try: - snuba_result = bulk_snuba_queries( - [request], - request.tenant_ids["referrer"], - use_cache=True, - )[0] - except Exception: - metrics.incr( - "metrics_layer.query", - tags={**logging_tags, "status": "query_error"}, - ) - raise - - snuba_result = convert_snuba_result( - snuba_result, - reverse_mappings, - request.dataset, - metrics_query.scope.use_case_id, - metrics_query.scope.org_ids[0], - ) - - # If we normalized the start/end, return those values in the response so the caller is aware - results = { - **snuba_result, - "modified_start": start, - "modified_end": end, - "indexer_mappings": mappings, - } - metrics.incr( - "metrics_layer.query", - tags={**logging_tags, "status": "success"}, - ) - return results + return request, reverse_mappings, mappings def _resolve_query_metadata( diff --git a/tests/snuba/test_metrics_layer.py b/tests/snuba/test_metrics_layer.py index bcd49217163188..f9255f2fcf5555 100644 --- a/tests/snuba/test_metrics_layer.py +++ b/tests/snuba/test_metrics_layer.py @@ -24,7 +24,7 @@ from sentry.sentry_metrics.use_case_id_registry import UseCaseID from sentry.snuba.metrics.naming_layer import SessionMRI, TransactionMRI from sentry.snuba.metrics.naming_layer.public import TransactionStatusTagValue, TransactionTagsKey -from sentry.snuba.metrics_layer.query import run_query +from sentry.snuba.metrics_layer.query import bulk_run_query, run_query from sentry.testutils.cases import BaseMetricsTestCase, TestCase pytestmark = pytest.mark.sentry_metrics @@ -133,6 +133,63 @@ def test_basic_generic_metrics(self) -> None: ).isoformat() ) + def test_basic_bulk_generic_metrics(self) -> None: + query = MetricsQuery( + query=None, + start=self.hour_ago, + end=self.now, + rollup=Rollup(interval=60, granularity=60), + scope=MetricsScope( + org_ids=[self.org_id], + project_ids=[self.project.id], + use_case_id=UseCaseID.TRANSACTIONS.value, + ), + ) + + query1 = query.set_query( + Timeseries( + metric=Metric( + "transaction.duration", + TransactionMRI.DURATION.value, + ), + aggregate="max", + ) + ) + query2 = query.set_query( + Timeseries( + metric=Metric( + public_name=None, + mri=TransactionMRI.USER.value, + ), + aggregate="uniq", + ) + ) + request1 = Request( + dataset="generic_metrics", + app_id="tests", + query=query1, + tenant_ids={"referrer": "metrics.testing.test", "organization_id": self.org_id}, + ) + request2 = Request( + dataset="generic_metrics", + app_id="tests", + query=query2, + tenant_ids={"referrer": "metrics.testing.test", "organization_id": self.org_id}, + ) + results = bulk_run_query([request1, request2]) + assert len(results) == 2 + + result = results[0] # Distribution + rows = result["data"] + for i in range(10): + assert rows[i]["aggregate_value"] == i + assert ( + rows[i]["time"] + == ( + self.hour_ago.replace(second=0, microsecond=0) + timedelta(minutes=1 * i) + ).isoformat() + ) + def test_groupby_generic_metrics(self) -> None: query = MetricsQuery( query=Timeseries( From 180882754e22f55f08dc67c32710a4bc8580b41e Mon Sep 17 00:00:00 2001 From: Leander Rodrigues Date: Fri, 9 Feb 2024 08:44:07 -0800 Subject: [PATCH 222/357] chore(issues): Remove usage of streamline-targeting-context flag from the frontend (#64819) From what I can tell, this flag is GA'd, so we can remove it and allow self-hosted to access this view by default. Also removing components/tests that were dependent on not having this flag (`RuleBuilder`, and `CodeOwnersPanel`) --- static/app/views/alerts/create.spec.tsx | 6 +- .../alerts/rules/issue/ruleNode.spec.tsx | 8 +- .../app/views/alerts/rules/issue/ruleNode.tsx | 9 +- .../views/alerts/rules/issue/ruleNodeList.tsx | 21 +- .../project/navigationConfiguration.tsx | 4 +- .../project/projectOwnership/codeowners.tsx | 105 -------- .../projectOwnership/editRulesModal.spec.tsx | 14 -- .../projectOwnership/editRulesModal.tsx | 78 ++---- .../project/projectOwnership/index.spec.tsx | 2 +- .../project/projectOwnership/index.tsx | 100 ++------ .../project/projectOwnership/modal.spec.tsx | 23 -- .../project/projectOwnership/modal.tsx | 34 +-- .../projectOwnership/ownerInput.spec.tsx | 14 +- .../project/projectOwnership/ownerInput.tsx | 19 +- .../projectOwnership/ownershipRulesTable.tsx | 2 +- .../projectOwnership/ruleBuilder.spec.tsx | 169 ------------- .../project/projectOwnership/ruleBuilder.tsx | 236 ------------------ tests/acceptance/test_project_ownership.py | 6 +- tests/js/test-balancer/jest-balance.json | 1 - 19 files changed, 85 insertions(+), 766 deletions(-) delete mode 100644 static/app/views/settings/project/projectOwnership/codeowners.tsx delete mode 100644 static/app/views/settings/project/projectOwnership/ruleBuilder.spec.tsx delete mode 100644 static/app/views/settings/project/projectOwnership/ruleBuilder.tsx diff --git a/static/app/views/alerts/create.spec.tsx b/static/app/views/alerts/create.spec.tsx index 750d652eddf389..5d93e7afca1f11 100644 --- a/static/app/views/alerts/create.spec.tsx +++ b/static/app/views/alerts/create.spec.tsx @@ -458,7 +458,7 @@ describe('ProjectAlertsCreate', function () { // Add a new action await selectEvent.select(screen.getByText('Add action...'), [ - 'Issue Owners, Team, or Member', + 'Suggested Assignees, Team, or Member', ]); // Update action interval @@ -663,7 +663,7 @@ describe('ProjectAlertsCreate', function () { await userEvent.click((await screen.findAllByLabelText('Delete Node'))[0]); await selectEvent.select(screen.getByText('Add action...'), [ - 'Issue Owners, Team, or Member', + 'Suggested Assignees, Team, or Member', ]); expect( @@ -703,7 +703,7 @@ describe('ProjectAlertsCreate', function () { ).not.toBeInTheDocument(); await selectEvent.select(screen.getByText('Add action...'), [ - 'Issue Owners, Team, or Member', + 'Suggested Assignees, Team, or Member', ]); expect( diff --git a/static/app/views/alerts/rules/issue/ruleNode.spec.tsx b/static/app/views/alerts/rules/issue/ruleNode.spec.tsx index 120d1eda838e57..320f3c0f8892ac 100644 --- a/static/app/views/alerts/rules/issue/ruleNode.spec.tsx +++ b/static/app/views/alerts/rules/issue/ruleNode.spec.tsx @@ -244,7 +244,7 @@ describe('RuleNode', () => { renderRuleNode(formNode(label), {targetType: 'IssueOwners'}); expect(screen.getByText('Send a notification to')).toBeInTheDocument(); - await selectEvent.select(screen.getByText('Issue Owners'), 'Team'); + await selectEvent.select(screen.getByText('Suggested Assignees'), 'Team'); expect(onPropertyChange).toHaveBeenCalledTimes(2); expect(onPropertyChange).toHaveBeenCalledWith(index, 'targetType', 'Team'); expect(onPropertyChange).toHaveBeenCalledWith(index, 'targetIdentifier', ''); @@ -253,11 +253,7 @@ describe('RuleNode', () => { it('renders mail action field with suggested assignees', async () => { const fieldName = 'exampleMailActionField'; const label = `Send a notification to {${fieldName}}`; - const organizationWithFeat = { - ...organization, - features: ['streamline-targeting-context'], - }; - renderRuleNode(formNode(label), {targetType: 'IssueOwners'}, organizationWithFeat); + renderRuleNode(formNode(label), {targetType: 'IssueOwners'}, organization); expect(screen.getByText('Send a notification to')).toBeInTheDocument(); await selectEvent.select(screen.getByText('Suggested Assignees'), 'Team'); diff --git a/static/app/views/alerts/rules/issue/ruleNode.tsx b/static/app/views/alerts/rules/issue/ruleNode.tsx index e95ff53a444288..4e89a389e92593 100644 --- a/static/app/views/alerts/rules/issue/ruleNode.tsx +++ b/static/app/views/alerts/rules/issue/ruleNode.tsx @@ -31,10 +31,6 @@ import SentryAppRuleModal from 'sentry/views/alerts/rules/issue/sentryAppRuleMod import TicketRuleModal from 'sentry/views/alerts/rules/issue/ticketRuleModal'; import type {SchemaFormConfig} from 'sentry/views/settings/organizationIntegrations/sentryAppExternalForm'; -export function hasStreamlineTargeting(organization: Organization): boolean { - return organization.features.includes('streamline-targeting-context'); -} - interface FieldProps { data: Props['data']; disabled: boolean; @@ -119,10 +115,7 @@ function MailActionFields({ onMemberTeamChange, }: FieldProps) { const isInitialized = data.targetType !== undefined && `${data.targetType}`.length > 0; - let issueOwnersLabel = t('Issue Owners'); - if (hasStreamlineTargeting(organization)) { - issueOwnersLabel = t('Suggested Assignees'); - } + const issueOwnersLabel = t('Suggested Assignees'); return ( => { return actions.map(node => { if (node.id === IssueAlertActionType.NOTIFY_EMAIL) { - let label = t('Issue Owners, Team, or Member'); - if (hasStreamlineTargeting(organization)) { - label = t('Suggested Assignees, Team, or Member'); - } + const label = t('Suggested Assignees, Team, or Member'); return { value: node, label, @@ -99,10 +95,7 @@ const groupLabels = { /** * Group options by category */ -const groupSelectOptions = ( - actions: IssueAlertRuleActionTemplate[], - organization: Organization -) => { +const groupSelectOptions = (actions: IssueAlertRuleActionTemplate[]) => { const grouped = actions.reduce< Record< keyof typeof groupLabels, @@ -142,7 +135,7 @@ const groupSelectOptions = ( .map(([key, values]) => { return { label: groupLabels[key], - options: createSelectOptions(values, organization), + options: createSelectOptions(values), }; }); }; @@ -264,8 +257,8 @@ class RuleNodeList extends Component { const options = selectType === 'grouped' - ? groupSelectOptions(enabledNodes, organization) - : createSelectOptions(enabledNodes, organization); + ? groupSelectOptions(enabledNodes) + : createSelectOptions(enabledNodes); return ( diff --git a/static/app/views/settings/project/navigationConfiguration.tsx b/static/app/views/settings/project/navigationConfiguration.tsx index 6156a1a3f1bfe4..d005a5bf5efdf9 100644 --- a/static/app/views/settings/project/navigationConfiguration.tsx +++ b/static/app/views/settings/project/navigationConfiguration.tsx @@ -48,9 +48,7 @@ export default function getConfiguration({ }, { path: `${pathPrefix}/ownership/`, - title: organization?.features?.includes('streamline-targeting-context') - ? t('Ownership Rules') - : t('Issue Owners'), + title: t('Ownership Rules'), description: t('Manage ownership rules for a project'), }, { diff --git a/static/app/views/settings/project/projectOwnership/codeowners.tsx b/static/app/views/settings/project/projectOwnership/codeowners.tsx deleted file mode 100644 index e70d395fa80d8c..00000000000000 --- a/static/app/views/settings/project/projectOwnership/codeowners.tsx +++ /dev/null @@ -1,105 +0,0 @@ -import {Component, Fragment} from 'react'; - -import {addErrorMessage, addSuccessMessage} from 'sentry/actionCreators/indicator'; -import type {Client} from 'sentry/api'; -import {Button} from 'sentry/components/button'; -import Confirm from 'sentry/components/confirm'; -import {IconDelete, IconSync} from 'sentry/icons'; -import {t} from 'sentry/locale'; -import type {CodeOwner, CodeownersFile, Organization, Project} from 'sentry/types'; -import withApi from 'sentry/utils/withApi'; -import RulesPanel from 'sentry/views/settings/project/projectOwnership/rulesPanel'; - -type Props = { - api: Client; - codeowners: CodeOwner[]; - disabled: boolean; - onDelete: (data: CodeOwner) => void; - onUpdate: (data: CodeOwner) => void; - organization: Organization; - project: Project; -}; - -class CodeOwnersPanel extends Component { - handleDelete = async (codeowner: CodeOwner) => { - const {api, organization, project, onDelete} = this.props; - const endpoint = `/api/0/projects/${organization.slug}/${project.slug}/codeowners/${codeowner.id}/`; - try { - await api.requestPromise(endpoint, { - method: 'DELETE', - }); - onDelete(codeowner); - addSuccessMessage(t('Deletion successful')); - } catch { - // no 4xx errors should happen on delete - addErrorMessage(t('An error occurred')); - } - }; - - handleSync = async (codeowner: CodeOwner) => { - const {api, organization, project, onUpdate} = this.props; - try { - const codeownerFile: CodeownersFile = await api.requestPromise( - `/organizations/${organization.slug}/code-mappings/${codeowner.codeMappingId}/codeowners/`, - { - method: 'GET', - } - ); - - const data = await api.requestPromise( - `/projects/${organization.slug}/${project.slug}/codeowners/${codeowner.id}/`, - { - method: 'PUT', - data: {raw: codeownerFile.raw}, - } - ); - onUpdate({...codeowner, ...data}); - addSuccessMessage(t('CODEOWNERS file sync successful.')); - } catch (_err) { - addErrorMessage(t('An error occurred trying to sync CODEOWNERS file.')); - } - }; - render() { - const {codeowners, disabled} = this.props; - return codeowners.map(codeowner => { - const {dateUpdated, provider, codeMapping, ownershipSyntax} = codeowner; - return ( - - } - size="xs" - onClick={() => this.handleSync(codeowner)} - disabled={disabled} - aria-label={t('Sync')} - />, - this.handleDelete(codeowner)} - message={t('Are you sure you want to remove this CODEOWNERS file?')} - key="confirm-delete" - disabled={disabled} - > - - )} + } /> @@ -194,7 +183,7 @@ tags.sku_class:enterprise #enterprise`; projectSlug={project.slug} codeowners={codeowners ?? []} /> - {hasStreamlineTargetingContext && ownership && ( + {ownership && ( )} - {!hasStreamlineTargetingContext && ownership && ( - - openEditOwnershipRules({ - organization, - project, - ownership, - onSave: this.handleOwnershipSave, - }) - } - disabled={editOwnershipRulesDisabled} - > - {t('Edit')} - , - ]} + + {hasCodeowners && ( + )} - - {hasCodeowners && - (hasStreamlineTargetingContext ? ( - - ) : ( - - ))} {ownership && (
    { /> ); - // Rule builder - expect(screen.getByLabelText('Rule pattern')).toBeInTheDocument(); - - expect(screen.getByText(/Match against Issue Data/)).toBeInTheDocument(); - // First in-app (default reverse order) frame is suggested - expect(screen.getByText('raven/base.py')).toBeInTheDocument(); - expect(screen.getByText('https://example.com/path')).toBeInTheDocument(); - }); - - it('renders streamline-targeting-context suggestions', () => { - render( - {}} - /> - ); - // Description expect(screen.getByText(/Assign issues based on custom rules/)).toBeInTheDocument(); @@ -107,9 +87,6 @@ describe('Project Ownership', () => { expect( screen.getByText(`url:*/path ${user.email}`, {exact: false}) ).toBeInTheDocument(); - - // Rule builder hidden TODO: remove when streamline-targeting-context is GA - expect(screen.queryByLabelText('Rule pattern')).not.toBeInTheDocument(); }); it('can cancel', async () => { diff --git a/static/app/views/settings/project/projectOwnership/modal.tsx b/static/app/views/settings/project/projectOwnership/modal.tsx index 6130f8eb1707b9..5cd775621a2e1e 100644 --- a/static/app/views/settings/project/projectOwnership/modal.tsx +++ b/static/app/views/settings/project/projectOwnership/modal.tsx @@ -138,31 +138,23 @@ class ProjectOwnershipModal extends DeprecatedAsyncComponent { .map(i => i.value) .slice(0, 5) : []; - - const hasStreamlineTargetingFeature = organization.features.includes( - 'streamline-targeting-context' - ); const paths = getFrameSuggestions(eventData); return ( - {hasStreamlineTargetingFeature ? ( - - - {tct( - 'Assign issues based on custom rules. To learn more, [docs:read the docs].', - { - docs: ( - - ), - } - )} - - - - ) : ( -

    {t('Match against Issue Data: (globbing syntax *, ? supported)')}

    - )} + + + {tct( + 'Assign issues based on custom rules. To learn more, [docs:read the docs].', + { + docs: ( + + ), + } + )} + + + ); - // Set a path, as path is selected bu default. - await userEvent.type(screen.getByRole('textbox', {name: 'Rule pattern'}), 'file.js'); - - // Select the user. - await selectEvent.select( - screen.getByRole('textbox', {name: 'Rule owner'}), - 'Foo Bar' + await userEvent.type( + screen.getByRole('textbox', {name: 'Ownership Rules'}), + '\npath:file.js bob@example.com' ); - // Add the new rule. - await userEvent.click(screen.getByRole('button', {name: 'Add rule'})); + await userEvent.click(screen.getByRole('button', {name: 'Save'})); expect(put).toHaveBeenCalledWith( `/projects/${organization.slug}/${project.slug}/ownership/`, diff --git a/static/app/views/settings/project/projectOwnership/ownerInput.tsx b/static/app/views/settings/project/projectOwnership/ownerInput.tsx index 838385b16fb200..54185d4d43a693 100644 --- a/static/app/views/settings/project/projectOwnership/ownerInput.tsx +++ b/static/app/views/settings/project/projectOwnership/ownerInput.tsx @@ -17,8 +17,6 @@ import type {Organization, Project, Team} from 'sentry/types'; import {defined} from 'sentry/utils'; import {trackIntegrationAnalytics} from 'sentry/utils/integrationUtil'; -import RuleBuilder from './ruleBuilder'; - const defaultProps = { urls: [] as string[], paths: [] as string[], @@ -167,26 +165,11 @@ class OwnerInput extends Component { }; render() { - const {project, organization, disabled, urls, paths, initialText, dateUpdated} = - this.props; + const {disabled, initialText, dateUpdated} = this.props; const {hasChanges, text, error} = this.state; - const hasStreamlineTargetingFeature = organization.features.includes( - 'streamline-targeting-context' - ); - return ( - {!hasStreamlineTargetingFeature && ( - - )}
    { diff --git a/static/app/views/settings/project/projectOwnership/ownershipRulesTable.tsx b/static/app/views/settings/project/projectOwnership/ownershipRulesTable.tsx index 0063d91c2cf118..4544e755e274a9 100644 --- a/static/app/views/settings/project/projectOwnership/ownershipRulesTable.tsx +++ b/static/app/views/settings/project/projectOwnership/ownershipRulesTable.tsx @@ -136,7 +136,7 @@ export function OwnershipRulesTable({ }; return ( - + [TEAM_1, TEAM_2]); - - handleAdd = jest.fn(); - - project = ProjectFixture({ - // Teams in project - teams: [TEAM_1], - }); - ProjectsStore.loadInitialData([project]); - jest.spyOn(ProjectsStore, 'getBySlug').mockImplementation(() => project); - MockApiClient.clearMockResponses(); - MockApiClient.addMockResponse({ - url: '/organizations/org-slug/members/', - body: [ - {...USER_1, user: USER_1}, - {...USER_2, user: USER_2}, - ], - }); - }); - - it('renders', async function () { - render( - - ); - - const addButton = screen.getByRole('button', {name: 'Add rule'}); - - await userEvent.click(addButton); - expect(handleAdd).not.toHaveBeenCalled(); - - await userEvent.type( - screen.getByRole('textbox', {name: 'Rule pattern'}), - 'some/path/*' - ); - - expect(addButton).toBeDisabled(); - - await selectEvent.select( - screen.getByRole('textbox', {name: 'Rule owner'}), - 'Jane Bloggs' - ); - - expect(addButton).toBeEnabled(); - await userEvent.click(addButton); - expect(handleAdd).toHaveBeenCalled(); - }); - - it('renders with suggestions', async function () { - render( - - ); - - // Open the menu so we can do some assertions. - const ownerInput = screen.getByRole('textbox', {name: 'Rule owner'}); - selectEvent.openMenu(ownerInput); - - await waitForElementToBeRemoved(() => screen.queryByText('Loading...')); - - expect(screen.getByText('Jane Bloggs')).toBeInTheDocument(); - expect(screen.getByText('John Smith')).toBeInTheDocument(); - expect(screen.getByText('#cool-team')).toBeInTheDocument(); - expect(screen.getByText('#team-not-in-project')).toBeInTheDocument(); - - // TODO Check that the last two are disabled - - // Enter to select Jane Bloggs - await selectEvent.select(ownerInput, 'Jane Bloggs'); - - const candidates = screen.getAllByRole('button', {name: 'Path rule candidate'}); - await userEvent.click(candidates[0]); - - expect(screen.getByRole('textbox', {name: 'Rule pattern'})).toHaveValue('a/bar'); - - const addButton = screen.getByRole('button', {name: 'Add rule'}); - await waitFor(() => expect(addButton).toBeEnabled()); - - await userEvent.click(addButton); - expect(handleAdd).toHaveBeenCalled(); - }); - - it('builds a tag rule', async function () { - render( - - ); - - await selectEvent.select(screen.getByText('Path'), 'Tag'); - await userEvent.type(screen.getByPlaceholderText('tag-name'), 'mytag'); - await userEvent.type(screen.getByPlaceholderText('tag-value'), 'value'); - await selectEvent.select( - screen.getByRole('textbox', {name: 'Rule owner'}), - 'Jane Bloggs' - ); - await userEvent.click(screen.getByRole('button', {name: 'Add rule'})); - - expect(handleAdd).toHaveBeenCalledWith('tags.mytag:value janebloggs@example.com'); - }); -}); diff --git a/static/app/views/settings/project/projectOwnership/ruleBuilder.tsx b/static/app/views/settings/project/projectOwnership/ruleBuilder.tsx deleted file mode 100644 index 8aa568d3f9aaf5..00000000000000 --- a/static/app/views/settings/project/projectOwnership/ruleBuilder.tsx +++ /dev/null @@ -1,236 +0,0 @@ -import {Component, Fragment} from 'react'; -import styled from '@emotion/styled'; - -import {addErrorMessage} from 'sentry/actionCreators/indicator'; -import {Button} from 'sentry/components/button'; -import SelectControl from 'sentry/components/forms/controls/selectControl'; -import Input from 'sentry/components/input'; -import Tag from 'sentry/components/tag'; -import TextOverflow from 'sentry/components/textOverflow'; -import {IconAdd, IconChevron} from 'sentry/icons'; -import {t} from 'sentry/locale'; -import MemberListStore from 'sentry/stores/memberListStore'; -import {space} from 'sentry/styles/space'; -import type {Organization, Project} from 'sentry/types'; -import type {Owner} from 'sentry/views/settings/project/projectOwnership/selectOwners'; -import SelectOwners from 'sentry/views/settings/project/projectOwnership/selectOwners'; - -const initialState = { - text: '', - tagName: '', - type: 'path', - owners: [], - isValid: false, -}; - -function getMatchPlaceholder(type: string): string { - switch (type) { - case 'path': - return 'src/example/*'; - case 'module': - return 'com.module.name.example'; - case 'url': - return 'https://example.com/settings/*'; - case 'tag': - return 'tag-value'; - default: - return ''; - } -} - -type Props = { - disabled: boolean; - onAddRule: (rule: string) => void; - organization: Organization; - paths: string[]; - project: Project; - urls: string[]; -}; - -type State = { - isValid: boolean; - owners: Owner[]; - tagName: string; - text: string; - type: string; -}; - -class RuleBuilder extends Component { - state: State = initialState; - - checkIsValid = () => { - this.setState(state => ({ - isValid: !!state.text && state.owners && !!state.owners.length, - })); - }; - - handleTypeChange = (option: {label: string; value: string}) => { - this.setState({type: option.value}); - this.checkIsValid(); - }; - - handleTagNameChangeValue = (e: React.ChangeEvent) => { - this.setState({tagName: e.target.value}, this.checkIsValid); - }; - - handleChangeValue = (e: React.ChangeEvent) => { - this.setState({text: e.target.value}); - this.checkIsValid(); - }; - - handleChangeOwners = (owners: Owner[]) => { - this.setState({owners}); - this.checkIsValid(); - }; - - handleAddRule = () => { - const {type, text, tagName, owners, isValid} = this.state; - - if (!isValid) { - addErrorMessage('A rule needs a type, a value, and one or more issue owners.'); - return; - } - - const ownerText = owners - .map(owner => - owner.actor.type === 'team' - ? `#${owner.actor.name}` - : MemberListStore.getById(owner.actor.id)?.email - ) - .join(' '); - - const quotedText = text.match(/\s/) ? `"${text}"` : text; - - const rule = `${ - type === 'tag' ? `tags.${tagName}` : type - }:${quotedText} ${ownerText}`; - this.props.onAddRule(rule); - this.setState(initialState); - }; - - handleSelectCandidate = (text: string, type: string) => { - this.setState({text, type}); - this.checkIsValid(); - }; - - render() { - const {urls, paths, disabled, project, organization} = this.props; - const {type, text, tagName, owners, isValid} = this.state; - - const hasCandidates = paths || urls; - - return ( - - {hasCandidates && ( - - {paths.map(v => ( - this.handleSelectCandidate(v, 'path')} - > - - {v} - {t('Path')} - - ))} - {urls.map(v => ( - this.handleSelectCandidate(v, 'url')} - > - - {v} - {t('URL')} - - ))} - - )} - - - {type === 'tag' && ( - - )} - - - - + ) : null} +
    + ); + } + + if (isSpanNode(props.node)) { + return ( +
    +
    + + {props.node.children.length > 0 ? ( + props.onExpandNode(props.node, !props.node.expanded)} + > + {props.node.children.length}{' '} + + ) : null} +
    + {props.node.value.op ?? ''} + + + {props.node.value.description ?? ''} + + {props.node.canFetchData ? ( + + ) : null} +
    + ); + } + + if (isMissingInstrumentationNode(props.node)) { + return ( +
    +
    + +
    + {t('Missing instrumentation')} +
    + ); + } + + if ('orphan_errors' in props.node.value) { + return ( +
    +
    + + {props.node.children.length > 0 ? ( + props.onExpandNode(props.node, !props.node.expanded)} + > + {props.node.children.length}{' '} + + ) : null} +
    + + {t('Trace')} + + {props.trace_id} +
    + ); + } + + if (isTraceErrorNode(props.node)) { +
    +
    + + {props.node.children.length > 0 ? ( + props.onExpandNode(props.node, !props.node.expanded)} + > + {props.node.children.length}{' '} + + ) : null} +
    + + {t('Error')} + + {props.node.value.title} +
    ; + } + + return null; +} + +function Connectors(props: {node: TraceTreeNode}) { + const showVerticalConnector = + ((props.node.expanded || props.node.zoomedIn) && props.node.children.length > 0) || + (props.node.value && 'autogrouped_by' in props.node.value); + + // If the tail node of the collapsed node has no children, + // we don't want to render the vertical connector as no children + // are being rendered as the chain is entirely collapsed + const hideVerticalConnector = + showVerticalConnector && + props.node.value && + props.node instanceof ParentAutogroupNode && + !props.node.tail.children.length; + + return ( + + {/* + @TODO count of rendered connectors could be % 3 as we can + have up to 3 connectors per node, 1 div, 1 before and 1 after + */} + {props.node.connectors.map((c, i) => { + return ( +
    + ); + })} + {showVerticalConnector && !hideVerticalConnector ? ( +
    + ) : null} + {props.node.isLastChild ? ( +
    + ) : ( +
    + )} + + ); +} + +function ProjectBadge(props: {project: Project}) { + return ; +} + +function ChildrenCountButton(props: { + children: React.ReactNode; + expanded: boolean; + onClick: () => void; +}) { + return ( + + ); +} + +/** + * This is a wrapper around the Trace component to apply styles + * to the trace tree. It exists because we _do not_ want to trigger + * emotion's css parsing logic as it is very slow and will cause + * the scrolling to flicker. + */ +const TraceStylingWrapper = styled('div')` + .TraceRow { + display: flex; + align-items: center; + position: absolute; + width: 100%; + font-size: ${p => p.theme.fontSizeSmall}; + + &:hover { + background-color: ${p => p.theme.backgroundSecondary}; + } + + &.Autogrouped { + color: ${p => p.theme.blue300}; + .TraceDescription { + font-weight: bold; + } + .TraceChildrenCountWrapper { + button { + color: ${p => p.theme.white}; + background-color: ${p => p.theme.blue300}; + } + } + } + } + + .TraceChildrenCount { + height: 16px; + white-space: nowrap; + min-width: 30px; + display: flex; + align-items: center; + justify-content: center; + border-radius: 99px; + padding: 0px ${space(0.5)}; + transition: all 0.15s ease-in-out; + background: ${p => p.theme.background}; + border: 2px solid ${p => p.theme.border}; + line-height: 0; + z-index: 1; + font-size: 10px; + box-shadow: ${p => p.theme.dropShadowLight}; + margin-right: ${space(1)}; + + svg { + width: 7px; + transition: none; + } + } + + .TraceChildrenCountWrapper { + display: flex; + justify-content: flex-end; + align-items: center; + min-width: 46px; + height: 100%; + position: relative; + + button { + transition: none; + } + + &.Orphaned { + .TraceVerticalConnector, + .TraceVerticalLastChildConnector, + .TraceExpandedVerticalConnector { + border-left: 2px dashed ${p => p.theme.border}; + } + + &::before { + border-bottom: 2px dashed ${p => p.theme.border}; + } + } + + &.Root { + &:before, + .TraceVerticalLastChildConnector { + visibility: hidden; + } + } + + &::before { + content: ''; + display: block; + width: 60%; + height: 2px; + border-bottom: 2px solid ${p => p.theme.border}; + position: absolute; + left: 0; + top: 50%; + transform: translateY(-50%); + } + + &::after { + content: ""; + background-color: rgb(224, 220, 229); + border-radius: 50%; + height: 6px; + width: 6px; + position: absolute; + left: 60%; + top: 50%; + transform: translateY(-50%); + } + } + + .TraceVerticalConnector { + position: absolute; + left: 0; + top: 0; + bottom: 0; + height: 100%; + width: 2px; + border-left: 2px solid ${p => p.theme.border}; + + &.Orphaned { + border-left: 2px dashed ${p => p.theme.border}; + } + } + + .TraceVerticalLastChildConnector { + position: absolute; + left: 0; + top: 0; + bottom: 0; + height: 50%; + width: 2px; + border-left: 2px solid ${p => p.theme.border}; + border-bottom-left-radius: 4px; + } + + .TraceExpandedVerticalConnector { + position: absolute; + bottom: 0; + height: 50%; + left: 50%; + width: 2px; + border-left: 2px solid ${p => p.theme.border}; + } + + .TraceOperation { + margin-left: ${space(0.5)}; + text-overflow: ellipsis; + white-space: nowrap; + font-weight: bold; + } + + .TraceEmDash { + margin-left: ${space(0.5)}; + margin-right: ${space(0.5)}; + } + + .TraceDescription { + white-space: nowrap; + } +`; diff --git a/static/app/views/performance/newTraceDetails/traceTree.spec.tsx b/static/app/views/performance/newTraceDetails/traceTree.spec.tsx new file mode 100644 index 00000000000000..dfbd46bb6f1621 --- /dev/null +++ b/static/app/views/performance/newTraceDetails/traceTree.spec.tsx @@ -0,0 +1,1083 @@ +import {OrganizationFixture} from 'sentry-fixture/organization'; + +import {waitFor} from 'sentry-test/reactTestingLibrary'; + +import type {RawSpanType} from 'sentry/components/events/interfaces/spans/types'; +import {EntryType, type Event} from 'sentry/types'; +import type { + TraceFullDetailed, + TraceSplitResults, +} from 'sentry/utils/performance/quickTrace/types'; + +import { + isAutogroupedNode, + isMissingInstrumentationNode, + isSpanNode, + isTransactionNode, +} from './guards'; +import { + ParentAutogroupNode, + type SiblingAutogroupNode, + TraceTree, + TraceTreeNode, +} from './traceTree'; + +function makeTrace( + overrides: Partial> +): TraceSplitResults { + return { + transactions: [], + orphan_errors: [], + ...overrides, + } as TraceSplitResults; +} + +function makeTransaction(overrides: Partial = {}): TraceFullDetailed { + return { + children: [], + start_timestamp: 0, + timestamp: 1, + 'transaction.op': '', + 'transaction.status': '', + ...overrides, + } as TraceFullDetailed; +} + +function makeRawSpan(overrides: Partial = {}): RawSpanType { + return { + op: '', + description: '', + start_timestamp: 0, + timestamp: 1, + ...overrides, + } as RawSpanType; +} + +function makeTraceError( + overrides: Partial = {} +): TraceTree.TraceError { + return { + title: 'MaybeEncodingError: Error sending result', + level: 'error', + data: {}, + ...overrides, + } as TraceTree.TraceError; +} + +function makeEvent(overrides: Partial = {}, spans: RawSpanType[] = []): Event { + return { + entries: [{type: EntryType.SPANS, data: spans}], + ...overrides, + } as Event; +} + +function assertSpanNode( + node: TraceTreeNode +): asserts node is TraceTreeNode { + if (!isSpanNode(node)) { + throw new Error('node is not a span'); + } +} + +function assertTransactionNode( + node: TraceTreeNode | null +): asserts node is TraceTreeNode { + if (!node || !isTransactionNode(node)) { + throw new Error('node is not a transaction'); + } +} + +function assertMissingInstrumentationNode( + node: TraceTreeNode +): asserts node is TraceTreeNode { + if (!isMissingInstrumentationNode(node)) { + throw new Error('node is not a missing instrumentation node'); + } +} + +function assertAutogroupedNode( + node: TraceTreeNode +): asserts node is ParentAutogroupNode | SiblingAutogroupNode { + if (!isAutogroupedNode(node)) { + throw new Error('node is not a autogrouped node'); + } +} + +function assertParentAutogroupedNode( + node: TraceTreeNode +): asserts node is ParentAutogroupNode { + if (!(node instanceof ParentAutogroupNode)) { + throw new Error('node is not a parent autogrouped node'); + } +} + +// function _assertSiblingAutogroupedNode( +// node: TraceTreeNode +// ): asserts node is ParentAutogroupNode { +// if (!(node instanceof SiblingAutogroupNode)) { +// throw new Error('node is not a parent node'); +// } +// } + +describe('TraceTree', () => { + beforeEach(() => { + MockApiClient.clearMockResponses(); + }); + it('builds from transactions', () => { + const tree = TraceTree.FromTrace( + makeTrace({ + transactions: [ + makeTransaction({ + children: [], + }), + makeTransaction({ + children: [], + }), + ], + }) + ); + + expect(tree.list).toHaveLength(3); + }); + + it('builds orphan errors as well', () => { + const tree = TraceTree.FromTrace( + makeTrace({ + transactions: [ + makeTransaction({ + children: [], + }), + makeTransaction({ + children: [], + }), + ], + orphan_errors: [makeTraceError()], + }) + ); + + expect(tree.list).toHaveLength(4); + }); + + it('builds from spans when root is a transaction node', () => { + const root = new TraceTreeNode( + null, + makeTransaction({ + children: [], + }), + {project_slug: '', event_id: ''} + ); + + const node = TraceTree.FromSpans(root, [ + makeRawSpan({start_timestamp: 0, op: '1', span_id: '1'}), + makeRawSpan({start_timestamp: 1, op: '2', span_id: '2', parent_span_id: '1'}), + makeRawSpan({start_timestamp: 2, op: '3', span_id: '3', parent_span_id: '2'}), + makeRawSpan({start_timestamp: 3, op: '4', span_id: '4', parent_span_id: '1'}), + ]); + + if (!isSpanNode(node.children[0])) { + throw new Error('Child needs to be a span'); + } + expect(node.children[0].value.span_id).toBe('1'); + expect(node.children[0].value.start_timestamp).toBe(0); + expect(node.children.length).toBe(1); + + assertSpanNode(node.children[0].children[0]); + assertSpanNode(node.children[0].children[0].children[0]); + assertSpanNode(node.children[0].children[1]); + + expect(node.children[0].children[0].value.start_timestamp).toBe(1); + expect(node.children[0].children[0].children[0].value.start_timestamp).toBe(2); + expect(node.children[0].children[1].value.start_timestamp).toBe(3); + }); + + it('injects missing spans', () => { + const root = new TraceTreeNode( + null, + makeTransaction({ + children: [], + }), + {project_slug: '', event_id: ''} + ); + + const date = new Date().getTime(); + + const node = TraceTree.FromSpans(root, [ + makeRawSpan({ + start_timestamp: date, + timestamp: date + 100, + span_id: '1', + op: 'span 1', + }), + makeRawSpan({ + start_timestamp: date + 200, + timestamp: date + 400, + op: 'span 2', + span_id: '2', + }), + ]); + + assertSpanNode(node.children[0]); + assertMissingInstrumentationNode(node.children[1]); + assertSpanNode(node.children[2]); + + expect(node.children.length).toBe(3); + expect(node.children[0].value.op).toBe('span 1'); + expect(node.children[1].value.type).toBe('missing_instrumentation'); + expect(node.children[2].value.op).toBe('span 2'); + }); + + it('builds and preserves list order', async () => { + const organization = OrganizationFixture(); + const api = new MockApiClient(); + + const tree = TraceTree.FromTrace( + makeTrace({ + transactions: [ + makeTransaction({ + transaction: 'txn 1', + start_timestamp: 0, + children: [makeTransaction({start_timestamp: 1, transaction: 'txn 2'})], + }), + ], + }) + ); + + tree.expand(tree.list[0], true); + const node = tree.list[1]; + + const request = MockApiClient.addMockResponse({ + url: '/organizations/org-slug/events/undefined:undefined/', + method: 'GET', + body: makeEvent({startTimestamp: 0}, [ + makeRawSpan({start_timestamp: 1, op: 'span 1', span_id: '1'}), + makeRawSpan({ + start_timestamp: 2, + op: 'span 2', + span_id: '2', + parent_span_id: '1', + }), + makeRawSpan({start_timestamp: 3, op: 'span 3', parent_span_id: '2'}), + makeRawSpan({start_timestamp: 4, op: 'span 4', parent_span_id: '1'}), + ]), + }); + + // 0 + // 1 + // 2 + // 3 + // 4 + tree.zoomIn(node, true, {api, organization}); + await waitFor(() => { + expect(node.zoomedIn).toBe(true); + }); + expect(request).toHaveBeenCalled(); + + expect(tree.list.length).toBe(6); + + assertTransactionNode(tree.list[1]); + assertSpanNode(tree.list[2]); + assertSpanNode(tree.list[3]); + + expect(tree.list[1].value.start_timestamp).toBe(0); + expect(tree.list[2].value.start_timestamp).toBe(1); + expect(tree.list[3].value.start_timestamp).toBe(2); + }); + + it('preserves input order', () => { + const firstChild = makeTransaction({ + start_timestamp: 0, + timestamp: 1, + children: [], + }); + + const secondChild = makeTransaction({ + start_timestamp: 1, + timestamp: 2, + children: [], + }); + + const tree = TraceTree.FromTrace( + makeTrace({ + transactions: [ + makeTransaction({ + start_timestamp: 0, + timestamp: 2, + children: [firstChild, secondChild], + }), + makeTransaction({ + start_timestamp: 2, + timestamp: 4, + }), + ], + }) + ); + + expect(tree.list).toHaveLength(3); + + tree.expand(tree.list[1], true); + expect(tree.list).toHaveLength(5); + expect(tree.list[2].value).toBe(firstChild); + expect(tree.list[3].value).toBe(secondChild); + }); + + it('creates children -> parent references', () => { + const tree = TraceTree.FromTrace( + makeTrace({ + transactions: [ + makeTransaction({ + start_timestamp: 0, + timestamp: 2, + children: [makeTransaction({start_timestamp: 1, timestamp: 2})], + }), + makeTransaction({ + start_timestamp: 2, + timestamp: 4, + }), + ], + }) + ); + + expect(tree.list).toHaveLength(3); + tree.expand(tree.list[1], true); + expect(tree.list[2].parent?.value).toBe(tree.list[1].value); + }); + + it('establishes parent-child relationships', () => { + const tree = TraceTree.FromTrace( + makeTrace({ + transactions: [ + makeTransaction({ + children: [makeTransaction()], + }), + ], + }) + ); + + expect(tree.root.children).toHaveLength(1); + expect(tree.root.children[0].children).toHaveLength(1); + }); + + it('isLastChild', () => { + const tree = TraceTree.FromTrace( + makeTrace({ + transactions: [ + makeTransaction({ + children: [makeTransaction(), makeTransaction()], + }), + makeTransaction(), + ], + orphan_errors: [], + }) + ); + + tree.expand(tree.list[1], true); + + expect(tree.list[0].isLastChild).toBe(true); + expect(tree.list[1].isLastChild).toBe(false); + expect(tree.list[2].isLastChild).toBe(false); + expect(tree.list[3].isLastChild).toBe(true); + expect(tree.list[4].isLastChild).toBe(true); + }); + + describe('connectors', () => { + it('computes transaction connectors', () => { + const tree = TraceTree.FromTrace( + makeTrace({ + transactions: [ + makeTransaction({ + transaction: 'sibling', + children: [ + makeTransaction({transaction: 'child'}), + makeTransaction({transaction: 'child'}), + ], + }), + makeTransaction({transaction: 'sibling'}), + ], + }) + ); + + // -1 root + // ------ list begins here + // 0 transaction + // 0 |- sibling + // -1, 2| | - child + // -1| | - child + // 0 |- sibling + + tree.expand(tree.list[1], true); + expect(tree.list.length).toBe(5); + + expect(tree.list[0].connectors.length).toBe(0); + + expect(tree.list[1].connectors.length).toBe(1); + expect(tree.list[1].connectors[0]).toBe(-1); + + expect(tree.list[2].connectors[0]).toBe(-1); + expect(tree.list[2].connectors[1]).toBe(2); + expect(tree.list[2].connectors.length).toBe(2); + + expect(tree.list[3].connectors[0]).toBe(-1); + expect(tree.list[3].connectors.length).toBe(1); + + expect(tree.list[4].connectors.length).toBe(0); + }); + + it('computes span connectors', async () => { + const tree = TraceTree.FromTrace( + makeTrace({ + transactions: [ + makeTransaction({ + project_slug: 'project', + event_id: 'event_id', + transaction: 'transaction', + children: [], + }), + ], + }) + ); + + // root + // |- node1 [] + // |- node2 [] + + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/events/project:event_id/', + method: 'GET', + body: makeEvent({}, [ + makeRawSpan({start_timestamp: 0, op: 'span', span_id: '1'}), + ]), + }); + + expect(tree.list.length).toBe(2); + + tree.zoomIn(tree.list[1], true, { + api: new MockApiClient(), + organization: OrganizationFixture(), + }); + + await waitFor(() => { + expect(tree.list.length).toBe(3); + }); + + // root + // |- node1 [] + // |- node2 [] + // |- span1 [] + + const span = tree.list[tree.list.length - 1]; + expect(span.connectors.length).toBe(0); + }); + }); + + describe('expanding', () => { + it('expands a node and updates the list', () => { + const tree = TraceTree.FromTrace( + makeTrace({transactions: [makeTransaction({children: [makeTransaction()]})]}) + ); + + const node = tree.list[1]; + + expect(tree.list.length).toBe(2); + expect(node.expanded).toBe(false); + expect(tree.expand(node, true)).toBe(true); + expect(node.expanded).toBe(true); + // Assert that the list has been updated + expect(tree.list).toHaveLength(3); + expect(tree.list[2]).toBe(node.children[0]); + }); + + it('collapses a node and updates the list', () => { + const tree = TraceTree.FromTrace( + makeTrace({transactions: [makeTransaction({children: [makeTransaction()]})]}) + ); + + const node = tree.list[1]; + + tree.expand(node, true); + expect(tree.list.length).toBe(3); + expect(tree.expand(node, false)).toBe(true); + expect(node.expanded).toBe(false); + // Assert that the list has been updated + expect(tree.list).toHaveLength(2); + expect(tree.list[1]).toBe(node); + }); + + it('preserves children expanded state', () => { + const lastChildExpandedTxn = makeTransaction({start_timestamp: 1000}); + const lastTransaction = makeTransaction({start_timestamp: 5}); + const tree = TraceTree.FromTrace( + makeTrace({ + transactions: [ + makeTransaction({ + children: [ + makeTransaction({children: [lastChildExpandedTxn]}), + lastTransaction, + ], + }), + ], + }) + ); + + expect(tree.expand(tree.list[1], true)).toBe(true); + expect(tree.expand(tree.list[2], true)).toBe(true); + // Assert that the list has been updated + expect(tree.list).toHaveLength(5); + + expect(tree.expand(tree.list[2], false)).toBe(true); + expect(tree.list.length).toBe(4); + expect(tree.expand(tree.list[2], true)).toBe(true); + expect(tree.list[tree.list.length - 1].value).toBe(lastTransaction); + }); + + it('expanding or collapsing a zoomed in node doesnt do anything', async () => { + const organization = OrganizationFixture(); + const api = new MockApiClient(); + + const tree = TraceTree.FromTrace( + makeTrace({transactions: [makeTransaction({children: [makeTransaction()]})]}) + ); + + const node = tree.list[0]; + + const request = MockApiClient.addMockResponse({ + url: '/organizations/org-slug/events/undefined:undefined/', + method: 'GET', + body: makeEvent(), + }); + + tree.zoomIn(node, true, {api, organization}); + await waitFor(() => { + expect(node.zoomedIn).toBe(true); + }); + expect(request).toHaveBeenCalled(); + expect(tree.expand(node, true)).toBe(false); + }); + }); + + describe('zooming', () => { + it('marks node as zoomed in', async () => { + const organization = OrganizationFixture(); + const api = new MockApiClient(); + + const tree = TraceTree.FromTrace( + makeTrace({ + transactions: [ + makeTransaction({project_slug: 'project', event_id: 'event_id'}), + ], + }) + ); + + const request = MockApiClient.addMockResponse({ + url: '/organizations/org-slug/events/project:event_id/', + method: 'GET', + body: makeEvent(), + }); + const node = tree.list[1]; + + expect(node.zoomedIn).toBe(false); + tree.zoomIn(node, true, {api, organization}); + + await waitFor(() => { + expect(node.zoomedIn).toBe(true); + }); + + expect(request).toHaveBeenCalled(); + }); + it('fetches spans for node when zooming in', async () => { + const tree = TraceTree.FromTrace( + makeTrace({ + transactions: [ + makeTransaction({ + transaction: 'txn', + project_slug: 'project', + event_id: 'event_id', + }), + ], + }) + ); + + const request = MockApiClient.addMockResponse({ + url: '/organizations/org-slug/events/project:event_id/', + method: 'GET', + body: makeEvent({}, [makeRawSpan()]), + }); + + const node = tree.list[1]; + expect(node.children).toHaveLength(0); + tree.zoomIn(node, true, { + api: new MockApiClient(), + organization: OrganizationFixture(), + }); + + expect(request).toHaveBeenCalled(); + await waitFor(() => { + expect(node.children).toHaveLength(1); + }); + // Assert that the children have been updated + assertTransactionNode(node.children[0].parent); + expect(node.children[0].parent.value.transaction).toBe('txn'); + expect(node.children[0].depth).toBe(node.depth + 1); + }); + it('zooms out', async () => { + const tree = TraceTree.FromTrace( + makeTrace({ + transactions: [ + makeTransaction({project_slug: 'project', event_id: 'event_id'}), + ], + }) + ); + + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/events/project:event_id/', + method: 'GET', + body: makeEvent({}, [makeRawSpan({span_id: 'span1', description: 'span1'})]), + }); + tree.zoomIn(tree.list[1], true, { + api: new MockApiClient(), + organization: OrganizationFixture(), + }); + + await waitFor(() => { + assertSpanNode(tree.list[1].children[0]); + expect(tree.list[1].children[0].value.description).toBe('span1'); + }); + + tree.zoomIn(tree.list[1], false, { + api: new MockApiClient(), + organization: OrganizationFixture(), + }); + + await waitFor(() => { + // Assert child no longer points to children + expect(tree.list[1].zoomedIn).toBe(false); + expect(tree.list[1].children[0]).toBe(undefined); + expect(tree.list[2]).toBe(undefined); + }); + }); + + it('zooms in and out', async () => { + const tree = TraceTree.FromTrace( + makeTrace({ + transactions: [ + makeTransaction({project_slug: 'project', event_id: 'event_id'}), + ], + }) + ); + + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/events/project:event_id/', + method: 'GET', + body: makeEvent({}, [makeRawSpan({span_id: 'span 1', description: 'span1'})]), + }); + // Zoom in + tree.zoomIn(tree.list[1], true, { + api: new MockApiClient(), + organization: OrganizationFixture(), + }); + await waitFor(() => { + assertSpanNode(tree.list[1].children[0]); + expect(tree.list[1].children[0].value.description).toBe('span1'); + }); + // Zoom out + tree.zoomIn(tree.list[1], false, { + api: new MockApiClient(), + organization: OrganizationFixture(), + }); + await waitFor(() => { + expect(tree.list[2]).toBe(undefined); + }); + // Zoom in + tree.zoomIn(tree.list[1], true, { + api: new MockApiClient(), + organization: OrganizationFixture(), + }); + await waitFor(() => { + assertSpanNode(tree.list[1].children[0]); + expect(tree.list[1].children[0].value?.description).toBe('span1'); + }); + }); + it('zooms in and out preserving siblings', async () => { + const tree = TraceTree.FromTrace( + makeTrace({ + transactions: [ + makeTransaction({ + project_slug: 'project', + event_id: 'event_id', + start_timestamp: 0, + children: [ + makeTransaction({ + start_timestamp: 1, + timestamp: 2, + project_slug: 'other_project', + event_id: 'event_id', + }), + makeTransaction({start_timestamp: 2, timestamp: 3}), + ], + }), + ], + }) + ); + + const request = MockApiClient.addMockResponse({ + url: '/organizations/org-slug/events/other_project:event_id/', + method: 'GET', + body: makeEvent({}, [makeRawSpan({description: 'span1'})]), + }); + tree.expand(tree.list[1], true); + tree.zoomIn(tree.list[2], true, { + api: new MockApiClient(), + organization: OrganizationFixture(), + }); + + expect(request).toHaveBeenCalled(); + + // Zoom in + await waitFor(() => { + expect(tree.list.length).toBe(5); + }); + + // Zoom out + tree.zoomIn(tree.list[2], false, { + api: new MockApiClient(), + organization: OrganizationFixture(), + }); + + await waitFor(() => { + expect(tree.list.length).toBe(4); + }); + }); + it('preserves expanded state when zooming in and out', async () => { + const tree = TraceTree.FromTrace( + makeTrace({ + transactions: [ + makeTransaction({ + project_slug: 'project', + event_id: 'event_id', + children: [ + makeTransaction({project_slug: 'other_project', event_id: 'event_id'}), + ], + }), + ], + }) + ); + + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/events/project:event_id/', + method: 'GET', + body: makeEvent({}, [ + makeRawSpan({description: 'span1'}), + makeRawSpan({description: 'span2'}), + ]), + }); + + tree.expand(tree.list[1], true); + + expect(tree.list.length).toBe(3); + + tree.zoomIn(tree.list[1], true, { + api: new MockApiClient(), + organization: OrganizationFixture(), + }); + + await waitFor(() => { + expect(tree.list.length).toBe(4); + }); + + tree.zoomIn(tree.list[1], false, { + api: new MockApiClient(), + organization: OrganizationFixture(), + }); + + await waitFor(() => { + expect(tree.list.length).toBe(3); + }); + expect(tree.list[1].expanded).toBe(true); + }); + }); + + describe('autogrouping', () => { + it('auto groups sibling spans and preserves tail spans', () => { + const root = new TraceTreeNode(null, makeRawSpan({description: 'span1'}), { + project_slug: '', + event_id: '', + }); + + for (let i = 0; i < 5; i++) { + root.children.push( + new TraceTreeNode(root, makeRawSpan({description: 'span', op: 'db'}), { + project_slug: '', + event_id: '', + }) + ); + } + + root.children.push( + new TraceTreeNode(root, makeRawSpan({description: 'span', op: 'http'}), { + project_slug: '', + event_id: '', + }) + ); + + expect(root.children.length).toBe(6); + + TraceTree.AutogroupSiblingSpanNodes(root); + + expect(root.children.length).toBe(2); + }); + + it('autogroups when number of children is exactly 5', () => { + const root = new TraceTreeNode(null, makeRawSpan({description: 'span1'}), { + project_slug: '', + event_id: '', + }); + + for (let i = 0; i < 5; i++) { + root.children.push( + new TraceTreeNode(root, makeRawSpan({description: 'span', op: 'db'}), { + project_slug: '', + event_id: '', + }) + ); + } + + expect(root.children.length).toBe(5); + + TraceTree.AutogroupSiblingSpanNodes(root); + + expect(root.children.length).toBe(1); + }); + + it('autogroups when number of children is > 5', () => { + const root = new TraceTreeNode(null, makeRawSpan({description: 'span1'}), { + project_slug: '', + event_id: '', + }); + + for (let i = 0; i < 7; i++) { + root.children.push( + new TraceTreeNode(root, makeRawSpan({description: 'span', op: 'db'}), { + project_slug: '', + event_id: '', + }) + ); + } + + expect(root.children.length).toBe(7); + + TraceTree.AutogroupSiblingSpanNodes(root); + + expect(root.children.length).toBe(1); + }); + + it('autogroups children case', () => { + // span1 : db + // ---span2 : http + // ------ span3 : http + + // to + + // span1 : db + // ---autogrouped(span2) : http + // ------ span2 : http + // --------- span3 : http + + const root = new TraceTreeNode( + null, + makeRawSpan({description: 'span1', span_id: '1', op: 'db'}), + { + project_slug: '', + event_id: '', + } + ); + + const child = new TraceTreeNode( + root, + makeRawSpan({description: 'span2', span_id: '2', op: 'http'}), + { + project_slug: '', + event_id: '', + } + ); + root.children.push(child); + + const grandChild = new TraceTreeNode( + child, + makeRawSpan({description: 'span3', span_id: '3', op: 'http'}), + { + project_slug: '', + event_id: '', + } + ); + child.children.push(grandChild); + + expect(root.children.length).toBe(1); + expect(root.children[0].children.length).toBe(1); + + TraceTree.AutogroupDirectChildrenSpanNodes(root); + + expect(root.children.length).toBe(1); + + const autoGroupedNode = root.children[0]; + expect(autoGroupedNode.children.length).toBe(0); + + autoGroupedNode.expanded = true; + + expect((autoGroupedNode.children[0].value as RawSpanType).description).toBe( + 'span2' + ); + }); + + it('autogrouping direct children skips rendering intermediary nodes', () => { + const root = new TraceTreeNode( + null, + makeRawSpan({span_id: 'span1', description: 'span1', op: 'db'}), + { + project_slug: '', + event_id: '', + } + ); + + const child = new TraceTreeNode( + root, + makeRawSpan({span_id: 'span2', description: 'span2', op: 'http'}), + { + project_slug: '', + event_id: '', + } + ); + root.children.push(child); + + const grandChild = new TraceTreeNode( + child, + makeRawSpan({span_id: 'span3', description: 'span3', op: 'http'}), + { + project_slug: '', + event_id: '', + } + ); + child.children.push(grandChild); + + expect(root.children.length).toBe(1); + expect(root.children[0].children.length).toBe(1); + + TraceTree.AutogroupDirectChildrenSpanNodes(root); + + expect(root.children.length).toBe(1); + + const autoGroupedNode = root.children[0]; + expect(autoGroupedNode.children.length).toBe(0); + + autoGroupedNode.expanded = true; + + expect((autoGroupedNode.children[0].value as RawSpanType).description).toBe( + 'span2' + ); + }); + + it('renders children of autogrouped sibling nodes', async () => { + const tree = TraceTree.FromTrace( + makeTrace({ + transactions: [ + makeTransaction({ + transaction: '/', + project_slug: 'project', + event_id: 'event_id', + }), + ], + }) + ); + + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/events/project:event_id/', + method: 'GET', + body: makeEvent({}, [ + makeRawSpan({description: 'parent span', op: 'http', span_id: '1'}), + makeRawSpan({description: 'span', op: 'db', parent_span_id: '1'}), + makeRawSpan({description: 'span', op: 'db', parent_span_id: '1'}), + makeRawSpan({description: 'span', op: 'db', parent_span_id: '1'}), + makeRawSpan({description: 'span', op: 'db', parent_span_id: '1'}), + makeRawSpan({description: 'span', op: 'db', parent_span_id: '1'}), + ]), + }); + + expect(tree.list.length).toBe(2); + tree.zoomIn(tree.list[1], true, { + api: new MockApiClient(), + organization: OrganizationFixture(), + }); + + await waitFor(() => { + expect(tree.list.length).toBe(4); + }); + + const autogroupedNode = tree.list[tree.list.length - 1]; + + assertAutogroupedNode(autogroupedNode); + expect(autogroupedNode.value.autogrouped_by).toBeTruthy(); + expect(autogroupedNode.children.length).toBe(5); + tree.expand(autogroupedNode, true); + expect(tree.list.length).toBe(9); + }); + + it('renders children of autogrouped direct children nodes', async () => { + const tree = TraceTree.FromTrace( + makeTrace({ + transactions: [ + makeTransaction({ + transaction: '/', + project_slug: 'project', + event_id: 'event_id', + }), + ], + }) + ); + + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/events/project:event_id/', + method: 'GET', + body: makeEvent({}, [ + makeRawSpan({description: 'parent span', op: 'http', span_id: '1'}), + makeRawSpan({description: 'span', op: 'db', span_id: '2', parent_span_id: '1'}), + makeRawSpan({description: 'span', op: 'db', span_id: '3', parent_span_id: '2'}), + makeRawSpan({description: 'span', op: 'db', span_id: '4', parent_span_id: '3'}), + makeRawSpan({description: 'span', op: 'db', span_id: '5', parent_span_id: '4'}), + makeRawSpan({ + description: 'span', + op: 'redis', + span_id: '6', + parent_span_id: '5', + }), + makeRawSpan({description: 'span', op: 'https', parent_span_id: '1'}), + ]), + }); + + expect(tree.list.length).toBe(2); + tree.zoomIn(tree.list[1], true, { + api: new MockApiClient(), + organization: OrganizationFixture(), + }); + + await waitFor(() => { + expect(tree.list.length).toBe(6); + }); + + const autogroupedNode = tree.list[tree.list.length - 3]; + assertParentAutogroupedNode(autogroupedNode); + expect('autogrouped_by' in autogroupedNode?.value).toBeTruthy(); + expect(autogroupedNode.groupCount).toBe(4); + + expect(autogroupedNode.head.value.span_id).toBe('2'); + expect(autogroupedNode.tail.value.span_id).toBe('5'); + + // Expand autogrouped node + expect(tree.expand(autogroupedNode, true)).toBe(true); + expect(tree.list.length).toBe(10); + + // Collapse autogrouped node + expect(tree.expand(autogroupedNode, false)).toBe(true); + expect(tree.list.length).toBe(6); + + expect(autogroupedNode.children[0].depth).toBe(4); + }); + }); +}); diff --git a/static/app/views/performance/newTraceDetails/traceTree.tsx b/static/app/views/performance/newTraceDetails/traceTree.tsx new file mode 100644 index 00000000000000..b8b8cc025abd07 --- /dev/null +++ b/static/app/views/performance/newTraceDetails/traceTree.tsx @@ -0,0 +1,884 @@ +import type {Client} from 'sentry/api'; +import type {RawSpanType} from 'sentry/components/events/interfaces/spans/types'; +import type {Organization} from 'sentry/types'; +import type {Event, EventTransaction} from 'sentry/types/event'; +import type { + TraceError as TraceErrorType, + TraceFullDetailed, + TraceSplitResults, +} from 'sentry/utils/performance/quickTrace/types'; + +import { + isAutogroupedNode, + isMissingInstrumentationNode, + isParentAutogroupedNode, + isSiblingAutogroupedNode, + isSpanNode, + isTransactionNode, +} from './guards'; + +/** + * + * This file implements the tree data structure that is used to represent a trace. We do + * this both for performance reasons as well as flexibility. The requirement for a tree + * is to support incremental patching and updates. This is important because we want to + * be able to fetch more data as the user interacts with the tree, and we want to be able + * efficiently update the tree as we receive more data. + * + * The trace is represented as a tree with different node value types (transaction or span) + * Each tree node contains a reference to its parent and a list of references to its children, + * as well as a reference to the value that the node holds. Each node also contains + * some meta data and state about the node, such as if it is expanded or zoomed in. The benefit + * of abstracting parts of the UI state is that the tree will persist user actions such as expanding + * or collapsing nodes which would have otherwise been lost when individual nodes are remounted in the tree. + * + * Each tree holds a list reference, which is a live reference to a flattened representation + * of the tree (used to render the tree in the UI). Since the list is mutable (and we want to keep it that way for performance + * reasons as we want to support mutations on traces with ~100k+ nodes), callers need to manage reactivity themselves. + * + * An alternative, but not recommended approach is to call build() on the tree after each mutation, + * which will iterate over all of the children and build a fresh list reference. + * + * In most cases, the initial tree is a list of transactions containing other transactions. Each transaction can + * then be expanded into a list of spans which can also in some cases be expanded. + * + * - trace - trace + * |- parent transaction --> when expanding |- parent transaction + * |- child transaction |- span + * |- span this used to be a transaction, + * |- child transaction span <- but is now be a list of spans + * |- span belonging to the transaction + * this results in child txns to be lost, + * which is a confusing user experience + * + * The tree supports autogrouping of spans vertically or as siblings. When that happens, a autogrouped node of either a vertical or + * sibling type is inserted as an intermediary node. In the vertical case, the autogrouped node + * holds the reference to the head and tail of the autogrouped sequence. In the sibling case, the autogrouped node + * holds a reference to the children that are part of the autogrouped sequence. When expanding and collapsing these nodes, + * the tree perform a reference swap to either point to the head (when expanded) or tail (when collapsed) of the autogrouped sequence. + * + * In vertical grouping case, the following happens: + * + * - root - root + * - trace - trace + * |- transaction |- transaction + * |- span 1 <-| these become autogrouped |- autogrouped (head=span1, tail=span3, children points to children of tail) + * |- span 2 |- as they are inserted into |- other span (parent points to autogrouped node) + * |- span 3 <-| the tree. + * |- other span + * + * When the autogrouped node is expanded the UI needs to show the entire collapsed chain, so we swap the tail children to point + * back to the tail, and have autogrouped node point to it's head as the children. + * + * - root - root + * - trace - trace + * |- transaction |- transaction + * |- autogrouped (head=span1, tail=span3) <- when expanding |- autogrouped (head=span1, tail=span3, children points to head) + * | other span (paren points to autogrouped) |- span 1 (head) + * |- span 2 + * |- span 3 (tail) + * |- other span (children of tail, parent points to tail) + * + * Notes and improvements: + * - collecting children should be O(n), it is currently O(n^2) as we are missing a proper queue implementation + * - the notion of expanded and zoomed is confusing, they stand for the same idea from a UI pov + * - there is an annoying thing wrt span and transaction nodes where we either store data on _children or _spanChildren + * this is because we want to be able to store both transaction and span nodes in the same tree, but it makes for an + * annoying API. A better design would have been to create an invisible meta node that just points to the correct children + * - connector generation should live in the UI layer, not in the tree. Same with depth calculation. It is more convenient + * to calculate this when rendering the tree, as we can only calculate it only for the visible nodes and avoid an extra tree pass + * - instead of storing span children separately, we should have meta tree nodes that handle pointing to the correct children + * + */ + +export declare namespace TraceTree { + type Transaction = TraceFullDetailed; + type Span = RawSpanType; + type Trace = TraceSplitResults; + type TraceError = TraceErrorType; + + interface MissingInstrumentationSpan { + start_timestamp: number; + timestamp: number; + type: 'missing_instrumentation'; + } + interface SiblingAutogroup extends RawSpanType { + autogrouped_by: { + description: string; + op: string; + }; + } + + interface ChildrenAutogroup { + autogrouped_by: { + op: string; + }; + } + + type NodeValue = + | Trace + | Transaction + | TraceError + | Span + | MissingInstrumentationSpan + | SiblingAutogroup + | ChildrenAutogroup + | null; + + type Metadata = { + event_id: string | undefined; + project_slug: string | undefined; + }; +} + +function fetchTransactionSpans( + api: Client, + organization: Organization, + project_slug: string, + event_id: string +): Promise { + return api.requestPromise( + `/organizations/${organization.slug}/events/${project_slug}:${event_id}/` + ); +} + +function maybeInsertMissingInstrumentationSpan( + parent: TraceTreeNode, + node: TraceTreeNode +) { + const lastInsertedSpan = parent.spanChildren[parent.spanChildren.length - 1]; + if (!lastInsertedSpan) { + return; + } + + if (node.value.start_timestamp - lastInsertedSpan.value.timestamp < 100) { + return; + } + + const missingInstrumentationSpan = + new TraceTreeNode( + parent, + { + type: 'missing_instrumentation', + start_timestamp: lastInsertedSpan.value.timestamp, + timestamp: node.value.start_timestamp, + }, + { + event_id: undefined, + project_slug: undefined, + } + ); + + parent.spanChildren.push(missingInstrumentationSpan); +} +export class TraceTree { + root: TraceTreeNode = TraceTreeNode.Root(); + private _spanPromises: Map, Promise> = + new Map(); + private _list: TraceTreeNode[] = []; + + static Empty() { + return new TraceTree().build(); + } + + static FromTrace(trace: TraceTree.Trace): TraceTree { + const tree = new TraceTree(); + + function visit( + parent: TraceTreeNode, + value: TraceTree.NodeValue + ) { + const node = new TraceTreeNode(parent, value, { + project_slug: value && 'project_slug' in value ? value.project_slug : undefined, + event_id: value && 'event_id' in value ? value.event_id : undefined, + }); + node.canFetchData = true; + + if (parent) { + parent.children.push(node as TraceTreeNode); + } + + if (value && 'children' in value) { + for (const child of value.children) { + visit(node, child); + } + } + + return node; + } + + const traceNode = new TraceTreeNode(tree.root, trace, { + event_id: undefined, + project_slug: undefined, + }); + + // Trace is always expanded by default + traceNode.expanded = true; + tree.root.children.push(traceNode); + + for (const transaction of trace.transactions) { + visit(traceNode, transaction); + } + + for (const trace_error of trace.orphan_errors) { + visit(traceNode, trace_error); + } + + return tree.build(); + } + + static FromSpans( + parent: TraceTreeNode, + spans: RawSpanType[] + ): TraceTreeNode { + const parentIsSpan = isSpanNode(parent); + const lookuptable: Record> = {}; + + if (parent.spanChildren.length > 0) { + parent.zoomedIn = true; + return parent; + } + + if (parentIsSpan) { + if (parent.value && 'span_id' in parent.value) { + lookuptable[parent.value.span_id] = parent as TraceTreeNode; + } + } + + const childrenLinks = new Map(); + for (const child of parent.children) { + if ( + child.value && + 'parent_span_id' in child.value && + typeof child.value.parent_span_id === 'string' + ) { + childrenLinks.set(child.value.parent_span_id, child.metadata); + } + continue; + } + + for (const span of spans) { + const node = new TraceTreeNode(null, span, { + event_id: undefined, + project_slug: undefined, + }); + + const parentLinkMetadata = childrenLinks.get(span.span_id); + node.expanded = true; + node.canFetchData = !!parentLinkMetadata; + + if (parentLinkMetadata) { + node.metadata = parentLinkMetadata; + } + + lookuptable[span.span_id] = node; + + if (span.parent_span_id) { + const parentNode = lookuptable[span.parent_span_id]; + + if (parentNode) { + node.parent = parentNode; + maybeInsertMissingInstrumentationSpan(parentNode, node); + parentNode.spanChildren.push(node); + continue; + } + } + + // Orphaned span + maybeInsertMissingInstrumentationSpan(parent, node); + parent.spanChildren.push(node); + node.parent = parent as TraceTreeNode; + } + + parent.zoomedIn = true; + TraceTree.AutogroupSiblingSpanNodes(parent); + TraceTree.AutogroupDirectChildrenSpanNodes(parent); + return parent; + } + + get list(): ReadonlyArray> { + return this._list; + } + + // Span chain grouping is when multiple spans with the same op are nested as direct and only children + // @TODO Abdk: simplify the chaining logic + static AutogroupDirectChildrenSpanNodes( + root: TraceTreeNode + ): void { + const queue = [root]; + + while (queue.length > 0) { + const node = queue.pop()!; + + if (node.children.length > 1 || !isSpanNode(node)) { + for (const child of node.children) { + queue.push(child); + } + continue; + } + + const head = node; + let tail = node; + let groupMatchCount = 0; + + while ( + tail && + tail.children.length === 1 && + isSpanNode(tail.children[0]) && + tail.children[0].value.op === head.value.op + ) { + groupMatchCount++; + tail = tail.children[0]; + } + + if (groupMatchCount < 1) { + for (const child of head.children) { + queue.push(child); + } + continue; + } + + const autoGroupedNode = new ParentAutogroupNode( + node.parent, + { + ...head.value, + autogrouped_by: { + op: head.value && 'op' in head.value ? head.value.op ?? '' : '', + }, + }, + { + event_id: undefined, + project_slug: undefined, + }, + head as TraceTreeNode, + tail as TraceTreeNode + ); + + if (!node.parent) { + throw new Error('Parent node is missing, this should be unreachable code'); + } + + autoGroupedNode.groupCount = groupMatchCount + 1; + + for (const c of tail.children) { + c.parent = autoGroupedNode; + } + + const index = node.parent.children.indexOf(node); + node.parent.children[index] = autoGroupedNode; + } + } + + static AutogroupSiblingSpanNodes(root: TraceTreeNode): void { + // Span sibling grouping is when min 5 consecutive spans without children have matching op and description + // Span chain grouping is when multiple spans with the same op are nested as direct and only children + const queue = [root]; + + while (queue.length > 0) { + const node = queue.pop()!; + + if (node.children.length < 5) { + for (const child of node.children) { + queue.push(child); + } + continue; + } + + let startIndex = 0; + let matchCount = 0; + + for (let i = 0; i < node.children.length - 1; i++) { + const current = node.children[i] as TraceTreeNode; + const next = node.children[i + 1] as TraceTreeNode; + + if ( + next.children.length === 0 && + current.children.length === 0 && + // @TODO this should check for typeof op and description + // to be of type string for runtime safety. Afaik it is impossible + // for these to be anything else but a string, but we should still check + next.value.op === current.value.op && + next.value.description === current.value.description + ) { + matchCount++; + if (i < node.children.length - 2) { + continue; + } + } + + if (matchCount >= 4) { + const autoGroupedNode = new SiblingAutogroupNode( + node, + { + ...current.value, + autogrouped_by: { + op: current.value.op ?? '', + description: current.value.description ?? '', + }, + }, + { + event_id: undefined, + project_slug: undefined, + } + ); + + // Copy the children under the new node. + autoGroupedNode.children = node.children.slice(startIndex, matchCount + 1); + autoGroupedNode.groupCount = matchCount + 1; + + // Remove the old children from the parent and insert the new node. + node.children.splice(startIndex, matchCount + 1, autoGroupedNode); + + for (let j = 0; j < autoGroupedNode.children.length; j++) { + autoGroupedNode.children[j].parent = autoGroupedNode; + } + } + + startIndex = i; + matchCount = 0; + } + } + } + + // Returns boolean to indicate if node was updated + expand(node: TraceTreeNode, expanded: boolean): boolean { + if (expanded === node.expanded) { + return false; + } + + // Expanding is not allowed for zoomed in nodes + if (node.zoomedIn) { + return false; + } + + if (node instanceof ParentAutogroupNode) { + // In parent autogrouping, we perform a node swap and either point the + // head or tails of the autogrouped sequence to the autogrouped node + if (node.expanded) { + const index = this._list.indexOf(node); + + const autogroupedChildren = node.getVisibleChildren(); + this._list.splice(index + 1, autogroupedChildren.length); + + const newChildren = node.tail.getVisibleChildren(); + + for (const c of node.tail.children) { + c.parent = node; + } + + this._list.splice(index + 1, 0, ...newChildren); + } else { + node.head.parent = node; + const index = this._list.indexOf(node); + const childrenCount = node.getVisibleChildrenCount(); + + this._list.splice(index + 1, childrenCount); + + node.getVisibleChildrenCount(); + const newChildren = [node.head].concat( + node.head.getVisibleChildren() as TraceTreeNode[] + ); + + for (const c of node.children) { + c.parent = node.tail; + } + + this._list.splice(index + 1, 0, ...newChildren); + } + + node.invalidate(node); + node.expanded = expanded; + return true; + } + + if (node.expanded) { + const index = this._list.indexOf(node); + this._list.splice(index + 1, node.getVisibleChildrenCount()); + } else { + const index = this._list.indexOf(node); + this._list.splice(index + 1, 0, ...node.getVisibleChildren()); + } + node.expanded = expanded; + return true; + } + + zoomIn( + node: TraceTreeNode, + zoomedIn: boolean, + options: { + api: Client; + organization: Organization; + } + ): Promise { + if (zoomedIn === node.zoomedIn) { + return Promise.resolve(null); + } + + if (!zoomedIn) { + const index = this._list.indexOf(node); + const childrenCount = node.getVisibleChildrenCount(); + this._list.splice(index + 1, childrenCount); + + node.zoomedIn = zoomedIn; + + if (node.expanded) { + this._list.splice(index + 1, 0, ...node.getVisibleChildren()); + } + + return Promise.resolve(null); + } + + const promise = + this._spanPromises.get(node) ?? + fetchTransactionSpans( + options.api, + options.organization, + node.metadata.project_slug!, + node.metadata.event_id! + ); + + promise.then(data => { + const spans = data.entries.find(s => s.type === 'spans'); + if (!spans) { + return data; + } + + // Remove existing entries from the list + const index = this._list.indexOf(node); + if (node.expanded) { + const childrenCount = node.getVisibleChildrenCount(); + this._list.splice(index + 1, childrenCount); + } + + // Api response is not sorted + if (spans.data) { + spans.data.sort((a, b) => a.start_timestamp - b.start_timestamp); + } + + TraceTree.FromSpans(node, spans.data); + + const spanChildren = node.getVisibleChildren(); + this._list.splice(index + 1, 0, ...spanChildren); + return data; + }); + + this._spanPromises.set(node, promise); + return promise; + } + + toList(): TraceTreeNode[] { + const list: TraceTreeNode[] = []; + + function visit(node: TraceTreeNode) { + list.push(node); + + if (!node.expanded) { + return; + } + + for (const child of node.children) { + visit(child); + } + } + + for (const child of this.root.children) { + visit(child); + } + + return list; + } + + /** + * Prints the tree in a human readable format, useful for debugging and testing + */ + print() { + const print = this.list + .map(t => { + const padding = ' '.repeat(t.depth); + + if (isAutogroupedNode(t)) { + if (isParentAutogroupedNode(t)) { + return padding + 'parent autogroup'; + } + if (isSiblingAutogroupedNode(t)) { + return padding + 'sibling autogroup'; + } + + return padding + 'autogroup'; + } + if (isSpanNode(t)) { + return padding + t.value?.op ?? 'unknown span op'; + } + if (isTransactionNode(t)) { + return padding + t.value.transaction ?? 'unknown transaction'; + } + if (isMissingInstrumentationNode(t)) { + return padding + 'missing_instrumentation'; + } + throw new Error('Not implemented'); + }) + .filter(Boolean) + .join('\n'); + + // eslint-disable-next-line no-console + console.log(print); + } + + build() { + this._list = this.toList(); + return this; + } +} + +export class TraceTreeNode { + parent: TraceTreeNode | null = null; + value: T; + expanded: boolean = false; + zoomedIn: boolean = false; + canFetchData: boolean = false; + metadata: TraceTree.Metadata = { + project_slug: undefined, + event_id: undefined, + }; + + private _depth: number | undefined; + private _children: TraceTreeNode[] = []; + private _spanChildren: TraceTreeNode< + TraceTree.Span | TraceTree.MissingInstrumentationSpan + >[] = []; + private _connectors: number[] | undefined = undefined; + + constructor( + parent: TraceTreeNode | null, + value: T, + metadata: TraceTree.Metadata + ) { + this.parent = parent ?? null; + this.value = value; + this.metadata = metadata; + + if (isTransactionNode(this)) { + this.expanded = true; + } + } + + get isOrphaned() { + return this.parent?.value && 'orphan_errors' in this.parent.value; + } + + get isLastChild() { + return this.parent?.children[this.parent.children.length - 1] === this; + } + + /** + * Return a lazily calculated depth of the node in the tree. + * Root node has a value of -1 as it is abstract. + */ + get depth(): number { + if (typeof this._depth === 'number') { + return this._depth; + } + + let depth = -2; + let node: TraceTreeNode | null = this; + + while (node) { + if (typeof node.parent?.depth === 'number') { + this._depth = node.parent.depth + 1; + return this._depth; + } + depth++; + node = node.parent; + } + + this._depth = depth; + return this._depth; + } + + /** + * Returns the depth levels at which the row should draw vertical connectors + * negative values mean connector points to an orphaned node + */ + get connectors(): number[] { + if (this._connectors !== undefined) { + return this._connectors!; + } + + this._connectors = []; + + if (this.parent?.connectors !== undefined) { + this._connectors = [...this.parent.connectors]; + + if (this.isLastChild || this.value === null) { + return this._connectors; + } + + this.connectors.push(this.isOrphaned ? -this.depth : this.depth); + return this._connectors; + } + + let node: TraceTreeNode | TraceTreeNode | null = this.parent; + + while (node) { + if (node.value === null) { + break; + } + + if (node.isLastChild) { + node = node.parent; + continue; + } + + this._connectors.push(node.isOrphaned ? -node.depth : node.depth); + node = node.parent; + } + + return this._connectors; + } + + /** + * Returns the children that the node currently points to. + * The logic here is a consequence of the tree design, where we want to be able to store + * both transaction and span nodes in the same tree. This results in an annoying API where + * we either store span children separately or transaction children separately. A better design + * would have been to create an invisible meta node that always points to the correct children. + */ + get children(): TraceTreeNode[] { + // if node is not a autogrouped node, return children + if (isAutogroupedNode(this)) { + return this._children; + } + + if (isSpanNode(this)) { + return this.canFetchData && !this.zoomedIn ? [] : this.spanChildren; + } + + // if a node is zoomed in, return span children, else return transaction children + return this.zoomedIn ? this._spanChildren : this._children; + } + + set children(children: TraceTreeNode[]) { + this._children = children; + } + + get spanChildren(): TraceTreeNode< + TraceTree.Span | TraceTree.MissingInstrumentationSpan + >[] { + return this._spanChildren; + } + + /** + * Invalidate the visual data used to render the tree, forcing it + * to be recalculated on the next render. This is useful when for example + * the tree is expanded or collapsed, or when the tree is mutated and + * the visual data is no longer valid as the indentation changes + */ + invalidate(root?: TraceTreeNode) { + this._connectors = undefined; + this._depth = undefined; + + if (root) { + const queue = [...this.children]; + + while (queue.length > 0) { + const next = queue.pop()!; + next.invalidate(); + for (let i = 0; i < next.children.length; i++) { + queue.push(next.children[i]); + } + } + } + } + + getVisibleChildrenCount(): number { + if (!this.children.length) { + return 0; + } + + let count = 0; + const queue = [...this.children]; + + while (queue.length > 0) { + count++; + const next = queue.pop()!; + + if (next.expanded || isParentAutogroupedNode(next)) { + for (let i = 0; i < next.children.length; i++) { + queue.push(next.children[i]); + } + } + } + + return count; + } + + getVisibleChildren(): TraceTreeNode[] { + if (!this.children.length) { + return []; + } + + // @TODO: should be a proper FIFO queue as shift is O(n) + const visibleChildren: TraceTreeNode[] = []; + + function visit(node) { + visibleChildren.push(node); + + if (node.expanded || isParentAutogroupedNode(node)) { + for (let i = 0; i < node.children.length; i++) { + visit(node.children[i]); + } + } + } + + for (const child of this.children) { + visit(child); + } + + return visibleChildren; + } + + static Root() { + return new TraceTreeNode(null, null, { + event_id: undefined, + project_slug: undefined, + }); + } +} + +export class ParentAutogroupNode extends TraceTreeNode { + head: TraceTreeNode; + tail: TraceTreeNode; + groupCount: number = 0; + + constructor( + parent: TraceTreeNode | null, + node: TraceTree.ChildrenAutogroup, + metadata: TraceTree.Metadata, + head: TraceTreeNode, + tail: TraceTreeNode + ) { + super(parent, node, metadata); + + this.head = head; + this.tail = tail; + } + + get children() { + if (this.expanded) { + return [this.head]; + } + return this.tail.children; + } +} + +export class SiblingAutogroupNode extends TraceTreeNode { + groupCount: number = 0; + + constructor( + parent: TraceTreeNode | null, + node: TraceTree.SiblingAutogroup, + metadata: TraceTree.Metadata + ) { + super(parent, node, metadata); + } +} diff --git a/static/app/views/performance/traceDetails/index.tsx b/static/app/views/performance/traceDetails/index.tsx index b8fa4116f8324d..6f9dcbf43f0c8d 100644 --- a/static/app/views/performance/traceDetails/index.tsx +++ b/static/app/views/performance/traceDetails/index.tsx @@ -22,6 +22,7 @@ import {decodeScalar} from 'sentry/utils/queryString'; import withApi from 'sentry/utils/withApi'; import withOrganization from 'sentry/utils/withOrganization'; +import {TraceView as TraceViewV1} from './../newTraceDetails'; import TraceDetailsContent from './content'; import {DEFAULT_TRACE_ROWS_LIMIT} from './limitExceededMessage'; import NewTraceDetailsContent from './newTraceDetailsContent'; @@ -180,6 +181,10 @@ class TraceSummary extends Component { render() { const {organization} = this.props; + if (organization.features.includes('trace-view-v1')) { + return ; + } + return ( From 74c9d180c74dc70e0b1e0b3a7c2a03a6b6cea16b Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Mon, 12 Feb 2024 09:25:37 -0500 Subject: [PATCH 244/357] ref: use a smaller page size in test_get_key_transaction_list_pagination test (#64270) --- src/sentry/api/base.py | 28 +++++++++++-------- .../endpoints/organization_user_reports.py | 8 +++++- .../api/endpoints/project_user_reports.py | 8 +++++- src/sentry/testutils/helpers/pagination.py | 11 ++++++++ .../test_discover_key_transactions.py | 8 ++++-- 5 files changed, 47 insertions(+), 16 deletions(-) create mode 100644 src/sentry/testutils/helpers/pagination.py diff --git a/src/sentry/api/base.py b/src/sentry/api/base.py index d91dc3765136ca..1ca5140c06c288 100644 --- a/src/sentry/api/base.py +++ b/src/sentry/api/base.py @@ -44,6 +44,13 @@ ) from sentry.utils.sdk import capture_exception, merge_context_into_scope +from ..services.hybrid_cloud import rpcmetrics +from ..utils.pagination_factory import ( + annotate_span_with_pagination_args, + clamp_pagination_per_page, + get_cursor, + get_paginator, +) from .authentication import ( ApiKeyAuthentication, OrgAuthTokenAuthentication, @@ -56,6 +63,7 @@ SuperuserOrStaffFeatureFlaggedPermission, SuperuserPermission, ) +from .utils import generate_organization_url __all__ = [ "Endpoint", @@ -65,14 +73,7 @@ "region_silo_endpoint", ] -from ..services.hybrid_cloud import rpcmetrics -from ..utils.pagination_factory import ( - annotate_span_with_pagination_args, - clamp_pagination_per_page, - get_cursor, - get_paginator, -) -from .utils import generate_organization_url +PAGINATION_DEFAULT_PER_PAGE = 100 ONE_MINUTE = 60 ONE_HOUR = ONE_MINUTE * 60 @@ -438,7 +439,12 @@ def respond(self, context: object | None = None, **kwargs: Any) -> Response: def respond_with_text(self, text): return self.respond({"text": text}) - def get_per_page(self, request: Request, default_per_page=100, max_per_page=100): + def get_per_page( + self, request: Request, default_per_page: int | None = None, max_per_page: int | None = None + ): + default_per_page = default_per_page or PAGINATION_DEFAULT_PER_PAGE + max_per_page = max_per_page or 100 + try: return clamp_pagination_per_page( request.GET.get("per_page", default_per_page), @@ -460,8 +466,8 @@ def paginate( on_results=None, paginator=None, paginator_cls=Paginator, - default_per_page=100, - max_per_page=100, + default_per_page: int | None = None, + max_per_page: int | None = None, cursor_cls=Cursor, response_cls=Response, response_kwargs=None, diff --git a/src/sentry/api/endpoints/organization_user_reports.py b/src/sentry/api/endpoints/organization_user_reports.py index e79ffadb3a2699..f4c935c6e17d90 100644 --- a/src/sentry/api/endpoints/organization_user_reports.py +++ b/src/sentry/api/endpoints/organization_user_reports.py @@ -1,3 +1,5 @@ +from typing import NotRequired, TypedDict + from rest_framework.request import Request from rest_framework.response import Response @@ -13,6 +15,10 @@ from sentry.models.userreport import UserReport +class _PaginateKwargs(TypedDict): + post_query_filter: NotRequired[object] + + @region_silo_endpoint class OrganizationUserReportsEndpoint(OrganizationEndpoint): owner = ApiOwner.FEEDBACK @@ -52,7 +58,7 @@ def get(self, request: Request, organization) -> Response: ) status = request.GET.get("status", "unresolved") - paginate_kwargs = {} + paginate_kwargs: _PaginateKwargs = {} if status == "unresolved": paginate_kwargs["post_query_filter"] = user_reports_filter_to_unresolved elif status: diff --git a/src/sentry/api/endpoints/project_user_reports.py b/src/sentry/api/endpoints/project_user_reports.py index 5e9276f1c7b390..67783f080b5a60 100644 --- a/src/sentry/api/endpoints/project_user_reports.py +++ b/src/sentry/api/endpoints/project_user_reports.py @@ -1,3 +1,5 @@ +from typing import NotRequired, TypedDict + from rest_framework import serializers from rest_framework.request import Request from rest_framework.response import Response @@ -23,6 +25,10 @@ class Meta: fields = ("name", "email", "comments", "event_id") +class _PaginateKwargs(TypedDict): + post_query_filter: NotRequired[object] + + @region_silo_endpoint class ProjectUserReportsEndpoint(ProjectEndpoint, EnvironmentMixin): owner = ApiOwner.FEEDBACK @@ -47,7 +53,7 @@ def get(self, request: Request, project) -> Response: if isinstance(request.auth, ProjectKey): return self.respond(status=401) - paginate_kwargs = {} + paginate_kwargs: _PaginateKwargs = {} try: environment = self._get_environment_from_request(request, project.organization_id) except Environment.DoesNotExist: diff --git a/src/sentry/testutils/helpers/pagination.py b/src/sentry/testutils/helpers/pagination.py new file mode 100644 index 00000000000000..13acbaeb73847c --- /dev/null +++ b/src/sentry/testutils/helpers/pagination.py @@ -0,0 +1,11 @@ +import contextlib +from collections.abc import Generator +from unittest import mock + +from sentry.api import base + + +@contextlib.contextmanager +def override_pagination_limit(n: int) -> Generator[None, None, None]: + with mock.patch.object(base, "PAGINATION_DEFAULT_PER_PAGE", n): + yield diff --git a/tests/snuba/api/endpoints/test_discover_key_transactions.py b/tests/snuba/api/endpoints/test_discover_key_transactions.py index bb86ae29a2f240..89e384248ec205 100644 --- a/tests/snuba/api/endpoints/test_discover_key_transactions.py +++ b/tests/snuba/api/endpoints/test_discover_key_transactions.py @@ -9,6 +9,7 @@ from sentry.models.projectteam import ProjectTeam from sentry.testutils.cases import APITestCase, SnubaTestCase from sentry.testutils.helpers import parse_link_header +from sentry.testutils.helpers.pagination import override_pagination_limit from sentry.testutils.silo import region_silo_test from sentry.utils.samples import load_data @@ -868,6 +869,7 @@ def test_get_key_transaction_list_mixed_my_and_other_teams(self): }, ] + @override_pagination_limit(5) def test_get_key_transaction_list_pagination(self): user = self.create_user() self.login_as(user=user) @@ -875,7 +877,7 @@ def test_get_key_transaction_list_pagination(self): project = self.create_project(name="baz", organization=org) teams = [] - for i in range(123): + for i in range(8): team = self.create_team(organization=org, name=f"Team {i:02d}") self.create_team_membership(team, user=user) project.add_team(team) @@ -893,7 +895,7 @@ def test_get_key_transaction_list_pagination(self): ) assert response.status_code == 200, response.content - assert len(response.data) == 100 + assert len(response.data) == 5 links = { link["rel"]: {"url": url, **link} for url, link in parse_link_header(response["Link"]).items() @@ -915,7 +917,7 @@ def test_get_key_transaction_list_pagination(self): ) assert response.status_code == 200, response.content - assert len(response.data) == 23 + assert len(response.data) == 3 links = { link["rel"]: {"url": url, **link} for url, link in parse_link_header(response["Link"]).items() From 333527b8a5187a967238b8e1d327be0c291ac481 Mon Sep 17 00:00:00 2001 From: Jodi Jang <116035587+jangjodi@users.noreply.github.com> Date: Mon, 12 Feb 2024 09:45:55 -0500 Subject: [PATCH 245/357] fix(similarity-embedding): Fix seer url (#64961) Fix url to settings.SEER_AUTOFIX_URL --- src/sentry/seer/utils.py | 11 ++++++++++- .../endpoints/test_group_similar_issues_embeddings.py | 10 +++++----- tests/sentry/seer/test_utils.py | 4 ++-- 3 files changed, 17 insertions(+), 8 deletions(-) diff --git a/src/sentry/seer/utils.py b/src/sentry/seer/utils.py index a17152805d0334..1e93bcdfba5233 100644 --- a/src/sentry/seer/utils.py +++ b/src/sentry/seer/utils.py @@ -35,6 +35,15 @@ class BreakpointResponse(TypedDict): timeout=settings.ANOMALY_DETECTION_TIMEOUT, ) +seer_staging_connection_pool = connection_from_url( + settings.SEER_AUTOFIX_URL, + retries=Retry( + total=5, + status_forcelist=[408, 429, 502, 503, 504], + ), + timeout=settings.ANOMALY_DETECTION_TIMEOUT, +) + def detect_breakpoints(breakpoint_request) -> BreakpointResponse: response = seer_connection_pool.urlopen( @@ -73,7 +82,7 @@ def get_similar_issues_embeddings( similar_issues_request: SimilarIssuesEmbeddingsRequest, ) -> SimilarIssuesEmbeddingsResponse: """Call /v0/issues/similar-issues endpoint from timeseries-analysis-service.""" - response = seer_connection_pool.urlopen( + response = seer_staging_connection_pool.urlopen( "POST", "/v0/issues/similar-issues", body=json.dumps(similar_issues_request), diff --git a/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py b/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py index 57d5ec4ecff515..43085592879581 100644 --- a/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py +++ b/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py @@ -229,7 +229,7 @@ def test_no_feature_flag(self): assert response.status_code == 404, response.content @with_feature("projects:similarity-embeddings") - @mock.patch("sentry.seer.utils.seer_connection_pool.urlopen") + @mock.patch("sentry.seer.utils.seer_staging_connection_pool.urlopen") def test_simple(self, mock_seer_request): seer_return_value: SimilarIssuesEmbeddingsResponse = { "responses": [ @@ -270,7 +270,7 @@ def test_simple(self, mock_seer_request): @with_feature("projects:similarity-embeddings") @mock.patch("sentry.analytics.record") - @mock.patch("sentry.seer.utils.seer_connection_pool.urlopen") + @mock.patch("sentry.seer.utils.seer_staging_connection_pool.urlopen") def test_multiple(self, mock_seer_request, mock_record): similar_group_over_threshold = self.create_group(project=self.project) similar_group_under_threshold = self.create_group(project=self.project) @@ -324,7 +324,7 @@ def test_multiple(self, mock_seer_request, mock_record): ) @with_feature("projects:similarity-embeddings") - @mock.patch("sentry.seer.utils.seer_connection_pool.urlopen") + @mock.patch("sentry.seer.utils.seer_staging_connection_pool.urlopen") def test_invalid_return(self, mock_seer_request): """ The seer API can return groups that do not exist if they have been deleted/merged. @@ -354,7 +354,7 @@ def test_invalid_return(self, mock_seer_request): @with_feature("projects:similarity-embeddings") @mock.patch("sentry.analytics.record") - @mock.patch("sentry.seer.utils.seer_connection_pool.urlopen") + @mock.patch("sentry.seer.utils.seer_staging_connection_pool.urlopen") def test_empty_return(self, mock_seer_request, mock_record): mock_seer_request.return_value = HTTPResponse([]) response = self.client.get(self.path) @@ -370,7 +370,7 @@ def test_empty_return(self, mock_seer_request, mock_record): ) @with_feature("projects:similarity-embeddings") - @mock.patch("sentry.seer.utils.seer_connection_pool.urlopen") + @mock.patch("sentry.seer.utils.seer_staging_connection_pool.urlopen") def test_no_optional_params(self, mock_seer_request): """ Test that optional parameters, k and threshold, can not be included. diff --git a/tests/sentry/seer/test_utils.py b/tests/sentry/seer/test_utils.py index bd43e1e88a4432..47558d3be328c2 100644 --- a/tests/sentry/seer/test_utils.py +++ b/tests/sentry/seer/test_utils.py @@ -7,7 +7,7 @@ class TestSimilarIssuesEmbeddingsUtils(TestCase): - @mock.patch("sentry.seer.utils.seer_connection_pool.urlopen") + @mock.patch("sentry.seer.utils.seer_staging_connection_pool.urlopen") def test_simple_similar_issues_embeddings(self, mock_seer_request): """Test that valid responses are decoded and returned.""" @@ -34,7 +34,7 @@ def test_simple_similar_issues_embeddings(self, mock_seer_request): response = get_similar_issues_embeddings(params) assert response == expected_return_value - @mock.patch("sentry.seer.utils.seer_connection_pool.urlopen") + @mock.patch("sentry.seer.utils.seer_staging_connection_pool.urlopen") def test_empty_similar_issues_embeddings(self, mock_seer_request): """Test that empty responses are returned.""" From da2bcd30c433829f87c9d7265410838dcd2c11b3 Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Mon, 12 Feb 2024 09:03:49 -0600 Subject: [PATCH 246/357] feat(metrics): Change metrics samples list to remote flag (#64960) --- src/sentry/features/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index f4e87bc0e873f6..54410139676627 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -146,7 +146,7 @@ default_manager.add("organizations:metrics-api-new-metrics-layer", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:metrics-blocking", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:metrics-extraction", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) -default_manager.add("organizations:metrics-samples-list", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) +default_manager.add("organizations:metrics-samples-list", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:minute-resolution-sessions", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:mobile-cpu-memory-in-transactions", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:mobile-ttid-ttfd-contribution", OrganizationFeature, FeatureHandlerStrategy.REMOTE) From a03dd84f7104e88fd1c05f2816c8a4724151f36c Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Mon, 12 Feb 2024 09:04:26 -0600 Subject: [PATCH 247/357] chore(stats-detectors): Remove unused stats detectors tags code (#64957) We've opted to not show tags for stats detector issues. --- static/app/actionCreators/group.tsx | 35 +-------- .../app/views/issueDetails/groupTags.spec.tsx | 23 ------ static/app/views/issueDetails/groupTags.tsx | 75 ++----------------- 3 files changed, 7 insertions(+), 126 deletions(-) diff --git a/static/app/actionCreators/group.tsx b/static/app/actionCreators/group.tsx index 453ef47ab4085f..ee9dca999fb28d 100644 --- a/static/app/actionCreators/group.tsx +++ b/static/app/actionCreators/group.tsx @@ -3,7 +3,6 @@ import * as Sentry from '@sentry/react'; import type {Tag} from 'sentry/actionCreators/events'; import type {RequestCallbacks, RequestOptions} from 'sentry/api'; import {Client} from 'sentry/api'; -import {getSampleEventQuery} from 'sentry/components/events/eventStatisticalDetector/eventComparison/eventDisplay'; import GroupStore from 'sentry/stores/groupStore'; import type { Actor, @@ -446,32 +445,6 @@ export const makeFetchIssueTagsQueryKey = ({ {query: {environment, readable, limit}}, ]; -const makeFetchStatisticalDetectorTagsQueryKey = ({ - orgSlug, - environment, - statisticalDetectorParameters, -}: FetchIssueTagsParameters): ApiQueryKey => { - const {transaction, durationBaseline, start, end} = statisticalDetectorParameters ?? { - transaction: '', - durationBaseline: 0, - start: undefined, - end: undefined, - }; - return [ - `/organizations/${orgSlug}/events-facets/`, - { - query: { - environment, - transaction, - includeAll: true, - query: getSampleEventQuery({transaction, durationBaseline, addUpperBound: false}), - start, - end, - }, - }, - ]; -}; - export const useFetchIssueTags = ( parameters: FetchIssueTagsParameters, { @@ -479,13 +452,7 @@ export const useFetchIssueTags = ( ...options }: Partial> = {} ) => { - let queryKey = makeFetchIssueTagsQueryKey(parameters); - if (parameters.isStatisticalDetector) { - // Statistical detector issues need to use a Discover query for tags - queryKey = makeFetchStatisticalDetectorTagsQueryKey(parameters); - } - - return useApiQuery(queryKey, { + return useApiQuery(makeFetchIssueTagsQueryKey(parameters), { staleTime: 30000, enabled: defined(parameters.groupId) && enabled, ...options, diff --git a/static/app/views/issueDetails/groupTags.spec.tsx b/static/app/views/issueDetails/groupTags.spec.tsx index f78c172dee691a..888c86fcf565ae 100644 --- a/static/app/views/issueDetails/groupTags.spec.tsx +++ b/static/app/views/issueDetails/groupTags.spec.tsx @@ -4,7 +4,6 @@ import {TagsFixture} from 'sentry-fixture/tags'; import {initializeOrg} from 'sentry-test/initializeOrg'; import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; -import {IssueType} from 'sentry/types'; import GroupTags from 'sentry/views/issueDetails/groupTags'; describe('GroupTags', function () { @@ -54,28 +53,6 @@ describe('GroupTags', function () { }); }); - it('navigates correctly when duration regression issue > tags key is clicked', async function () { - render( - , - {context: routerContext, organization} - ); - - await screen.findAllByTestId('tag-title'); - await userEvent.click(screen.getByText('browser')); - - expect(router.push).toHaveBeenCalledWith({ - pathname: '/organizations/org-slug/performance/summary/tags/', - query: expect.objectContaining({ - tagKey: 'browser', - }), - }); - }); - it('shows an error message when the request fails', async function () { MockApiClient.addMockResponse({ url: '/organizations/org-slug/issues/1/tags/', diff --git a/static/app/views/issueDetails/groupTags.tsx b/static/app/views/issueDetails/groupTags.tsx index 4caf9ce4d671a3..2f0c13d6b26aab 100644 --- a/static/app/views/issueDetails/groupTags.tsx +++ b/static/app/views/issueDetails/groupTags.tsx @@ -1,14 +1,10 @@ -import {useRef} from 'react'; import styled from '@emotion/styled'; -import type {Tag} from 'sentry/actionCreators/events'; -import type {GroupTagsResponse} from 'sentry/actionCreators/group'; import {useFetchIssueTags} from 'sentry/actionCreators/group'; import {Alert} from 'sentry/components/alert'; import Count from 'sentry/components/count'; import {DeviceName} from 'sentry/components/deviceName'; import GlobalSelectionLink from 'sentry/components/globalSelectionLink'; -import {sumTagFacetsForTopValues} from 'sentry/components/group/tagFacets'; import * as Layout from 'sentry/components/layouts/thirds'; import ExternalLink from 'sentry/components/links/externalLink'; import Link from 'sentry/components/links/link'; @@ -20,20 +16,15 @@ import PanelBody from 'sentry/components/panels/panelBody'; import Version from 'sentry/components/version'; import {t, tct} from 'sentry/locale'; import {space} from 'sentry/styles/space'; -import type {Event, Group} from 'sentry/types'; -import {IssueType} from 'sentry/types'; -import {defined, percent} from 'sentry/utils'; -import {useRelativeDateTime} from 'sentry/utils/profiling/hooks/useRelativeDateTime'; +import type {Group} from 'sentry/types'; +import {percent} from 'sentry/utils'; import {useLocation} from 'sentry/utils/useLocation'; import useOrganization from 'sentry/utils/useOrganization'; -import {generateTagsRoute} from '../performance/transactionSummary/transactionTags/utils'; - type GroupTagsProps = { baseUrl: string; environments: string[]; group: Group; - event?: Event; }; type SimpleTag = { @@ -47,34 +38,9 @@ type SimpleTag = { totalValues: number; }; -function isTagFacetsResponse( - _: GroupTagsResponse | Tag[] | undefined, - shouldUseTagFacetsEndpoint: boolean -): _ is Tag[] { - return shouldUseTagFacetsEndpoint; -} - -function GroupTags({group, baseUrl, environments, event}: GroupTagsProps) { +function GroupTags({group, baseUrl, environments}: GroupTagsProps) { const organization = useOrganization(); const location = useLocation(); - const now = useRef(Date.now()).current; - - const {transaction, aggregateRange2, breakpoint} = - event?.occurrence?.evidenceData ?? {}; - - const {start: beforeDateTime, end: afterDateTime} = useRelativeDateTime({ - anchor: breakpoint, - relativeDays: 14, - }); - - const isRegressionIssue = - group.issueType === IssueType.PERFORMANCE_DURATION_REGRESSION || - group.issueType === IssueType.PERFORMANCE_ENDPOINT_REGRESSION; - - const shouldUseTagFacetsEndpoint = - organization.features.includes('performance-duration-regression-visible') && - defined(event) && - isRegressionIssue; const { data = [], @@ -85,23 +51,9 @@ function GroupTags({group, baseUrl, environments, event}: GroupTagsProps) { orgSlug: organization.slug, groupId: group.id, environment: environments, - isStatisticalDetector: shouldUseTagFacetsEndpoint, - statisticalDetectorParameters: shouldUseTagFacetsEndpoint - ? { - transaction, - start: new Date(breakpoint * 1000).toISOString(), - end: new Date(now).toISOString(), - durationBaseline: aggregateRange2, - } - : undefined, }); - // useFetchIssueTags can return two different types of responses, depending on shouldUseTagFacetsEndpoint - // This line will convert the response to a common type for rendering - const tagList: SimpleTag[] = isTagFacetsResponse(data, shouldUseTagFacetsEndpoint) - ? data.filter(({key}) => key !== 'transaction')?.map(sumTagFacetsForTopValues) - : data; - const alphabeticalTags = tagList.sort((a, b) => a.key.localeCompare(b.key)); + const alphabeticalTags = data.sort((a, b) => a.key.localeCompare(b.key)); if (isLoading) { return ; @@ -117,24 +69,9 @@ function GroupTags({group, baseUrl, environments, event}: GroupTagsProps) { } const getTagKeyTarget = (tag: SimpleTag) => { - const pathname = isRegressionIssue - ? generateTagsRoute({orgSlug: organization.slug}) - : `${baseUrl}tags/${tag.key}/`; - - const query = isRegressionIssue - ? { - ...extractSelectionParameters(location.query), - start: (beforeDateTime as Date).toISOString(), - end: (afterDateTime as Date).toISOString(), - statsPeriod: undefined, - tagKey: tag.key, - transaction, - } - : extractSelectionParameters(location.query); - return { - pathname, - query, + pathname: `${baseUrl}tags/${tag.key}/`, + query: extractSelectionParameters(location.query), }; }; From 89f52249b5393d0da8e1b180f1b40c99cd8922db Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Mon, 12 Feb 2024 10:21:22 -0500 Subject: [PATCH 248/357] Revert "Revert "ref: projectconfig_cache stores binary in redis so use decode_responses=False (#64816)"" (#64996) this was reverted out of caution but it did not make a difference in redis connection counts This reverts commit ef7a9fc2696c70aef9418f98790261b10d71b85a. --- src/sentry/relay/projectconfig_cache/redis.py | 4 +-- src/sentry/utils/redis.py | 34 +++++++++++-------- 2 files changed, 22 insertions(+), 16 deletions(-) diff --git a/src/sentry/relay/projectconfig_cache/redis.py b/src/sentry/relay/projectconfig_cache/redis.py index f540fd2d0a20d5..b3ae6237096afb 100644 --- a/src/sentry/relay/projectconfig_cache/redis.py +++ b/src/sentry/relay/projectconfig_cache/redis.py @@ -15,10 +15,10 @@ class RedisProjectConfigCache(ProjectConfigCache): def __init__(self, **options): cluster_key = options.get("cluster", "default") - self.cluster = redis.redis_clusters.get(cluster_key) + self.cluster = redis.redis_clusters.get(cluster_key, decode_responses=False) read_cluster_key = options.get("read_cluster", cluster_key) - self.cluster_read = redis.redis_clusters.get(read_cluster_key) + self.cluster_read = redis.redis_clusters.get(read_cluster_key, decode_responses=False) super().__init__(**options) diff --git a/src/sentry/utils/redis.py b/src/sentry/utils/redis.py index 16ae7f1dda1967..f9f1984af865d2 100644 --- a/src/sentry/utils/redis.py +++ b/src/sentry/utils/redis.py @@ -60,7 +60,9 @@ class _RBCluster: def supports(self, config): return not config.get("is_redis_cluster", False) - def factory(self, **config): + def factory(self, *, decode_responses: bool, **config): + if not decode_responses: + raise NotImplementedError("decode_responses=False mode is not implemented for `rb`") # rb expects a dict of { host, port } dicts where the key is the host # ID. Coerce the configuration into the correct format if necessary. hosts = config["hosts"] @@ -107,7 +109,7 @@ def supports(self, config): # in non-cluster mode. return config.get("is_redis_cluster", False) or len(config.get("hosts")) == 1 - def factory(self, **config): + def factory(self, *, decode_responses: bool, **config): # StrictRedisCluster expects a list of { host, port } dicts. Coerce the # configuration into the correct format if necessary. hosts = config.get("hosts") @@ -133,7 +135,7 @@ def cluster_factory(): # # https://github.com/Grokzen/redis-py-cluster/blob/73f27edf7ceb4a408b3008ef7d82dac570ab9c6a/rediscluster/nodemanager.py#L385 startup_nodes=deepcopy(hosts), - decode_responses=True, + decode_responses=decode_responses, skip_full_coverage_check=True, max_connections=16, max_connections_per_node=True, @@ -142,7 +144,7 @@ def cluster_factory(): ) else: host = hosts[0].copy() - host["decode_responses"] = True + host["decode_responses"] = decode_responses return ( import_string(config["client_class"]) if "client_class" in config @@ -170,17 +172,19 @@ def __init__( ... def __init__(self, options_manager, cluster_type=_RBCluster): - self.__clusters = {} + self.__clusters: dict[tuple[str, bool], TCluster] = {} self.__options_manager = options_manager self.__cluster_type = cluster_type() - def get(self, key) -> TCluster: - cluster = self.__clusters.get(key) + def get(self, key: str, *, decode_responses: bool = True) -> TCluster: + cache_key = (key, decode_responses) + try: + return self.__clusters[cache_key] + except KeyError: + # Do not access attributes of the `cluster` object to prevent + # setup/init of lazy objects. The _RedisCluster type will try to + # connect to the cluster during initialization. - # Do not access attributes of the `cluster` object to prevent - # setup/init of lazy objects. The _RedisCluster type will try to - # connect to the cluster during initialization. - if cluster is None: # TODO: This would probably be safer with a lock, but I'm not sure # that it's necessary. configuration = self.__options_manager.get("redis.clusters").get(key) @@ -190,9 +194,11 @@ def get(self, key) -> TCluster: if not self.__cluster_type.supports(configuration): raise KeyError(f"Invalid cluster type, expected: {self.__cluster_type}") - cluster = self.__clusters[key] = self.__cluster_type.factory(**configuration) - - return cluster + ret = self.__clusters[cache_key] = self.__cluster_type.factory( + **configuration, + decode_responses=decode_responses, + ) + return ret # TODO(epurkhiser): When migration of all rb cluster to true redis clusters has From 29ea8cc8869af408d903c8570091fa01e0b51acf Mon Sep 17 00:00:00 2001 From: Lukas Stracke Date: Mon, 12 Feb 2024 16:29:16 +0100 Subject: [PATCH 249/357] chore(api-docs): Transfer ownership of endpoints to processing team (#64997) This PR transfers the ownership of 5 endpoints from the web frontend team to the processing team: ``` "unknown": [ "OrganizationArtifactBundleAssembleEndpoint::POST", "ProjectArtifactBundleFileDetailsEndpoint::GET", "ProjectArtifactBundleFilesEndpoint::GET", "ProjectArtifactLookupEndpoint::GET", "SourceMapsEndpoint::DELETE", "SourceMapsEndpoint::GET" ] ``` These endpoints were never owned by the Web SDK Frontend team and after discussing this internally, they belong to the processing team (cc @loewenheim). --- src/sentry/api/endpoints/artifact_lookup.py | 2 +- src/sentry/api/endpoints/debug_files.py | 2 +- .../api/endpoints/organization_artifactbundle_assemble.py | 2 +- .../api/endpoints/project_artifact_bundle_file_details.py | 2 +- src/sentry/api/endpoints/project_artifact_bundle_files.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/sentry/api/endpoints/artifact_lookup.py b/src/sentry/api/endpoints/artifact_lookup.py index 0b259feba93111..f5667244e646f4 100644 --- a/src/sentry/api/endpoints/artifact_lookup.py +++ b/src/sentry/api/endpoints/artifact_lookup.py @@ -37,7 +37,7 @@ @region_silo_endpoint class ProjectArtifactLookupEndpoint(ProjectEndpoint): - owner = ApiOwner.WEB_FRONTEND_SDKS + owner = ApiOwner.OWNERS_PROCESSING publish_status = { "GET": ApiPublishStatus.UNKNOWN, } diff --git a/src/sentry/api/endpoints/debug_files.py b/src/sentry/api/endpoints/debug_files.py index e818ef85d60bb3..f33a5a72f36e0b 100644 --- a/src/sentry/api/endpoints/debug_files.py +++ b/src/sentry/api/endpoints/debug_files.py @@ -507,7 +507,7 @@ def post(self, request: Request, project) -> Response: @region_silo_endpoint class SourceMapsEndpoint(ProjectEndpoint): - owner = ApiOwner.WEB_FRONTEND_SDKS + owner = ApiOwner.OWNERS_PROCESSING publish_status = { "DELETE": ApiPublishStatus.UNKNOWN, "GET": ApiPublishStatus.UNKNOWN, diff --git a/src/sentry/api/endpoints/organization_artifactbundle_assemble.py b/src/sentry/api/endpoints/organization_artifactbundle_assemble.py index aa89830c2cd2a5..1ef00198e0ad04 100644 --- a/src/sentry/api/endpoints/organization_artifactbundle_assemble.py +++ b/src/sentry/api/endpoints/organization_artifactbundle_assemble.py @@ -23,7 +23,7 @@ @region_silo_endpoint class OrganizationArtifactBundleAssembleEndpoint(OrganizationReleasesBaseEndpoint): - owner = ApiOwner.WEB_FRONTEND_SDKS + owner = ApiOwner.OWNERS_PROCESSING publish_status = { "POST": ApiPublishStatus.UNKNOWN, } diff --git a/src/sentry/api/endpoints/project_artifact_bundle_file_details.py b/src/sentry/api/endpoints/project_artifact_bundle_file_details.py index 316ca6277b056f..54d5816ef20d5a 100644 --- a/src/sentry/api/endpoints/project_artifact_bundle_file_details.py +++ b/src/sentry/api/endpoints/project_artifact_bundle_file_details.py @@ -48,7 +48,7 @@ def download_file_from_artifact_bundle( class ProjectArtifactBundleFileDetailsEndpoint( ProjectEndpoint, ProjectArtifactBundleFileDetailsMixin ): - owner = ApiOwner.WEB_FRONTEND_SDKS + owner = ApiOwner.OWNERS_PROCESSING publish_status = { "GET": ApiPublishStatus.UNKNOWN, } diff --git a/src/sentry/api/endpoints/project_artifact_bundle_files.py b/src/sentry/api/endpoints/project_artifact_bundle_files.py index abe3d9239dc125..641edb684344c6 100644 --- a/src/sentry/api/endpoints/project_artifact_bundle_files.py +++ b/src/sentry/api/endpoints/project_artifact_bundle_files.py @@ -53,7 +53,7 @@ def __getitem__(self, range): @region_silo_endpoint class ProjectArtifactBundleFilesEndpoint(ProjectEndpoint): - owner = ApiOwner.WEB_FRONTEND_SDKS + owner = ApiOwner.OWNERS_PROCESSING publish_status = { "GET": ApiPublishStatus.UNKNOWN, } From dca855df65c8dc3031b1468d1b00dac614eec750 Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Mon, 12 Feb 2024 09:29:26 -0600 Subject: [PATCH 250/357] feat(metrics): PoC for new metrics samples list (#64899) This adds a new metrics samples list behind a feature flag to feature a spans first experience in metrics. Requires #64895 --- .../app/components/ddm/metricSamplesTable.tsx | 311 ++++++++++++++++++ static/app/views/ddm/widgetDetails.tsx | 20 +- 2 files changed, 325 insertions(+), 6 deletions(-) create mode 100644 static/app/components/ddm/metricSamplesTable.tsx diff --git a/static/app/components/ddm/metricSamplesTable.tsx b/static/app/components/ddm/metricSamplesTable.tsx new file mode 100644 index 00000000000000..f62a9d58d9bbd3 --- /dev/null +++ b/static/app/components/ddm/metricSamplesTable.tsx @@ -0,0 +1,311 @@ +import {useEffect, useMemo, useState} from 'react'; + +import EmptyStateWarning from 'sentry/components/emptyStateWarning'; +import GridEditable, {COL_WIDTH_UNDEFINED} from 'sentry/components/gridEditable'; +import ProjectBadge from 'sentry/components/idBadge/projectBadge'; +import Link from 'sentry/components/links/link'; +import {normalizeDateTimeParams} from 'sentry/components/organizations/pageFilters/parse'; +import PerformanceDuration from 'sentry/components/performanceDuration'; +import {t, tct} from 'sentry/locale'; +import type {MRI} from 'sentry/types'; +import {defined} from 'sentry/utils'; +import {Container, FieldDateTime} from 'sentry/utils/discover/styles'; +import {DiscoverDatasets} from 'sentry/utils/discover/types'; +import {getShortEventId} from 'sentry/utils/events'; +import {formatPercentage} from 'sentry/utils/formatters'; +import {getTransactionDetailsUrl} from 'sentry/utils/performance/urls'; +import {generateProfileFlamechartRoute} from 'sentry/utils/profiling/routes'; +import Projects from 'sentry/utils/projects'; +import {useApiQuery} from 'sentry/utils/queryClient'; +import {decodeScalar} from 'sentry/utils/queryString'; +import {useLocation} from 'sentry/utils/useLocation'; +import useOrganization from 'sentry/utils/useOrganization'; +import usePageFilters from 'sentry/utils/usePageFilters'; +import usePrevious from 'sentry/utils/usePrevious'; +import {getTraceDetailsUrl} from 'sentry/views/performance/traceDetails/utils'; +import ColorBar from 'sentry/views/performance/vitalDetail/colorBar'; + +interface MetricSamplesTableProps { + mri?: MRI; + query?: string; +} + +export function MetricSamplesTable({mri, query}: MetricSamplesTableProps) { + const location = useLocation(); + + const [offset, setOffset] = useState(0); + + const emptyMessage = useMemo(() => { + if (defined(mri)) { + return null; + } + + return ( + +

    {t('Choose a metric to display samples')}

    +
    + ); + }, [mri]); + + const previousMri = usePrevious(mri); + useEffect(() => { + if (mri !== previousMri) { + setOffset(0); + } + }, [previousMri, mri]); + + // TODO: this is just a temporary solution for the spans.exlusive_time MRI + // long term, we should use an unified endpoint + const result = useMetricSamples({ + fields: [ + 'project', + 'id', + 'span.op', + 'span.description', + 'span.duration', + 'span.self_time', + 'timestamp', + 'trace', + 'transaction.id', + 'profile_id', + ], + query: [query, 'has:profile_id'].filter(Boolean).join(' '), + sort: {field: 'timestamp', kind: 'desc'}, + // TODO: support other MRIs later + enabled: mri === 'd:spans/exclusive_time@millisecond', + limit: 100, + referrer: 'foo', + }); + + const data = useMemo(() => { + // This is just some POC code, so not going to fix this type error + // @ts-ignore + return (result.data?.data ?? []).slice(offset, offset + 10); + }, [result.data, offset]); + + return ( + + ); +} + +interface UseMetricSamplesOptions { + fields: F[]; + referrer: string; + sort: {field: F; kind: 'asc' | 'desc'}; + cursor?: string; + enabled?: boolean; + limit?: number; + query?: string; +} + +function useMetricSamples({ + cursor, + enabled, + fields, + referrer, + limit, + query, + sort, +}: UseMetricSamplesOptions) { + const organization = useOrganization(); + const {selection} = usePageFilters(); + + const path = `/organizations/${organization.slug}/events/`; + + const endpointOptions = { + query: { + dataset: DiscoverDatasets.SPANS_INDEXED, + referrer, + project: selection.projects, + environment: selection.environments, + ...normalizeDateTimeParams(selection.datetime), + field: fields, + per_page: limit, + query, + sort: sort.kind === 'asc' ? sort.field : `-${sort.field}`, + cursor, + }, + }; + + return useApiQuery([path, endpointOptions], { + staleTime: 0, + refetchOnWindowFocus: false, + retry: false, + enabled, + }); +} + +const COLUMN_ORDER = [ + {key: 'project', width: COL_WIDTH_UNDEFINED, name: 'Project'}, + {key: 'id', width: COL_WIDTH_UNDEFINED, name: 'Span ID'}, + {key: 'span.op', width: COL_WIDTH_UNDEFINED, name: 'Span Op'}, + {key: 'span.description', width: COL_WIDTH_UNDEFINED, name: 'Span Description'}, + {key: 'span.self_time', width: COL_WIDTH_UNDEFINED, name: 'Span Self Time'}, + {key: 'timestamp', width: COL_WIDTH_UNDEFINED, name: 'Timestamp'}, + {key: 'trace', width: COL_WIDTH_UNDEFINED, name: 'Trace'}, + {key: 'profile_id', width: COL_WIDTH_UNDEFINED, name: 'Profile'}, +]; + +function renderBodyCell(col, dataRow) { + if (col.key === 'id') { + return ( + + ); + } + + if (col.key === 'project') { + return ; + } + + if (col.key === 'span.self_time') { + return ( + + ); + } + + if (col.key === 'timestamp') { + return ; + } + + if (col.key === 'trace') { + return ; + } + + if (col.key === 'profile_id') { + return ( + + ); + } + + return {dataRow[col.key]}; +} + +function ProjectRenderer({projectSlug}) { + const organization = useOrganization(); + + return ( + + + {({projects}) => { + const project = projects.find(p => p.slug === projectSlug); + return ( + + ); + }} + + + ); +} + +function SpanId({project, spanId, transactionId}) { + const organization = useOrganization(); + const target = getTransactionDetailsUrl( + organization.slug, + `${project}:${transactionId}`, + undefined, + undefined, + spanId + ); + return ( + + {getShortEventId(spanId)} + + ); +} + +function SpanSelfTimeRenderer({selfTime, duration}) { + // duration is stored as an UInt32 while self time is stored + // as a Float64. So in cases where duration should equal self time, + // it can be truncated. + // + // When this happens, we just take the self time as the duration. + const spanDuration = Math.max(selfTime, duration); + const percentage = selfTime / spanDuration; + + const colorStops = useMemo(() => { + return [ + {color: '#694D99', percent: percentage}, + {color: 'gray100', percent: 1 - percentage}, + ]; + }, [percentage]); + + return ( + + {tct('[selfTime] ([percentage] of duration)', { + selfTime: , + percentage: formatPercentage(percentage), + })} + + + ); +} + +function TimestampRenderer({timestamp}) { + const location = useLocation(); + + return ( + + ); +} + +function TraceId({traceId}) { + const organization = useOrganization(); + const {selection} = usePageFilters(); + const target = getTraceDetailsUrl( + organization, + traceId, + { + start: selection.datetime.start, + end: selection.datetime.end, + statsPeriod: selection.datetime.period, + }, + {} + ); + return ( + + {getShortEventId(traceId)} + + ); +} + +function ProfileId({projectSlug, profileId}) { + const organization = useOrganization(); + const target = generateProfileFlamechartRoute({ + orgSlug: organization.slug, + projectSlug, + profileId, + }); + return ( + + {getShortEventId(profileId)} + + ); +} diff --git a/static/app/views/ddm/widgetDetails.tsx b/static/app/views/ddm/widgetDetails.tsx index 8844d273f39c33..d89375c70db8eb 100644 --- a/static/app/views/ddm/widgetDetails.tsx +++ b/static/app/views/ddm/widgetDetails.tsx @@ -1,6 +1,7 @@ import {useCallback, useMemo, useState} from 'react'; import styled from '@emotion/styled'; +import {MetricSamplesTable} from 'sentry/components/ddm/metricSamplesTable'; import {TabList, TabPanels, Tabs} from 'sentry/components/tabs'; import {Tooltip} from 'sentry/components/tooltip'; import {t} from 'sentry/locale'; @@ -82,12 +83,19 @@ export function WidgetDetails() { - + {organization.features.includes('metrics-samples-list') ? ( + + ) : ( + + )} From 99f03b5229ade6873c210530ce1eef05f621c353 Mon Sep 17 00:00:00 2001 From: Yagiz Nizipli Date: Mon, 12 Feb 2024 10:56:53 -0500 Subject: [PATCH 251/357] build: update eslint config and clean-up rules (#64999) Cleans up the rules since it is now added to `sentry-eslint-config` and adds `noVar` and `useConst` rules to Biome. --- .eslintrc.js | 37 ----------------- biome.json | 14 +++++++ package.json | 2 +- .../templates/sentry/error-page-embed.js | 2 +- static/app/utils/statics-setup.tsx | 2 +- tests/js/setup.ts | 4 +- yarn.lock | 40 +++++++++---------- 7 files changed, 39 insertions(+), 62 deletions(-) diff --git a/.eslintrc.js b/.eslintrc.js index cc3fcd04bede16..97e5ca127ffb2e 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -31,24 +31,6 @@ module.exports = { {additionalHooks: ADDITIONAL_HOOKS_TO_CHECK_DEPS_FOR}, ], ...(!isRelaxed && !isCi ? strictRulesNotCi : {}), - - // TODO(@anonrig): Remove these rules from eslint-sentry-config. - 'import/no-nodejs-modules': 'off', - semi: 'off', - 'use-isnan': 'off', - curly: 'off', - eqeqeq: 'off', - 'no-extra-semi': 'off', - 'no-eq-null': 'off', - 'comma-dangle': 'off', - 'react/jsx-no-target-blank': 'off', - 'react/jsx-no-duplicate-props': 'off', - 'react-hooks/rules-of-hooks': 'off', - 'no-duplicate-case': 'off', - 'no-dupe-keys': 'off', - 'no-redeclare': 'off', - 'no-debugger': 'off', - 'no-unreachable': 'off', }, // JSON file formatting is handled by Biome. ESLint should not be linting // and formatting these files. @@ -57,25 +39,6 @@ module.exports = { { files: ['tests/js/**/*.{ts,js}'], extends: ['plugin:testing-library/react', 'sentry-app/strict'], - rules: { - // TODO(@anonrig): Remove these rules from eslint-sentry-config. - 'import/no-nodejs-modules': 'off', - semi: 'off', - 'use-isnan': 'off', - curly: 'off', - eqeqeq: 'off', - 'no-extra-semi': 'off', - 'no-eq-null': 'off', - 'comma-dangle': 'off', - 'react/jsx-no-target-blank': 'off', - 'react/jsx-no-duplicate-props': 'off', - 'react-hooks/rules-of-hooks': 'off', - 'no-duplicate-case': 'off', - 'no-dupe-keys': 'off', - 'no-redeclare': 'off', - 'no-debugger': 'off', - 'no-unreachable': 'off', - }, }, { files: ['*.ts', '*.tsx'], diff --git a/biome.json b/biome.json index c95af0f8d39367..06e4f6ea962d29 100644 --- a/biome.json +++ b/biome.json @@ -39,6 +39,10 @@ "noDuplicateCase": "error", "noRedeclare": "error", "useIsArray": "error" + }, + "style": { + "noVar": "error", + "useConst": "error" } } }, @@ -103,6 +107,16 @@ } } } + }, + { + "include": ["src/sentry/templates/sentry/error-page-embed.js"], + "linter": { + "rules": { + "style": { + "noVar": "off" + } + } + } } ] } diff --git a/package.json b/package.json index 3e770de06be568..593ed2b628be9e 100644 --- a/package.json +++ b/package.json @@ -192,7 +192,7 @@ "babel-plugin-dynamic-import-node": "^2.3.3", "benchmark": "^2.1.4", "eslint": "8.49.0", - "eslint-config-sentry-app": "2.1.0", + "eslint-config-sentry-app": "2.3.0", "html-webpack-plugin": "^5.5.0", "jest": "29.6.2", "jest-canvas-mock": "^2.5.2", diff --git a/src/sentry/templates/sentry/error-page-embed.js b/src/sentry/templates/sentry/error-page-embed.js index 90b60bb1e249d9..78a98d32f638b7 100644 --- a/src/sentry/templates/sentry/error-page-embed.js +++ b/src/sentry/templates/sentry/error-page-embed.js @@ -1,4 +1,4 @@ -/* eslint no-var:0,strict:0,block-scoped-var:0 */ +/* eslint strict:0,block-scoped-var:0 */ /* global sentryEmbedCallback:false */ (function (window, document, JSON) { 'use strict'; diff --git a/static/app/utils/statics-setup.tsx b/static/app/utils/statics-setup.tsx index 8d6eea7a36caa2..6414e08c63724d 100644 --- a/static/app/utils/statics-setup.tsx +++ b/static/app/utils/statics-setup.tsx @@ -1,6 +1,6 @@ /* eslint no-native-reassign:0 */ -// eslint-disable-next-line no-var +// biome-ignore lint/style/noVar: Not required declare var __webpack_public_path__: string; /** diff --git a/tests/js/setup.ts b/tests/js/setup.ts index 78c599dd406287..b4ad608c5d2892 100644 --- a/tests/js/setup.ts +++ b/tests/js/setup.ts @@ -149,12 +149,12 @@ declare global { /** * Generates a promise that resolves on the next macro-task */ - // eslint-disable-next-line no-var + // biome-ignore lint/style/noVar: Not required var tick: () => Promise; /** * Used to mock API requests */ - // eslint-disable-next-line no-var + // biome-ignore lint/style/noVar: Not required var MockApiClient: typeof Client; } diff --git a/yarn.lock b/yarn.lock index 2db61ec20bca7b..4d5bb9bae88d09 100644 --- a/yarn.lock +++ b/yarn.lock @@ -6083,40 +6083,40 @@ escodegen@^2.0.0: optionalDependencies: source-map "~0.6.1" -eslint-config-sentry-app@2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/eslint-config-sentry-app/-/eslint-config-sentry-app-2.1.0.tgz#8cc162bcb9958a5fe13c97fff6b5157b06a3e24d" - integrity sha512-q8YMWXnEYujid/3/g9MqVQ/TlOr85r8b18+ow5mS0xBEl1Kjwr5/dWPlRCZN8XuRVatDqxI8JIj/Lto2JNtoig== +eslint-config-sentry-app@2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/eslint-config-sentry-app/-/eslint-config-sentry-app-2.3.0.tgz#b634a0ab498e4b37127eb68825673473a76b682c" + integrity sha512-A9rjtxM/+UnGn1UKNYbeCjf/dgdHLbZ6bcgZ1OFr8LpYI0yNnLJjbyjSTJsN8RpEIhKKYWbzsw9gvt3b5B1lHA== dependencies: "@emotion/eslint-plugin" "^11.11.0" "@typescript-eslint/eslint-plugin" "^6.19.0" "@typescript-eslint/parser" "^6.19.0" - eslint-config-sentry "^2.1.0" - eslint-config-sentry-react "^2.1.0" + eslint-config-sentry "^2.3.0" + eslint-config-sentry-react "^2.3.0" eslint-import-resolver-typescript "^2.7.1" eslint-import-resolver-webpack "^0.13.8" eslint-plugin-import "^2.29.1" eslint-plugin-jest "^27.6.3" eslint-plugin-no-lookahead-lookbehind-regexp "0.1.0" eslint-plugin-react "^7.33.2" - eslint-plugin-sentry "^2.1.0" + eslint-plugin-sentry "^2.3.0" eslint-plugin-simple-import-sort "^10.0.0" -eslint-config-sentry-react@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/eslint-config-sentry-react/-/eslint-config-sentry-react-2.1.0.tgz#9471fde40bcf6f98d05625dc9b9b4f54acf9baa6" - integrity sha512-6YXhXA2wXiB0H90jaDA8JEq02oFJVLb9UegTrWdV/ml//ya9D5pqameK9LY5YIqf4n1osi9lWdeSQSWvhAko3w== +eslint-config-sentry-react@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/eslint-config-sentry-react/-/eslint-config-sentry-react-2.3.0.tgz#cfb73157d7e9e6575d360a9122f0e2091e10de86" + integrity sha512-A4LCCArBMpGm7djCbw58uDTn8/FUPz882MYxAXagDpIZadIKi2CC+OTEcHul9HNApTz1JrrJ+hxcJv1dI5W6XA== dependencies: - eslint-config-sentry "^2.1.0" + eslint-config-sentry "^2.3.0" eslint-plugin-jest-dom "^5.1.0" eslint-plugin-react-hooks "^4.6.0" eslint-plugin-testing-library "^6.2.0" eslint-plugin-typescript-sort-keys "^3.1.0" -eslint-config-sentry@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/eslint-config-sentry/-/eslint-config-sentry-2.1.0.tgz#70bf894554212250f1d6ec1b8366ae7f8230944a" - integrity sha512-gefNirUXLh8FczVJ6OYMmoKa93VTw3tb+O+ZX3GHAeJlzUwHM3GNo2XHohoGmI3VO1mnMopIGGjJp8J8TP7kxA== +eslint-config-sentry@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/eslint-config-sentry/-/eslint-config-sentry-2.3.0.tgz#df8951e7b645c6a525449e2aae31d45e8eec11d7" + integrity sha512-H4dt4FkJ78DN/MHhxrx3o2BznncXN0/1SQpHQRtEfX2Vv9rGanvDVDH3RG/dreFAP19pqobMHXONLH4cYJAvTw== eslint-import-resolver-node@^0.3.9: version "0.3.9" @@ -6235,10 +6235,10 @@ eslint-plugin-react@^7.33.2: semver "^6.3.1" string.prototype.matchall "^4.0.8" -eslint-plugin-sentry@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-sentry/-/eslint-plugin-sentry-2.1.0.tgz#5ce40f57e6e0c934b111a2b09a3527028d0b7ff1" - integrity sha512-mUrbUJzrJ714ksg15tFyZMmWWHaq50SNFkOFPRs2Ff5OCzsOPzCKNBqZgkBKTMpLYpv9OvRxaW5h/7Lh/VHVDA== +eslint-plugin-sentry@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-sentry/-/eslint-plugin-sentry-2.3.0.tgz#ef04352e92bee9e53f6f73f1ecebabef7e01c89e" + integrity sha512-kHcP4GfcLO4SAEMLZ/LsK+7RBC5wZEZIwoExgUXsWbtOQ+VkMjuLDBrDXlLtyte0IjstIuGB4nXJ+SAL9HuHug== dependencies: requireindex "~1.2.0" From 43d1db1829319a291207c7ebbabb597940402f37 Mon Sep 17 00:00:00 2001 From: Jonas Date: Mon, 12 Feb 2024 10:58:29 -0500 Subject: [PATCH 252/357] feat(tracetree): use queue and convert recursion to iteration (#64973) Convert recursive logic to stack based dfs iterative approach --------- Co-authored-by: Abdullah Khan --- .../performance/newTraceDetails/guards.tsx | 12 ++++ .../newTraceDetails/traceTree.spec.tsx | 40 ++++++----- .../performance/newTraceDetails/traceTree.tsx | 71 ++++++++++--------- 3 files changed, 72 insertions(+), 51 deletions(-) diff --git a/static/app/views/performance/newTraceDetails/guards.tsx b/static/app/views/performance/newTraceDetails/guards.tsx index 68548074cc8b59..c65d4c16fcab0b 100644 --- a/static/app/views/performance/newTraceDetails/guards.tsx +++ b/static/app/views/performance/newTraceDetails/guards.tsx @@ -50,3 +50,15 @@ export function isTraceErrorNode( ): node is TraceTreeNode { return !!(node.value && 'level' in node.value); } + +export function isRootNode( + node: TraceTreeNode +): node is TraceTreeNode { + return !!(node.value && node.value === null); +} + +export function isTraceNode( + node: TraceTreeNode +): node is TraceTreeNode { + return !!(node.value && 'orphan_errors' in node.value); +} diff --git a/static/app/views/performance/newTraceDetails/traceTree.spec.tsx b/static/app/views/performance/newTraceDetails/traceTree.spec.tsx index dfbd46bb6f1621..a7d60bb15f1f23 100644 --- a/static/app/views/performance/newTraceDetails/traceTree.spec.tsx +++ b/static/app/views/performance/newTraceDetails/traceTree.spec.tsx @@ -37,6 +37,7 @@ function makeTransaction(overrides: Partial = {}): TraceFullD children: [], start_timestamp: 0, timestamp: 1, + transaction: 'transaction', 'transaction.op': '', 'transaction.status': '', ...overrides, @@ -312,9 +313,11 @@ describe('TraceTree', () => { }) ); - expect(tree.list).toHaveLength(3); + expect(tree.list).toHaveLength(5); - tree.expand(tree.list[1], true); + expect(tree.expand(tree.list[1], false)).toBe(true); + expect(tree.list).toHaveLength(3); + expect(tree.expand(tree.list[1], true)).toBe(true); expect(tree.list).toHaveLength(5); expect(tree.list[2].value).toBe(firstChild); expect(tree.list[3].value).toBe(secondChild); @@ -337,8 +340,7 @@ describe('TraceTree', () => { }) ); - expect(tree.list).toHaveLength(3); - tree.expand(tree.list[1], true); + expect(tree.list).toHaveLength(4); expect(tree.list[2].parent?.value).toBe(tree.list[1].value); }); @@ -477,6 +479,8 @@ describe('TraceTree', () => { const node = tree.list[1]; + expect(tree.expand(node, false)).toBe(true); + expect(tree.list.length).toBe(2); expect(node.expanded).toBe(false); expect(tree.expand(node, true)).toBe(true); @@ -503,30 +507,28 @@ describe('TraceTree', () => { }); it('preserves children expanded state', () => { - const lastChildExpandedTxn = makeTransaction({start_timestamp: 1000}); - const lastTransaction = makeTransaction({start_timestamp: 5}); const tree = TraceTree.FromTrace( makeTrace({ transactions: [ makeTransaction({ children: [ - makeTransaction({children: [lastChildExpandedTxn]}), - lastTransaction, + makeTransaction({children: [makeTransaction({start_timestamp: 1000})]}), + makeTransaction({start_timestamp: 5}), ], }), ], }) ); - expect(tree.expand(tree.list[1], true)).toBe(true); - expect(tree.expand(tree.list[2], true)).toBe(true); + expect(tree.expand(tree.list[2], false)).toBe(true); // Assert that the list has been updated - expect(tree.list).toHaveLength(5); + expect(tree.list).toHaveLength(4); - expect(tree.expand(tree.list[2], false)).toBe(true); - expect(tree.list.length).toBe(4); expect(tree.expand(tree.list[2], true)).toBe(true); - expect(tree.list[tree.list.length - 1].value).toBe(lastTransaction); + expect(tree.list.length).toBe(5); + expect(tree.list[tree.list.length - 1].value).toEqual( + makeTransaction({start_timestamp: 5}) + ); }); it('expanding or collapsing a zoomed in node doesnt do anything', async () => { @@ -992,11 +994,11 @@ describe('TraceTree', () => { method: 'GET', body: makeEvent({}, [ makeRawSpan({description: 'parent span', op: 'http', span_id: '1'}), - makeRawSpan({description: 'span', op: 'db', parent_span_id: '1'}), - makeRawSpan({description: 'span', op: 'db', parent_span_id: '1'}), - makeRawSpan({description: 'span', op: 'db', parent_span_id: '1'}), - makeRawSpan({description: 'span', op: 'db', parent_span_id: '1'}), - makeRawSpan({description: 'span', op: 'db', parent_span_id: '1'}), + makeRawSpan({description: 'span', op: 'db', span_id: '2', parent_span_id: '1'}), + makeRawSpan({description: 'span', op: 'db', span_id: '3', parent_span_id: '1'}), + makeRawSpan({description: 'span', op: 'db', span_id: '4', parent_span_id: '1'}), + makeRawSpan({description: 'span', op: 'db', span_id: '5', parent_span_id: '1'}), + makeRawSpan({description: 'span', op: 'db', span_id: '5', parent_span_id: '1'}), ]), }); diff --git a/static/app/views/performance/newTraceDetails/traceTree.tsx b/static/app/views/performance/newTraceDetails/traceTree.tsx index b8b8cc025abd07..fa477f8545bf7a 100644 --- a/static/app/views/performance/newTraceDetails/traceTree.tsx +++ b/static/app/views/performance/newTraceDetails/traceTree.tsx @@ -12,8 +12,10 @@ import { isAutogroupedNode, isMissingInstrumentationNode, isParentAutogroupedNode, + isRootNode, isSiblingAutogroupedNode, isSpanNode, + isTraceNode, isTransactionNode, } from './guards'; @@ -88,7 +90,6 @@ import { * - connector generation should live in the UI layer, not in the tree. Same with depth calculation. It is more convenient * to calculate this when rendering the tree, as we can only calculate it only for the visible nodes and avoid an extra tree pass * - instead of storing span children separately, we should have meta tree nodes that handle pointing to the correct children - * */ export declare namespace TraceTree { @@ -171,6 +172,7 @@ function maybeInsertMissingInstrumentationSpan( parent.spanChildren.push(missingInstrumentationSpan); } + export class TraceTree { root: TraceTreeNode = TraceTreeNode.Root(); private _spanPromises: Map, Promise> = @@ -300,8 +302,6 @@ export class TraceTree { return this._list; } - // Span chain grouping is when multiple spans with the same op are nested as direct and only children - // @TODO Abdk: simplify the chaining logic static AutogroupDirectChildrenSpanNodes( root: TraceTreeNode ): void { @@ -370,8 +370,6 @@ export class TraceTree { } static AutogroupSiblingSpanNodes(root: TraceTreeNode): void { - // Span sibling grouping is when min 5 consecutive spans without children have matching op and description - // Span chain grouping is when multiple spans with the same op are nested as direct and only children const queue = [root]; while (queue.length > 0) { @@ -615,6 +613,13 @@ export class TraceTree { if (isMissingInstrumentationNode(t)) { return padding + 'missing_instrumentation'; } + if (isRootNode(t)) { + return padding + 'Root'; + } + if (isTraceNode(t)) { + return padding + 'Trace'; + } + throw new Error('Not implemented'); }) .filter(Boolean) @@ -744,7 +749,6 @@ export class TraceTreeNode { * would have been to create an invisible meta node that always points to the correct children. */ get children(): TraceTreeNode[] { - // if node is not a autogrouped node, return children if (isAutogroupedNode(this)) { return this._children; } @@ -791,20 +795,23 @@ export class TraceTreeNode { } getVisibleChildrenCount(): number { - if (!this.children.length) { - return 0; - } - + const stack: TraceTreeNode[] = []; let count = 0; - const queue = [...this.children]; - while (queue.length > 0) { - count++; - const next = queue.pop()!; + for (let i = this.children.length - 1; i >= 0; i--) { + if (this.children[i].expanded || isParentAutogroupedNode(this.children[i])) { + stack.push(this.children[i]); + } + } - if (next.expanded || isParentAutogroupedNode(next)) { - for (let i = 0; i < next.children.length; i++) { - queue.push(next.children[i]); + while (stack.length > 0) { + const node = stack.pop()!; + count++; + // Since we're using a stack and it's LIFO, reverse the children before pushing them + // to ensure they are processed in the original left-to-right order. + if (node.expanded || isParentAutogroupedNode(node)) { + for (let i = node.children.length - 1; i >= 0; i--) { + stack.push(node.children[i]); } } } @@ -813,28 +820,28 @@ export class TraceTreeNode { } getVisibleChildren(): TraceTreeNode[] { - if (!this.children.length) { - return []; - } - - // @TODO: should be a proper FIFO queue as shift is O(n) - const visibleChildren: TraceTreeNode[] = []; + const stack: TraceTreeNode[] = []; + const children: TraceTreeNode[] = []; - function visit(node) { - visibleChildren.push(node); + for (let i = this.children.length - 1; i >= 0; i--) { + if (this.children[i].expanded || isParentAutogroupedNode(this.children[i])) { + stack.push(this.children[i]); + } + } + while (stack.length > 0) { + const node = stack.pop()!; + children.push(node); + // Since we're using a stack and it's LIFO, reverse the children before pushing them + // to ensure they are processed in the original left-to-right order. if (node.expanded || isParentAutogroupedNode(node)) { - for (let i = 0; i < node.children.length; i++) { - visit(node.children[i]); + for (let i = node.children.length - 1; i >= 0; i--) { + stack.push(node.children[i]); } } } - for (const child of this.children) { - visit(child); - } - - return visibleChildren; + return children; } static Root() { From 5d3476b988989ac4d73659c97eac4d53ac15643d Mon Sep 17 00:00:00 2001 From: Ogi <86684834+obostjancic@users.noreply.github.com> Date: Mon, 12 Feb 2024 17:07:40 +0100 Subject: [PATCH 253/357] fix(ddm): dashboard feature name (#64991) --- static/app/views/ddm/contextMenu.tsx | 2 +- static/app/views/ddm/pageHeaderActions.tsx | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/static/app/views/ddm/contextMenu.tsx b/static/app/views/ddm/contextMenu.tsx index 8562806df3f1ea..000b259112d0c2 100644 --- a/static/app/views/ddm/contextMenu.tsx +++ b/static/app/views/ddm/contextMenu.tsx @@ -98,7 +98,7 @@ export function MetricQueryContextMenu({ diff --git a/static/app/views/ddm/pageHeaderActions.tsx b/static/app/views/ddm/pageHeaderActions.tsx index 7e478cb4d7ae5e..1e93932da203cf 100644 --- a/static/app/views/ddm/pageHeaderActions.tsx +++ b/static/app/views/ddm/pageHeaderActions.tsx @@ -93,7 +93,7 @@ export function PageHeaderActions({ From 8513c300fabae92421ae4244884f807887d821fb Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Mon, 12 Feb 2024 11:13:44 -0500 Subject: [PATCH 254/357] ref(rq): Allow methods and data in useQuery (#65005) ### Summary useApiQuery isn't solely limited to `POST`, if you aren't actually performing mutations (but need to do pre-flight validation, for example) you may want to send a data payload, which is `POST`, by convention. --- static/app/utils/queryClient.tsx | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/static/app/utils/queryClient.tsx b/static/app/utils/queryClient.tsx index af0afc67a556d1..883d3132c14499 100644 --- a/static/app/utils/queryClient.tsx +++ b/static/app/utils/queryClient.tsx @@ -9,7 +9,7 @@ import type { } from '@tanstack/react-query'; import {useInfiniteQuery, useQuery} from '@tanstack/react-query'; -import type {ApiResult, Client, ResponseMeta} from 'sentry/api'; +import type {APIRequestMethod, ApiResult, Client, ResponseMeta} from 'sentry/api'; import type {ParsedHeader} from 'sentry/utils/parseLinkHeader'; import parseLinkHeader from 'sentry/utils/parseLinkHeader'; import type RequestError from 'sentry/utils/requestError/requestError'; @@ -43,8 +43,11 @@ const PERSIST_IN_FLIGHT = true; type QueryKeyEndpointOptions< Headers = Record, Query = Record, + Data = Record, > = { + data?: Data; headers?: Headers; + method?: APIRequestMethod; query?: Query; }; @@ -52,7 +55,11 @@ export type ApiQueryKey = | readonly [url: string] | readonly [ url: string, - options: QueryKeyEndpointOptions, Record>, + options: QueryKeyEndpointOptions< + Record, + Record, + Record + >, ]; export interface UseApiQueryOptions @@ -146,7 +153,8 @@ export function fetchDataQuery(api: Client) { return api.requestPromise(url, { includeAllArgs: true, - method: 'GET', + method: opts?.method ?? 'GET', + data: opts?.data, query: opts?.query, headers: opts?.headers, }); From af96d67cdb567fa843ce69b5fcda17559a6aabcc Mon Sep 17 00:00:00 2001 From: Richard Roggenkemper <46740234+roggenkemper@users.noreply.github.com> Date: Mon, 12 Feb 2024 08:59:50 -0800 Subject: [PATCH 255/357] fix(inbound-filters): Make edits to copy in settings (#64976) this pr makes a few small copy edits to the inbound filter settings --- .../project/projectFilters/projectFiltersSettings.tsx | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/static/app/views/settings/project/projectFilters/projectFiltersSettings.tsx b/static/app/views/settings/project/projectFilters/projectFiltersSettings.tsx index 639567829b79a2..71caa0ed0bdb5b 100644 --- a/static/app/views/settings/project/projectFilters/projectFiltersSettings.tsx +++ b/static/app/views/settings/project/projectFilters/projectFiltersSettings.tsx @@ -58,7 +58,7 @@ const filterDescriptions = { 'Filter transactions that match most [commonNamingPatterns:common naming patterns] for health checks.', { commonNamingPatterns: ( - + ), } ), @@ -262,7 +262,7 @@ class LegacyBrowserFilterRow extends Component { {!disabled && (
    - {t('Legacy Browser Filters')}: + {t('Filter out legacy browsers')}:
    From 7c008d6abf1d909b570b081fbc65e8ab9753b07c Mon Sep 17 00:00:00 2001 From: Isabella Enriquez Date: Mon, 12 Feb 2024 12:10:07 -0500 Subject: [PATCH 256/357] chore(api): Make `List Issues to be Resolved in a Particular Release` Endpoint Experimental (#64886) https://docs.sentry.io/api/releases/list-issues-to-be-resolved-in-a-particular-release/ should not be public. The endpoint file has publish status to private but these (incomplete) docs come from the old documentation system with JSON. This PR removes the outdated documentation to line up with the status in the endpoint file. --- api-docs/openapi.json | 3 - .../project-issues-resolved-in-release.json | 52 -------------- .../project_issues_resolved_in_release.py | 2 +- .../test_project_issues_solved_in_release.py | 70 ------------------- 4 files changed, 1 insertion(+), 126 deletions(-) delete mode 100644 api-docs/paths/releases/project-issues-resolved-in-release.json delete mode 100644 tests/apidocs/endpoints/releases/test_project_issues_solved_in_release.py diff --git a/api-docs/openapi.json b/api-docs/openapi.json index e83a335ae808e4..4825a045061497 100644 --- a/api-docs/openapi.json +++ b/api-docs/openapi.json @@ -195,9 +195,6 @@ "/api/0/organizations/{organization_slug}/releases/{version}/commitfiles/": { "$ref": "paths/releases/organization-release-commit-files.json" }, - "/api/0/projects/{organization_slug}/{project_slug}/releases/{version}/resolved/": { - "$ref": "paths/releases/project-issues-resolved-in-release.json" - }, "/api/0/organizations/{organization_slug}/releases/{version}/deploys/": { "$ref": "paths/releases/deploys.json" }, diff --git a/api-docs/paths/releases/project-issues-resolved-in-release.json b/api-docs/paths/releases/project-issues-resolved-in-release.json deleted file mode 100644 index a9be8f22e53dff..00000000000000 --- a/api-docs/paths/releases/project-issues-resolved-in-release.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "get": { - "tags": ["Releases"], - "description": "List issues to be resolved in a particular release.", - "operationId": "List Issues to be Resolved in a Particular Release", - "parameters": [ - { - "name": "organization_slug", - "in": "path", - "description": "The slug of the organization.", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "project_slug", - "in": "path", - "description": "The slug of the project.", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "version", - "in": "path", - "description": "The version identifier of the release.", - "required": true, - "schema": { - "type": "string" - } - } - ], - "responses": { - "200": { - "description": "Success" - }, - "403": { - "description": "Forbidden" - }, - "404": { - "description": "Not Found" - } - }, - "security": [ - { - "auth_token": ["project:releases"] - } - ] - } -} diff --git a/src/sentry/api/endpoints/project_issues_resolved_in_release.py b/src/sentry/api/endpoints/project_issues_resolved_in_release.py index 8d547202136990..89677ed08924d5 100644 --- a/src/sentry/api/endpoints/project_issues_resolved_in_release.py +++ b/src/sentry/api/endpoints/project_issues_resolved_in_release.py @@ -15,7 +15,7 @@ class ProjectIssuesResolvedInReleaseEndpoint(ProjectEndpoint, EnvironmentMixin): owner = ApiOwner.ISSUES publish_status = { - "GET": ApiPublishStatus.PRIVATE, + "GET": ApiPublishStatus.EXPERIMENTAL, } permission_classes = (ProjectPermission,) diff --git a/tests/apidocs/endpoints/releases/test_project_issues_solved_in_release.py b/tests/apidocs/endpoints/releases/test_project_issues_solved_in_release.py deleted file mode 100644 index 64f890e38d7ad1..00000000000000 --- a/tests/apidocs/endpoints/releases/test_project_issues_solved_in_release.py +++ /dev/null @@ -1,70 +0,0 @@ -from uuid import uuid1 - -from django.test.client import RequestFactory -from django.urls import reverse - -from fixtures.apidocs_test_case import APIDocsTestCase -from sentry.models.commit import Commit -from sentry.models.grouplink import GroupLink -from sentry.models.groupresolution import GroupResolution -from sentry.models.releasecommit import ReleaseCommit -from sentry.models.repository import Repository -from sentry.testutils.silo import region_silo_test - - -@region_silo_test -class ProjectIssuesResolvedInReleaseEndpointTest(APIDocsTestCase): - endpoint = "sentry-api-0-project-release-resolved" - method = "get" - - def setUp(self): - super().setUp() - self.user = self.create_user() - self.org = self.create_organization() - self.team = self.create_team(organization=self.org) - self.create_member(organization=self.org, user=self.user, teams=[self.team]) - self.project = self.create_project(teams=[self.team]) - self.release = self.create_release(project=self.project) - self.group = self.create_group(project=self.project) - self.login_as(self.user) - - repo = Repository.objects.create(organization_id=self.org.id, name=self.project.name) - commit = Commit.objects.create( - organization_id=self.org.id, repository_id=repo.id, key=uuid1().hex - ) - commit2 = Commit.objects.create( - organization_id=self.org.id, repository_id=repo.id, key=uuid1().hex - ) - ReleaseCommit.objects.create( - organization_id=self.org.id, release=self.release, commit=commit, order=1 - ) - ReleaseCommit.objects.create( - organization_id=self.org.id, release=self.release, commit=commit2, order=0 - ) - GroupLink.objects.create( - group_id=self.group.id, - project_id=self.group.project_id, - linked_type=GroupLink.LinkedType.commit, - relationship=GroupLink.Relationship.resolves, - linked_id=commit.id, - ) - - GroupResolution.objects.create( - group=self.group, - release=self.release, - type=GroupResolution.Type.in_release, - ) - self.url = reverse( - "sentry-api-0-project-release-resolved", - kwargs={ - "organization_slug": self.project.organization.slug, - "project_slug": self.project.slug, - "version": self.release.version, - }, - ) - - def test_get(self): - response = self.client.get(self.url) - request = RequestFactory().get(self.url) - - self.validate_schema(request, response) From 1cfb0bf4071ea6133f52babbe1258050a8777ade Mon Sep 17 00:00:00 2001 From: Colleen O'Rourke Date: Mon, 12 Feb 2024 09:18:06 -0800 Subject: [PATCH 257/357] ref(slack): Fix empty text (#64891) I see a few `invalid_blocks` [errors](https://console.cloud.google.com/logs/query;query=resource.type%3D%22k8s_container%22%0Alabels.name%3D~%22sentry%22%0AjsonPayload.event%20%3D%20%22rule.fail.slack_post%22%0Ajson_payload.error%3D%22invalid_blocks%22;summaryFields=:true:32:beginning;lfeCustomFields=jsonPayload%252Ferror;cursorTimestamp=2024-02-08T18:56:57.727384762Z;duration=PT10M?project=internal-sentry&rapt=AEjHL4OrXdLbKrht86-HuSjck8ZerOjuSSiY6ZnMnpy12mCMG2egSebe97-lXg7C0LRv8OHYAcj0tn9v98A-FI11k6e8Vq0OIZNzfEn3bASivMtyEWxTnvI) for one specific project where the text seems to be `" "` so after we strip the space, there is nothing, resulting in an error from Slack. --- .../slack/message_builder/issues.py | 4 +- .../slack/test_message_builder.py | 88 ++++++++++++++++++- 2 files changed, 88 insertions(+), 4 deletions(-) diff --git a/src/sentry/integrations/slack/message_builder/issues.py b/src/sentry/integrations/slack/message_builder/issues.py index 9cce6b23ef30f6..ba351081e23607 100644 --- a/src/sentry/integrations/slack/message_builder/issues.py +++ b/src/sentry/integrations/slack/message_builder/issues.py @@ -629,7 +629,9 @@ def build(self, notification_uuid: str | None = None) -> SlackBlock | SlackAttac # build up text block if text: text = text.lstrip(" ") - blocks.append(self.get_rich_text_preformatted_block(text)) + # XXX(CEO): sometimes text is " " and slack will error if we pass an empty string (now "") + if text: + blocks.append(self.get_rich_text_preformatted_block(text)) # build up actions text if self.actions and self.identity and not action_text: diff --git a/tests/sentry/integrations/slack/test_message_builder.py b/tests/sentry/integrations/slack/test_message_builder.py index a19798dd6f2c04..e42d4419247144 100644 --- a/tests/sentry/integrations/slack/test_message_builder.py +++ b/tests/sentry/integrations/slack/test_message_builder.py @@ -10,6 +10,7 @@ from sentry.eventstore.models import Event from sentry.incidents.logic import CRITICAL_TRIGGER_LABEL from sentry.incidents.models import IncidentStatus +from sentry.integrations.message_builder import build_attachment_text, build_attachment_title from sentry.integrations.slack.message_builder import LEVEL_TO_COLOR from sentry.integrations.slack.message_builder.base.block import BlockSlackMessageBuilder from sentry.integrations.slack.message_builder.incidents import SlackIncidentsMessageBuilder @@ -67,7 +68,8 @@ def build_test_message_blocks( ) -> dict[str, Any]: project = group.project - title = group.title + title = build_attachment_title(group) + text = build_attachment_text(group) title_link = f"http://testserver/organizations/{project.organization.slug}/issues/{group.id}" formatted_title = title if event: @@ -86,6 +88,20 @@ def build_test_message_blocks( "block_id": f'{{"issue":{group.id}}}', }, ] + if text: + new_text = text.lstrip(" ") + if new_text: + text_section = { + "type": "rich_text", + "elements": [ + { + "type": "rich_text_preformatted", + "elements": [{"type": "text", "text": new_text}], + "border": 0, + } + ], + } + blocks.append(text_section) tags_text = "" if not tags: @@ -182,9 +198,12 @@ def build_test_message_blocks( blocks.append({"type": "divider"}) + popup_text = ( + f"[{project.slug}] {title}: {text}" if text is not None else f"[{project.slug}] {title}" + ) return { "blocks": blocks, - "text": f"[{project.slug}] {title}", + "text": popup_text, } @@ -311,7 +330,6 @@ def test_build_group_attachment(self): @with_feature("organizations:slack-block-kit") def test_build_group_block(self): - release = self.create_release(project=self.project) event = self.store_event( data={ @@ -403,6 +421,70 @@ def test_build_group_block(self): assert SlackIssuesMessageBuilder(group).build() == test_message + @with_feature("organizations:slack-block-kit") + def test_build_group_block_with_message(self): + event_data = { + "event_id": "a" * 32, + "message": "IntegrationError", + "fingerprint": ["group-1"], + "exception": { + "values": [ + { + "type": "IntegrationError", + "value": "Identity not found.", + } + ] + }, + } + event = self.store_event( + data=event_data, + project_id=self.project.id, + ) + assert event.group + group = event.group + self.project.flags.has_releases = True + self.project.save(update_fields=["flags"]) + base_tags = {"level": "error"} + + assert SlackIssuesMessageBuilder(group).build() == build_test_message_blocks( + teams={self.team}, + users={self.user}, + group=group, + tags=base_tags, + ) + + @with_feature("organizations:slack-block-kit") + def test_build_group_block_with_empty_string_message(self): + event_data = { + "event_id": "a" * 32, + "message": "IntegrationError", + "fingerprint": ["group-1"], + "exception": { + "values": [ + { + "type": "IntegrationError", + "value": " ", + } + ] + }, + } + event = self.store_event( + data=event_data, + project_id=self.project.id, + ) + assert event.group + group = event.group + self.project.flags.has_releases = True + self.project.save(update_fields=["flags"]) + base_tags = {"level": "error"} + + assert SlackIssuesMessageBuilder(group).build() == build_test_message_blocks( + teams={self.team}, + users={self.user}, + group=group, + tags=base_tags, + ) + @patch( "sentry.integrations.slack.message_builder.issues.get_option_groups", wraps=get_option_groups, From 7340c816173b6d2f1afdc30cb4f7ffd6f4df8199 Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Mon, 12 Feb 2024 09:20:14 -0800 Subject: [PATCH 258/357] fix(issues): Adjust timeline dots, menu header (#64979) --- .../views/issueDetails/traceTimeline/traceTimelineEvents.tsx | 4 ++-- .../views/issueDetails/traceTimeline/traceTimelineTooltip.tsx | 4 +++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx b/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx index ebd52556163227..37a91061570881 100644 --- a/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx @@ -169,7 +169,7 @@ function NodeGroup({ )} {!isCurrentNode && groupEvents - .slice(0, 4) + .slice(0, 5) .map(groupEvent => 'event.type' in groupEvent ? ( @@ -226,12 +226,12 @@ const IconNode = styled('div')` box-shadow: ${p => p.theme.dropShadowLight}; user-select: none; background-color: ${p => color(p.theme.red200).alpha(0.3).string()}; - border: 1px solid ${p => p.theme.red300}; margin-left: -8px; `; const PerformanceIconNode = styled(IconNode)` background-color: unset; + border: 1px solid ${p => p.theme.red300}; `; const CurrentNodeContainer = styled('div')` diff --git a/static/app/views/issueDetails/traceTimeline/traceTimelineTooltip.tsx b/static/app/views/issueDetails/traceTimeline/traceTimelineTooltip.tsx index c742e64c94b9c5..ac0c4b8611a42e 100644 --- a/static/app/views/issueDetails/traceTimeline/traceTimelineTooltip.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceTimelineTooltip.tsx @@ -40,7 +40,9 @@ export function TraceTimelineTooltip({event, timelineEvents}: TraceTimelineToolt {displayYouAreHere && {t('You are here')}} - {t('Around the same time')} + {(filteredTimelineEvents.length > 1 || displayYouAreHere) && ( + {t('Around the same time')} + )} {filteredTimelineEvents.slice(0, 3).map(timelineEvent => { const project = projects.find(p => p.slug === timelineEvent.project); return ( From 817027d8f8a74676413a8c1e68ad88bf5ac6eaac Mon Sep 17 00:00:00 2001 From: Leander Rodrigues Date: Mon, 12 Feb 2024 09:23:39 -0800 Subject: [PATCH 259/357] chore(issues): Remove usage of issue-alert-fallback-targeting flag from the frontend (#64965) It's GA'd so I'll delete it from the frontend and try to get rid of any unused components as well. --- .../views/alerts/rules/issue/index.spec.tsx | 8 --- static/app/views/alerts/rules/issue/index.tsx | 13 +--- .../app/views/alerts/rules/issue/ruleNode.tsx | 71 +------------------ .../views/alerts/rules/issue/ruleNodeList.tsx | 5 +- .../projectInstall/issueAlertOptions.spec.tsx | 5 +- .../projectInstall/issueAlertOptions.tsx | 10 +-- .../project/projectOwnership/index.spec.tsx | 8 +-- .../project/projectOwnership/index.tsx | 15 ---- 8 files changed, 10 insertions(+), 125 deletions(-) diff --git a/static/app/views/alerts/rules/issue/index.spec.tsx b/static/app/views/alerts/rules/issue/index.spec.tsx index ff5ae9c4f7cc2b..34d531dbed638b 100644 --- a/static/app/views/alerts/rules/issue/index.spec.tsx +++ b/static/app/views/alerts/rules/issue/index.spec.tsx @@ -140,14 +140,6 @@ describe('IssueRuleEditor', function () { url: `/projects/org-slug/project-slug/?expand=hasAlertIntegration`, body: {}, }); - MockApiClient.addMockResponse({ - url: `/projects/org-slug/project-slug/ownership/`, - method: 'GET', - body: { - fallthrough: false, - autoAssignment: false, - }, - }); MockApiClient.addMockResponse({ url: '/projects/org-slug/project-slug/rules/preview/', method: 'POST', diff --git a/static/app/views/alerts/rules/issue/index.tsx b/static/app/views/alerts/rules/issue/index.tsx index bd6d0e4264aff8..acf84fb29ea1d1 100644 --- a/static/app/views/alerts/rules/issue/index.tsx +++ b/static/app/views/alerts/rules/issue/index.tsx @@ -45,14 +45,7 @@ import {ALL_ENVIRONMENTS_KEY} from 'sentry/constants'; import {IconChevron, IconNot} from 'sentry/icons'; import {t, tct, tn} from 'sentry/locale'; import {space} from 'sentry/styles/space'; -import type { - Environment, - IssueOwnership, - Member, - Organization, - Project, - Team, -} from 'sentry/types'; +import type {Environment, Member, Organization, Project, Team} from 'sentry/types'; import {OnboardingTaskKey} from 'sentry/types'; import type { IssueAlertConfiguration, @@ -163,7 +156,6 @@ type State = DeprecatedAsyncView['state'] & { uuid: null | string; acceptedNoisyAlert?: boolean; duplicateTargetRule?: UnsavedIssueAlertRule | IssueAlertRule | null; - ownership?: null | IssueOwnership; rule?: UnsavedIssueAlertRule | IssueAlertRule | null; }; @@ -282,7 +274,6 @@ class IssueRuleEditor extends DeprecatedAsyncView { }, ], ['configs', `/projects/${organization.slug}/${project.slug}/rules/configuration/`], - ['ownership', `/projects/${organization.slug}/${project.slug}/ownership/`], ]; if (ruleId) { @@ -1125,7 +1116,6 @@ class IssueRuleEditor extends DeprecatedAsyncView { rule, detailedError, loading, - ownership, sendingNotification, incompatibleConditions, incompatibleFilters, @@ -1390,7 +1380,6 @@ class IssueRuleEditor extends DeprecatedAsyncView { organization={organization} project={project} disabled={disabled} - ownership={ownership} error={ this.hasError('actions') && ( diff --git a/static/app/views/alerts/rules/issue/ruleNode.tsx b/static/app/views/alerts/rules/issue/ruleNode.tsx index 4e89a389e92593..4b453a2cde54ab 100644 --- a/static/app/views/alerts/rules/issue/ruleNode.tsx +++ b/static/app/views/alerts/rules/issue/ruleNode.tsx @@ -13,7 +13,7 @@ import {releaseHealth} from 'sentry/data/platformCategories'; import {IconDelete, IconSettings} from 'sentry/icons'; import {t, tct} from 'sentry/locale'; import {space} from 'sentry/styles/space'; -import type {Choices, IssueOwnership, Organization, Project} from 'sentry/types'; +import type {Choices, Organization, Project} from 'sentry/types'; import type { IssueAlertConfiguration, IssueAlertRuleAction, @@ -232,7 +232,6 @@ interface Props { incompatibleBanner?: boolean; incompatibleRule?: boolean; node?: IssueAlertConfiguration[keyof IssueAlertConfiguration][number] | null; - ownership?: null | IssueOwnership; } function RuleNode({ @@ -245,7 +244,6 @@ function RuleNode({ onDelete, onPropertyChange, onReset, - ownership, incompatibleRule, incompatibleBanner, }: Props) { @@ -315,8 +313,7 @@ function RuleNode({ if ( data.id === IssueAlertActionType.NOTIFY_EMAIL && - data.targetType !== MailActionTargetType.ISSUE_OWNERS && - organization.features.includes('issue-alert-fallback-targeting') + data.targetType !== MailActionTargetType.ISSUE_OWNERS ) { // Hide the fallback options when targeting team or member label = 'Send a notification to {targetType}'; @@ -492,70 +489,6 @@ function RuleNode({ ); } - if ( - data.id === IssueAlertActionType.NOTIFY_EMAIL && - data.targetType === MailActionTargetType.ISSUE_OWNERS && - !organization.features.includes('issue-alert-fallback-targeting') - ) { - return ( - - {!ownership - ? tct( - 'If there are no matching [issueOwners], ownership is determined by the [ownershipSettings].', - { - issueOwners: ( - - {t('issue owners')} - - ), - ownershipSettings: ( - - {t('ownership settings')} - - ), - } - ) - : ownership.fallthrough - ? tct( - 'If there are no matching [issueOwners], all project members will receive this alert. To change this behavior, see [ownershipSettings].', - { - issueOwners: ( - - {t('issue owners')} - - ), - ownershipSettings: ( - - {t('ownership settings')} - - ), - } - ) - : tct( - 'If there are no matching [issueOwners], this action will have no effect. To change this behavior, see [ownershipSettings].', - { - issueOwners: ( - - {t('issue owners')} - - ), - ownershipSettings: ( - - {t('ownership settings')} - - ), - } - )} - - ); - } - return null; } diff --git a/static/app/views/alerts/rules/issue/ruleNodeList.tsx b/static/app/views/alerts/rules/issue/ruleNodeList.tsx index 05fd851512323a..4751d66815a6eb 100644 --- a/static/app/views/alerts/rules/issue/ruleNodeList.tsx +++ b/static/app/views/alerts/rules/issue/ruleNodeList.tsx @@ -4,7 +4,7 @@ import styled from '@emotion/styled'; import SelectControl from 'sentry/components/forms/controls/selectControl'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; -import type {IssueOwnership, Organization, Project} from 'sentry/types'; +import type {Organization, Project} from 'sentry/types'; import type { IssueAlertConfiguration, IssueAlertGenericConditionConfig, @@ -50,7 +50,6 @@ type Props = { project: Project; incompatibleBanner?: number | null; incompatibleRules?: number[] | null; - ownership?: null | IssueOwnership; selectType?: 'grouped'; }; @@ -244,7 +243,6 @@ class RuleNodeList extends Component { placeholder, items, organization, - ownership, project, disabled, error, @@ -277,7 +275,6 @@ class RuleNodeList extends Component { organization={organization} project={project} disabled={disabled} - ownership={ownership} incompatibleRule={incompatibleRules?.includes(idx)} incompatibleBanner={incompatibleBanner === idx} /> diff --git a/static/app/views/projectInstall/issueAlertOptions.spec.tsx b/static/app/views/projectInstall/issueAlertOptions.spec.tsx index dea59abe1a9f5f..c26064158723b8 100644 --- a/static/app/views/projectInstall/issueAlertOptions.spec.tsx +++ b/static/app/views/projectInstall/issueAlertOptions.spec.tsx @@ -121,14 +121,13 @@ describe('IssueAlertOptions', function () { expect(screen.getByTestId('range-input')).toHaveValue(10); }); - it('should provide fallthroughType with issue action for issue-alert-fallback-targeting', async () => { + it('should provide fallthroughType with issue action', async () => { MockApiClient.addMockResponse({ url: URL, body: MOCK_RESP_VERBOSE, }); - const org = {...organization, features: ['issue-alert-fallback-targeting']}; - render(); + render(); await userEvent.click(screen.getByLabelText(/When there are more than/i)); expect(props.onChange).toHaveBeenCalledWith( expect.objectContaining({ diff --git a/static/app/views/projectInstall/issueAlertOptions.tsx b/static/app/views/projectInstall/issueAlertOptions.tsx index 94d0e768748ec6..354b394c86f1e8 100644 --- a/static/app/views/projectInstall/issueAlertOptions.tsx +++ b/static/app/views/projectInstall/issueAlertOptions.tsx @@ -31,6 +31,7 @@ const ISSUE_ALERT_DEFAULT_ACTION: Omit< > = { id: IssueAlertActionType.NOTIFY_EMAIL, targetType: 'IssueOwners', + fallthroughType: 'ActiveMembers', }; const METRIC_CONDITION_MAP = { @@ -234,14 +235,7 @@ class IssueAlertOptions extends DeprecatedAsyncComponent { ), ] : undefined, - actions: [ - { - ...ISSUE_ALERT_DEFAULT_ACTION, - ...(this.props.organization.features.includes('issue-alert-fallback-targeting') - ? {fallthroughType: 'ActiveMembers'} - : {}), - }, - ], + actions: [ISSUE_ALERT_DEFAULT_ACTION], actionMatch: 'all', frequency: 5, }; diff --git a/static/app/views/settings/project/projectOwnership/index.spec.tsx b/static/app/views/settings/project/projectOwnership/index.spec.tsx index d5db9c8258a37e..a31a758fdd7d0a 100644 --- a/static/app/views/settings/project/projectOwnership/index.spec.tsx +++ b/static/app/views/settings/project/projectOwnership/index.spec.tsx @@ -140,16 +140,12 @@ describe('Project Ownership', () => { }); }); - it('should hide issue owners for issue-alert-fallback-targeting flag', () => { - const org = { - ...organization, - features: ['issue-alert-fallback-targeting'], - }; + it('should hide issue owners', () => { render( ); diff --git a/static/app/views/settings/project/projectOwnership/index.tsx b/static/app/views/settings/project/projectOwnership/index.tsx index a6bcd5c3f0dd96..e9e94336c78077 100644 --- a/static/app/views/settings/project/projectOwnership/index.tsx +++ b/static/app/views/settings/project/projectOwnership/index.tsx @@ -235,21 +235,6 @@ tags.sku_class:enterprise #enterprise`; ], disabled, }, - ...(organization.features.includes('issue-alert-fallback-targeting') - ? [] - : [ - { - name: 'fallthrough', - type: 'boolean' as const, - label: t( - 'Send alert to project members if there’s no assigned owner' - ), - help: t( - 'Alerts will be sent to all users who have access to this project.' - ), - disabled, - }, - ]), { name: 'codeownersAutoSync', type: 'boolean', From 6d6c96440d74b606849a7d297851341f3e02b541 Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Mon, 12 Feb 2024 11:25:20 -0600 Subject: [PATCH 260/357] feat(metrics): Add new endpoint for metrics samples (#65011) This adds the new endpoint that will be the standard way of fetching samples for a metric. Closes #65000 --- .../api/endpoints/organization_metrics.py | 50 +++++++++++++++- src/sentry/api/urls.py | 6 ++ .../endpoints/test_organization_metrics.py | 57 +++++++++++++++++++ 3 files changed, 112 insertions(+), 1 deletion(-) diff --git a/src/sentry/api/endpoints/organization_metrics.py b/src/sentry/api/endpoints/organization_metrics.py index ec323cb2797207..b1ebe2c6733bd1 100644 --- a/src/sentry/api/endpoints/organization_metrics.py +++ b/src/sentry/api/endpoints/organization_metrics.py @@ -1,15 +1,23 @@ +from rest_framework import serializers from rest_framework.exceptions import ParseError from rest_framework.request import Request from rest_framework.response import Response +from sentry import features from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import region_silo_endpoint -from sentry.api.bases.organization import OrganizationEndpoint, OrganizationMetricsPermission +from sentry.api.bases import OrganizationEventsV2EndpointBase +from sentry.api.bases.organization import ( + NoProjects, + OrganizationEndpoint, + OrganizationMetricsPermission, +) from sentry.api.exceptions import ResourceDoesNotExist from sentry.api.paginator import GenericOffsetPaginator from sentry.api.utils import get_date_range_from_params from sentry.exceptions import InvalidParams +from sentry.models.organization import Organization from sentry.sentry_metrics.querying.data import run_metrics_query from sentry.sentry_metrics.querying.data_v2 import run_metrics_queries_plan from sentry.sentry_metrics.querying.data_v2.plan import MetricsQueriesPlan, QueryOrder @@ -28,6 +36,7 @@ get_single_metric_info, get_tag_values, ) +from sentry.snuba.metrics.naming_layer.mri import is_mri from sentry.snuba.metrics.utils import DerivedMetricException, DerivedMetricParseException from sentry.snuba.referrer import Referrer from sentry.snuba.sessions_v2 import InvalidField @@ -410,3 +419,42 @@ def post(self, request: Request, organization) -> Response: return Response(status=500, data={"detail": str(e)}) return Response(status=200, data=results) + + +class MetricsSamplesSerializer(serializers.Serializer): + mri = serializers.CharField(required=True) + field = serializers.ListField(required=True, allow_empty=False, child=serializers.CharField()) + + def validate_mri(self, mri: str): + if not is_mri(mri): + raise serializers.ValidationError(f"Invalid MRI: {mri}") + + return mri + + +@region_silo_endpoint +class OrganizationMetricsSamplesEndpoint(OrganizationEventsV2EndpointBase): + publish_status = { + "GET": ApiPublishStatus.EXPERIMENTAL, + } + owner = ApiOwner.TELEMETRY_EXPERIENCE + + def get(self, request: Request, organization: Organization) -> Response: + if not features.has("organizations:metrics-samples-list", organization, actor=request.user): + return Response(status=404) + + try: + params = self.get_snuba_params(request, organization) + except NoProjects: + return Response(status=404) + + serializer = MetricsSamplesSerializer(data=request.GET) + if not serializer.is_valid(): + return Response(serializer.errors, status=400) + + serialized = serializer.validated_data + + assert params + assert serialized + + return Response(status=200) diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py index a28c71db3a6660..c284cdb2919f0a 100644 --- a/src/sentry/api/urls.py +++ b/src/sentry/api/urls.py @@ -386,6 +386,7 @@ OrganizationMetricsDataEndpoint, OrganizationMetricsDetailsEndpoint, OrganizationMetricsQueryEndpoint, + OrganizationMetricsSamplesEndpoint, OrganizationMetricsTagDetailsEndpoint, OrganizationMetricsTagsEndpoint, ) @@ -1986,6 +1987,11 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: OrganizationMetricsQueryEndpoint.as_view(), name="sentry-api-0-organization-metrics-query", ), + re_path( + r"^(?P[^/]+)/metrics/samples/$", + OrganizationMetricsSamplesEndpoint.as_view(), + name="sentry-api-0-organization-metrics-samples", + ), re_path( r"^(?P[^/]+)/metrics/tags/$", OrganizationMetricsTagsEndpoint.as_view(), diff --git a/tests/sentry/api/endpoints/test_organization_metrics.py b/tests/sentry/api/endpoints/test_organization_metrics.py index 20b5a6d00dd74b..b913f9980046c4 100644 --- a/tests/sentry/api/endpoints/test_organization_metrics.py +++ b/tests/sentry/api/endpoints/test_organization_metrics.py @@ -3,6 +3,7 @@ import pytest from django.urls import reverse +from rest_framework.exceptions import ErrorDetail from sentry.models.apitoken import ApiToken from sentry.sentry_metrics import indexer @@ -97,3 +98,59 @@ def test_permissions(self): for method, endpoint, *rest in self.endpoints: response = self.send_request(token, method, endpoint, *rest) assert response.status_code in (200, 400, 404) + + +@region_silo_test +class OrganizationMetricsSamplesEndpointTest(APITestCase): + view = "sentry-api-0-organization-metrics-samples" + + def setUp(self): + self.login_as(user=self.user) + + def do_request(self, query, features=None, **kwargs): + if features is None: + features = [] + with self.feature(features): + return self.client.get( + reverse(self.view, kwargs={"organization_slug": self.organization.slug}), + query, + format="json", + **kwargs, + ) + + def test_feature_flag(self): + query = { + "mri": "d:transactions/duration@millisecond", + "field": ["id"], + "project": [self.project.id], + } + + response = self.do_request(query) + assert response.status_code == 404 + + response = self.do_request(query, features=["organizations:metrics-samples-list"]) + assert response.status_code == 200 + + def test_no_project(self): + query = { + "mri": "d:transactions/duration@millisecond", + "field": ["id"], + "project": [], + } + + response = self.do_request(query, features=["organizations:metrics-samples-list"]) + assert response.status_code == 404 + + def test_bad_params(self): + query = { + "mri": "foo", + "field": [], + "project": [self.project.id], + } + + response = self.do_request(query, features=["organizations:metrics-samples-list"]) + assert response.status_code == 400 + assert response.data == { + "mri": [ErrorDetail(string="Invalid MRI: foo", code="invalid")], + "field": [ErrorDetail(string="This field is required.", code="required")], + } From 72f77cec6ac381595aa91555f7cdc52f313edb5e Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Mon, 12 Feb 2024 09:35:17 -0800 Subject: [PATCH 261/357] feat(issues): Add current event to the trace timeline (#64958) --- .../traceTimeline/traceLink.spec.tsx | 2 - .../issueDetails/traceTimeline/traceLink.tsx | 4 +- .../traceTimeline/traceTimeline.spec.tsx | 39 ++++++++++--------- .../traceTimeline/useTraceTimelineEvents.tsx | 21 ++++++++-- 4 files changed, 42 insertions(+), 24 deletions(-) diff --git a/static/app/views/issueDetails/traceTimeline/traceLink.spec.tsx b/static/app/views/issueDetails/traceTimeline/traceLink.spec.tsx index 26f8abf1947de9..8a2361dc545f6c 100644 --- a/static/app/views/issueDetails/traceTimeline/traceLink.spec.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceLink.spec.tsx @@ -29,7 +29,6 @@ describe('TraceLink', () => { 'project.name': project.name, title: 'Slow DB Query', id: 'abc', - issue: 'SENTRY-ABC1', transaction: '/api/slow/', }, ], @@ -44,7 +43,6 @@ describe('TraceLink', () => { 'project.name': project.name, title: 'AttributeError: Something Failed', id: event.id, - issue: 'SENTRY-2EYS', transaction: 'important.task', 'event.type': 'error', }, diff --git a/static/app/views/issueDetails/traceTimeline/traceLink.tsx b/static/app/views/issueDetails/traceTimeline/traceLink.tsx index 57a8e623e54bec..4fa32243903d7e 100644 --- a/static/app/views/issueDetails/traceTimeline/traceLink.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceLink.tsx @@ -48,7 +48,9 @@ export function TraceLink({event}: TraceLinkProps) { > {t('View Full Trace')} - {data.length > 0 && tn(' (%s issue)', ' (%s issues)', data.length)} + {data.length >= 100 + ? t(' (100+ issues)') + : tn(' (%s issue)', ' (%s issues)', data.length)} diff --git a/static/app/views/issueDetails/traceTimeline/traceTimeline.spec.tsx b/static/app/views/issueDetails/traceTimeline/traceTimeline.spec.tsx index c96641dfc59dd1..c64bc21c15ccdc 100644 --- a/static/app/views/issueDetails/traceTimeline/traceTimeline.spec.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceTimeline.spec.tsx @@ -15,6 +15,7 @@ jest.mock('sentry/utils/routeAnalytics/useRouteAnalyticsParams'); describe('TraceTimeline', () => { const organization = OrganizationFixture({features: ['issues-trace-timeline']}); const event = EventFixture({ + dateCreated: '2024-01-24T09:09:03+00:00', contexts: { trace: { trace_id: '123', @@ -23,6 +24,7 @@ describe('TraceTimeline', () => { }); const project = ProjectFixture(); + const emptyBody: TraceEventResponse = {data: [], meta: {fields: {}, units: {}}}; const issuePlatformBody: TraceEventResponse = { data: [ { @@ -32,7 +34,6 @@ describe('TraceTimeline', () => { 'project.name': project.name, title: 'Slow DB Query', id: 'abc', - issue: 'SENTRY-ABC1', transaction: '/api/slow/', }, ], @@ -47,7 +48,6 @@ describe('TraceTimeline', () => { 'project.name': project.name, title: 'AttributeError: Something Failed', id: event.id, - issue: 'SENTRY-2EYS', transaction: 'important.task', 'event.type': 'error', 'stack.function': ['important.task', 'task.run'], @@ -85,10 +85,7 @@ describe('TraceTimeline', () => { it('displays nothing if the only event is the current event', async () => { MockApiClient.addMockResponse({ url: `/organizations/${organization.slug}/events/`, - body: { - data: [], - meta: {fields: {}, units: {}}, - }, + body: emptyBody, match: [MockApiClient.matchQuery({dataset: 'issuePlatform'})], }); MockApiClient.addMockResponse({ @@ -106,18 +103,12 @@ describe('TraceTimeline', () => { it('displays nothing if there are no events', async () => { MockApiClient.addMockResponse({ url: `/organizations/${organization.slug}/events/`, - body: { - data: [], - meta: {fields: {}, units: {}}, - }, + body: emptyBody, match: [MockApiClient.matchQuery({dataset: 'issuePlatform'})], }); MockApiClient.addMockResponse({ url: `/organizations/${organization.slug}/events/`, - body: { - data: [], - meta: {fields: {}, units: {}}, - }, + body: emptyBody, match: [MockApiClient.matchQuery({dataset: 'discover'})], }); render(, {organization}); @@ -135,14 +126,26 @@ describe('TraceTimeline', () => { }); MockApiClient.addMockResponse({ url: `/organizations/${organization.slug}/events/`, - body: { - data: [], - meta: {fields: {}, units: {}}, - }, + body: emptyBody, match: [MockApiClient.matchQuery({dataset: 'discover'})], }); render(, {organization}); // Checking for the presence of seconds expect(await screen.findAllByText(/\d{1,2}:\d{2}:\d{2} (AM|PM)/)).toHaveLength(5); }); + + it('adds the current event if not in the api response', async () => { + MockApiClient.addMockResponse({ + url: `/organizations/${organization.slug}/events/`, + body: issuePlatformBody, + match: [MockApiClient.matchQuery({dataset: 'issuePlatform'})], + }); + MockApiClient.addMockResponse({ + url: `/organizations/${organization.slug}/events/`, + body: emptyBody, + match: [MockApiClient.matchQuery({dataset: 'discover'})], + }); + render(, {organization}); + expect(await screen.findByLabelText('Current Event')).toBeInTheDocument(); + }); }); diff --git a/static/app/views/issueDetails/traceTimeline/useTraceTimelineEvents.tsx b/static/app/views/issueDetails/traceTimeline/useTraceTimelineEvents.tsx index 63a7fc19140473..e9d6e9efaec347 100644 --- a/static/app/views/issueDetails/traceTimeline/useTraceTimelineEvents.tsx +++ b/static/app/views/issueDetails/traceTimeline/useTraceTimelineEvents.tsx @@ -8,7 +8,6 @@ import useOrganization from 'sentry/utils/useOrganization'; interface BaseEvent { id: string; - issue: string; 'issue.id': number; project: string; 'project.name': string; @@ -54,7 +53,7 @@ export function useTraceTimelineEvents( query: { // Get performance issues dataset: DiscoverDatasets.ISSUE_PLATFORM, - field: ['title', 'project', 'timestamp', 'issue.id', 'issue', 'transaction'], + field: ['title', 'project', 'timestamp', 'issue.id', 'transaction'], per_page: 100, query: `trace:${traceId}`, referrer: 'api.issues.issue_events', @@ -85,7 +84,6 @@ export function useTraceTimelineEvents( 'project', 'timestamp', 'issue.id', - 'issue', 'transaction', 'event.type', 'stack.function', @@ -116,7 +114,23 @@ export function useTraceTimelineEvents( }; } + // Events is unsorted since they're grouped by date later const events = [...issuePlatformData.data, ...discoverData.data]; + + // The current event might be missing when there is a large number of issues + const hasCurrentEvent = events.some(e => e.id === event.id); + if (!hasCurrentEvent) { + events.push({ + id: event.id, + 'issue.id': Number(event.groupID), + project: event.projectID, + // The project name for current event is not used + 'project.name': '', + timestamp: event.dateCreated!, + title: event.title, + transaction: '', + }); + } const timestamps = events.map(e => new Date(e.timestamp).getTime()); const startTimestamp = Math.min(...timestamps); const endTimestamp = Math.max(...timestamps); @@ -126,6 +140,7 @@ export function useTraceTimelineEvents( endTimestamp, }; }, [ + event, issuePlatformData, discoverData, isLoadingIssuePlatform, From 702d660cd5cbd318c90595204deadbc463b15076 Mon Sep 17 00:00:00 2001 From: Kyle Mumma Date: Mon, 12 Feb 2024 12:42:07 -0500 Subject: [PATCH 262/357] ref(snuba): add typing to utils/snuba.py (SNS-2588) (#64310) This pr is associated with [SNS-2588](https://getsentry.atlassian.net/browse/SNS-2588) The goal of this pr is to fully type utils/snuba.py up to the standard deemed in the codebase's pyproject.toml, and remove it from the do-not-typecheck list. * most of the changes are adding type annotation to functions signatures and some local variables. * there were a few cases where I needed to slightly rewrite lines to equivalent logic, to help with typing * break variables out when they are being re-assigned multiple types * a few asserts were added, that should be guaranteed to pass, for mypy [SNS-2588]: https://getsentry.atlassian.net/browse/SNS-2588?atlOrigin=eyJpIjoiNWRkNTljNzYxNjVmNDY3MDlhMDU5Y2ZhYzA5YTRkZjUiLCJwIjoiZ2l0aHViLWNvbS1KU1cifQ --- pyproject.toml | 1 - src/sentry/utils/snuba.py | 89 +++++++++++++++++++++------------------ 2 files changed, 48 insertions(+), 42 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 07c0102a5d518e..dd9ebd701dbe12 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -529,7 +529,6 @@ module = [ "sentry.utils.sentry_apps.webhooks", "sentry.utils.services", "sentry.utils.snowflake", - "sentry.utils.snuba", "sentry.utils.suspect_resolutions.get_suspect_resolutions", "sentry.utils.suspect_resolutions_releases.get_suspect_resolutions_releases", "sentry.web.client_config", diff --git a/src/sentry/utils/snuba.py b/src/sentry/utils/snuba.py index ef0d34f72cbf88..8f2a0155b02b4e 100644 --- a/src/sentry/utils/snuba.py +++ b/src/sentry/utils/snuba.py @@ -6,7 +6,7 @@ import re import time from collections import namedtuple -from collections.abc import Callable, Mapping, MutableMapping, Sequence +from collections.abc import Callable, Collection, Mapping, MutableMapping, Sequence from concurrent.futures import ThreadPoolExecutor from contextlib import contextmanager from copy import deepcopy @@ -428,7 +428,7 @@ def to_naive_timestamp(value): return (value - epoch_naive).total_seconds() -def to_start_of_hour(dt: datetime) -> datetime: +def to_start_of_hour(dt: datetime) -> str: """This is a function that mimics toStartOfHour from Clickhouse""" return dt.replace(minute=0, second=0, microsecond=0).isoformat() @@ -522,7 +522,9 @@ def infer_project_ids_from_related_models(filter_keys: Mapping[str, Sequence[int return list(set.union(*ids)) -def get_query_params_to_update_for_projects(query_params, with_org=False): +def get_query_params_to_update_for_projects( + query_params: SnubaQueryParams, with_org: bool = False +) -> tuple[int, dict[str, Any]]: """ Get the project ID and query params that need to be updated for project based datasets, before we send the query to Snuba. @@ -549,7 +551,7 @@ def get_query_params_to_update_for_projects(query_params, with_org=False): organization_id = get_organization_id_from_project_ids(project_ids) - params = {"project": project_ids} + params: dict[str, Any] = {"project": project_ids} if with_org: params["organization"] = organization_id @@ -907,9 +909,11 @@ def _apply_cache_and_build_results( if to_query: query_results = _bulk_snuba_query([item[1] for item in to_query], headers) - for result, (query_pos, _, cache_key) in zip(query_results, to_query): - if cache_key: - cache.set(cache_key, json.dumps(result), settings.SENTRY_SNUBA_CACHE_TTL_SECONDS) + for result, (query_pos, _, opt_cache_key) in zip(query_results, to_query): + if opt_cache_key: + cache.set( + opt_cache_key, json.dumps(result), settings.SENTRY_SNUBA_CACHE_TTL_SECONDS + ) results.append((query_pos, result)) # Sort so that we get the results back in the original param list order @@ -979,19 +983,7 @@ def _bulk_snuba_query( raise UnexpectedResponseError(f"Could not decode JSON response: {response.data!r}") if response.status != 200: - error_query = snuba_param_list[index][0] - if isinstance(error_query, Request): - query_str = error_query.serialize() - query_type = "mql" if isinstance(error_query.query, MetricsQuery) else "snql" - else: - query_str = json.dumps(error_query) - query_type = "snql" - sentry_sdk.add_breadcrumb( - category="query_info", - level="info", - message=f"{query_type}_query", - data={query_type: query_str}, - ) + _log_request_query(snuba_param_list[index][0]) if body.get("error"): error = body["error"] @@ -1015,6 +1007,18 @@ def _bulk_snuba_query( return results +def _log_request_query(req: Request) -> None: + """Given a request, logs its associated query in sentry breadcrumbs""" + query_str = req.serialize() + query_type = "mql" if isinstance(req.query, MetricsQuery) else "snql" + sentry_sdk.add_breadcrumb( + category="query_info", + level="info", + message=f"{query_type}_query", + data={query_type: query_str}, + ) + + RawResult = tuple[urllib3.response.HTTPResponse, Callable[[Any], Any], Callable[[Any], Any]] @@ -1147,7 +1151,11 @@ def query( return nest_groups(body["data"], groupby, aggregate_names + selected_names) -def nest_groups(data, groups, aggregate_cols): +def nest_groups( + data: Sequence[MutableMapping], + groups: Sequence[str] | None, + aggregate_cols: Sequence[str], +) -> dict | Any: """ Build a nested mapping from query response rows. Each group column gives a new level of nesting and the leaf result is the aggregate @@ -1161,14 +1169,14 @@ def nest_groups(data, groups, aggregate_cols): return {c: data[0][c] for c in aggregate_cols} if data else None else: g, rest = groups[0], groups[1:] - inter = {} + inter: dict[Any, Any] = {} for d in data: inter.setdefault(d[g], []).append(d) return {k: nest_groups(v, rest, aggregate_cols) for k, v in inter.items()} -def resolve_column(dataset) -> Callable[[str], str]: - def _resolve_column(col: str) -> str: +def resolve_column(dataset) -> Callable: + def _resolve_column(col): if col is None: return col if isinstance(col, int) or isinstance(col, float): @@ -1208,7 +1216,7 @@ def _resolve_column(col: str) -> str: return _resolve_column -def resolve_condition(cond, column_resolver): +def resolve_condition(cond: list, column_resolver: Callable[[Any], Any]) -> list: """ When conditions have been parsed by the api.event_search module we can end up with conditions that are not valid on the current dataset @@ -1238,7 +1246,7 @@ def _passthrough_arg(arg): func_args = cond[index + 1] for i, arg in enumerate(func_args): if i == 0: - if isinstance(arg, (list, tuple)): + if isinstance(arg, list): func_args[i] = resolve_condition(arg, column_resolver) else: func_args[i] = column_resolver(arg) @@ -1252,7 +1260,7 @@ def _passthrough_arg(arg): for i, arg in enumerate(func_args): # Nested function try: - if isinstance(arg, (list, tuple)): + if isinstance(arg, list): func_args[i] = resolve_condition(arg, column_resolver) else: func_args[i] = column_resolver(arg) @@ -1267,7 +1275,7 @@ def _passthrough_arg(arg): if isinstance(cond[0], str) and len(cond) == 3: cond[0] = column_resolver(cond[0]) return cond - if isinstance(cond[0], (list, tuple)): + if isinstance(cond[0], list): if get_function_index(cond[0]) is not None: cond[0] = resolve_condition(cond[0], column_resolver) return cond @@ -1298,8 +1306,8 @@ def _aliased_query_impl(**kwargs): def resolve_conditions( - conditions: Sequence[Any] | None, column_resolver: Callable[[str], str] -) -> Sequence[Any] | None: + conditions: Sequence | None, column_resolver: Callable[[Any], Any] +) -> list | None: if conditions is None: return conditions @@ -1324,9 +1332,9 @@ def aliased_query_params( having=None, dataset=None, orderby=None, - condition_resolver=None, + condition_resolver: Callable | None = None, **kwargs, -) -> Mapping[str, Any]: +) -> dict[str, Any]: if dataset is None: raise ValueError("A dataset is required, and is no longer automatically detected.") @@ -1345,11 +1353,10 @@ def aliased_query_params( derived_columns.append(aggregation[2]) if conditions: - column_resolver = ( - functools.partial(condition_resolver, dataset=dataset) - if condition_resolver - else resolve_func - ) + if condition_resolver: + column_resolver: Callable = functools.partial(condition_resolver, dataset=dataset) + else: + column_resolver = resolve_func resolved_conditions = resolve_conditions(conditions, column_resolver) else: resolved_conditions = conditions @@ -1490,7 +1497,7 @@ def get_snuba_translators(filter_keys, is_grouprelease=False): Release.objects.filter(id__in=[x[2] for x in gr_map]).values_list("id", "version") ) fwd_map = {gr: (group, ver[release]) for (gr, group, release) in gr_map} - rev_map = dict(reversed(t) for t in fwd_map.items()) + rev_map = {v: k for k, v in fwd_map.items()} fwd = ( lambda col, trans: lambda filters: replace( filters, col, [trans[k][1] for k in filters[col]] @@ -1510,7 +1517,7 @@ def get_snuba_translators(filter_keys, is_grouprelease=False): fwd_map = { k: fmt(v) for k, v in model.objects.filter(id__in=ids).values_list("id", field) } - rev_map = dict(reversed(t) for t in fwd_map.items()) + rev_map = {v: k for k, v in fwd_map.items()} fwd = ( lambda col, trans: lambda filters: replace( filters, col, [trans[k] for k in filters[col] if k] @@ -1569,7 +1576,7 @@ def get_related_project_ids(column, ids): return [] -def shrink_time_window(issues, start): +def shrink_time_window(issues: Collection | None, start: datetime) -> datetime: """\ If a single issue is passed in, shrink the `start` parameter to be briefly before the `first_seen` in order to hopefully eliminate a large percentage of rows scanned. @@ -1588,7 +1595,7 @@ def shrink_time_window(issues, start): return start -def naiveify_datetime(dt): +def naiveify_datetime(dt: datetime) -> datetime: return dt if not dt.tzinfo else dt.astimezone(timezone.utc).replace(tzinfo=None) From e78dd77f1584df01f67eab94ad9804240cc40dd6 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Mon, 12 Feb 2024 09:45:24 -0800 Subject: [PATCH 263/357] feat(tests): Add utility function to capture return values of mocks (#64856) When pytest mocks or spies on a function, it keeps track of the input each time the function is called. It does not, however, keep track of the output. (Why?? Because you shouldn't be testing implementation details? But then why keep track of the input?) In order to test certain side effects, though, sometimes you need to know what your spies return. This PR solves that by adding a helper, `capture_return_values`, which adds the return values of all calls of the given function to a given list, so the return values can be accessed later. (It also accepts a dictionary of lists, in case you want to keep track of the return values of more than one function at a time.) --- src/sentry/testutils/pytest/mocking.py | 81 +++++++++++++++++++ .../pytest/mocking/animals/__init__.py | 14 ++++ .../testutils/pytest/mocking/test_mocking.py | 49 +++++++++++ 3 files changed, 144 insertions(+) create mode 100644 src/sentry/testutils/pytest/mocking.py create mode 100644 tests/sentry/testutils/pytest/mocking/animals/__init__.py create mode 100644 tests/sentry/testutils/pytest/mocking/test_mocking.py diff --git a/src/sentry/testutils/pytest/mocking.py b/src/sentry/testutils/pytest/mocking.py new file mode 100644 index 00000000000000..a393229ea46502 --- /dev/null +++ b/src/sentry/testutils/pytest/mocking.py @@ -0,0 +1,81 @@ +from __future__ import annotations + +from collections.abc import Callable +from typing import ParamSpec, TypeVar + +# TODO: Once we're on python 3.12, we can get rid of these and change the first line of the +# signature of `capture_return_values` to +# def capture_return_values[T, **P]( +P = ParamSpec("P") +T = TypeVar("T") + + +def capture_return_values( + fn: Callable[P, T], + return_values: list[T] | dict[str, list[T]], +) -> Callable[P, T]: + """ + Create a wrapped version of the given function, which stores the return value each time that + function is called. This is useful when you want to spy on a given function and make assertions + based on what it returns. + + In a test, this can be used in concert with a patching context manager like so: + + from unittest import mock + from wherever import capture_return_values + from animals import get_dog, get_cat + + def test_getting_animals(): + # If you're only planning to patch one function, use a list: + + get_dog_return_values = [] + wrapped_get_dog = capture_return_values( + get_dog, get_dog_return_values + ) + + with mock.patch( + "animals.get_dog", wraps=wrapped_get_dog + ) as get_dog_spy: + a_function_that_calls_get_dog() + assert get_dog_spy.call_count == 1 + assert get_dog_return_values[0] == "maisey" + + # Alternatively, if you're planning to patch more than one function, + # you can pass a dictionary: + + return_values = {} + wrapped_get_dog = capture_return_values( + get_dog, return_values + ) + wrapped_get_cat = capture_return_values( + get_cat, return_values + ) + + with( + mock.patch( + "animals.get_dog", wraps=wrapped_get_dog + ) as get_dog_spy, + mock.patch( + "animals.get_cat", wraps=wrapped_get_cat + ) as get_cat_spy, + ): + a_function_that_calls_get_dog() + assert get_dog_spy.call_count == 1 + assert return_values["get_dog"][0] == "charlie" + + a_function_that_calls_get_cat() + assert get_cat_spy.call_count == 1 + assert return_values["get_cat"][0] == "piper" + """ + + def wrapped_fn(*args: P.args, **kwargs: P.kwargs) -> T: + returned_value = fn(*args, **kwargs) + + if isinstance(return_values, list): + return_values.append(returned_value) + elif isinstance(return_values, dict): + return_values.setdefault(fn.__name__, []).append(returned_value) + + return returned_value + + return wrapped_fn diff --git a/tests/sentry/testutils/pytest/mocking/animals/__init__.py b/tests/sentry/testutils/pytest/mocking/animals/__init__.py new file mode 100644 index 00000000000000..779b4e65810030 --- /dev/null +++ b/tests/sentry/testutils/pytest/mocking/animals/__init__.py @@ -0,0 +1,14 @@ +def get_dog(): + return "maisey" + + +def get_cat(): + return "piper" + + +def a_function_that_calls_get_dog(): + return f"{get_dog()} is a good dog!" + + +def a_function_that_calls_get_cat(): + return f"{get_cat()} is a good cat, because she thinks she's a dog!" diff --git a/tests/sentry/testutils/pytest/mocking/test_mocking.py b/tests/sentry/testutils/pytest/mocking/test_mocking.py new file mode 100644 index 00000000000000..58cf5ba960c00f --- /dev/null +++ b/tests/sentry/testutils/pytest/mocking/test_mocking.py @@ -0,0 +1,49 @@ +from typing import Any +from unittest import TestCase, mock + +from sentry.testutils.pytest.mocking import capture_return_values +from tests.sentry.testutils.pytest.mocking.animals import ( + a_function_that_calls_get_cat, + a_function_that_calls_get_dog, + get_cat, + get_dog, +) + + +class CaptureReturnValuesTest(TestCase): + def test_return_values_as_list(self): + get_dog_return_values: list[Any] = [] + + wrapped_get_dog = capture_return_values(get_dog, get_dog_return_values) + + with mock.patch( + "tests.sentry.testutils.pytest.mocking.animals.get_dog", + wraps=wrapped_get_dog, + ) as get_dog_spy: + a_function_that_calls_get_dog() + assert get_dog_spy.call_count == 1 + assert get_dog_return_values[0] == "maisey" + + def test_return_values_as_dict(self): + return_values: dict[str, list[Any]] = {} + + wrapped_get_dog = capture_return_values(get_dog, return_values) + wrapped_get_cat = capture_return_values(get_cat, return_values) + + with ( + mock.patch( + "tests.sentry.testutils.pytest.mocking.animals.get_dog", + wraps=wrapped_get_dog, + ) as get_dog_spy, + mock.patch( + "tests.sentry.testutils.pytest.mocking.animals.get_cat", + wraps=wrapped_get_cat, + ) as get_cat_spy, + ): + a_function_that_calls_get_dog() + assert get_dog_spy.call_count == 1 + assert return_values["get_dog"][0] == "maisey" + + a_function_that_calls_get_cat() + assert get_cat_spy.call_count == 1 + assert return_values["get_cat"][0] == "piper" From 2a2f1f835517d900afe41471b4859d1902e692b8 Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Mon, 12 Feb 2024 09:45:59 -0800 Subject: [PATCH 264/357] test(issues): Cleanup alert ownership test (#65013) --- .../project/projectOwnership/index.spec.tsx | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/static/app/views/settings/project/projectOwnership/index.spec.tsx b/static/app/views/settings/project/projectOwnership/index.spec.tsx index a31a758fdd7d0a..9bb14369ca02cc 100644 --- a/static/app/views/settings/project/projectOwnership/index.spec.tsx +++ b/static/app/views/settings/project/projectOwnership/index.spec.tsx @@ -139,21 +139,5 @@ describe('Project Ownership', () => { ); }); }); - - it('should hide issue owners', () => { - render( - - ); - - expect(screen.getByText('Prioritize Auto Assignment')).toBeInTheDocument(); - expect( - screen.queryByText('Send alert to project members if there’s no assigned owner') - ).not.toBeInTheDocument(); - }); }); }); From 2a82f0b04eefc9ad92c1dba9c64bae8498126362 Mon Sep 17 00:00:00 2001 From: Mark Story Date: Mon, 12 Feb 2024 12:50:09 -0500 Subject: [PATCH 265/357] feat(hybridcloud) Make number of retries options based (#64949) This will allow us to dial up and down the number of retries without deploys. This could be useful in an future incident where retries are causing more harm than help. --- src/sentry/options/defaults.py | 8 +++++++- src/sentry/services/hybrid_cloud/rpc.py | 3 ++- src/sentry/shared_integrations/client/proxy.py | 3 ++- 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index 6f9bb46b029df7..b0e4eeb3bab4d6 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -1604,10 +1604,16 @@ flags=FLAG_MODIFIABLE_RATE | FLAG_AUTOMATOR_MODIFIABLE, ) -register("hybrid_cloud.outbox_rate", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE) +# === Hybrid cloud subsystem options === +# UI rollout register("hybrid_cloud.multi-region-selector", default=False, flags=FLAG_AUTOMATOR_MODIFIABLE) register("hybrid_cloud.region-user-allow-list", default=[], flags=FLAG_AUTOMATOR_MODIFIABLE) +# Retry controls +register("hybrid_cloud.rpc.retries", default=5, flags=FLAG_AUTOMATOR_MODIFIABLE) +register("hybrid_cloud.integrationproxy.retries", default=5, flags=FLAG_AUTOMATOR_MODIFIABLE) +# == End hybrid cloud subsystem + # Decides whether an incoming transaction triggers an update of the clustering rule applied to it. register("txnames.bump-lifetime-sample-rate", default=0.1, flags=FLAG_AUTOMATOR_MODIFIABLE) # Decides whether an incoming span triggers an update of the clustering rule applied to it. diff --git a/src/sentry/services/hybrid_cloud/rpc.py b/src/sentry/services/hybrid_cloud/rpc.py index 769f7c255bf011..b549ea8be44f73 100644 --- a/src/sentry/services/hybrid_cloud/rpc.py +++ b/src/sentry/services/hybrid_cloud/rpc.py @@ -18,6 +18,7 @@ from django.conf import settings from requests.adapters import HTTPAdapter, Retry +from sentry import options from sentry.services.hybrid_cloud import ArgumentDict, DelegatedBySiloMode, RpcModel from sentry.services.hybrid_cloud.rpcmetrics import RpcMetricRecord from sentry.services.hybrid_cloud.sig import SerializableFunctionSignature @@ -563,7 +564,7 @@ def _fire_test_request(self, headers: Mapping[str, str], data: bytes) -> Any: def _fire_request(self, headers: MutableMapping[str, str], data: bytes) -> requests.Response: retry_adapter = HTTPAdapter( max_retries=Retry( - total=5, + total=options.get("hybrid_cloud.rpc.retries"), backoff_factor=0.1, status_forcelist=[503], allowed_methods=["POST"], diff --git a/src/sentry/shared_integrations/client/proxy.py b/src/sentry/shared_integrations/client/proxy.py index fb2d5aff32eba4..9aa60ef5bc004c 100644 --- a/src/sentry/shared_integrations/client/proxy.py +++ b/src/sentry/shared_integrations/client/proxy.py @@ -15,6 +15,7 @@ from requests import PreparedRequest from requests.adapters import Retry +from sentry import options from sentry.db.postgres.transactions import in_test_hide_transaction_boundary from sentry.http import build_session from sentry.integrations.client import ApiClient @@ -153,7 +154,7 @@ def build_session(self) -> SafeSession: return build_session( is_ipaddress_permitted=is_control_silo_ip_address, max_retries=Retry( - total=5, + total=options.get("hybrid_cloud.integrationproxy.retries"), backoff_factor=0.1, status_forcelist=[503], allowed_methods=["PATCH", "HEAD", "PUT", "GET", "DELETE", "POST"], From 6e08e690d12eb3933e6c6a2e38a37a0a309f3f5b Mon Sep 17 00:00:00 2001 From: Ryan Albrecht Date: Mon, 12 Feb 2024 09:56:50 -0800 Subject: [PATCH 266/357] bug(replay): Add useMemo to add the replay-count hook helpers (#64800) Adding useMemo so that we're not returning new objects & new anon-functions each time. I think the assumption is that hooks return stable references whenever possible, so conforming to that will make it easier for callsites to prevent excess re-renders. --- .../useReplayCountForFeedbacks.tsx | 13 ++++--- .../replayCount/useReplayCountForIssues.tsx | 34 +++++++++++++------ .../useReplayCountForTransactions.tsx | 17 ++++++---- .../app/utils/replayCount/useReplayExists.tsx | 13 ++++--- 4 files changed, 53 insertions(+), 24 deletions(-) diff --git a/static/app/utils/replayCount/useReplayCountForFeedbacks.tsx b/static/app/utils/replayCount/useReplayCountForFeedbacks.tsx index e1c32fe996d76b..19ddb6fd4b43de 100644 --- a/static/app/utils/replayCount/useReplayCountForFeedbacks.tsx +++ b/static/app/utils/replayCount/useReplayCountForFeedbacks.tsx @@ -1,3 +1,5 @@ +import {useMemo} from 'react'; + import useReplayCount from 'sentry/utils/replayCount/useReplayCount'; import useOrganization from 'sentry/utils/useOrganization'; @@ -14,8 +16,11 @@ export default function useReplayCountForFeedbacks() { statsPeriod: '90d', }); - return { - feedbackHasReplay: hasOne, - feedbacksHaveReplay: hasMany, - }; + return useMemo( + () => ({ + feedbackHasReplay: hasOne, + feedbacksHaveReplay: hasMany, + }), + [hasMany, hasOne] + ); } diff --git a/static/app/utils/replayCount/useReplayCountForIssues.tsx b/static/app/utils/replayCount/useReplayCountForIssues.tsx index 15a310e8833027..a109724e44a3f3 100644 --- a/static/app/utils/replayCount/useReplayCountForIssues.tsx +++ b/static/app/utils/replayCount/useReplayCountForIssues.tsx @@ -1,3 +1,5 @@ +import {useMemo} from 'react'; + import {IssueCategory} from 'sentry/types'; import useReplayCount from 'sentry/utils/replayCount/useReplayCount'; import useOrganization from 'sentry/utils/useOrganization'; @@ -32,14 +34,26 @@ export default function useReplayCountForIssues() { statsPeriod: '14d', }); - return { - getReplayCountForIssue: (id: string, category: IssueCategory) => - category === IssueCategory.ERROR ? getOneError(id) : getOneIssue(id), - getReplayCountForIssues: (id: readonly string[], category: IssueCategory) => - category === IssueCategory.ERROR ? getManyError(id) : getManyIssue(id), - issueHasReplay: (id: string, category: IssueCategory) => - category === IssueCategory.ERROR ? hasOneError(id) : hasOneIssue(id), - issuesHaveReplay: (id: readonly string[], category: IssueCategory) => - category === IssueCategory.ERROR ? hasManyError(id) : hasManyIssue(id), - }; + return useMemo( + () => ({ + getReplayCountForIssue: (id: string, category: IssueCategory) => + category === IssueCategory.ERROR ? getOneError(id) : getOneIssue(id), + getReplayCountForIssues: (id: readonly string[], category: IssueCategory) => + category === IssueCategory.ERROR ? getManyError(id) : getManyIssue(id), + issueHasReplay: (id: string, category: IssueCategory) => + category === IssueCategory.ERROR ? hasOneError(id) : hasOneIssue(id), + issuesHaveReplay: (id: readonly string[], category: IssueCategory) => + category === IssueCategory.ERROR ? hasManyError(id) : hasManyIssue(id), + }), + [ + getManyError, + getManyIssue, + getOneError, + getOneIssue, + hasManyError, + hasManyIssue, + hasOneError, + hasOneIssue, + ] + ); } diff --git a/static/app/utils/replayCount/useReplayCountForTransactions.tsx b/static/app/utils/replayCount/useReplayCountForTransactions.tsx index 819ac22efdec42..54164447c88ad2 100644 --- a/static/app/utils/replayCount/useReplayCountForTransactions.tsx +++ b/static/app/utils/replayCount/useReplayCountForTransactions.tsx @@ -1,3 +1,5 @@ +import {useMemo} from 'react'; + import useReplayCount from 'sentry/utils/replayCount/useReplayCount'; import useOrganization from 'sentry/utils/useOrganization'; @@ -14,10 +16,13 @@ export default function useReplayCountForTransactions() { statsPeriod: '90d', }); - return { - getReplayCountForTransaction: getOne, - getReplayCountForTransactions: getMany, - transactionHasReplay: hasOne, - transactionsHaveReplay: hasMany, - }; + return useMemo( + () => ({ + getReplayCountForTransaction: getOne, + getReplayCountForTransactions: getMany, + transactionHasReplay: hasOne, + transactionsHaveReplay: hasMany, + }), + [getMany, getOne, hasMany, hasOne] + ); } diff --git a/static/app/utils/replayCount/useReplayExists.tsx b/static/app/utils/replayCount/useReplayExists.tsx index 802bfb060fac58..c5b57c0d8daf98 100644 --- a/static/app/utils/replayCount/useReplayExists.tsx +++ b/static/app/utils/replayCount/useReplayExists.tsx @@ -1,3 +1,5 @@ +import {useMemo} from 'react'; + import useReplayCount from 'sentry/utils/replayCount/useReplayCount'; import useOrganization from 'sentry/utils/useOrganization'; @@ -14,8 +16,11 @@ export default function useReplayExists() { statsPeriod: '90d', }); - return { - replayExists: hasOne, - replaysExist: hasMany, - }; + return useMemo( + () => ({ + replayExists: hasOne, + replaysExist: hasMany, + }), + [hasMany, hasOne] + ); } From 28513ea1739dd597161dc1df044d63ee049af0f8 Mon Sep 17 00:00:00 2001 From: Ryan Albrecht Date: Mon, 12 Feb 2024 09:58:02 -0800 Subject: [PATCH 267/357] feat(feedback): Create a feature flag to test Replay Clips inside User Feedback (#64920) Flag is setup: https://flagr.getsentry.net/#/flags/550 --- src/sentry/conf/server.py | 2 ++ src/sentry/features/__init__.py | 1 + 2 files changed, 3 insertions(+) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 5f640c44e61e04..d787dc50805d3e 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1932,6 +1932,8 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:use-metrics-layer-in-alerts": False, # Enable User Feedback v2 ingest "organizations:user-feedback-ingest": False, + # Use ReplayClipPreview inside the User Feedback Details panel + "organizations:user-feedback-replay-clip": False, # Enable User Feedback spam auto filtering feature UI "organizations:user-feedback-spam-filter-ui": False, # Enable User Feedback spam auto filtering feature ingest diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index 54410139676627..842d3b71b2df34 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -285,6 +285,7 @@ default_manager.add("organizations:use-metrics-layer-in-alerts", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:use-metrics-layer", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:user-feedback-ingest", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) +default_manager.add("organizations:user-feedback-replay-clip", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:user-feedback-spam-filter-ui", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:user-feedback-spam-filter-ingest", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:user-feedback-ui", OrganizationFeature, FeatureHandlerStrategy.REMOTE) From e88984dc85ec2b3fd722cfe48019dcfd39076863 Mon Sep 17 00:00:00 2001 From: Isabella Enriquez Date: Mon, 12 Feb 2024 13:01:20 -0500 Subject: [PATCH 268/357] fix(slack): Check that message is a group of blocks (#64910) resolves SENTRY-2KT0 This PR adds an extra condition for sending workflow notifications when the block kit flag is enabled for when block kit is GA'd. An attribute error would occur when the flag was enabled but a notification built with the old system was still being sent. --- .../integrations/slack/notifications.py | 8 +- .../integrations/slack/test_notifications.py | 73 ++++++++++++++++++- 2 files changed, 78 insertions(+), 3 deletions(-) diff --git a/src/sentry/integrations/slack/notifications.py b/src/sentry/integrations/slack/notifications.py index 2c368a84f2c493..dc6e257675589d 100644 --- a/src/sentry/integrations/slack/notifications.py +++ b/src/sentry/integrations/slack/notifications.py @@ -64,7 +64,7 @@ def _get_attachments( def _notify_recipient( notification: BaseNotification, recipient: RpcActor, - attachments: list[SlackAttachment], + attachments: list[SlackAttachment] | SlackBlock, channel: str, integration: Integration, shared_context: Mapping[str, Any], @@ -75,7 +75,11 @@ def _notify_recipient( text = notification.get_notification_title(ExternalProviders.SLACK, shared_context) - if features.has("organizations:slack-block-kit", notification.organization): + # NOTE(isabella): we check that attachments consists of blocks in case the flag is turned on + # while a notification with the legacy format is being sent + if features.has("organizations:slack-block-kit", notification.organization) and isinstance( + local_attachments, dict + ): blocks = [] if text: # NOTE(isabella): with legacy attachments, the notification title was diff --git a/tests/sentry/integrations/slack/test_notifications.py b/tests/sentry/integrations/slack/test_notifications.py index a6287dd188207f..9d32403abe7109 100644 --- a/tests/sentry/integrations/slack/test_notifications.py +++ b/tests/sentry/integrations/slack/test_notifications.py @@ -3,7 +3,7 @@ import responses -from sentry.integrations.slack.notifications import send_notification_as_slack +from sentry.integrations.slack.notifications import _get_attachments, send_notification_as_slack from sentry.notifications.additional_attachment_manager import manager from sentry.testutils.cases import SlackActivityNotificationTest from sentry.testutils.helpers.notifications import DummyNotification @@ -17,6 +17,13 @@ def additional_attachment_generator(integration, organization): return {"title": organization.slug, "text": integration.id} +def additional_attachment_generator_block_kit(integration, organization): + return [ + {"type": "section", "text": {"type": "mrkdwn", "text": organization.slug}}, + {"type": "section", "text": {"type": "mrkdwn", "text": integration.id}}, + ] + + @region_silo_test class SlackNotificationsTest(SlackActivityNotificationTest): def setUp(self): @@ -44,6 +51,29 @@ def test_additional_attachment(self): assert attachments[1]["title"] == self.organization.slug assert attachments[1]["text"] == self.integration.id + @responses.activate + def test_additional_attachment_block_kit(self): + with self.feature("organizations:slack-block-kit"), mock.patch.dict( + manager.attachment_generators, + {ExternalProviders.SLACK: additional_attachment_generator_block_kit}, + ): + with self.tasks(): + send_notification_as_slack(self.notification, [self.user], {}, {}) + + data = parse_qs(responses.calls[0].request.body) + + assert "blocks" in data + assert "text" in data + assert data["text"][0] == "Notification Title" + + blocks = json.loads(data["blocks"][0]) + assert len(blocks) == 4 + + assert blocks[0]["text"]["text"] == "Notification Title" + assert blocks[1]["text"]["text"] == "*My Title* \n" + assert blocks[2]["text"]["text"] == self.organization.slug + assert blocks[3]["text"]["text"] == self.integration.id + @responses.activate def test_no_additional_attachment(self): with self.tasks(): @@ -58,3 +88,44 @@ def test_no_additional_attachment(self): assert len(attachments) == 1 assert attachments[0]["title"] == "My Title" + + @responses.activate + def test_no_additional_attachment_block_kit(self): + with self.feature("organizations:slack-block-kit"): + with self.tasks(): + send_notification_as_slack(self.notification, [self.user], {}, {}) + + data = parse_qs(responses.calls[0].request.body) + + assert "blocks" in data + assert "text" in data + assert data["text"][0] == "Notification Title" + + blocks = json.loads(data["blocks"][0]) + assert len(blocks) == 2 + + assert blocks[0]["text"]["text"] == "Notification Title" + assert blocks[1]["text"]["text"] == "*My Title* \n" + + @responses.activate + @mock.patch("sentry.integrations.slack.notifications._get_attachments") + def test_attachment_with_block_kit_flag(self, mock_attachment): + """ + Tests that notifications built with the legacy system can still send successfully with + the block kit flag enabled. + """ + mock_attachment.return_value = _get_attachments(self.notification, self.user, {}, {}) + + with self.feature("organizations:slack-block-kit"): + with self.tasks(): + send_notification_as_slack(self.notification, [self.user], {}, {}) + + data = parse_qs(responses.calls[0].request.body) + + assert "attachments" in data + assert data["text"][0] == "Notification Title" + + attachments = json.loads(data["attachments"][0]) + assert len(attachments) == 1 + + assert attachments[0]["title"] == "My Title" From 40d2f44cdefe63e2070ea0157008de9bc3756855 Mon Sep 17 00:00:00 2001 From: Jodi Jang <116035587+jangjodi@users.noreply.github.com> Date: Mon, 12 Feb 2024 13:21:34 -0500 Subject: [PATCH 269/357] fix(similarity-embedding): Add exception and stacktrace check (#65014) Fixes SENTRY-2M4X Fixes SENTRY-2M51 --- .../group_similar_issues_embeddings.py | 8 +- .../test_group_similar_issues_embeddings.py | 92 +++++++++++++++++++ 2 files changed, 97 insertions(+), 3 deletions(-) diff --git a/src/sentry/api/endpoints/group_similar_issues_embeddings.py b/src/sentry/api/endpoints/group_similar_issues_embeddings.py index 75c7d2ee923da5..ca80a5deebd41d 100644 --- a/src/sentry/api/endpoints/group_similar_issues_embeddings.py +++ b/src/sentry/api/endpoints/group_similar_issues_embeddings.py @@ -28,13 +28,13 @@ def get_stacktrace_string(exception: Mapping[Any, Any], event: GroupEvent) -> str: """Get the stacktrace string from an exception dictionary.""" - if not exception["values"]: + if not exception.get("values"): return "" frame_count = 0 output = [] for exc in exception["values"]: - if not exc: + if not exc or not exc.get("stacktrace"): continue if exc["stacktrace"] and exc["stacktrace"].get("frames"): @@ -120,7 +120,9 @@ def get(self, request: Request, group) -> Response: return Response(status=404) latest_event = group.get_latest_event() - stacktrace_string = get_stacktrace_string(latest_event.data["exception"], latest_event) + stacktrace_string = "" + if latest_event.data.get("exception"): + stacktrace_string = get_stacktrace_string(latest_event.data["exception"], latest_event) similar_issues_params: SimilarIssuesEmbeddingsRequest = { "group_id": group.id, diff --git a/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py b/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py index 43085592879581..c1b6314ace394f 100644 --- a/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py +++ b/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py @@ -369,6 +369,98 @@ def test_empty_return(self, mock_seer_request, mock_record): user_id=self.user.id, ) + @with_feature("projects:similarity-embeddings") + @mock.patch("sentry.seer.utils.seer_staging_connection_pool.urlopen") + def test_no_stacktrace(self, mock_seer_request): + seer_return_value: SimilarIssuesEmbeddingsResponse = { + "responses": [ + { + "message_similarity": 0.95, + "parent_group_id": self.similar_group.id, + "should_group": False, + "stacktrace_similarity": 0.00, + } + ] + } + mock_seer_request.return_value = HTTPResponse(json.dumps(seer_return_value).encode("utf-8")) + + error_trace_no_stacktrace = { + "fingerprint": ["my-route", "{{ default }}"], + "exception": {"values": []}, + } + event_no_stacktrace = self.store_event( + data=error_trace_no_stacktrace, project_id=self.project + ) + group_no_stacktrace = event_no_stacktrace.group + assert group_no_stacktrace + response = self.client.get( + f"/api/0/issues/{group_no_stacktrace.id}/similar-issues-embeddings/", + data={"k": "1", "threshold": "0.98"}, + ) + + assert response.data == self.get_expected_response( + [self.similar_group.id], [0.95], [0.00], ["No"] + ) + + mock_seer_request.assert_called_with( + "POST", + "/v0/issues/similar-issues", + body=json.dumps( + { + "group_id": group_no_stacktrace.id, + "project_id": self.project.id, + "stacktrace": "", + "message": group_no_stacktrace.message, + "k": 1, + "threshold": 0.98, + }, + ), + headers={"Content-Type": "application/json;charset=utf-8"}, + ) + + @with_feature("projects:similarity-embeddings") + @mock.patch("sentry.seer.utils.seer_staging_connection_pool.urlopen") + def test_no_exception(self, mock_seer_request): + seer_return_value: SimilarIssuesEmbeddingsResponse = { + "responses": [ + { + "message_similarity": 0.95, + "parent_group_id": self.similar_group.id, + "should_group": False, + "stacktrace_similarity": 0.00, + } + ] + } + mock_seer_request.return_value = HTTPResponse(json.dumps(seer_return_value).encode("utf-8")) + + event_no_exception = self.store_event(data={}, project_id=self.project) + group_no_exception = event_no_exception.group + assert group_no_exception + response = self.client.get( + f"/api/0/issues/{group_no_exception.id}/similar-issues-embeddings/", + data={"k": "1", "threshold": "0.98"}, + ) + + assert response.data == self.get_expected_response( + [self.similar_group.id], [0.95], [0.00], ["No"] + ) + + mock_seer_request.assert_called_with( + "POST", + "/v0/issues/similar-issues", + body=json.dumps( + { + "group_id": group_no_exception.id, + "project_id": self.project.id, + "stacktrace": "", + "message": group_no_exception.message, + "k": 1, + "threshold": 0.98, + }, + ), + headers={"Content-Type": "application/json;charset=utf-8"}, + ) + @with_feature("projects:similarity-embeddings") @mock.patch("sentry.seer.utils.seer_staging_connection_pool.urlopen") def test_no_optional_params(self, mock_seer_request): From 78ceaaf64284a9ff75e09cfbb2f375c0c7111f2c Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Mon, 12 Feb 2024 10:21:45 -0800 Subject: [PATCH 270/357] feat(stories): add component search (#64962) implement file tree search https://github.com/getsentry/sentry/assets/56095982/d3bd5707-e5c2-4799-bc5c-bc95d272b431 --- static/app/views/stories/index.tsx | 26 +++++++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/static/app/views/stories/index.tsx b/static/app/views/stories/index.tsx index ed2ac04bcd62b3..1144ecb8e2ed83 100644 --- a/static/app/views/stories/index.tsx +++ b/static/app/views/stories/index.tsx @@ -1,6 +1,8 @@ +import {useState} from 'react'; import type {RouteComponentProps} from 'react-router'; import styled from '@emotion/styled'; +import Input from 'sentry/components/input'; import {space} from 'sentry/styles/space'; import EmptyStory from 'sentry/views/stories/emptyStory'; import ErrorStory from 'sentry/views/stories/errorStory'; @@ -15,13 +17,31 @@ type Props = RouteComponentProps<{}, {}, any, StoriesQuery>; export default function Stories({location}: Props) { const story = useStoriesLoader({filename: location.query.name}); + const [searchTerm, setSearchTerm] = useState(''); return ( +
    + setSearchTerm(e.target.value.toLowerCase())} + /> + + s.toLowerCase().includes(searchTerm))} + /> + +
    - - - {story.error ? ( From 1695527afb38426b7fd56485a1d36b7fdf35b400 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Mon, 12 Feb 2024 10:40:57 -0800 Subject: [PATCH 271/357] fix(feedback): don't show replay cta for unsupported platforms (#65017) fixes https://github.com/getsentry/sentry/issues/65006 we shouldn't show this CTA for platforms we don't support: ![image](https://github.com/getsentry/sentry/assets/56095982/f0971888-942b-431d-b84b-f81affe95a8a) --- static/app/components/feedback/feedbackItem/feedbackReplay.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/app/components/feedback/feedbackItem/feedbackReplay.tsx b/static/app/components/feedback/feedbackItem/feedbackReplay.tsx index 35d6764ac8becc..896a7ff5ad3abc 100644 --- a/static/app/components/feedback/feedbackItem/feedbackReplay.tsx +++ b/static/app/components/feedback/feedbackItem/feedbackReplay.tsx @@ -53,7 +53,7 @@ export default function FeedbackReplay({eventData, feedbackItem, organization}: return ; } - if (!hasSentOneReplay) { + if (!hasSentOneReplay && platformSupported) { return ; } From 9b44a4849ca6f9f165588a8d4bcc695992e287f7 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Mon, 12 Feb 2024 10:45:39 -0800 Subject: [PATCH 272/357] feat(grouping): Add `assign_event_to_group` flag-off tests (#64689) This adds the first set of integration tests for the new `assign_event_to_group` function in `event_manager.py`, those testing the branch with the feature flag off. It also adds a number of helper functions to make the tests easier to understand. Notes: - Since these tests only test the branch of `assign_event_to_group` where the feature flag is off, they're essentially just testing the different branches of `_save_aggregate` (minus any branches involving hierarchical grouping, which are tested elsewhere). - Though these tests don't cover anything that wasn't already covered by unit tests, once the new assignment logic is implemented, they can be adapted to make sure it works as expected (has different logic but the same side effects and result). Knowing that this is coming, they test slightly more implementation detail than I'd probably normally test. - In order to keep track (even for myself) of what the results _should_ be for each test, I've structured them to run by calling a helper, `get_results_from_saving_event`, which saves an event and determines (but doesn't make assertions about) which side effects happened when the saving process called `assign_event_to_group`. The tests themselves then assert that those side effects are what was expected. My theory here is that a) it makes things easier to reason about by letting the reader understand the logic of determining the side effects (which is universal) separately from understanding the expectations of a particular test, and b) by giving the side effects human-readable names, it makes it easier to both verify that each test is testing the right thing and to figure out which side effects are the right ones in the given scenario. --- .../grouping/test_assign_to_group.py | 410 ++++++++++++++++++ 1 file changed, 410 insertions(+) create mode 100644 tests/sentry/event_manager/grouping/test_assign_to_group.py diff --git a/tests/sentry/event_manager/grouping/test_assign_to_group.py b/tests/sentry/event_manager/grouping/test_assign_to_group.py new file mode 100644 index 00000000000000..2ec1dbfb06aeaa --- /dev/null +++ b/tests/sentry/event_manager/grouping/test_assign_to_group.py @@ -0,0 +1,410 @@ +from __future__ import annotations + +from contextlib import contextmanager +from time import time +from typing import Any +from unittest import mock + +import pytest + +from sentry.event_manager import _create_group +from sentry.eventstore.models import Event +from sentry.grouping.ingest import ( + _calculate_primary_hash, + _calculate_secondary_hash, + find_existing_grouphash, +) +from sentry.models.grouphash import GroupHash +from sentry.models.project import Project +from sentry.testutils.helpers.eventprocessing import save_new_event +from sentry.testutils.pytest.fixtures import django_db_all +from sentry.testutils.pytest.mocking import capture_return_values +from sentry.testutils.skips import requires_snuba + +pytestmark = [requires_snuba] + + +LEGACY_CONFIG = "legacy:2019-03-12" +NEWSTYLE_CONFIG = "newstyle:2023-01-11" + + +@contextmanager +def patch_grouping_helpers(return_values: dict[str, Any]): + wrapped_find_existing_grouphash = capture_return_values(find_existing_grouphash, return_values) + wrapped_calculate_primary_hash = capture_return_values(_calculate_primary_hash, return_values) + wrapped_calculate_secondary_hash = capture_return_values( + _calculate_secondary_hash, return_values + ) + + with ( + mock.patch( + "sentry.event_manager.find_existing_grouphash", + wraps=wrapped_find_existing_grouphash, + ) as find_existing_grouphash_spy, + mock.patch( + "sentry.grouping.ingest._calculate_primary_hash", + wraps=wrapped_calculate_primary_hash, + ) as calculate_primary_hash_spy, + mock.patch( + "sentry.grouping.ingest._calculate_secondary_hash", + wraps=wrapped_calculate_secondary_hash, + ) as calculate_secondary_hash_spy, + mock.patch( + "sentry.event_manager._create_group", + # No return-value-wrapping necessary here, since all we need + # is the group id, and that's stored on the event + wraps=_create_group, + ) as create_group_spy, + ): + yield { + "find_existing_grouphash": find_existing_grouphash_spy, + "_calculate_primary_hash": calculate_primary_hash_spy, + "_calculate_secondary_hash": calculate_secondary_hash_spy, + "_create_group": create_group_spy, + } + + +def set_grouping_configs( + project: Project, + primary_config: str, + secondary_config: str | None, + transition_expiry: float | None = None, + in_transition: bool = False, +): + project.update_option("sentry:grouping_config", primary_config) + project.update_option("sentry:secondary_grouping_config", secondary_config) + if in_transition: + project.update_option( + "sentry:secondary_grouping_expiry", transition_expiry or time() + 3600 + ) + else: + project.update_option("sentry:secondary_grouping_expiry", None) + + +def save_event_with_grouping_config( + event_data: dict[str, Any], + project: Project, + primary_config: str, + secondary_config: str | None = None, + in_transition: bool = False, +) -> Event: + """ + Create an event with the given grouping config, by temporarily changing project options before + saving an event. Resets options to current values once the event is saved. + """ + current_primary_config = project.get_option("sentry:grouping_config") + current_secondary_config = project.get_option("sentry:secondary_grouping_config") + current_transition_expiry = project.get_option("sentry:secondary_grouping_expiry") + + set_grouping_configs( + project=project, + primary_config=primary_config, + secondary_config=secondary_config, + in_transition=in_transition, + ) + event = save_new_event(event_data, project) + + # Reset project options + set_grouping_configs( + project=project, + primary_config=current_primary_config, + secondary_config=current_secondary_config, + transition_expiry=current_transition_expiry, + in_transition=True, # Force transition expiry to be set, even if it's None + ) + + return event + + +def get_results_from_saving_event( + event_data: dict[str, Any], + project: Project, + primary_config: str, + secondary_config: str, + in_transition: bool, + existing_group_id: int | None = None, +): + # Whether or not these are assigned a value depends on the values of `in_transition` and + # `existing_group_id`. Everything else we'll return will definitely get a value and therefore + # doesn't need to be initialized. + secondary_hash_calculated = None + hashes_different = None + secondary_hash_found = None + event_assigned_to_given_existing_group = None + secondary_grouphash_existed_already = None + secondary_grouphash_exists_now = None + + existing_grouphashes = { + gh.hash: gh.group_id for gh in GroupHash.objects.filter(project_id=project.id) + } + + return_values: dict[str, list[Any]] = {} + + with patch_grouping_helpers(return_values) as spies: + calculate_secondary_hash_spy = spies["_calculate_secondary_hash"] + create_group_spy = spies["_create_group"] + calculate_primary_hash_spy = spies["_calculate_primary_hash"] + + set_grouping_configs( + project=project, + primary_config=primary_config, + secondary_config=secondary_config, + in_transition=in_transition, + ) + + new_event = save_new_event(event_data, project) + hash_search_result = return_values["find_existing_grouphash"][0][0] + post_save_grouphashes = { + gh.hash: gh.group_id for gh in GroupHash.objects.filter(project_id=project.id) + } + + # We should never call any of these more than once, regardless of the test + assert calculate_primary_hash_spy.call_count <= 1 + assert calculate_secondary_hash_spy.call_count <= 1 + assert create_group_spy.call_count <= 1 + + primary_hash_calculated = calculate_primary_hash_spy.call_count == 1 + secondary_hash_calculated = calculate_secondary_hash_spy.call_count == 1 + + primary_hash = return_values["_calculate_primary_hash"][0].hashes[0] + primary_hash_found = bool(hash_search_result) and hash_search_result.hash == primary_hash + + new_group_created = create_group_spy.call_count == 1 + + primary_grouphash_existed_already = primary_hash in existing_grouphashes + primary_grouphash_exists_now = primary_hash in post_save_grouphashes + + # Sanity checks + if primary_grouphash_existed_already: + existing_primary_hash_group_id = existing_grouphashes.get(primary_hash) + post_save_primary_hash_group_id = post_save_grouphashes.get(primary_hash) + assert ( + post_save_primary_hash_group_id == existing_primary_hash_group_id + ), "Existing primary hash's group id changed" + assert ( + existing_group_id + ), "Primary grouphash already exists. Either something's wrong or you forgot to pass an existing group id" + + if existing_group_id: + event_assigned_to_given_existing_group = ( + new_event.group_id == existing_group_id if existing_group_id else None + ) + + if secondary_hash_calculated: + secondary_hash = return_values["_calculate_secondary_hash"][0].hashes[0] + hashes_different = secondary_hash != primary_hash + secondary_hash_found = ( + bool(hash_search_result) and hash_search_result.hash == secondary_hash + ) + secondary_grouphash_existed_already = secondary_hash in existing_grouphashes + secondary_grouphash_exists_now = secondary_hash in post_save_grouphashes + + # More sanity checks + if secondary_grouphash_existed_already: + existing_secondary_hash_group_id = existing_grouphashes.get(secondary_hash) + post_save_secondary_hash_group_id = post_save_grouphashes.get(secondary_hash) + assert ( + post_save_secondary_hash_group_id == existing_secondary_hash_group_id + ), "Existing secondary hash's group id changed" + assert ( + existing_group_id + ), "Secondary grouphash already exists. Either something's wrong or you forgot to pass an existing group id" + + return { + "primary_hash_calculated": primary_hash_calculated, + "secondary_hash_calculated": secondary_hash_calculated, + "hashes_different": hashes_different, + "primary_hash_found": primary_hash_found, + "secondary_hash_found": secondary_hash_found, + "new_group_created": new_group_created, + "event_assigned_to_given_existing_group": event_assigned_to_given_existing_group, + "primary_grouphash_existed_already": primary_grouphash_existed_already, + "secondary_grouphash_existed_already": secondary_grouphash_existed_already, + "primary_grouphash_exists_now": primary_grouphash_exists_now, + "secondary_grouphash_exists_now": secondary_grouphash_exists_now, + } + + +@django_db_all +@pytest.mark.parametrize( + "in_transition", (True, False), ids=(" in_transition: True ", " in_transition: False ") +) +def test_new_group( + in_transition: bool, + default_project: Project, +): + project = default_project + event_data = {"message": "testing, testing, 123"} + + results = get_results_from_saving_event( + event_data=event_data, + project=project, + primary_config=NEWSTYLE_CONFIG, + secondary_config=LEGACY_CONFIG, + in_transition=in_transition, + ) + + if in_transition: + assert results == { + "primary_hash_calculated": True, + "secondary_hash_calculated": True, + "hashes_different": True, + "primary_hash_found": False, + "secondary_hash_found": False, + "new_group_created": True, + "primary_grouphash_existed_already": False, + "secondary_grouphash_existed_already": False, + "primary_grouphash_exists_now": True, + "secondary_grouphash_exists_now": True, + # Moot since no existing group was passed + "event_assigned_to_given_existing_group": None, + } + else: + assert results == { + "primary_hash_calculated": True, + "secondary_hash_calculated": False, + "primary_hash_found": False, + "new_group_created": True, + "primary_grouphash_existed_already": False, + "primary_grouphash_exists_now": True, + # The rest are moot since no existing group was passed and no secondary hash was + # calculated. + "event_assigned_to_given_existing_group": None, + "hashes_different": None, + "secondary_hash_found": None, + "secondary_grouphash_existed_already": None, + "secondary_grouphash_exists_now": None, + } + + +@django_db_all +@pytest.mark.parametrize( + "in_transition", (True, False), ids=(" in_transition: True ", " in_transition: False ") +) +def test_existing_group_no_new_hash( + in_transition: bool, + default_project: Project, +): + project = default_project + event_data = {"message": "testing, testing, 123"} + + # Set the stage by creating a group with the soon-to-be-secondary hash + existing_event = save_event_with_grouping_config(event_data, project, LEGACY_CONFIG) + + # Now save a new, identical, event with an updated grouping config + results = get_results_from_saving_event( + event_data=event_data, + project=project, + primary_config=NEWSTYLE_CONFIG, + secondary_config=LEGACY_CONFIG, + in_transition=in_transition, + existing_group_id=existing_event.group_id, + ) + + if in_transition: + assert results == { + "primary_hash_calculated": True, + "secondary_hash_calculated": True, + "hashes_different": True, + "primary_hash_found": False, + "secondary_hash_found": True, + "new_group_created": False, + "event_assigned_to_given_existing_group": True, + "primary_grouphash_existed_already": False, + "secondary_grouphash_existed_already": True, + "primary_grouphash_exists_now": True, + "secondary_grouphash_exists_now": True, + } + else: + assert results == { + "primary_hash_calculated": True, + "secondary_hash_calculated": False, + "primary_hash_found": False, + "new_group_created": True, + "event_assigned_to_given_existing_group": False, + "primary_grouphash_existed_already": False, + "primary_grouphash_exists_now": True, + # The rest are moot since no secondary hash was calculated. + "hashes_different": None, + "secondary_hash_found": None, + "secondary_grouphash_existed_already": None, + "secondary_grouphash_exists_now": None, + } + + +@django_db_all +@pytest.mark.parametrize( + "in_transition", (True, False), ids=(" in_transition: True ", " in_transition: False ") +) +@pytest.mark.parametrize( + "secondary_hash_exists", + (True, False), + ids=(" secondary_hash_exists: True ", " secondary_hash_exists: False "), +) +def test_existing_group_new_hash_exists( + secondary_hash_exists: bool, + in_transition: bool, + default_project: Project, +): + project = default_project + event_data = {"message": "testing, testing, 123"} + + # Set the stage by creating a group tied to the new hash (and possibly the legacy hash as well) + if secondary_hash_exists: + existing_event = save_event_with_grouping_config( + event_data, project, NEWSTYLE_CONFIG, LEGACY_CONFIG, True + ) + assert ( + GroupHash.objects.filter( + project_id=project.id, group_id=existing_event.group_id + ).count() + == 2 + ) + else: + existing_event = save_event_with_grouping_config(event_data, project, NEWSTYLE_CONFIG) + assert ( + GroupHash.objects.filter( + project_id=project.id, group_id=existing_event.group_id + ).count() + == 1 + ) + + # Now save a new, identical, event + results = get_results_from_saving_event( + event_data=event_data, + project=project, + primary_config=NEWSTYLE_CONFIG, + secondary_config=LEGACY_CONFIG, + in_transition=in_transition, + existing_group_id=existing_event.group_id, + ) + + if in_transition: + assert results == { + "primary_hash_calculated": True, + "secondary_hash_calculated": True, + "hashes_different": True, + "primary_hash_found": True, + "secondary_hash_found": False, # We found the new hash first and quit looking + "new_group_created": False, + "event_assigned_to_given_existing_group": True, + "primary_grouphash_existed_already": True, + "secondary_grouphash_existed_already": secondary_hash_exists, + "primary_grouphash_exists_now": True, + "secondary_grouphash_exists_now": True, + } + else: + assert results == { + "primary_hash_calculated": True, + "secondary_hash_calculated": False, + "primary_hash_found": True, + "new_group_created": False, + "event_assigned_to_given_existing_group": True, + "primary_grouphash_existed_already": True, + "primary_grouphash_exists_now": True, + # The rest are moot since no secondary hash was calculated. + "hashes_different": None, + "secondary_hash_found": None, + "secondary_grouphash_existed_already": None, + "secondary_grouphash_exists_now": None, + } From 0db2753e81f8d6fd77f5e3ccde3eb8e773a0963e Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Mon, 12 Feb 2024 12:48:57 -0600 Subject: [PATCH 273/357] chore(stats-detectors): Remove experimental regression issue (#64969) This was the experimental regression issue code that can now be removed. --- .../issue_platform_adapter.py | 14 +++--------- src/sentry/tasks/statistical_detectors.py | 3 +-- .../test_issue_platform_adapter.py | 22 ++++--------------- .../sentry/tasks/test_auto_resolve_issues.py | 4 ++-- tests/sentry/tasks/test_post_process.py | 6 ++--- .../tasks/test_weekly_escalating_forecast.py | 10 ++++----- 6 files changed, 18 insertions(+), 41 deletions(-) diff --git a/src/sentry/statistical_detectors/issue_platform_adapter.py b/src/sentry/statistical_detectors/issue_platform_adapter.py index 421f4b81c41aa9..f87dad875151f3 100644 --- a/src/sentry/statistical_detectors/issue_platform_adapter.py +++ b/src/sentry/statistical_detectors/issue_platform_adapter.py @@ -2,11 +2,7 @@ import uuid from datetime import datetime, timezone -from sentry.issues.grouptype import ( - GroupType, - PerformanceDurationRegressionGroupType, - PerformanceP95EndpointRegressionGroupType, -) +from sentry.issues.grouptype import GroupType, PerformanceP95EndpointRegressionGroupType from sentry.issues.issue_occurrence import IssueEvidence, IssueOccurrence from sentry.issues.producer import PayloadType, produce_occurrence_to_kafka from sentry.seer.utils import BreakpointData @@ -21,7 +17,7 @@ def fingerprint_regression(transaction, full=False): return fingerprint -def send_regression_to_platform(regression: BreakpointData, released: bool): +def send_regression_to_platform(regression: BreakpointData): current_timestamp = datetime.utcnow().replace(tzinfo=timezone.utc) displayed_old_baseline = round(float(regression["aggregate_range_1"]), 2) @@ -31,11 +27,7 @@ def send_regression_to_platform(regression: BreakpointData, released: bool): # TODO fix this in the breakpoint microservice and in trends v2 project_id = int(regression["project"]) - issue_type: type[GroupType] = ( - PerformanceP95EndpointRegressionGroupType - if released - else PerformanceDurationRegressionGroupType - ) + issue_type: type[GroupType] = PerformanceP95EndpointRegressionGroupType occurrence = IssueOccurrence( id=uuid.uuid4().hex, diff --git a/src/sentry/tasks/statistical_detectors.py b/src/sentry/tasks/statistical_detectors.py index 1716bebc59bae1..2f480ddaaafba6 100644 --- a/src/sentry/tasks/statistical_detectors.py +++ b/src/sentry/tasks/statistical_detectors.py @@ -387,7 +387,7 @@ def _detect_transaction_change_points( for regression in regressions: breakpoint_count += 1 - send_regression_to_platform(regression, True) + send_regression_to_platform(regression) metrics.incr( "statistical_detectors.breakpoint.emitted", @@ -565,7 +565,6 @@ def emit_function_regression_issue( "trend_percentage": regression["trend_percentage"], "unweighted_p_value": regression["unweighted_p_value"], "unweighted_t_value": regression["unweighted_t_value"], - "released": True, } ) diff --git a/tests/sentry/statistical_detectors/test_issue_platform_adapter.py b/tests/sentry/statistical_detectors/test_issue_platform_adapter.py index 67cf011ea23f28..fd07819a627a77 100644 --- a/tests/sentry/statistical_detectors/test_issue_platform_adapter.py +++ b/tests/sentry/statistical_detectors/test_issue_platform_adapter.py @@ -1,27 +1,13 @@ from unittest import mock -import pytest - -from sentry.issues.grouptype import ( - PerformanceDurationRegressionGroupType, - PerformanceP95EndpointRegressionGroupType, -) +from sentry.issues.grouptype import PerformanceP95EndpointRegressionGroupType from sentry.seer.utils import BreakpointData from sentry.statistical_detectors.issue_platform_adapter import send_regression_to_platform -@pytest.mark.parametrize( - ["released", "issue_type"], - [ - pytest.param(False, PerformanceDurationRegressionGroupType, id="unreleased"), - pytest.param(True, PerformanceP95EndpointRegressionGroupType, id="released"), - ], -) @mock.patch("sentry.statistical_detectors.issue_platform_adapter.produce_occurrence_to_kafka") def test_send_regressions_to_plaform( mock_produce_occurrence_to_kafka, - released, - issue_type, ): project_id = "123" @@ -38,7 +24,7 @@ def test_send_regressions_to_plaform( "breakpoint": 1691366400, } - send_regression_to_platform(mock_regression, released) + send_regression_to_platform(mock_regression) assert len(mock_produce_occurrence_to_kafka.mock_calls) == 1 @@ -51,7 +37,7 @@ def test_send_regressions_to_plaform( occurrence, **{ "project_id": 123, - "issue_title": issue_type.description, + "issue_title": PerformanceP95EndpointRegressionGroupType.description, "subtitle": "Increased from 14.0ms to 28.0ms (P95)", "resource_id": None, "evidence_data": mock_regression, @@ -63,7 +49,7 @@ def test_send_regressions_to_plaform( }, {"name": "Transaction", "value": "foo", "important": True}, ], - "type": issue_type.type_id, + "type": PerformanceP95EndpointRegressionGroupType.type_id, "level": "info", "culprit": "foo", }, diff --git a/tests/sentry/tasks/test_auto_resolve_issues.py b/tests/sentry/tasks/test_auto_resolve_issues.py index aa149ac0033b22..1b40b95c2dec47 100644 --- a/tests/sentry/tasks/test_auto_resolve_issues.py +++ b/tests/sentry/tasks/test_auto_resolve_issues.py @@ -5,7 +5,7 @@ from django.utils import timezone from sentry.issues.grouptype import ( - PerformanceDurationRegressionGroupType, + PerformanceP95EndpointRegressionGroupType, PerformanceSlowDBQueryGroupType, ) from sentry.models.group import Group, GroupStatus @@ -122,7 +122,7 @@ def test_aggregate_performance(self, mock_backend): project=project, status=GroupStatus.UNRESOLVED, last_seen=timezone.now() - timedelta(days=1), - type=PerformanceDurationRegressionGroupType.type_id, # Test that auto_resolve is disabled for SD + type=PerformanceP95EndpointRegressionGroupType.type_id, # Test that auto_resolve is disabled for SD ) mock_backend.get_size.return_value = 0 diff --git a/tests/sentry/tasks/test_post_process.py b/tests/sentry/tasks/test_post_process.py index b16b0d5df9b658..ce586fb926d858 100644 --- a/tests/sentry/tasks/test_post_process.py +++ b/tests/sentry/tasks/test_post_process.py @@ -24,8 +24,8 @@ from sentry.issues.grouptype import ( FeedbackGroup, GroupCategory, - PerformanceDurationRegressionGroupType, PerformanceNPlusOneGroupType, + PerformanceP95EndpointRegressionGroupType, ProfileFileIOGroupType, ) from sentry.issues.ingest import save_issue_occurrence @@ -2486,7 +2486,7 @@ class PostProcessGroupAggregateEventTest( ): def create_event(self, data, project_id): group = self.create_group( - type=PerformanceDurationRegressionGroupType.type_id, + type=PerformanceP95EndpointRegressionGroupType.type_id, ) event = self.store_event(data=data, project_id=project_id) @@ -2513,7 +2513,7 @@ def call_post_process_group( if cache_key is None: cache_key = write_event_to_cache(event) with self.feature( - PerformanceDurationRegressionGroupType.build_post_process_group_feature_name() + PerformanceP95EndpointRegressionGroupType.build_post_process_group_feature_name() ): post_process_group( is_new=is_new, diff --git a/tests/sentry/tasks/test_weekly_escalating_forecast.py b/tests/sentry/tasks/test_weekly_escalating_forecast.py index 32e0f7c6bd9bfa..a4bdb33f1fd666 100644 --- a/tests/sentry/tasks/test_weekly_escalating_forecast.py +++ b/tests/sentry/tasks/test_weekly_escalating_forecast.py @@ -2,7 +2,7 @@ from unittest.mock import MagicMock, patch from sentry.issues.escalating_group_forecast import ONE_EVENT_FORECAST, EscalatingGroupForecast -from sentry.issues.grouptype import ErrorGroupType, PerformanceDurationRegressionGroupType +from sentry.issues.grouptype import ErrorGroupType, PerformanceP95EndpointRegressionGroupType from sentry.models.group import Group, GroupStatus from sentry.tasks.weekly_escalating_forecast import run_escalating_forecast from sentry.testutils.cases import APITestCase, SnubaTestCase @@ -58,7 +58,7 @@ def test_empty_sd_escalating_forecast( """ with self.tasks(): group_list = self.create_archived_until_escalating_groups( - num_groups=1, group_type=PerformanceDurationRegressionGroupType.type_id + num_groups=1, group_type=PerformanceP95EndpointRegressionGroupType.type_id ) mock_query_groups_past_counts.return_value = {} @@ -107,7 +107,7 @@ def test_single_sd_group_escalating_forecast( ) -> None: with self.tasks(): group_list = self.create_archived_until_escalating_groups( - num_groups=1, group_type=PerformanceDurationRegressionGroupType.type_id + num_groups=1, group_type=PerformanceP95EndpointRegressionGroupType.type_id ) mock_query_groups_past_counts.return_value = get_mock_groups_past_counts_response( @@ -162,7 +162,7 @@ def test_multiple_sd_groups_escalating_forecast( ) -> None: with self.tasks(): group_list = self.create_archived_until_escalating_groups( - num_groups=3, group_type=PerformanceDurationRegressionGroupType.type_id + num_groups=3, group_type=PerformanceP95EndpointRegressionGroupType.type_id ) mock_query_groups_past_counts.return_value = get_mock_groups_past_counts_response( @@ -216,7 +216,7 @@ def test_update_sd_group_escalating_forecast( ) -> None: with self.tasks(): group_list = self.create_archived_until_escalating_groups( - num_groups=1, group_type=PerformanceDurationRegressionGroupType.type_id + num_groups=1, group_type=PerformanceP95EndpointRegressionGroupType.type_id ) mock_query_groups_past_counts.return_value = get_mock_groups_past_counts_response( From 2aedf1d3df0cfaa396601952eeb177bf86ecdf69 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Mon, 12 Feb 2024 14:01:56 -0500 Subject: [PATCH 274/357] ref: prevent sentry.auth.authenticators.u2f from reading the database at import time (#64952) reproducible with `PYTHONWARNINGS=error::RuntimeWarning mypy` --- src/sentry/auth/authenticators/u2f.py | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/src/sentry/auth/authenticators/u2f.py b/src/sentry/auth/authenticators/u2f.py index beb1de9e1e132d..17ba3f90286dce 100644 --- a/src/sentry/auth/authenticators/u2f.py +++ b/src/sentry/auth/authenticators/u2f.py @@ -1,4 +1,5 @@ from base64 import urlsafe_b64encode +from functools import cached_property from time import time from urllib.parse import urlparse @@ -53,11 +54,20 @@ class U2fInterface(AuthenticatorInterface): "Chrome)." ) allow_multi_enrollment = True - # rp is a relying party for webauthn, this would be sentry.io for SAAS - # and the prefix for self-hosted / dev environments - rp_id = urlparse(_get_url_prefix()).hostname - rp = PublicKeyCredentialRpEntity(rp_id, "Sentry") - webauthn_registration_server = Fido2Server(rp) + + @cached_property + def rp_id(self) -> str | None: + # rp is a relying party for webauthn, this would be sentry.io for SAAS + # and the prefix for self-hosted / dev environments + return urlparse(_get_url_prefix()).hostname + + @cached_property + def rp(self) -> PublicKeyCredentialRpEntity: + return PublicKeyCredentialRpEntity(self.rp_id, "Sentry") + + @cached_property + def webauthn_registration_server(self) -> Fido2Server: + return Fido2Server(self.rp) def __init__(self, authenticator=None, status=EnrollmentStatus.EXISTING): super().__init__(authenticator, status) From b21cc4d0bb8cc74e60bdb2300a8f8ac1a35627ff Mon Sep 17 00:00:00 2001 From: Cathy Teng <70817427+cathteng@users.noreply.github.com> Date: Mon, 12 Feb 2024 11:02:47 -0800 Subject: [PATCH 275/357] ref(superuser): allow read-only superusers to view audit logs (#64892) --- src/sentry/api/bases/organization.py | 13 +++++++++++++ .../endpoints/test_organization_auditlogs.py | 19 +++++++++++++++++++ 2 files changed, 32 insertions(+) diff --git a/src/sentry/api/bases/organization.py b/src/sentry/api/bases/organization.py index ee58564ea16107..eac8559886f82f 100644 --- a/src/sentry/api/bases/organization.py +++ b/src/sentry/api/bases/organization.py @@ -98,6 +98,19 @@ class OrganizationAndStaffPermission(StaffPermissionMixin, OrganizationPermissio class OrganizationAuditPermission(OrganizationPermission): scope_map = {"GET": ["org:write"]} + def has_object_permission( + self, + request: Request, + view: object, + organization: Organization | RpcOrganization | RpcUserOrganizationContext, + ) -> bool: + if super().has_object_permission(request, view, organization): + return True + + # the GET requires org:write, but we want both superuser read-only + + # write to be able to access this GET. read-only only has :read scopes + return is_active_superuser(request) + class OrganizationEventPermission(OrganizationPermission): scope_map = { diff --git a/tests/sentry/api/endpoints/test_organization_auditlogs.py b/tests/sentry/api/endpoints/test_organization_auditlogs.py index cfc6f6b361cac6..c71452ecb4fdca 100644 --- a/tests/sentry/api/endpoints/test_organization_auditlogs.py +++ b/tests/sentry/api/endpoints/test_organization_auditlogs.py @@ -1,11 +1,13 @@ from datetime import timedelta +from django.test import override_settings from django.utils import timezone from rest_framework.exceptions import ErrorDetail from sentry import audit_log from sentry.models.auditlogentry import AuditLogEntry from sentry.testutils.cases import APITestCase +from sentry.testutils.helpers.features import with_feature from sentry.testutils.silo import control_silo_test @@ -177,3 +179,20 @@ def test_options_data_included(self): response = self.get_success_response(self.organization.slug) assert len(response.data) == 2 assert set(response.data["options"]) == audit_log_api_names + + @override_settings(SENTRY_SELF_HOSTED=False) + @with_feature("auth:enterprise-superuser-read-write") + def test_superuser_read_write_can_see_audit_logs(self): + superuser = self.create_user(is_superuser=True) + self.login_as(superuser, superuser=True) + + AuditLogEntry.objects.create( + organization_id=self.organization.id, + event=audit_log.get_event_id("ORG_EDIT"), + actor=self.user, + datetime=timezone.now(), + ) + self.get_success_response(self.organization.slug) + + self.add_user_permission(superuser, "superuser.write") + self.get_success_response(self.organization.slug) From a1ae8365cdd2ebc0b822f00015907aa68cb79907 Mon Sep 17 00:00:00 2001 From: Armen Zambrano G <44410+armenzg@users.noreply.github.com> Date: Mon, 12 Feb 2024 14:12:13 -0500 Subject: [PATCH 276/357] ref(metrics_extraction): auto fixes + do_request refactor (#64894) Automatic formatting changes and move `do_request` to the parental class. --- src/sentry/search/events/builder/metrics.py | 32 ++++++++++------- src/sentry/search/events/datasets/metrics.py | 22 +++++++----- src/sentry/snuba/discover.py | 36 +++++++++++-------- src/sentry/snuba/metrics/extraction.py | 2 +- src/sentry/testutils/cases.py | 9 +++++ .../endpoints/test_organization_events_mep.py | 11 ++---- .../test_organization_events_stats_mep.py | 22 +++--------- 7 files changed, 71 insertions(+), 63 deletions(-) diff --git a/src/sentry/search/events/builder/metrics.py b/src/sentry/search/events/builder/metrics.py index f8cb3df6be5eaf..d3ea64d3cfede9 100644 --- a/src/sentry/search/events/builder/metrics.py +++ b/src/sentry/search/events/builder/metrics.py @@ -1024,9 +1024,11 @@ def run_query(self, referrer: str, use_cache: bool = False) -> Any: # Need this otherwise top_events returns only 1 item groupbys = [Column(col) for col in self._get_group_bys()] groupby_aliases = [ - groupby.alias - if isinstance(groupby, (AliasedExpression, CurriedFunction)) - else groupby.name + ( + groupby.alias + if isinstance(groupby, (AliasedExpression, CurriedFunction)) + else groupby.name + ) for groupby in groupbys if not ( isinstance(groupby, CurriedFunction) and groupby.function == "team_key_transaction" @@ -1074,9 +1076,11 @@ def run_query(self, referrer: str, use_cache: bool = False) -> Any: Function( "tuple", [ - groupby.exp - if isinstance(groupby, AliasedExpression) - else groupby + ( + groupby.exp + if isinstance(groupby, AliasedExpression) + else groupby + ) for groupby in self.groupby if not ( isinstance(groupby, CurriedFunction) @@ -1172,9 +1176,11 @@ def run_query(self, referrer: str, use_cache: bool = False) -> Any: Function( "tuple", [ - groupby.exp - if isinstance(groupby, AliasedExpression) - else groupby + ( + groupby.exp + if isinstance(groupby, AliasedExpression) + else groupby + ) for groupby in self.groupby ], ), @@ -1845,9 +1851,11 @@ def run_query(self, referrer: str, use_cache: bool = False) -> Any: get_series( projects=self.params.projects, metrics_query=metrics_query, - use_case_id=UseCaseID.TRANSACTIONS - if self.is_performance - else UseCaseID.SESSIONS, + use_case_id=( + UseCaseID.TRANSACTIONS + if self.is_performance + else UseCaseID.SESSIONS + ), include_meta=True, tenant_ids=self.tenant_ids, ) diff --git a/src/sentry/search/events/datasets/metrics.py b/src/sentry/search/events/datasets/metrics.py index ed2f2ce1658053..52a4fa98ed586b 100644 --- a/src/sentry/search/events/datasets/metrics.py +++ b/src/sentry/search/events/datasets/metrics.py @@ -918,9 +918,9 @@ def _resolve_project_threshold_config(self) -> SelectType: column_name_resolver=lambda _use_case_id, _org_id, value: self.builder.resolve_column_name( value ), - org_id=self.builder.params.organization.id - if self.builder.params.organization - else None, + org_id=( + self.builder.params.organization.id if self.builder.params.organization else None + ), project_ids=self.builder.params.project_ids, ) @@ -1104,9 +1104,11 @@ def _resolve_histogram_function( f"histogramIf({num_buckets})", [ Column("value"), - Function("and", [zoom_params, metric_condition]) - if zoom_params - else metric_condition, + ( + Function("and", [zoom_params, metric_condition]) + if zoom_params + else metric_condition + ), ], alias, ) @@ -1725,9 +1727,11 @@ def _resolve_rate( condition, ], ), - args["interval"] - if interval is None - else Function("divide", [args["interval"], interval]), + ( + args["interval"] + if interval is None + else Function("divide", [args["interval"], interval]) + ), ], alias, ) diff --git a/src/sentry/snuba/discover.py b/src/sentry/snuba/discover.py index ca4578fcf8be56..2c9feb2c7e48c8 100644 --- a/src/sentry/snuba/discover.py +++ b/src/sentry/snuba/discover.py @@ -344,15 +344,17 @@ def timeseries_query( for snql_query, result in zip(query_list, query_results): results.append( { - "data": zerofill( - result["data"], - snql_query.params.start, - snql_query.params.end, - rollup, - "time", - ) - if zerofill_results - else result["data"], + "data": ( + zerofill( + result["data"], + snql_query.params.start, + snql_query.params.end, + rollup, + "time", + ) + if zerofill_results + else result["data"] + ), "meta": result["meta"], } ) @@ -509,9 +511,11 @@ def top_events_timeseries( ): return SnubaTSResult( { - "data": zerofill([], params["start"], params["end"], rollup, "time") - if zerofill_results - else [], + "data": ( + zerofill([], params["start"], params["end"], rollup, "time") + if zerofill_results + else [] + ), }, params["start"], params["end"], @@ -553,9 +557,11 @@ def top_events_timeseries( for key, item in results.items(): results[key] = SnubaTSResult( { - "data": zerofill(item["data"], params["start"], params["end"], rollup, "time") - if zerofill_results - else item["data"], + "data": ( + zerofill(item["data"], params["start"], params["end"], rollup, "time") + if zerofill_results + else item["data"] + ), "order": item["order"], }, params["start"], diff --git a/src/sentry/snuba/metrics/extraction.py b/src/sentry/snuba/metrics/extraction.py index b53a0c5dbec137..a39188dbf3ff38 100644 --- a/src/sentry/snuba/metrics/extraction.py +++ b/src/sentry/snuba/metrics/extraction.py @@ -224,7 +224,7 @@ def get_default_spec_version(cls: Any) -> SpecVersion: "p95": "p95", "p99": "p99", # p100 is not supported in the metrics layer, so we convert to max which is equivalent. - "p100": "max" + "p100": "max", # generic percentile is not supported by metrics layer. } diff --git a/src/sentry/testutils/cases.py b/src/sentry/testutils/cases.py index cee6c62b6f1f44..43dd4a3894eaf6 100644 --- a/src/sentry/testutils/cases.py +++ b/src/sentry/testutils/cases.py @@ -40,6 +40,7 @@ from requests.utils import CaseInsensitiveDict, get_encoding_from_headers from rest_framework import status from rest_framework.request import Request +from rest_framework.response import Response from rest_framework.test import APITestCase as BaseAPITestCase from sentry_relay.consts import SPAN_STATUS_NAME_TO_CODE from snuba_sdk import Granularity, Limit, Offset @@ -2012,8 +2013,16 @@ class MetricsEnhancedPerformanceTestCase(BaseMetricsLayerTestCase, TestCase): def setUp(self): super().setUp() + self.login_as(user=self.user) self._index_metric_strings() + def do_request(self, data: dict[str, Any], features: dict[str, str] = None) -> Response: + """Set up self.features and self.url in the inheriting classes. + You can pass your own features if you do not want to use the default used by the subclass. + """ + with self.feature(self.features if features is None else features): + return self.client.get(self.url, data=data, format="json") + def _index_metric_strings(self): strings = [ "transaction", diff --git a/tests/snuba/api/endpoints/test_organization_events_mep.py b/tests/snuba/api/endpoints/test_organization_events_mep.py index f73ddfd89cc55c..d625586db65fc6 100644 --- a/tests/snuba/api/endpoints/test_organization_events_mep.py +++ b/tests/snuba/api/endpoints/test_organization_events_mep.py @@ -3134,15 +3134,8 @@ class OrganizationEventsMetricsEnhancedPerformanceEndpointTestWithOnDemandMetric def setUp(self) -> None: super().setUp() - - def do_request(self, query: Any) -> Any: - self.login_as(user=self.user) - url = reverse( - self.viewname, - kwargs={"organization_slug": self.organization.slug}, - ) - with self.feature({"organizations:on-demand-metrics-extraction-widgets": True}): - return self.client.get(url, query, format="json") + self.url = reverse(self.viewname, kwargs={"organization_slug": self.organization.slug}) + self.features = {"organizations:on-demand-metrics-extraction-widgets": True} def _on_demand_query_check( self, diff --git a/tests/snuba/api/endpoints/test_organization_events_stats_mep.py b/tests/snuba/api/endpoints/test_organization_events_stats_mep.py index 338d00fd24af2c..784e84bfadee71 100644 --- a/tests/snuba/api/endpoints/test_organization_events_stats_mep.py +++ b/tests/snuba/api/endpoints/test_organization_events_stats_mep.py @@ -43,13 +43,6 @@ def setUp(self): self.additional_params = dict() - def do_request(self, data, url=None, features=None): - if features is None: - features = {"organizations:discover-basic": True} - features.update(self.features) - with self.feature(features): - return self.client.get(self.url if url is None else url, data=data, format="json") - # These throughput tests should roughly match the ones in OrganizationEventsStatsEndpointTest def test_throughput_epm_hour_rollup(self): # Each of these denotes how many events to create in each hour @@ -976,14 +969,10 @@ def setUp(self): "sentry-api-0-organization-events-stats", kwargs={"organization_slug": self.project.organization.slug}, ) - self.features = {"organizations:on-demand-metrics-extraction-widgets": True} - - def do_request(self, data, url=None, features=None): - if features is None: - features = {"organizations:discover-basic": True} - features.update(self.features) - with self.feature(features): - return self.client.get(self.url if url is None else url, data=data, format="json") + self.features = { + "organizations:on-demand-metrics-extraction-widgets": True, + "organizations:on-demand-metrics-extraction": True, + } def test_top_events_wrong_on_demand_type(self): query = "transaction.duration:>=100" @@ -1392,7 +1381,6 @@ def test_glob_http_referer_on_demand(self): def _test_is_metrics_extracted_data( self, params: dict[str, Any], expected_on_demand_query: bool, dataset: str ) -> None: - features = {"organizations:on-demand-metrics-extraction": True} spec = OnDemandMetricSpec( field="count()", query="transaction.duration:>1s", @@ -1400,7 +1388,7 @@ def _test_is_metrics_extracted_data( ) self.store_on_demand_metric(1, spec=spec) - response = self.do_request(params, features=features) + response = self.do_request(params) assert response.status_code == 200, response.content meta = response.data["meta"] From 97be10a027053c63338787f6374eb8a2f1908640 Mon Sep 17 00:00:00 2001 From: Nar Saynorath Date: Mon, 12 Feb 2024 15:27:50 -0500 Subject: [PATCH 277/357] fix(device-class): Allow for unknown device class in spans datasets (#65010) Discover stores Unknown device class as `null` but in spans data it's encoded as `""`. This conditionally adds `"Unknown"` to the available device class filters so passing it in doesn't throw an error during querying. --- .../search/events/datasets/filter_aliases.py | 11 ++++++--- .../search/events/datasets/spans_indexed.py | 10 +++++--- .../search/events/datasets/spans_metrics.py | 10 +++++--- .../test_organization_events_span_indexed.py | 23 +++++++++++++++++++ .../test_organization_events_span_metrics.py | 23 +++++++++++++++++++ 5 files changed, 68 insertions(+), 9 deletions(-) diff --git a/src/sentry/search/events/datasets/filter_aliases.py b/src/sentry/search/events/datasets/filter_aliases.py index 662cbb2067fd6e..8494d214fcde2d 100644 --- a/src/sentry/search/events/datasets/filter_aliases.py +++ b/src/sentry/search/events/datasets/filter_aliases.py @@ -287,9 +287,14 @@ def semver_build_filter_converter( def device_class_converter( - builder: builder.QueryBuilder, search_filter: SearchFilter + builder: builder.QueryBuilder, + search_filter: SearchFilter, + device_class_map: Mapping[str, set[str]] | None = None, ) -> WhereType | None: + if not device_class_map: + device_class_map = DEVICE_CLASS + value = search_filter.value.value - if value not in DEVICE_CLASS: + if value not in device_class_map: raise InvalidSearchQuery(f"{value} is not a supported device.class") - return Condition(builder.column("device.class"), Op.IN, list(DEVICE_CLASS[value])) + return Condition(builder.column("device.class"), Op.IN, list(device_class_map[value])) diff --git a/src/sentry/search/events/datasets/spans_indexed.py b/src/sentry/search/events/datasets/spans_indexed.py index d91eea0e910fff..af03d382f15184 100644 --- a/src/sentry/search/events/datasets/spans_indexed.py +++ b/src/sentry/search/events/datasets/spans_indexed.py @@ -21,6 +21,7 @@ with_default, ) from sentry.search.events.types import SelectType, WhereType +from sentry.search.utils import DEVICE_CLASS class SpansIndexedDatasetConfig(DatasetConfig): @@ -36,9 +37,7 @@ def search_filter_converter( return { constants.PROJECT_ALIAS: self._project_slug_filter_converter, constants.PROJECT_NAME_ALIAS: self._project_slug_filter_converter, - constants.DEVICE_CLASS_ALIAS: lambda search_filter: filter_aliases.device_class_converter( - self.builder, search_filter - ), + constants.DEVICE_CLASS_ALIAS: self._device_class_filter_converter, constants.SPAN_IS_SEGMENT_ALIAS: filter_aliases.span_is_segment_converter, } @@ -210,6 +209,11 @@ def orderby_converter(self) -> Mapping[str, Callable[[Direction], OrderBy]]: def _project_slug_filter_converter(self, search_filter: SearchFilter) -> WhereType | None: return filter_aliases.project_slug_converter(self.builder, search_filter) + def _device_class_filter_converter(self, search_filter: SearchFilter) -> SelectType: + return filter_aliases.device_class_converter( + self.builder, search_filter, {**DEVICE_CLASS, "Unknown": {""}} + ) + def _resolve_project_slug_alias(self, alias: str) -> SelectType: return field_aliases.resolve_project_slug_alias(self.builder, alias) diff --git a/src/sentry/search/events/datasets/spans_metrics.py b/src/sentry/search/events/datasets/spans_metrics.py index 7252a4a62c6fce..c808c09ac4ee91 100644 --- a/src/sentry/search/events/datasets/spans_metrics.py +++ b/src/sentry/search/events/datasets/spans_metrics.py @@ -11,6 +11,7 @@ from sentry.search.events.datasets import field_aliases, filter_aliases, function_aliases from sentry.search.events.datasets.base import DatasetConfig from sentry.search.events.types import SelectType, WhereType +from sentry.search.utils import DEVICE_CLASS from sentry.snuba.metrics.naming_layer.mri import SpanMRI from sentry.snuba.referrer import Referrer @@ -28,9 +29,7 @@ def search_filter_converter( ) -> Mapping[str, Callable[[SearchFilter], WhereType | None]]: return { constants.SPAN_DOMAIN_ALIAS: self._span_domain_filter_converter, - constants.DEVICE_CLASS_ALIAS: lambda search_filter: filter_aliases.device_class_converter( - self.builder, search_filter - ), + constants.DEVICE_CLASS_ALIAS: self._device_class_filter_converter, } @property @@ -482,6 +481,11 @@ def _span_domain_filter_converter(self, search_filter: SearchFilter) -> WhereTyp 0, ) + def _device_class_filter_converter(self, search_filter: SearchFilter) -> SelectType: + return filter_aliases.device_class_converter( + self.builder, search_filter, {**DEVICE_CLASS, "Unknown": {""}} + ) + def _resolve_span_module(self, alias: str) -> SelectType: return field_aliases.resolve_span_module(self.builder, alias) diff --git a/tests/snuba/api/endpoints/test_organization_events_span_indexed.py b/tests/snuba/api/endpoints/test_organization_events_span_indexed.py index 3e1ff94268c41d..4193e42dafc016 100644 --- a/tests/snuba/api/endpoints/test_organization_events_span_indexed.py +++ b/tests/snuba/api/endpoints/test_organization_events_span_indexed.py @@ -95,3 +95,26 @@ def test_sentry_tags_syntax(self): assert len(data) == 1 assert data[0]["sentry_tags[transaction.method]"] == "foo" assert meta["dataset"] == "spansIndexed" + + def test_device_class_filter_unknown(self): + self.store_spans( + [ + self.create_span({"sentry_tags": {"device.class": ""}}, start_ts=self.ten_mins_ago), + ] + ) + response = self.do_request( + { + "field": ["device.class", "count()"], + "query": "device.class:Unknown", + "orderby": "count()", + "project": self.project.id, + "dataset": "spansIndexed", + } + ) + + assert response.status_code == 200, response.content + data = response.data["data"] + meta = response.data["meta"] + assert len(data) == 1 + assert data[0]["device.class"] == "Unknown" + assert meta["dataset"] == "spansIndexed" diff --git a/tests/snuba/api/endpoints/test_organization_events_span_metrics.py b/tests/snuba/api/endpoints/test_organization_events_span_metrics.py index 96d4f2cd98b448..7fc6de9bd94f43 100644 --- a/tests/snuba/api/endpoints/test_organization_events_span_metrics.py +++ b/tests/snuba/api/endpoints/test_organization_events_span_metrics.py @@ -996,6 +996,29 @@ def test_device_class_filter(self): assert data[0]["device.class"] == level assert meta["fields"]["device.class"] == "string" + def test_device_class_filter_unknown(self): + self.store_span_metric( + 123, + internal_metric=constants.SELF_TIME_LIGHT, + timestamp=self.min_ago, + tags={"device.class": ""}, + ) + response = self.do_request( + { + "field": ["device.class", "count()"], + "query": "device.class:Unknown", + "orderby": "count()", + "project": self.project.id, + "dataset": "spansMetrics", + } + ) + assert response.status_code == 200, response.content + data = response.data["data"] + meta = response.data["meta"] + assert len(data) == 1 + assert data[0]["device.class"] == "Unknown" + assert meta["fields"]["device.class"] == "string" + @region_silo_test class OrganizationEventsMetricsEnhancedPerformanceEndpointTestWithMetricLayer( From 2d13378ac250362d9ea3273446a08fec2af2d610 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Mon, 12 Feb 2024 12:38:07 -0800 Subject: [PATCH 278/357] ref(grouping): Call `_save_aggregate_new` when feature flag enabled (#64851) This PR is a step towards updating the logic of `_save_aggregate`. The new logic will be contained in a separate function, so as a first step, this PR creates the new function, with identical logic to `_save_aggregate`. That way, when changes are made, they'll be starting from the same baseline as they would be were `_save_aggregate` itself being modified, which should hopefully make reviewing easier. It also switches to calling this new function in the flag-on branch of `assign_event_to_group`. Finally, to make sure that everything still works as expected, the `assign_event_to_group` tests have been modified to now run both branches, flag on and flag off. --- src/sentry/event_manager.py | 241 +++++++++++++++++- .../grouping/test_assign_to_group.py | 28 +- 2 files changed, 266 insertions(+), 3 deletions(-) diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index 0ad38db4f301d8..19b6d56a156ad8 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -1331,8 +1331,7 @@ def assign_event_to_group(event: Event, job: Job, metric_tags: MutableTags) -> G ) and not has_mobile_config ): - # This will be updated to the new logic once it's written - group_info = _save_aggregate( + group_info = _save_aggregate_new( event=event, job=job, release=job["release"], @@ -1593,6 +1592,244 @@ def _save_aggregate( return GroupInfo(group, is_new, is_regression) +def _save_aggregate_new( + event: Event, + job: Job, + release: Release | None, + received_timestamp: int | float, + metric_tags: MutableTags, +) -> GroupInfo | None: + project = event.project + + primary_hashes, secondary_hashes, hashes = get_hash_values(project, job, metric_tags) + + # Now that we've used the current and possibly secondary grouping config(s) to calculate the + # hashes, we're free to perform a config update if needed. Future events will use the new + # config, but will also be grandfathered into the current config for a week, so as not to + # erroneously create new groups. + update_grouping_config_if_needed(project) + + _materialize_metadata_many([job]) + metadata = dict(job["event_metadata"]) + + group_creation_kwargs = _get_group_creation_kwargs(job) + + # Because this logic is not complex enough we want to special case the situation where we + # migrate from a hierarchical hash to a non hierarchical hash. The reason being that + # there needs to be special logic to not create orphaned hashes in migration cases + # but it wants a different logic to implement splitting of hierarchical hashes. + migrate_off_hierarchical = bool( + secondary_hashes + and secondary_hashes.hierarchical_hashes + and not primary_hashes.hierarchical_hashes + ) + + flat_grouphashes = [ + GroupHash.objects.get_or_create(project=project, hash=hash)[0] for hash in hashes.hashes + ] + + # The root_hierarchical_hash is the least specific hash within the tree, so + # typically hierarchical_hashes[0], unless a hash `n` has been split in + # which case `root_hierarchical_hash = hierarchical_hashes[n + 1]`. Chosing + # this for select_for_update mostly provides sufficient synchronization + # when groups are created and also relieves contention by locking a more + # specific hash than `hierarchical_hashes[0]`. + existing_grouphash, root_hierarchical_hash = find_existing_grouphash( + project, flat_grouphashes, hashes.hierarchical_hashes + ) + + if root_hierarchical_hash is not None: + root_hierarchical_grouphash = GroupHash.objects.get_or_create( + project=project, hash=root_hierarchical_hash + )[0] + + metadata.update( + hashes.group_metadata_from_hash( + existing_grouphash.hash + if existing_grouphash is not None + else root_hierarchical_hash + ) + ) + + else: + root_hierarchical_grouphash = None + + # In principle the group gets the same metadata as the event, so common + # attributes can be defined in eventtypes. + # + # Additionally the `last_received` key is set for group metadata, later in + # _save_aggregate + group_creation_kwargs["data"] = materialize_metadata( + event.data, + get_event_type(event.data), + metadata, + ) + group_creation_kwargs["data"]["last_received"] = received_timestamp + + if existing_grouphash is None: + if killswitch_matches_context( + "store.load-shed-group-creation-projects", + { + "project_id": project.id, + "platform": event.platform, + }, + ): + raise HashDiscarded("Load shedding group creation", reason="load_shed") + + with sentry_sdk.start_span( + op="event_manager.create_group_transaction" + ) as span, metrics.timer( + "event_manager.create_group_transaction" + ) as metric_tags, transaction.atomic( + router.db_for_write(GroupHash) + ): + span.set_tag("create_group_transaction.outcome", "no_group") + metric_tags["create_group_transaction.outcome"] = "no_group" + + all_grouphash_ids = [h.id for h in flat_grouphashes] + if root_hierarchical_grouphash is not None: + all_grouphash_ids.append(root_hierarchical_grouphash.id) + + all_grouphashes = list( + GroupHash.objects.filter(id__in=all_grouphash_ids).select_for_update() + ) + + flat_grouphashes = [gh for gh in all_grouphashes if gh.hash in hashes.hashes] + + existing_grouphash, root_hierarchical_hash = find_existing_grouphash( + project, flat_grouphashes, hashes.hierarchical_hashes + ) + + if root_hierarchical_hash is not None: + root_hierarchical_grouphash = GroupHash.objects.get_or_create( + project=project, hash=root_hierarchical_hash + )[0] + else: + root_hierarchical_grouphash = None + + if existing_grouphash is None: + group = _create_group(project, event, **group_creation_kwargs) + + if ( + features.has("projects:first-event-severity-calculation", event.project) + and group.data.get("metadata", {}).get("severity") is None + ): + logger.error( + "Group created without severity score", + extra={ + "event_id": event.data["event_id"], + "group_id": group.id, + }, + ) + + if root_hierarchical_grouphash is not None: + new_hashes = [root_hierarchical_grouphash] + else: + new_hashes = list(flat_grouphashes) + + GroupHash.objects.filter(id__in=[h.id for h in new_hashes]).exclude( + state=GroupHash.State.LOCKED_IN_MIGRATION + ).update(group=group) + + is_new = True + is_regression = False + + span.set_tag("create_group_transaction.outcome", "new_group") + metric_tags["create_group_transaction.outcome"] = "new_group" + + metrics.incr( + "group.created", + skip_internal=True, + tags={ + "platform": event.platform or "unknown", + "sdk": normalized_sdk_tag_from_event(event), + }, + ) + + # This only applies to events with stacktraces + frame_mix = event.get_event_metadata().get("in_app_frame_mix") + if frame_mix: + metrics.incr( + "grouping.in_app_frame_mix", + sample_rate=1.0, + tags={ + "platform": event.platform or "unknown", + "sdk": normalized_sdk_tag_from_event(event), + "frame_mix": frame_mix, + }, + ) + + return GroupInfo(group, is_new, is_regression) + + group = Group.objects.get(id=existing_grouphash.group_id) + if group.issue_category != GroupCategory.ERROR: + logger.info( + "event_manager.category_mismatch", + extra={ + "issue_category": group.issue_category, + "event_type": "error", + }, + ) + return None + + is_new = False + + # For the migration from hierarchical to non hierarchical we want to associate + # all group hashes + if migrate_off_hierarchical: + new_hashes = [h for h in flat_grouphashes if h.group_id is None] + if root_hierarchical_grouphash and root_hierarchical_grouphash.group_id is None: + new_hashes.append(root_hierarchical_grouphash) + elif root_hierarchical_grouphash is None: + # No hierarchical grouping was run, only consider flat hashes + new_hashes = [h for h in flat_grouphashes if h.group_id is None] + elif root_hierarchical_grouphash.group_id is None: + # The root hash is not assigned to a group. + # We ran multiple grouping algorithms + # (see secondary grouping), and the hierarchical hash is new + new_hashes = [root_hierarchical_grouphash] + else: + new_hashes = [] + + if new_hashes: + # There may still be secondary hashes that we did not use to find an + # existing group. A classic example is when grouping makes changes to + # the app-hash (changes to in_app logic), but the system hash stays + # stable and is used to find an existing group. Associate any new + # hashes with the group such that event saving continues to be + # resilient against grouping algorithm changes. + # + # There is a race condition here where two processes could "steal" + # hashes from each other. In practice this should not be user-visible + # as group creation is synchronized. Meaning the only way hashes could + # jump between groups is if there were two processes that: + # + # 1) have BOTH found an existing group + # (otherwise at least one of them would be in the group creation + # codepath which has transaction isolation/acquires row locks) + # 2) AND are looking at the same set, or an overlapping set of hashes + # (otherwise they would not operate on the same rows) + # 3) yet somehow also sort their event into two different groups each + # (otherwise the update would not change anything) + # + # We think this is a very unlikely situation. A previous version of + # _save_aggregate had races around group creation which made this race + # more user visible. For more context, see 84c6f75a and d0e22787, as + # well as GH-5085. + GroupHash.objects.filter(id__in=[h.id for h in new_hashes]).exclude( + state=GroupHash.State.LOCKED_IN_MIGRATION + ).update(group=group) + + is_regression = _process_existing_aggregate( + group=group, + event=event, + incoming_group_values=group_creation_kwargs, + release=release, + ) + + return GroupInfo(group, is_new, is_regression) + + def _create_group(project: Project, event: Event, **kwargs: Any) -> Group: try: short_id = project.next_short_id() diff --git a/tests/sentry/event_manager/grouping/test_assign_to_group.py b/tests/sentry/event_manager/grouping/test_assign_to_group.py index 2ec1dbfb06aeaa..3121afefb9e086 100644 --- a/tests/sentry/event_manager/grouping/test_assign_to_group.py +++ b/tests/sentry/event_manager/grouping/test_assign_to_group.py @@ -17,6 +17,7 @@ from sentry.models.grouphash import GroupHash from sentry.models.project import Project from sentry.testutils.helpers.eventprocessing import save_new_event +from sentry.testutils.helpers.features import Feature from sentry.testutils.pytest.fixtures import django_db_all from sentry.testutils.pytest.mocking import capture_return_values from sentry.testutils.skips import requires_snuba @@ -123,6 +124,7 @@ def get_results_from_saving_event( secondary_config: str, in_transition: bool, existing_group_id: int | None = None, + new_logic_enabled: bool = False, ): # Whether or not these are assigned a value depends on the values of `in_transition` and # `existing_group_id`. Everything else we'll return will definitely get a value and therefore @@ -140,7 +142,10 @@ def get_results_from_saving_event( return_values: dict[str, list[Any]] = {} - with patch_grouping_helpers(return_values) as spies: + with ( + patch_grouping_helpers(return_values) as spies, + Feature({"organizations:grouping-suppress-unnecessary-secondary-hash": new_logic_enabled}), + ): calculate_secondary_hash_spy = spies["_calculate_secondary_hash"] create_group_spy = spies["_create_group"] calculate_primary_hash_spy = spies["_calculate_primary_hash"] @@ -229,7 +234,13 @@ def get_results_from_saving_event( @pytest.mark.parametrize( "in_transition", (True, False), ids=(" in_transition: True ", " in_transition: False ") ) +@pytest.mark.parametrize( + "new_logic_enabled", + (True, False), + ids=(" new_logic_enabled: True ", " new_logic_enabled: False "), +) def test_new_group( + new_logic_enabled: bool, in_transition: bool, default_project: Project, ): @@ -242,6 +253,7 @@ def test_new_group( primary_config=NEWSTYLE_CONFIG, secondary_config=LEGACY_CONFIG, in_transition=in_transition, + new_logic_enabled=new_logic_enabled, ) if in_transition: @@ -281,7 +293,13 @@ def test_new_group( @pytest.mark.parametrize( "in_transition", (True, False), ids=(" in_transition: True ", " in_transition: False ") ) +@pytest.mark.parametrize( + "new_logic_enabled", + (True, False), + ids=(" new_logic_enabled: True ", " new_logic_enabled: False "), +) def test_existing_group_no_new_hash( + new_logic_enabled: bool, in_transition: bool, default_project: Project, ): @@ -299,6 +317,7 @@ def test_existing_group_no_new_hash( secondary_config=LEGACY_CONFIG, in_transition=in_transition, existing_group_id=existing_event.group_id, + new_logic_enabled=new_logic_enabled, ) if in_transition: @@ -336,6 +355,11 @@ def test_existing_group_no_new_hash( @pytest.mark.parametrize( "in_transition", (True, False), ids=(" in_transition: True ", " in_transition: False ") ) +@pytest.mark.parametrize( + "new_logic_enabled", + (True, False), + ids=(" new_logic_enabled: True ", " new_logic_enabled: False "), +) @pytest.mark.parametrize( "secondary_hash_exists", (True, False), @@ -343,6 +367,7 @@ def test_existing_group_no_new_hash( ) def test_existing_group_new_hash_exists( secondary_hash_exists: bool, + new_logic_enabled: bool, in_transition: bool, default_project: Project, ): @@ -377,6 +402,7 @@ def test_existing_group_new_hash_exists( secondary_config=LEGACY_CONFIG, in_transition=in_transition, existing_group_id=existing_event.group_id, + new_logic_enabled=new_logic_enabled, ) if in_transition: From 6cfc89e2cdb91213b2de93510e528af7fbe36ea7 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Mon, 12 Feb 2024 13:24:12 -0800 Subject: [PATCH 279/357] ref(grouping): Call `find_existing_grouphash_new` when feature flag enabled (#64857) This is a follow-up to https://github.com/getsentry/sentry/pull/64851, and does a similar thing: Before modifying the logic of `find_existing_grouphash`, it creates an identical copy, `find_existing_grouphash_new`, to be called in the feature-flag-on branch of `assign_event_to_group` (via `_save_aggregate_new`). As in that PR, the reason for doing this is so that it will be easier to tell what actually changes between the current version of the function and the new version once those changes are made. --- src/sentry/event_manager.py | 5 +- src/sentry/grouping/ingest.py | 80 +++++++++++++++++++ .../grouping/test_assign_to_group.py | 15 +++- 3 files changed, 97 insertions(+), 3 deletions(-) diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index 19b6d56a156ad8..d282bf29966bdb 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -48,6 +48,7 @@ from sentry.grouping.api import GroupingConfig, get_grouping_config_dict_for_project from sentry.grouping.ingest import ( find_existing_grouphash, + find_existing_grouphash_new, get_hash_values, update_grouping_config_if_needed, ) @@ -1634,7 +1635,7 @@ def _save_aggregate_new( # this for select_for_update mostly provides sufficient synchronization # when groups are created and also relieves contention by locking a more # specific hash than `hierarchical_hashes[0]`. - existing_grouphash, root_hierarchical_hash = find_existing_grouphash( + existing_grouphash, root_hierarchical_hash = find_existing_grouphash_new( project, flat_grouphashes, hashes.hierarchical_hashes ) @@ -1696,7 +1697,7 @@ def _save_aggregate_new( flat_grouphashes = [gh for gh in all_grouphashes if gh.hash in hashes.hashes] - existing_grouphash, root_hierarchical_hash = find_existing_grouphash( + existing_grouphash, root_hierarchical_hash = find_existing_grouphash_new( project, flat_grouphashes, hashes.hierarchical_hashes ) diff --git a/src/sentry/grouping/ingest.py b/src/sentry/grouping/ingest.py index 6afb94dd5bf155..268556df7e5da6 100644 --- a/src/sentry/grouping/ingest.py +++ b/src/sentry/grouping/ingest.py @@ -303,6 +303,86 @@ def find_existing_grouphash( return None, root_hierarchical_hash +def find_existing_grouphash_new( + project: Project, + flat_grouphashes: Sequence[GroupHash], + hierarchical_hashes: Sequence[str] | None, +) -> tuple[GroupHash | None, str | None]: + all_grouphashes = [] + root_hierarchical_hash = None + + found_split = False + + if hierarchical_hashes: + hierarchical_grouphashes = { + h.hash: h + for h in GroupHash.objects.filter(project=project, hash__in=hierarchical_hashes) + } + + # Look for splits: + # 1. If we find a hash with SPLIT state at `n`, we want to use + # `n + 1` as the root hash. + # 2. If we find a hash associated to a group that is more specific + # than the primary hash, we want to use that hash as root hash. + for hash in reversed(hierarchical_hashes): + group_hash = hierarchical_grouphashes.get(hash) + + if group_hash is not None and group_hash.state == GroupHash.State.SPLIT: + found_split = True + break + + root_hierarchical_hash = hash + + if group_hash is not None: + all_grouphashes.append(group_hash) + + if group_hash.group_id is not None: + # Even if we did not find a hash with SPLIT state, we want to use + # the most specific hierarchical hash as root hash if it was already + # associated to a group. + # See `move_all_events` test case + break + + if root_hierarchical_hash is None: + # All hashes were split, so we group by most specific hash. This is + # a legitimate usecase when there are events whose stacktraces are + # suffixes of other event's stacktraces. + root_hierarchical_hash = hierarchical_hashes[-1] + group_hash = hierarchical_grouphashes.get(root_hierarchical_hash) + + if group_hash is not None: + all_grouphashes.append(group_hash) + + if not found_split: + # In case of a split we want to avoid accidentally finding the split-up + # group again via flat hashes, which are very likely associated with + # whichever group is attached to the split hash. This distinction will + # become irrelevant once we start moving existing events into child + # groups and delete the parent group. + all_grouphashes.extend(flat_grouphashes) + + for group_hash in all_grouphashes: + if group_hash.group_id is not None: + return group_hash, root_hierarchical_hash + + # When refactoring for hierarchical grouping, we noticed that a + # tombstone may get ignored entirely if there is another hash *before* + # that happens to have a group_id. This bug may not have been noticed + # for a long time because most events only ever have 1-2 hashes. It + # will definitely get more noticeable with hierarchical grouping and + # it's not clear what good behavior would look like. Do people want to + # be able to tombstone `hierarchical_hashes[4]` while still having a + # group attached to `hierarchical_hashes[0]`? Maybe. + if group_hash.group_tombstone_id is not None: + raise HashDiscarded( + "Matches group tombstone %s" % group_hash.group_tombstone_id, + reason="discard", + tombstone_id=group_hash.group_tombstone_id, + ) + + return None, root_hierarchical_hash + + def get_hash_values( project: Project, job: Job, diff --git a/tests/sentry/event_manager/grouping/test_assign_to_group.py b/tests/sentry/event_manager/grouping/test_assign_to_group.py index 3121afefb9e086..ab0e6ef4a2eeac 100644 --- a/tests/sentry/event_manager/grouping/test_assign_to_group.py +++ b/tests/sentry/event_manager/grouping/test_assign_to_group.py @@ -13,6 +13,7 @@ _calculate_primary_hash, _calculate_secondary_hash, find_existing_grouphash, + find_existing_grouphash_new, ) from sentry.models.grouphash import GroupHash from sentry.models.project import Project @@ -32,6 +33,9 @@ @contextmanager def patch_grouping_helpers(return_values: dict[str, Any]): wrapped_find_existing_grouphash = capture_return_values(find_existing_grouphash, return_values) + wrapped_find_existing_grouphash_new = capture_return_values( + find_existing_grouphash_new, return_values + ) wrapped_calculate_primary_hash = capture_return_values(_calculate_primary_hash, return_values) wrapped_calculate_secondary_hash = capture_return_values( _calculate_secondary_hash, return_values @@ -42,6 +46,10 @@ def patch_grouping_helpers(return_values: dict[str, Any]): "sentry.event_manager.find_existing_grouphash", wraps=wrapped_find_existing_grouphash, ) as find_existing_grouphash_spy, + mock.patch( + "sentry.event_manager.find_existing_grouphash_new", + wraps=wrapped_find_existing_grouphash_new, + ) as find_existing_grouphash_new_spy, mock.patch( "sentry.grouping.ingest._calculate_primary_hash", wraps=wrapped_calculate_primary_hash, @@ -59,6 +67,7 @@ def patch_grouping_helpers(return_values: dict[str, Any]): ): yield { "find_existing_grouphash": find_existing_grouphash_spy, + "find_existing_grouphash_new": find_existing_grouphash_new_spy, "_calculate_primary_hash": calculate_primary_hash_spy, "_calculate_secondary_hash": calculate_secondary_hash_spy, "_create_group": create_group_spy, @@ -126,6 +135,10 @@ def get_results_from_saving_event( existing_group_id: int | None = None, new_logic_enabled: bool = False, ): + find_existing_grouphash_fn = ( + "find_existing_grouphash_new" if new_logic_enabled else "find_existing_grouphash" + ) + # Whether or not these are assigned a value depends on the values of `in_transition` and # `existing_group_id`. Everything else we'll return will definitely get a value and therefore # doesn't need to be initialized. @@ -158,7 +171,7 @@ def get_results_from_saving_event( ) new_event = save_new_event(event_data, project) - hash_search_result = return_values["find_existing_grouphash"][0][0] + hash_search_result = return_values[find_existing_grouphash_fn][0][0] post_save_grouphashes = { gh.hash: gh.group_id for gh in GroupHash.objects.filter(project_id=project.id) } From b33b201838686f99f7674df9fae9bdf2c6f072f2 Mon Sep 17 00:00:00 2001 From: Nar Saynorath Date: Mon, 12 Feb 2024 16:27:51 -0500 Subject: [PATCH 280/357] fix(app-start): Pass along start type to samples components (#65031) The start type wasn't being passed along to the event samples drawer. I added some props to let me pass along this filter. Along the way I noticed the hooks we use for querying were using different APIs for passing along filters. Some wanted a list of `key:value` or others wanted an object with a `{key: value}` mapping. Some components used `query` in the `key:value` format, but only applied it to some of the hooks. I've passed along both and added filters to wherever it was necessary, but this feels pretty tech-debt-y and I'd like to fix it in a future PR. --- .../views/appStartup/screenSummary/index.tsx | 8 +++++++- .../screenSummary/spanOperationTable.tsx | 1 + .../screens/screenLoadSpans/samples/index.tsx | 4 ++++ .../screenLoadSpans/samples/samplesContainer.tsx | 16 +++++++++++++++- .../sampleList/durationChart/index.tsx | 4 +++- .../sampleList/sampleTable/sampleTable.tsx | 4 +++- 6 files changed, 33 insertions(+), 4 deletions(-) diff --git a/static/app/views/starfish/views/appStartup/screenSummary/index.tsx b/static/app/views/starfish/views/appStartup/screenSummary/index.tsx index 1648b8b41aef86..5375d21aa527c2 100644 --- a/static/app/views/starfish/views/appStartup/screenSummary/index.tsx +++ b/static/app/views/starfish/views/appStartup/screenSummary/index.tsx @@ -34,6 +34,7 @@ type Query = { primaryRelease: string; project: string; secondaryRelease: string; + spanAppStartType: string; spanDescription: string; spanGroup: string; spanOp: string; @@ -52,6 +53,7 @@ function ScreenSummary() { spanGroup, spanDescription, spanOp, + spanAppStartType, } = location.query; const startupModule: LocationDescriptor = { @@ -61,6 +63,7 @@ function ScreenSummary() { QueryParameterNames.SPANS_SORT, 'transaction', SpanMetricsField.SPAN_OP, + SpanMetricsField.APP_START_TYPE, ]), }, }; @@ -192,8 +195,11 @@ function ScreenSummary() { - {spanGroup && spanOp && ( + {spanGroup && spanOp && spanAppStartType && ( ; onClose?: () => void; spanDescription?: string; spanOp?: string; @@ -36,6 +37,7 @@ export function ScreenLoadSpanSamples({ onClose, transactionRoute = '/performance/summary/', spanOp, + additionalFilters, }: Props) { const router = useRouter(); @@ -118,6 +120,7 @@ export function ScreenLoadSpanSamples({ sectionTitle={t('Release 1')} project={project} spanOp={spanOp} + additionalFilters={additionalFilters} /> @@ -129,6 +132,7 @@ export function ScreenLoadSpanSamples({ sectionTitle={t('Release 2')} project={project} spanOp={spanOp} + additionalFilters={additionalFilters} /> diff --git a/static/app/views/starfish/views/screens/screenLoadSpans/samples/samplesContainer.tsx b/static/app/views/starfish/views/screens/screenLoadSpans/samples/samplesContainer.tsx index 0b6d39ecee8983..2ee2e11ef7e49f 100644 --- a/static/app/views/starfish/views/screens/screenLoadSpans/samples/samplesContainer.tsx +++ b/static/app/views/starfish/views/screens/screenLoadSpans/samples/samplesContainer.tsx @@ -35,6 +35,7 @@ const {SPAN_SELF_TIME, SPAN_OP} = SpanMetricsField; type Props = { groupId: string; transactionName: string; + additionalFilters?: Record; project?: Project | null; release?: string; sectionSubtitle?: string; @@ -50,6 +51,7 @@ export function ScreenLoadSampleContainer({ release, project, spanOp, + additionalFilters, }: Props) { const router = useRouter(); const location = useLocation(); @@ -97,7 +99,7 @@ export function ScreenLoadSampleContainer({ } const {data} = useSpanMetrics({ - filters, + filters: {...filters, ...additionalFilters}, fields: [`avg(${SPAN_SELF_TIME})`, 'count()', SPAN_OP], referrer: 'api.starfish.span-summary-panel-samples-table-avg', }); @@ -148,6 +150,12 @@ export function ScreenLoadSampleContainer({ `${key}:${value}`) + : undefined + } + additionalFilters={additionalFilters} groupId={groupId} transactionName={transactionName} transactionMethod={transactionMethod} @@ -167,6 +175,12 @@ export function ScreenLoadSampleContainer({ } /> `${key}:${value}`) + : undefined + } + additionalFilters={additionalFilters} highlightedSpanId={highlightedSpanId} transactionMethod={transactionMethod} onMouseLeaveSample={() => setHighlightedSpanId(undefined)} diff --git a/static/app/views/starfish/views/spanSummaryPage/sampleList/durationChart/index.tsx b/static/app/views/starfish/views/spanSummaryPage/sampleList/durationChart/index.tsx index df9a788bcce548..1c3f32bda3bc00 100644 --- a/static/app/views/starfish/views/spanSummaryPage/sampleList/durationChart/index.tsx +++ b/static/app/views/starfish/views/spanSummaryPage/sampleList/durationChart/index.tsx @@ -31,6 +31,7 @@ type Props = { groupId: string; transactionName: string; additionalFields?: string[]; + additionalFilters?: Record; highlightedSpanId?: string; onClickSample?: (sample: SpanSample) => void; onMouseLeaveSample?: () => void; @@ -77,6 +78,7 @@ function DurationChart({ release, query, platform, + additionalFilters, }: Props) { const theme = useTheme(); const {setPageError} = usePageAlert(); @@ -104,7 +106,7 @@ function DurationChart({ data: spanMetricsSeriesData, error: spanMetricsSeriesError, } = useSpanMetricsSeries({ - filters, + filters: {...filters, ...additionalFilters}, yAxis: [`avg(${SPAN_SELF_TIME})`], referrer: 'api.starfish.sidebar-span-metrics-chart', }); diff --git a/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleTable/sampleTable.tsx b/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleTable/sampleTable.tsx index bb5781671ecfc2..13bd65b4a99570 100644 --- a/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleTable/sampleTable.tsx +++ b/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleTable/sampleTable.tsx @@ -28,6 +28,7 @@ type Props = { groupId: string; transactionName: string; additionalFields?: string[]; + additionalFilters?: Record; columnOrder?: SamplesTableColumnHeader[]; highlightedSpanId?: string; onMouseLeaveSample?: () => void; @@ -48,6 +49,7 @@ function SampleTable({ release, query, additionalFields, + additionalFilters, }: Props) { const filters: SpanMetricsQueryFilters = { 'span.group': groupId, @@ -63,7 +65,7 @@ function SampleTable({ } const {data, isFetching: isFetchingSpanMetrics} = useSpanMetrics({ - filters, + filters: {...filters, ...additionalFilters}, fields: [`avg(${SPAN_SELF_TIME})`, SPAN_OP], referrer: 'api.starfish.span-summary-panel-samples-table-avg', }); From 0d6e7094a8c4aaad22ffa82df38212176ddffa60 Mon Sep 17 00:00:00 2001 From: Gabe Villalobos Date: Mon, 12 Feb 2024 13:30:19 -0800 Subject: [PATCH 281/357] chore(hc): Fixes for region mode by default in testing (#64849) --- tests/relay_integration/test_sdk.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/tests/relay_integration/test_sdk.py b/tests/relay_integration/test_sdk.py index 22038e5a9bd2d9..e09817076f5362 100644 --- a/tests/relay_integration/test_sdk.py +++ b/tests/relay_integration/test_sdk.py @@ -130,29 +130,27 @@ def test_bind_organization_context(default_organization): @no_silo_test @override_settings(SENTRY_PROJECT=1) @django_db_all -def test_bind_organization_context_with_callback(settings, default_organization): +def test_bind_organization_context_with_callback(default_organization): create_default_projects() configure_sdk() def add_context(scope, organization, **kwargs): scope.set_tag("organization.test", "1") - settings.SENTRY_ORGANIZATION_CONTEXT_HELPER = add_context - bind_organization_context(default_organization) - - assert Hub.current.scope._tags["organization.test"] == "1" + with override_settings(SENTRY_ORGANIZATION_CONTEXT_HELPER=add_context): + bind_organization_context(default_organization) + assert Hub.current.scope._tags["organization.test"] == "1" @no_silo_test @override_settings(SENTRY_PROJECT=1) @django_db_all -def test_bind_organization_context_with_callback_error(settings, default_organization): +def test_bind_organization_context_with_callback_error(default_organization): configure_sdk() def add_context(scope, organization, **kwargs): 1 / 0 - settings.SENTRY_ORGANIZATION_CONTEXT_HELPER = add_context - bind_organization_context(default_organization) - - assert Hub.current.scope._tags["organization"] == default_organization.id + with override_settings(SENTRY_ORGANIZATION_CONTEXT_HELPER=add_context): + bind_organization_context(default_organization) + assert Hub.current.scope._tags["organization"] == default_organization.id From 8fbfffd01d520070910f46f99437818882902447 Mon Sep 17 00:00:00 2001 From: Ryan Skonnord Date: Mon, 12 Feb 2024 13:52:30 -0800 Subject: [PATCH 282/357] test(hc): Tag TestAlertRuleActionRequester (#65027) --- .../external_requests/test_alert_rule_action_requester.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/sentry/mediators/external_requests/test_alert_rule_action_requester.py b/tests/sentry/mediators/external_requests/test_alert_rule_action_requester.py index ddc62e0fa73414..e08e65940b7ec1 100644 --- a/tests/sentry/mediators/external_requests/test_alert_rule_action_requester.py +++ b/tests/sentry/mediators/external_requests/test_alert_rule_action_requester.py @@ -6,10 +6,12 @@ AlertRuleActionRequester, ) from sentry.testutils.cases import TestCase +from sentry.testutils.silo import control_silo_test from sentry.utils import json from sentry.utils.sentry_apps import SentryAppWebhookRequestsBuffer +@control_silo_test class TestAlertRuleActionRequester(TestCase): def setUp(self): super().setUp() From b583df71ffa3b6197b15b8e614132c7fce3a558d Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Mon, 12 Feb 2024 14:31:09 -0800 Subject: [PATCH 283/357] fix(releases): Display only the selected repo's commits (#65029) --- fixtures/js-stubs/commit.ts | 4 +- .../detail/commitsAndFiles/commits.spec.tsx | 50 +++++++++++++++++++ .../detail/commitsAndFiles/commits.tsx | 23 +++++---- 3 files changed, 64 insertions(+), 13 deletions(-) diff --git a/fixtures/js-stubs/commit.ts b/fixtures/js-stubs/commit.ts index 994e7e20a44dc6..fb38159eceb382 100644 --- a/fixtures/js-stubs/commit.ts +++ b/fixtures/js-stubs/commit.ts @@ -1,9 +1,9 @@ import {CommitAuthorFixture} from 'sentry-fixture/commitAuthor'; import {RepositoryFixture} from 'sentry-fixture/repository'; -import {type Commit} from 'sentry/types'; +import type {Commit} from 'sentry/types'; -export function CommitFixture(params = {}): Commit { +export function CommitFixture(params: Partial = {}): Commit { return { dateCreated: '2018-11-30T18:46:31Z', message: diff --git a/static/app/views/releases/detail/commitsAndFiles/commits.spec.tsx b/static/app/views/releases/detail/commitsAndFiles/commits.spec.tsx index 34f93005d051ef..f1e3e85e178e0a 100644 --- a/static/app/views/releases/detail/commitsAndFiles/commits.spec.tsx +++ b/static/app/views/releases/detail/commitsAndFiles/commits.spec.tsx @@ -123,4 +123,54 @@ describe('Commits', () => { selectEvent.openMenu(screen.getByRole('button')); expect(screen.getByText('getsentry/sentry-frontend')).toBeInTheDocument(); }); + + it('should render the commits from the selected repo', async () => { + const otherRepo = RepositoryFixture({ + id: '5', + name: 'getsentry/sentry-frontend', + integrationId: '1', + }); + // Current repo is stored in query parameter activeRepo + const {routerContext: newRouterContext, routerProps: newRouterProps} = initializeOrg({ + router: { + params: {release: release.version}, + location: {query: {activeRepo: otherRepo.name}}, + }, + }); + MockApiClient.addMockResponse({ + url: `/projects/${organization.slug}/${project.slug}/releases/${encodeURIComponent( + release.version + )}/repositories/`, + body: [repos[0]!, otherRepo], + }); + MockApiClient.addMockResponse({ + url: `/organizations/org-slug/releases/${encodeURIComponent( + release.version + )}/commits/`, + body: [ + CommitFixture(), + CommitFixture({ + repository: otherRepo, + }), + ], + }); + render( + {}, + hasHealthData: false, + releaseBounds: {} as any, + releaseMeta: {} as any, + }} + > + + , + {context: newRouterContext} + ); + expect(await screen.findByRole('button')).toHaveTextContent(otherRepo.name); + expect(screen.queryByText('example/repo-name')).not.toBeInTheDocument(); + }); }); diff --git a/static/app/views/releases/detail/commitsAndFiles/commits.tsx b/static/app/views/releases/detail/commitsAndFiles/commits.tsx index 5c398aca4f5132..c088061c2a8fbe 100644 --- a/static/app/views/releases/detail/commitsAndFiles/commits.tsx +++ b/static/app/views/releases/detail/commitsAndFiles/commits.tsx @@ -59,6 +59,9 @@ function Commits({activeReleaseRepo, releaseRepos, projectSlug}: CommitsProps) { const commitsByRepository = getCommitsByRepository(commitList); const reposToRender = getReposToRender(Object.keys(commitsByRepository)); + const activeRepoName: string | undefined = activeReleaseRepo + ? activeReleaseRepo.name + : reposToRender[0]; return ( @@ -80,18 +83,16 @@ function Commits({activeReleaseRepo, releaseRepos, projectSlug}: CommitsProps) { {commitListError && } {isLoadingCommitList ? ( - ) : commitList.length ? ( + ) : commitList.length && activeRepoName ? ( - {reposToRender.map(repoName => ( - - {repoName} - - {commitsByRepository[repoName]?.map(commit => ( - - ))} - - - ))} + + {activeRepoName} + + {commitsByRepository[activeRepoName]?.map(commit => ( + + ))} + + ) : ( From a91ddc2f6f8110284d4d2fbd614cea23b6d3f199 Mon Sep 17 00:00:00 2001 From: Ryan Albrecht Date: Mon, 12 Feb 2024 14:35:41 -0800 Subject: [PATCH 284/357] fix(replay): Improve render of Hydration Diffs when missing next-mutation breadcrumb (#65036) This improves the Hydration Diff modal in a few ways, especially when the 'next' mutation isn't found (or doesn't exist!) Specifically: - `stopPropagation()` when the button is clicked to open the modal. Before we were opening the modal, and changing the timestamp of the replay at the same time - Adds the 'Before Hydration' 'After Hydration' headers to the left/right sides of the Visual Diff tab - When the `next` migration is not found, render nothing in right side of the Visual Diff tab. Before we would render the replay at time=0, which is not correct - Render a text diff when `next` migration is not found, where the right side text is assumed to be `''`. Before we passed in `null` and no html diff was rendered. ![SCR-20240212-mokv](https://github.com/getsentry/sentry/assets/187460/8073c5fe-546d-407a-918c-d17b484d8287) ![SCR-20240212-molj](https://github.com/getsentry/sentry/assets/187460/471ecca7-375c-4a96-97f6-289d75bd17dc) Fixes #65023 --- .../replays/breadcrumbs/breadcrumbItem.tsx | 2 +- .../openReplayComparisonButton.tsx | 3 +- .../breadcrumbs/replayComparisonModal.tsx | 104 +++++++++++------- .../app/utils/replays/hydrateBreadcrumbs.tsx | 4 +- 4 files changed, 72 insertions(+), 41 deletions(-) diff --git a/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx b/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx index 4ba181025b271d..7592683eaebedb 100644 --- a/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx +++ b/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx @@ -123,7 +123,7 @@ function BreadcrumbItem({ replay={replay} leftTimestamp={frame.offsetMs} rightTimestamp={ - (frame.data.mutations.next.timestamp as number) - + (frame.data.mutations.next?.timestamp ?? 0) - (replay?.getReplay().started_at.getTime() ?? 0) } /> diff --git a/static/app/components/replays/breadcrumbs/openReplayComparisonButton.tsx b/static/app/components/replays/breadcrumbs/openReplayComparisonButton.tsx index 40d34fab7ff958..ec5f201733003d 100644 --- a/static/app/components/replays/breadcrumbs/openReplayComparisonButton.tsx +++ b/static/app/components/replays/breadcrumbs/openReplayComparisonButton.tsx @@ -31,7 +31,8 @@ export function OpenReplayComparisonButton({ size="xs" analyticsEventKey="replay.details-hydration-modal-opened" analyticsEventName="Replay Details Hydration Modal Opened" - onClick={() => { + onClick={event => { + event.stopPropagation(); openModal( deps => ( ('html'); + const [activeTab, setActiveTab] = useState(Tab.HTML); const [leftBody, setLeftBody] = useState(null); const [rightBody, setRightBody] = useState(null); @@ -89,55 +94,71 @@ export default function ReplayComparisonModal({ setActiveTab(tab as 'visual' | 'html')} + onSelectionChange={tab => setActiveTab(tab as Tab)} > - Html Diff - Visual Diff + {t('Html Diff')} + {t('Visual Diff')} + - - - - - - - - - - + + + {t('Before Hydration')} + + + {t('After Hydration')} + + + + + + + + + + + {rightTimestamp > 0 ? ( + + ) : ( +
    + )} + + + - {activeTab === 'html' && leftBody && rightBody ? ( + {activeTab === Tab.HTML ? ( {t('Before Hydration')} {t('After Hydration')} - + ) : null} @@ -214,11 +235,20 @@ const DiffHeader = styled('div')` font-weight: 600; line-height: 1.2; + div { + height: 28px; /* div with and without buttons inside are the same height */ + } + div:last-child { padding-left: ${space(2)}; } `; +const ReplayGrid = styled('div')` + display: grid; + grid-template-columns: 1fr 1fr; +`; + const StyledParagraph = styled('p')` padding-top: ${space(0.5)}; margin-bottom: ${space(1)}; diff --git a/static/app/utils/replays/hydrateBreadcrumbs.tsx b/static/app/utils/replays/hydrateBreadcrumbs.tsx index 3011a2c4333764..8f7ce6ca1f5c4e 100644 --- a/static/app/utils/replays/hydrateBreadcrumbs.tsx +++ b/static/app/utils/replays/hydrateBreadcrumbs.tsx @@ -19,8 +19,8 @@ function findCloseMutations(date: Date, rrwebFrames: RecordingFrame[]) { const framesBefore = incrementalFrames.filter(frame => frame.timestamp <= timeMS); const framesAfter = incrementalFrames.filter(frame => frame.timestamp > timeMS); return { - prev: framesBefore.slice(-1)[0] ?? null, - next: framesAfter[0] ?? null, + prev: framesBefore.at(-1) ?? null, + next: framesAfter.at(0) ?? null, }; } From ee63ee56994a2ee3d0a9167156d467900ce60af7 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Mon, 12 Feb 2024 15:00:53 -0800 Subject: [PATCH 285/357] ref(grouping): Remove hierarchical code from `_save_aggregate_new` (#64858) Since the new group assignment logic in `event_manager` won't apply to anyone on the mobile grouping config, we have the freedom to remove the hierarchical code required by that config. This should make further refactoring easier, since it simplifies the code quite a bit. --- src/sentry/event_manager.py | 85 ++++++------------------------------- 1 file changed, 12 insertions(+), 73 deletions(-) diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index d282bf29966bdb..1e0c48b156b6af 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -1602,7 +1602,7 @@ def _save_aggregate_new( ) -> GroupInfo | None: project = event.project - primary_hashes, secondary_hashes, hashes = get_hash_values(project, job, metric_tags) + _, _, hashes = get_hash_values(project, job, metric_tags) # Now that we've used the current and possibly secondary grouping config(s) to calculate the # hashes, we're free to perform a config update if needed. Future events will use the new @@ -1615,46 +1615,14 @@ def _save_aggregate_new( group_creation_kwargs = _get_group_creation_kwargs(job) - # Because this logic is not complex enough we want to special case the situation where we - # migrate from a hierarchical hash to a non hierarchical hash. The reason being that - # there needs to be special logic to not create orphaned hashes in migration cases - # but it wants a different logic to implement splitting of hierarchical hashes. - migrate_off_hierarchical = bool( - secondary_hashes - and secondary_hashes.hierarchical_hashes - and not primary_hashes.hierarchical_hashes - ) - - flat_grouphashes = [ + grouphashes = [ GroupHash.objects.get_or_create(project=project, hash=hash)[0] for hash in hashes.hashes ] - # The root_hierarchical_hash is the least specific hash within the tree, so - # typically hierarchical_hashes[0], unless a hash `n` has been split in - # which case `root_hierarchical_hash = hierarchical_hashes[n + 1]`. Chosing - # this for select_for_update mostly provides sufficient synchronization - # when groups are created and also relieves contention by locking a more - # specific hash than `hierarchical_hashes[0]`. - existing_grouphash, root_hierarchical_hash = find_existing_grouphash_new( - project, flat_grouphashes, hashes.hierarchical_hashes + existing_grouphash, _ = find_existing_grouphash_new( + project, grouphashes, hashes.hierarchical_hashes ) - if root_hierarchical_hash is not None: - root_hierarchical_grouphash = GroupHash.objects.get_or_create( - project=project, hash=root_hierarchical_hash - )[0] - - metadata.update( - hashes.group_metadata_from_hash( - existing_grouphash.hash - if existing_grouphash is not None - else root_hierarchical_hash - ) - ) - - else: - root_hierarchical_grouphash = None - # In principle the group gets the same metadata as the event, so common # attributes can be defined in eventtypes. # @@ -1687,27 +1655,16 @@ def _save_aggregate_new( span.set_tag("create_group_transaction.outcome", "no_group") metric_tags["create_group_transaction.outcome"] = "no_group" - all_grouphash_ids = [h.id for h in flat_grouphashes] - if root_hierarchical_grouphash is not None: - all_grouphash_ids.append(root_hierarchical_grouphash.id) - - all_grouphashes = list( - GroupHash.objects.filter(id__in=all_grouphash_ids).select_for_update() + grouphashes = list( + GroupHash.objects.filter( + id__in=[h.id for h in grouphashes], + ).select_for_update() ) - flat_grouphashes = [gh for gh in all_grouphashes if gh.hash in hashes.hashes] - - existing_grouphash, root_hierarchical_hash = find_existing_grouphash_new( - project, flat_grouphashes, hashes.hierarchical_hashes + existing_grouphash, _ = find_existing_grouphash_new( + project, grouphashes, hashes.hierarchical_hashes ) - if root_hierarchical_hash is not None: - root_hierarchical_grouphash = GroupHash.objects.get_or_create( - project=project, hash=root_hierarchical_hash - )[0] - else: - root_hierarchical_grouphash = None - if existing_grouphash is None: group = _create_group(project, event, **group_creation_kwargs) @@ -1723,10 +1680,7 @@ def _save_aggregate_new( }, ) - if root_hierarchical_grouphash is not None: - new_hashes = [root_hierarchical_grouphash] - else: - new_hashes = list(flat_grouphashes) + new_hashes = list(grouphashes) GroupHash.objects.filter(id__in=[h.id for h in new_hashes]).exclude( state=GroupHash.State.LOCKED_IN_MIGRATION @@ -1775,22 +1729,7 @@ def _save_aggregate_new( is_new = False - # For the migration from hierarchical to non hierarchical we want to associate - # all group hashes - if migrate_off_hierarchical: - new_hashes = [h for h in flat_grouphashes if h.group_id is None] - if root_hierarchical_grouphash and root_hierarchical_grouphash.group_id is None: - new_hashes.append(root_hierarchical_grouphash) - elif root_hierarchical_grouphash is None: - # No hierarchical grouping was run, only consider flat hashes - new_hashes = [h for h in flat_grouphashes if h.group_id is None] - elif root_hierarchical_grouphash.group_id is None: - # The root hash is not assigned to a group. - # We ran multiple grouping algorithms - # (see secondary grouping), and the hierarchical hash is new - new_hashes = [root_hierarchical_grouphash] - else: - new_hashes = [] + new_hashes = [h for h in grouphashes if h.group_id is None] if new_hashes: # There may still be secondary hashes that we did not use to find an From 1dcfdc2a7f7b73320ed76eccac8bf11e67742901 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Mon, 12 Feb 2024 15:05:50 -0800 Subject: [PATCH 286/357] fix(feedback): change ticket linking wording to say sentry feedback (#65015) fixes https://github.com/getsentry/sentry/issues/64981 --- src/sentry/integrations/github/issues.py | 17 +++++++--- .../sentry/integrations/github/test_issues.py | 32 +++++++++++++++++++ 2 files changed, 45 insertions(+), 4 deletions(-) diff --git a/src/sentry/integrations/github/issues.py b/src/sentry/integrations/github/issues.py index 62fd62c133b7ec..de92e51fa2e4b2 100644 --- a/src/sentry/integrations/github/issues.py +++ b/src/sentry/integrations/github/issues.py @@ -206,6 +206,12 @@ def get_link_issue_config(self, group: Group, **kwargs: Any) -> list[dict[str, A "sentry-integration-github-search", args=[org.slug, self.model.id] ) + def get_linked_issue_comment_prefix(group: Group) -> str: + if group.issue_category == GroupCategory.FEEDBACK: + return "Sentry feedback" + else: + return "Sentry issue" + return [ { "name": "repo", @@ -229,11 +235,14 @@ def get_link_issue_config(self, group: Group, **kwargs: Any) -> list[dict[str, A { "name": "comment", "label": "Comment", - "default": "Sentry issue: [{issue_id}]({url})".format( - url=absolute_uri( - group.get_absolute_url(params={"referrer": "github_integration"}) + "default": ( + get_linked_issue_comment_prefix(group) + + ": [{issue_id}]({url})".format( + url=absolute_uri( + group.get_absolute_url(params={"referrer": "github_integration"}) + ), + issue_id=group.qualified_short_id, ), - issue_id=group.qualified_short_id, ), "type": "textarea", "required": False, diff --git a/tests/sentry/integrations/github/test_issues.py b/tests/sentry/integrations/github/test_issues.py index f45085f44451bf..e7a3978fcbe2d1 100644 --- a/tests/sentry/integrations/github/test_issues.py +++ b/tests/sentry/integrations/github/test_issues.py @@ -10,6 +10,7 @@ from sentry.integrations.github import client from sentry.integrations.github.integration import GitHubIntegration from sentry.integrations.github.issues import GitHubIssueBasic +from sentry.issues.grouptype import FeedbackGroup from sentry.models.integrations.external_issue import ExternalIssue from sentry.services.hybrid_cloud.integration import integration_service from sentry.silo.util import PROXY_BASE_URL_HEADER, PROXY_OI_HEADER, PROXY_SIGNATURE_HEADER @@ -397,6 +398,37 @@ def test_repo_dropdown_choices(self): ("getsentry/sentry", "sentry"), ] + @responses.activate + def test_linked_issue_comment(self): + issue_event = self.store_event( + data={"event_id": "a" * 32, "timestamp": self.min_ago}, project_id=self.project.id + ) + feedback_issue = self.create_group(project=self.project, type=FeedbackGroup.type_id) + + responses.add( + responses.GET, + "https://api.github.com/installation/repositories", + json={ + "total_count": 2, + "repositories": [ + {"full_name": "getsentry/sentry", "name": "sentry"}, + {"full_name": "getsentry/other", "name": "other", "archived": True}, + ], + }, + ) + + # link an issue + data = {"params": {"repo": "getsentry/hello"}} + assert issue_event.group is not None + resp = self.install.get_link_issue_config(group=issue_event.group, **data) + # assert comment wording for linked issue is correct + assert "Sentry issue" in resp[2]["default"][0] + + # link a feedback issue + resp = self.install.get_link_issue_config(group=feedback_issue, **data) + # assert comment wording for linked feedback is correct + assert "Sentry feedback" in resp[2]["default"][0] + @responses.activate def after_link_issue(self): responses.add( From 0ac4aaf20f7de8f8ec74e377cd40052940e6420f Mon Sep 17 00:00:00 2001 From: Nathan Hsieh <6186377+nhsiehgit@users.noreply.github.com> Date: Mon, 12 Feb 2024 15:57:01 -0800 Subject: [PATCH 287/357] publish release threshold status api (#64807) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit publish api documentation Screenshot 2024-02-12 at 3 06 42 PM --------- Co-authored-by: schew2381 --- .../release_threshold_status_index.py | 73 +++++++++---------- .../api/endpoints/release_thresholds/types.py | 2 +- .../examples/release_threshold_examples.py | 35 +++++++++ 3 files changed, 69 insertions(+), 41 deletions(-) create mode 100644 src/sentry/apidocs/examples/release_threshold_examples.py diff --git a/src/sentry/api/endpoints/release_thresholds/release_threshold_status_index.py b/src/sentry/api/endpoints/release_thresholds/release_threshold_status_index.py index 2f1dcdb4a9b29d..a32fae2a69e49e 100644 --- a/src/sentry/api/endpoints/release_thresholds/release_threshold_status_index.py +++ b/src/sentry/api/endpoints/release_thresholds/release_threshold_status_index.py @@ -7,6 +7,7 @@ from django.db.models import F, Q from django.http import HttpResponse +from drf_spectacular.utils import extend_schema from rest_framework import serializers from rest_framework.request import Request from rest_framework.response import Response @@ -22,12 +23,17 @@ is_error_count_healthy, is_new_issue_count_healthy, ) +from sentry.api.endpoints.release_thresholds.types import EnrichedThreshold from sentry.api.endpoints.release_thresholds.utils import ( fetch_sessions_data, get_errors_counts_timeseries_by_project_and_release, get_new_issue_counts, ) from sentry.api.serializers import serialize +from sentry.apidocs.constants import RESPONSE_BAD_REQUEST +from sentry.apidocs.examples.release_threshold_examples import ReleaseThresholdExamples +from sentry.apidocs.parameters import GlobalParams +from sentry.apidocs.utils import inline_sentry_response_serializer from sentry.models.release import Release from sentry.models.release_threshold.constants import ReleaseThresholdType from sentry.services.hybrid_cloud.organization import RpcOrganization @@ -36,7 +42,6 @@ logger = logging.getLogger("sentry.release_threshold_status") if TYPE_CHECKING: - from sentry.api.endpoints.release_thresholds.types import EnrichedThreshold from sentry.models.deploy import Deploy from sentry.models.organization import Organization from sentry.models.project import Project @@ -46,14 +51,14 @@ class ReleaseThresholdStatusIndexSerializer(serializers.Serializer): start = serializers.DateTimeField( - help_text="This defines the start of the time series range as an explicit datetime, either in UTC ISO8601 or epoch seconds." - "Use along with `end`", + help_text="The start of the time series range as an explicit datetime, either in UTC ISO8601 or epoch seconds. " + "Use along with `end`.", required=True, ) end = serializers.DateTimeField( help_text=( - "This defines the inclusive end of the time series range as an explicit datetime, either in UTC ISO8601 or epoch seconds." - "Use along with `start`" + "The inclusive end of the time series range as an explicit datetime, either in UTC ISO8601 or epoch seconds. " + "Use along with `start`." ), required=True, ) @@ -61,19 +66,19 @@ class ReleaseThresholdStatusIndexSerializer(serializers.Serializer): required=False, allow_empty=True, child=serializers.CharField(), - help_text=("Provide a list of environment names to filter your results by"), + help_text=("A list of environment names to filter your results by."), ) projectSlug = serializers.ListField( required=False, allow_empty=True, child=serializers.CharField(), - help_text=("Provide a list of project slugs to filter your results by"), + help_text=("A list of project slugs to filter your results by."), ) release = serializers.ListField( required=False, allow_empty=True, child=serializers.CharField(), - help_text=("Provide a list of release versions to filter your results by"), + help_text=("A list of release versions to filter your results by."), ) def validate(self, data): @@ -83,47 +88,35 @@ def validate(self, data): @region_silo_endpoint +@extend_schema(tags=["Releases"]) class ReleaseThresholdStatusIndexEndpoint(OrganizationReleasesBaseEndpoint, EnvironmentMixin): owner: ApiOwner = ApiOwner.ENTERPRISE publish_status = { - "GET": ApiPublishStatus.EXPERIMENTAL, + "GET": ApiPublishStatus.PUBLIC, } + @extend_schema( + operation_id="Retrieve Statuses of Release Thresholds (Alpha)", + parameters=[GlobalParams.ORG_SLUG, ReleaseThresholdStatusIndexSerializer], + request=None, + responses={ + 200: inline_sentry_response_serializer( + "ReleaseThresholdStatusResponse", dict[str, list[EnrichedThreshold]] + ), + 400: RESPONSE_BAD_REQUEST, + }, + examples=ReleaseThresholdExamples.THRESHOLD_STATUS_RESPONSE, + ) def get(self, request: Request, organization: Organization | RpcOrganization) -> HttpResponse: - """ - List all derived statuses of releases that fall within the provided start/end datetimes - - Constructs a response key'd off release_version, project_slug, and lists thresholds with their status for *specified* projects - Each returned enriched threshold will contain the full serialized release_threshold instance as well as it's derived health status - - { - {proj}-{release}: [ - { - project_id, - project_slug, - environment, - ..., - key: {release}-{proj}, - release_version: '', - is_healthy: True/False, - start: datetime, - end: datetime, - metric_value: int, - }, - {...}, - {...} - ], - {proj}-{release}: [...], - } - - `````````````````` + r""" + **`[WARNING]`**: This API is an experimental Alpha feature and is subject to change! - :param start: timestamp of the beginning of the specified date range - :param end: timestamp of the end of the specified date range + List all derived statuses of releases that fall within the provided start/end datetimes. - TODO: - - should we limit/paginate results? (this could get really bulky) + Constructs a response key'd off \{`release_version`\}-\{`project_slug`\} that lists thresholds with their status for *specified* projects. + Each returned enriched threshold will contain the full serialized `release_threshold` instance as well as it's derived health statuses. """ + # TODO: We should limit/paginate results (this could get really bulky) # ======================================================================== # STEP 1: Validate request data # diff --git a/src/sentry/api/endpoints/release_thresholds/types.py b/src/sentry/api/endpoints/release_thresholds/types.py index ad1f5cd9000b36..44d97faa5ae7c5 100644 --- a/src/sentry/api/endpoints/release_thresholds/types.py +++ b/src/sentry/api/endpoints/release_thresholds/types.py @@ -2,7 +2,7 @@ from typing import Any, TypedDict -class SerializedThreshold(TypedDict): +class SerializedThreshold(TypedDict, total=False): id: str date: datetime environment: dict[str, Any] | None diff --git a/src/sentry/apidocs/examples/release_threshold_examples.py b/src/sentry/apidocs/examples/release_threshold_examples.py new file mode 100644 index 00000000000000..0f63ed4809e6d7 --- /dev/null +++ b/src/sentry/apidocs/examples/release_threshold_examples.py @@ -0,0 +1,35 @@ +from drf_spectacular.utils import OpenApiExample + +from .project_examples import BASE_PROJECT + + +class ReleaseThresholdExamples: + THRESHOLD_STATUS_RESPONSE = [ + OpenApiExample( + "Client key with rate limiting", + value={ + f"{BASE_PROJECT['slug']}-v1.0.0": [ + { + "project_id": 0, + "project_slug": BASE_PROJECT["slug"], + "environment": { + "name": "production", + }, + "project": BASE_PROJECT, + "threshold_type": 0, + "trigger_type": "over", + "value": 100, + "window_in_seconds": 600, + "key": "foobar-v1.0.0", + "release": f"{BASE_PROJECT['slug']}-v1.0.0", + "is_healthy": True, + "start": "2022-02-14T19:00:00Z", + "end": "2022-02-28T18:03:00Z", + "metric_value": 0.1, + }, + ], + }, + status_codes=["200"], + response_only=True, + ), + ] From d4773d880f4502d508e703b3ac0369cfefaf04da Mon Sep 17 00:00:00 2001 From: Ryan Albrecht Date: Mon, 12 Feb 2024 16:01:00 -0800 Subject: [PATCH 288/357] feat(replay): Update replay loading placeholders in Issues and Feedback for consistency (#64908) This updates the Issue Details page (with or without replay clip previews) and Feedback to show a loading spinner with the diagonal lines background while the replay is loading. This is consistent with what appears while loading a replay on the replay details page. **Issue Details** https://github.com/getsentry/sentry/assets/187460/d9061cbc-803d-4b0d-aeae-59d285e25710 There is some flickering here because the component does re-render a couple times as loading progresses, so the spinner animation restarts. Not ideal, but something we can followup on. On this page especially it's loading below the fold, so people are not likely to see it as often. **Feedback Details** https://github.com/getsentry/sentry/assets/187460/6940462c-1196-4d6a-a4ec-b139ad268167 Fixes https://github.com/getsentry/sentry/issues/64382 --- .../negativeSpaceContainer.stories.tsx | 13 +++++++----- .../container/negativeSpaceContainer.tsx | 19 ++++++++++++++++- .../components/events/eventReplay/index.tsx | 13 ++++++++++++ .../events/eventReplay/replayClipPreview.tsx | 14 ++++++------- .../events/eventReplay/replayPreview.tsx | 16 +++++++------- static/app/components/lazyLoad.tsx | 21 +++++++++++++++---- 6 files changed, 71 insertions(+), 25 deletions(-) diff --git a/static/app/components/container/negativeSpaceContainer.stories.tsx b/static/app/components/container/negativeSpaceContainer.stories.tsx index d410b10a7c1757..2f37d4947bbb84 100644 --- a/static/app/components/container/negativeSpaceContainer.stories.tsx +++ b/static/app/components/container/negativeSpaceContainer.stories.tsx @@ -11,13 +11,16 @@ export default storyBook(NegativeSpaceContainer, story => { story('Empty', () => (

    - A is a container that will preserve the - aspect ratio of whatever is inside it. It's a flex element, so the children are - free to expand/contract depending on whether things like flex-grow: 1{' '} - are set. + A is a container with a diagonal pattern + for a background. It will preserve the aspect ratio of whatever is inside it. It's + a flex element, so the children are free to expand/contract depending on whether + things like flex-grow: 1 are set.

    Here's one with nothing inside it:

    - +
    )); diff --git a/static/app/components/container/negativeSpaceContainer.tsx b/static/app/components/container/negativeSpaceContainer.tsx index e3d6bf048090fb..bb2177efbd8ad2 100644 --- a/static/app/components/container/negativeSpaceContainer.tsx +++ b/static/app/components/container/negativeSpaceContainer.tsx @@ -1,6 +1,23 @@ +import type {CSSProperties, ForwardedRef, ReactNode} from 'react'; +import {forwardRef} from 'react'; import styled from '@emotion/styled'; -const NegativeSpaceContainer = styled('div')` +interface Props { + children?: ReactNode; + className?: string; + style?: CSSProperties; + testId?: string; +} + +const NegativeSpaceContainer = styled( + forwardRef(({children, testId, ...props}: Props, ref: ForwardedRef) => { + return ( +
    + {children} +
    + ); + }) +)` width: 100%; display: flex; flex-grow: 1; diff --git a/static/app/components/events/eventReplay/index.tsx b/static/app/components/events/eventReplay/index.tsx index bdc09fa261945f..0425ed6ec0cf4f 100644 --- a/static/app/components/events/eventReplay/index.tsx +++ b/static/app/components/events/eventReplay/index.tsx @@ -2,11 +2,14 @@ import {useCallback} from 'react'; import ReactLazyLoad from 'react-lazyload'; import styled from '@emotion/styled'; +import NegativeSpaceContainer from 'sentry/components/container/negativeSpaceContainer'; import ErrorBoundary from 'sentry/components/errorBoundary'; import {EventReplaySection} from 'sentry/components/events/eventReplay/eventReplaySection'; import LazyLoad from 'sentry/components/lazyLoad'; +import LoadingIndicator from 'sentry/components/loadingIndicator'; import {ReplayGroupContextProvider} from 'sentry/components/replays/replayGroupContext'; import {replayBackendPlatforms} from 'sentry/data/platformCategories'; +import {space} from 'sentry/styles/space'; import type {Group} from 'sentry/types'; import type {Event} from 'sentry/types/event'; import {getAnalyticsDataForEvent, getAnalyticsDataForGroup} from 'sentry/utils/events'; @@ -84,6 +87,11 @@ function EventReplayContent({ organization, }, }, + loadingFallback: ( + + + + ), }; return ( @@ -129,3 +137,8 @@ export default function EventReplay({event, group, projectSlug}: Props) { const ReplaySectionMinHeight = styled(EventReplaySection)` min-height: 508px; `; + +const StyledNegativeSpaceContainer = styled(NegativeSpaceContainer)` + height: 400px; + margin-bottom: ${space(2)}; +`; diff --git a/static/app/components/events/eventReplay/replayClipPreview.tsx b/static/app/components/events/eventReplay/replayClipPreview.tsx index 860718dd6381bf..e99fdc664bbf59 100644 --- a/static/app/components/events/eventReplay/replayClipPreview.tsx +++ b/static/app/components/events/eventReplay/replayClipPreview.tsx @@ -5,10 +5,11 @@ import styled from '@emotion/styled'; import {Alert} from 'sentry/components/alert'; import {LinkButton} from 'sentry/components/button'; import ButtonBar from 'sentry/components/buttonBar'; +import NegativeSpaceContainer from 'sentry/components/container/negativeSpaceContainer'; import ErrorBoundary from 'sentry/components/errorBoundary'; import {StaticReplayPreview} from 'sentry/components/events/eventReplay/staticReplayPreview'; +import LoadingIndicator from 'sentry/components/loadingIndicator'; import Panel from 'sentry/components/panels/panel'; -import Placeholder from 'sentry/components/placeholder'; import {Flex} from 'sentry/components/profiling/flex'; import MissingReplayAlert from 'sentry/components/replays/alerts/missingReplayAlert'; import { @@ -184,11 +185,9 @@ function ReplayClipPreview({ if (fetching || !replayRecord || !replay) { return ( - + + + ); } @@ -272,7 +271,8 @@ const StaticPanel = styled(FluidHeight)` border-radius: ${p => p.theme.borderRadius}; `; -const StyledPlaceholder = styled(Placeholder)` +const StyledNegativeSpaceContainer = styled(NegativeSpaceContainer)` + height: 400px; margin-bottom: ${space(2)}; `; diff --git a/static/app/components/events/eventReplay/replayPreview.tsx b/static/app/components/events/eventReplay/replayPreview.tsx index afbfbd2435966c..dee70c91421702 100644 --- a/static/app/components/events/eventReplay/replayPreview.tsx +++ b/static/app/components/events/eventReplay/replayPreview.tsx @@ -4,8 +4,9 @@ import styled from '@emotion/styled'; import {Alert} from 'sentry/components/alert'; import type {LinkButton} from 'sentry/components/button'; +import NegativeSpaceContainer from 'sentry/components/container/negativeSpaceContainer'; import {StaticReplayPreview} from 'sentry/components/events/eventReplay/staticReplayPreview'; -import Placeholder from 'sentry/components/placeholder'; +import LoadingIndicator from 'sentry/components/loadingIndicator'; import {Flex} from 'sentry/components/profiling/flex'; import MissingReplayAlert from 'sentry/components/replays/alerts/missingReplayAlert'; import {IconDelete} from 'sentry/icons'; @@ -89,13 +90,11 @@ function ReplayPreview({ return ; } - if (fetching || !replayRecord) { + if (fetching || !replayRecord || !replay) { return ( - + + + ); } @@ -112,7 +111,8 @@ function ReplayPreview({ ); } -const StyledPlaceholder = styled(Placeholder)` +const StyledNegativeSpaceContainer = styled(NegativeSpaceContainer)` + height: 400px; margin-bottom: ${space(2)}; `; diff --git a/static/app/components/lazyLoad.tsx b/static/app/components/lazyLoad.tsx index 3419bb82eb9502..f2b80633f8ca68 100644 --- a/static/app/components/lazyLoad.tsx +++ b/static/app/components/lazyLoad.tsx @@ -18,6 +18,13 @@ type Props = React.ComponentProps & { * Accepts a function to trigger the import resolution of the component. */ component: () => PromisedImport; + + /** + * Override the default fallback component. + * + * Try not to load too many unique components for the fallback! + */ + loadingFallback?: React.ReactNode | undefined; }; /** @@ -26,7 +33,11 @@ type Props = React.ComponentProps & { * * import('./myComponent')} someComponentProps={...} /> */ -function LazyLoad({component, ...props}: Props) { +function LazyLoad({ + component, + loadingFallback, + ...props +}: Props) { const LazyComponent = useMemo( () => lazy(() => retryableImport(component)), [component] @@ -36,9 +47,11 @@ function LazyLoad({component, ...props}: Props) { - - + loadingFallback ?? ( + + + + ) } > )} /> From 90e4b0c49f7427996930867b63ffab28a4315275 Mon Sep 17 00:00:00 2001 From: Ryan Albrecht Date: Mon, 12 Feb 2024 16:01:08 -0800 Subject: [PATCH 289/357] feat(feedback): Let User Feedback render a replay clip instead of a static image (#64925) ![SCR-20240208-nrsx](https://github.com/getsentry/sentry/assets/187460/c9a94dae-3777-4a39-ba29-855fa76fdfcb) Depends on https://github.com/getsentry/sentry/pull/64920 being deployed. --- .../components/events/eventReplay/index.tsx | 11 ++++- .../eventReplay/replayClipPreview.spec.tsx | 4 ++ .../events/eventReplay/replayClipPreview.tsx | 14 +++--- .../feedback/feedbackItem/replaySection.tsx | 48 ++++++++++++------- 4 files changed, 54 insertions(+), 23 deletions(-) diff --git a/static/app/components/events/eventReplay/index.tsx b/static/app/components/events/eventReplay/index.tsx index 0425ed6ec0cf4f..96ae7f273c1329 100644 --- a/static/app/components/events/eventReplay/index.tsx +++ b/static/app/components/events/eventReplay/index.tsx @@ -25,6 +25,11 @@ type Props = { group?: Group; }; +const CLIP_OFFSETS = { + durationAfterMs: 5_000, + durationBeforeMs: 5_000, +}; + function EventReplayContent({ event, group, @@ -100,7 +105,11 @@ function EventReplayContent({ {hasReplayClipFeature ? ( - + ) : ( )} diff --git a/static/app/components/events/eventReplay/replayClipPreview.spec.tsx b/static/app/components/events/eventReplay/replayClipPreview.spec.tsx index 8cd32dcea3cb34..d19e6bf419f738 100644 --- a/static/app/components/events/eventReplay/replayClipPreview.spec.tsx +++ b/static/app/components/events/eventReplay/replayClipPreview.spec.tsx @@ -123,6 +123,10 @@ describe('ReplayClipPreview', () => { orgSlug: mockOrgSlug, replaySlug: mockReplaySlug, eventTimestampMs: mockEventTimestampMs, + clipOffsets: { + durationAfterMs: 5_000, + durationBeforeMs: 5_000, + }, }; it('Should render a placeholder when is fetching the replay data', () => { diff --git a/static/app/components/events/eventReplay/replayClipPreview.tsx b/static/app/components/events/eventReplay/replayClipPreview.tsx index e99fdc664bbf59..eff2cc359c95c8 100644 --- a/static/app/components/events/eventReplay/replayClipPreview.tsx +++ b/static/app/components/events/eventReplay/replayClipPreview.tsx @@ -43,6 +43,10 @@ import type {ReplayRecord} from 'sentry/views/replays/types'; type Props = { analyticsContext: string; + clipOffsets: { + durationAfterMs: number; + durationBeforeMs: number; + }; eventTimestampMs: number; orgSlug: string; replaySlug: string; @@ -50,9 +54,6 @@ type Props = { fullReplayButtonProps?: Partial>; }; -const CLIP_DURATION_BEFORE_EVENT = 5_000; -const CLIP_DURATION_AFTER_EVENT = 5_000; - function getReplayAnalyticsStatus({ fetchError, replayRecord, @@ -145,6 +146,7 @@ function ReplayPreviewPlayer({ function ReplayClipPreview({ analyticsContext, + clipOffsets, eventTimestampMs, orgSlug, replaySlug, @@ -152,10 +154,10 @@ function ReplayClipPreview({ }: Props) { const clipWindow = useMemo( () => ({ - startTimestampMs: eventTimestampMs - CLIP_DURATION_BEFORE_EVENT, - endTimestampMs: eventTimestampMs + CLIP_DURATION_AFTER_EVENT, + startTimestampMs: eventTimestampMs - clipOffsets.durationBeforeMs, + endTimestampMs: eventTimestampMs + clipOffsets.durationAfterMs, }), - [eventTimestampMs] + [clipOffsets.durationBeforeMs, clipOffsets.durationAfterMs, eventTimestampMs] ); const {fetching, replay, replayRecord, fetchError, replayId} = useReplayReader({ diff --git a/static/app/components/feedback/feedbackItem/replaySection.tsx b/static/app/components/feedback/feedbackItem/replaySection.tsx index ec049937cff990..4a985f96edc21d 100644 --- a/static/app/components/feedback/feedbackItem/replaySection.tsx +++ b/static/app/components/feedback/feedbackItem/replaySection.tsx @@ -10,27 +10,43 @@ interface Props { replayId: string; } +const CLIP_OFFSETS = { + durationAfterMs: 0, + durationBeforeMs: 20_000, +}; + export default function ReplaySection({eventTimestampMs, organization, replayId}: Props) { + const hasUserFeedbackReplayClip = organization.features.includes( + 'user-feedback-replay-clip' + ); + const replayPreview = useCallback( () => import('sentry/components/events/eventReplay/replayPreview'), [] ); + const replayClipPreview = useCallback( + () => import('sentry/components/events/eventReplay/replayClipPreview'), + [] + ); + + const props = { + analyticsContext: 'feedback', + eventTimestampMs, + focusTab: TabKey.BREADCRUMBS, + orgSlug: organization.slug, + replaySlug: replayId, + fullReplayButtonProps: { + analyticsEventKey: 'feedback_details.open_replay_details_clicked', + analyticsEventName: 'Feedback Details: Open Replay Details Clicked', + analyticsParams: { + organization, + }, + }, + }; - return ( - + return hasUserFeedbackReplayClip ? ( + + ) : ( + ); } From 738dc6b3f4bc47c645088b41600a6e10ad71b081 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Mon, 12 Feb 2024 16:24:19 -0800 Subject: [PATCH 290/357] chore(logging): Remove missing-severity-score debug logging (#65042) This removes debug logging, and its corresponding tests, which was left over from the initial POC of first-event severity scoring. Since the bug in question was solved four months ago, we've never hit this error. Note: The reason for doing this now (in addition to general tidiness) is that it has already been removed from the updated version of `save_aggregate` (`save_aggregate_new`) and this prevents us needing to update tests for now-obsolete behavior when we switch to using the new version. --- src/sentry/event_manager.py | 24 --- tests/sentry/event_manager/test_severity.py | 155 -------------------- 2 files changed, 179 deletions(-) diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index 1e0c48b156b6af..edfb6362d462b6 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -1473,18 +1473,6 @@ def _save_aggregate( if existing_grouphash is None: group = _create_group(project, event, **group_creation_kwargs) - if ( - features.has("projects:first-event-severity-calculation", event.project) - and group.data.get("metadata", {}).get("severity") is None - ): - logger.error( - "Group created without severity score", - extra={ - "event_id": event.data["event_id"], - "group_id": group.id, - }, - ) - if root_hierarchical_grouphash is not None: new_hashes = [root_hierarchical_grouphash] else: @@ -2557,18 +2545,6 @@ def _save_grouphash_and_group( group = _create_group(project, event, **group_kwargs) group_hash.update(group=group) - if ( - features.has("projects:first-event-severity-calculation", event.project) - and group.data.get("metadata", {}).get("severity") is None - ): - logger.error( - "Group created without severity score", - extra={ - "event_id": event.data["event_id"], - "group_id": group.id, - }, - ) - if group is None: # If we failed to create the group it means another worker beat us to # it. Since a GroupHash can only be created in a transaction with the diff --git a/tests/sentry/event_manager/test_severity.py b/tests/sentry/event_manager/test_severity.py index cf589f6de4dca9..ab51993e4fdd47 100644 --- a/tests/sentry/event_manager/test_severity.py +++ b/tests/sentry/event_manager/test_severity.py @@ -10,10 +10,7 @@ from sentry.event_manager import ( NON_TITLE_EVENT_TITLES, EventManager, - _get_event_instance, _get_severity_score, - _save_aggregate, - _save_grouphash_and_group, severity_connection_pool, ) from sentry.models.group import Group @@ -406,155 +403,3 @@ def test_score_not_clobbered_by_second_event(self, mock_get_severity_score: Magi # Metadata has been updated, but severity hasn't been clobbered in the process assert group.data["metadata"]["type"] == "BrokenStuffError" assert group.get_event_metadata()["severity"] == 0.1121 - - -@region_silo_test -class TestSaveAggregateSeverity(TestCase): - @patch("sentry.event_manager._save_aggregate", wraps=_save_aggregate) - @patch("sentry.event_manager.logger.error") - @patch("sentry.event_manager._get_severity_score", return_value=(None, None)) - def test_error_logged_on_no_score_when_enabled( - self, - mock_get_severity_score: MagicMock, - mock_logger_error: MagicMock, - mock_save_aggregate: MagicMock, - ): - with self.feature({"projects:first-event-severity-calculation": True}): - manager = EventManager( - make_event( - exception={"values": [{"type": "NopeError", "value": "Nopey McNopeface"}]} - ) - ) - event = manager.save(self.project.id) - - mock_save_aggregate.assert_called() - mock_get_severity_score.assert_called() - mock_logger_error.assert_called_with( - "Group created without severity score", - extra={ - "event_id": event.event_id, - "group_id": event.group_id, - }, - ) - - @patch("sentry.event_manager._save_aggregate", wraps=_save_aggregate) - @patch("sentry.event_manager.logger.error") - @patch("sentry.event_manager._get_severity_score", return_value=None) - def test_no_error_logged_on_no_score_when_disabled( - self, - mock_get_severity_score: MagicMock, - mock_logger_error: MagicMock, - mock_save_aggregate: MagicMock, - ): - with self.feature({"projects:first-event-severity-calculation": False}): - manager = EventManager( - make_event( - exception={"values": [{"type": "NopeError", "value": "Nopey McNopeface"}]} - ) - ) - manager.save(self.project.id) - - logger_messages = [call.args[0] for call in mock_logger_error.mock_calls] - - mock_save_aggregate.assert_called() - mock_get_severity_score.assert_not_called() - assert "Group created without severity score" not in logger_messages - - @patch("sentry.event_manager._save_aggregate", wraps=_save_aggregate) - @patch("sentry.event_manager.logger.error") - @patch("sentry.event_manager._get_severity_score", return_value=(0.0, "ml")) - def test_no_error_logged_on_zero_score_when_enabled( - self, - mock_get_severity_score: MagicMock, - mock_logger_error: MagicMock, - mock_save_aggregate: MagicMock, - ): - with self.feature({"projects:first-event-severity-calculation": True}): - manager = EventManager( - make_event( - exception={"values": [{"type": "NopeError", "value": "Nopey McNopeface"}]} - ) - ) - event = manager.save(self.project.id) - - logger_messages = [call.args[0] for call in mock_logger_error.mock_calls] - - mock_save_aggregate.assert_called() - mock_get_severity_score.assert_called() - assert event.group and event.group.data["metadata"]["severity"] == 0.0 - assert "Group created without severity score" not in logger_messages - - -@region_silo_test -class TestSaveGroupHashAndGroupSeverity(TestCase): - @patch("sentry.event_manager.logger.error") - @patch("sentry.event_manager._get_severity_score", return_value=(None, None)) - def test_error_logged_on_no_score_when_enabled( - self, - mock_get_severity_score: MagicMock, - mock_logger_error: MagicMock, - ): - with self.feature({"projects:first-event-severity-calculation": True}): - event = _get_event_instance( - make_event( - exception={"values": [{"type": "NopeError", "value": "Nopey McNopeface"}]} - ), - self.project.id, - ) - - group, created = _save_grouphash_and_group(self.project, event, "dogs are great") - - assert created is True - mock_get_severity_score.assert_called() - mock_logger_error.assert_called_with( - "Group created without severity score", - extra={ - "event_id": event.event_id, - "group_id": group.id, - }, - ) - - @patch("sentry.event_manager.logger.error") - @patch("sentry.event_manager._get_severity_score", return_value=(None, None)) - def test_no_error_logged_on_no_score_when_disabled( - self, - mock_get_severity_score: MagicMock, - mock_logger_error: MagicMock, - ): - with self.feature({"projects:first-event-severity-calculation": False}): - event = _get_event_instance( - make_event( - exception={"values": [{"type": "NopeError", "value": "Nopey McNopeface"}]} - ), - self.project.id, - ) - - _save_grouphash_and_group(self.project, event, "dogs are great") - - logger_messages = [call.args[0] for call in mock_logger_error.mock_calls] - - mock_get_severity_score.assert_not_called() - assert "Group created without severity score" not in logger_messages - - @patch("sentry.event_manager.logger.error") - @patch("sentry.event_manager._get_severity_score", return_value=(0.0, "ml")) - def test_no_error_logged_on_zero_score_when_enabled( - self, - mock_get_severity_score: MagicMock, - mock_logger_error: MagicMock, - ): - with self.feature({"projects:first-event-severity-calculation": True}): - event = _get_event_instance( - make_event( - exception={"values": [{"type": "NopeError", "value": "Nopey McNopeface"}]} - ), - self.project.id, - ) - - group, _ = _save_grouphash_and_group(self.project, event, "dogs are great") - - logger_messages = [call.args[0] for call in mock_logger_error.mock_calls] - - mock_get_severity_score.assert_called() - assert group.data["metadata"]["severity"] == 0.0 - assert "Group created without severity score" not in logger_messages From eccad82b3757f3665837913ba5508788622f0968 Mon Sep 17 00:00:00 2001 From: Gabe Villalobos Date: Mon, 12 Feb 2024 17:00:28 -0800 Subject: [PATCH 291/357] fix(hybrid-cloud): Sets region mode to default test, unless monolith dbs specified (#65021) --- src/sentry/testutils/pytest/sentry.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/sentry/testutils/pytest/sentry.py b/src/sentry/testutils/pytest/sentry.py index e95eef0c40d6d5..8afb8b21bf0b28 100644 --- a/src/sentry/testutils/pytest/sentry.py +++ b/src/sentry/testutils/pytest/sentry.py @@ -54,7 +54,10 @@ def configure_split_db() -> None: def _configure_test_env_regions() -> None: - settings.SILO_MODE = DEFAULT_SILO_MODE_FOR_TEST_CASES + SENTRY_USE_MONOLITH_DBS = os.environ.get("SENTRY_USE_MONOLITH_DBS", "0") == "1" + settings.SILO_MODE = ( + DEFAULT_SILO_MODE_FOR_TEST_CASES if not SENTRY_USE_MONOLITH_DBS else SiloMode.MONOLITH + ) # Assign a random name on every test run, as a reminder that test setup and # assertions should not depend on this value. If you need to test behavior that From 121ccc4dee2e3fbfd40b074ba61afcb292f6e634 Mon Sep 17 00:00:00 2001 From: Dan Fuller Date: Mon, 12 Feb 2024 17:03:24 -0800 Subject: [PATCH 292/357] fix(crons): Fix muted sort order on the monitor index (#65048) Fixing the sort order here, in other sorts, muted/disabled has a higher value than a valid status --- .../endpoints/organization_monitor_index.py | 4 ++-- .../endpoints/test_organization_monitor_index.py | 16 ++++++++-------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/src/sentry/monitors/endpoints/organization_monitor_index.py b/src/sentry/monitors/endpoints/organization_monitor_index.py index dbc73cbd236747..8219c2cf980e6a 100644 --- a/src/sentry/monitors/endpoints/organization_monitor_index.py +++ b/src/sentry/monitors/endpoints/organization_monitor_index.py @@ -184,9 +184,9 @@ def get(self, request: Request, organization: Organization) -> Response: elif sort == "muted": queryset = queryset.annotate( muted_ordering=Case( - When(is_muted=True, then=Value(0)), + When(is_muted=True, then=Value(2)), When(Exists(monitor_environments_query.filter(is_muted=True)), then=Value(1)), - default=2, + default=0, ), ) sort_fields = ["muted_ordering", "name"] diff --git a/tests/sentry/monitors/endpoints/test_organization_monitor_index.py b/tests/sentry/monitors/endpoints/test_organization_monitor_index.py index 9bc40b3eb781c3..97334395429e51 100644 --- a/tests/sentry/monitors/endpoints/test_organization_monitor_index.py +++ b/tests/sentry/monitors/endpoints/test_organization_monitor_index.py @@ -142,7 +142,7 @@ def test_sort_muted(self): self._create_monitor(name="A monitor"), self._create_monitor(name="ZA monitor"), ] - monitors.sort(key=lambda m: (not m.is_muted, m.name)) + monitors.sort(key=lambda m: (m.is_muted, m.name)) response = self.get_success_response(self.organization.slug, sort="muted") self.check_valid_response(response, monitors) @@ -182,13 +182,13 @@ def test_sort_muted_envs(self): response = self.get_success_response(self.organization.slug, sort="muted") expected = [ - muted_monitor_2, - muted_monitor_1, - muted_env_monitor, - muted_other_env_monitor, non_muted_monitor_2, non_muted_monitor_1, not_muted_env_monitor, + muted_env_monitor, + muted_other_env_monitor, + muted_monitor_2, + muted_monitor_1, ] self.check_valid_response(response, expected) @@ -200,13 +200,13 @@ def test_sort_muted_envs(self): self.organization.slug, sort="muted", environment=["prod"] ) expected = [ - muted_monitor_2, - muted_monitor_1, - muted_env_monitor, non_muted_monitor_2, non_muted_monitor_1, muted_other_env_monitor, not_muted_env_monitor, + muted_env_monitor, + muted_monitor_2, + muted_monitor_1, ] self.check_valid_response(response, expected) From e882180d00b5d91c33ac1d6cceb44e488e5ab156 Mon Sep 17 00:00:00 2001 From: Bartek Ogryczak Date: Mon, 12 Feb 2024 17:55:48 -0800 Subject: [PATCH 293/357] ref(grouping): just the platform name for platform rules (#65040) Bringing the naming in line with [the convention](https://github.com/getsentry/sentry/blob/master/src/sentry/grouping/fingerprinting/configs/README.md#naming-convetions), stating that platform-wide rules should only use name of the platform, rather than partial SDK name. This doesn't change anything functionally. --- .../001_chunkload_errors.txt | 0 .../002_hydration_errors.txt | 0 src/sentry/grouping/strategies/configurations.py | 2 +- tests/sentry/grouping/test_builtin_fingerprinting.py | 4 ++-- 4 files changed, 3 insertions(+), 3 deletions(-) rename src/sentry/grouping/fingerprinting/configs/{sentry.javascript@2024-02-02 => javascript@2024-02-02}/001_chunkload_errors.txt (100%) rename src/sentry/grouping/fingerprinting/configs/{sentry.javascript@2024-02-02 => javascript@2024-02-02}/002_hydration_errors.txt (100%) diff --git a/src/sentry/grouping/fingerprinting/configs/sentry.javascript@2024-02-02/001_chunkload_errors.txt b/src/sentry/grouping/fingerprinting/configs/javascript@2024-02-02/001_chunkload_errors.txt similarity index 100% rename from src/sentry/grouping/fingerprinting/configs/sentry.javascript@2024-02-02/001_chunkload_errors.txt rename to src/sentry/grouping/fingerprinting/configs/javascript@2024-02-02/001_chunkload_errors.txt diff --git a/src/sentry/grouping/fingerprinting/configs/sentry.javascript@2024-02-02/002_hydration_errors.txt b/src/sentry/grouping/fingerprinting/configs/javascript@2024-02-02/002_hydration_errors.txt similarity index 100% rename from src/sentry/grouping/fingerprinting/configs/sentry.javascript@2024-02-02/002_hydration_errors.txt rename to src/sentry/grouping/fingerprinting/configs/javascript@2024-02-02/002_hydration_errors.txt diff --git a/src/sentry/grouping/strategies/configurations.py b/src/sentry/grouping/strategies/configurations.py index 51a0b1bf34868d..667bb65d2f79ed 100644 --- a/src/sentry/grouping/strategies/configurations.py +++ b/src/sentry/grouping/strategies/configurations.py @@ -201,7 +201,7 @@ def register_strategy_config(id: str, **kwargs) -> type[StrategyConfiguration]: "java_cglib_hibernate_logic": True, }, enhancements_base="newstyle:2023-01-11", - fingerprinting_bases=["sentry.javascript@2024-02-02"], + fingerprinting_bases=["javascript@2024-02-02"], ) diff --git a/tests/sentry/grouping/test_builtin_fingerprinting.py b/tests/sentry/grouping/test_builtin_fingerprinting.py index ba87419f53f651..ccd434b9560b65 100644 --- a/tests/sentry/grouping/test_builtin_fingerprinting.py +++ b/tests/sentry/grouping/test_builtin_fingerprinting.py @@ -24,7 +24,7 @@ @pytest.fixture def default_bases(): - return ["sentry.javascript@2024-02-02"] + return ["javascript@2024-02-02"] def test_default_bases(default_bases): @@ -33,7 +33,7 @@ def test_default_bases(default_bases): assert { k: [r._to_config_structure() for r in rs] for k, rs in FINGERPRINTING_BASES.items() } == { - "sentry.javascript@2024-02-02": [ + "javascript@2024-02-02": [ { "matchers": [["family", "javascript"], ["type", "ChunkLoadError"]], "fingerprint": ["chunkloaderror"], From 242ecf00ccd564d4595574545be846160f7abb77 Mon Sep 17 00:00:00 2001 From: ArthurKnaus Date: Tue, 13 Feb 2024 08:10:42 +0100 Subject: [PATCH 294/357] feat(ddm): Fog of war spanning too many buckets (#65009) - closes https://github.com/getsentry/sentry/issues/65008 --------- Co-authored-by: Ogi <86684834+obostjancic@users.noreply.github.com> --- static/app/views/ddm/chart.tsx | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/static/app/views/ddm/chart.tsx b/static/app/views/ddm/chart.tsx index 8d8e2d3b44ecd0..103c06bcc8b99e 100644 --- a/static/app/views/ddm/chart.tsx +++ b/static/app/views/ddm/chart.tsx @@ -344,12 +344,12 @@ const createFogOfWarAreaSeries = (series: Series, fogBucketCnt = 0) => ({ }); function getWidthFactor(bucketSize: number) { - // In general, fog of war should cover the last bucket - if (bucketSize > 30 * 60_000) { + // If the bucket size is >= 5 minutes the fog of war should only cover the last bucket + if (bucketSize >= 5 * 60_000) { return 1; } - // for 10s timeframe we want to show a fog of war that spans last 10 buckets + // for buckets <= 10s we want to show a fog of war that spans last 10 buckets // because on average, we are missing last 90 seconds of data if (bucketSize <= 10_000) { return 10; From 2393cec1633173d19de6bce4f021a8c13e1ca682 Mon Sep 17 00:00:00 2001 From: Riccardo Busetti Date: Tue, 13 Feb 2024 10:00:04 +0100 Subject: [PATCH 295/357] feat(ddm): Return None when no data is present (#64995) --- .../querying/data/transformation.py | 8 +- .../sentry_metrics/querying/data/utils.py | 17 ---- .../sentry_metrics/querying/data/test_api.py | 89 ++++++++++--------- 3 files changed, 48 insertions(+), 66 deletions(-) diff --git a/src/sentry/sentry_metrics/querying/data/transformation.py b/src/sentry/sentry_metrics/querying/data/transformation.py index cbd72b3b2d7937..897646deb55703 100644 --- a/src/sentry/sentry_metrics/querying/data/transformation.py +++ b/src/sentry/sentry_metrics/querying/data/transformation.py @@ -6,7 +6,7 @@ from sentry.search.utils import parse_datetime_string from sentry.sentry_metrics.querying.data.execution import QueryResult -from sentry.sentry_metrics.querying.data.utils import get_identity, nan_to_none +from sentry.sentry_metrics.querying.data.utils import nan_to_none from sentry.sentry_metrics.querying.errors import MetricsQueryExecutionError from sentry.sentry_metrics.querying.types import GroupKey, ResultValue, Series, Total @@ -182,11 +182,9 @@ def transform(self) -> Mapping[str, Any]: series = metric_values.series total = metric_values.total - # We generate the full series by passing as default value the identity of the totals, which is the - # default value applied in the timeseries. This function already aligns the series by sorting it in - # ascending order so there is no need to have the series elements sorted beforehand. + # We generate the full series with a default value of `null` in case no series data is returned. translated_serieses[metric_name] = _generate_full_series( - int(start.timestamp()), len(intervals), interval, series, get_identity(total) + int(start.timestamp()), len(intervals), interval, series ) # In case we get nan, we will cast it to None but this can be changed in case there is the need. translated_totals[metric_name] = nan_to_none(total) diff --git a/src/sentry/sentry_metrics/querying/data/utils.py b/src/sentry/sentry_metrics/querying/data/utils.py index 4d9b8a3d9f285c..0b367089051f3c 100644 --- a/src/sentry/sentry_metrics/querying/data/utils.py +++ b/src/sentry/sentry_metrics/querying/data/utils.py @@ -3,23 +3,6 @@ from sentry.sentry_metrics.querying.types import ResultValue -def get_identity(value: ResultValue) -> ResultValue: - """ - Computes the identity of a value. - - For nan, we want to return None instead of 0.0 but this is just a design decision that conforms - to the previous implementation of the layer. - """ - if value is None: - return None - - if is_nan(value): - return None - - # We might decide in the future to have identity values specific to each aggregate. - return type(value)() - - def nan_to_none(value: ResultValue) -> ResultValue: """ Converts a nan value to None or returns the original value. diff --git a/tests/sentry/sentry_metrics/querying/data/test_api.py b/tests/sentry/sentry_metrics/querying/data/test_api.py index 25fd309c7b92d7..d576fb4f1ad648 100644 --- a/tests/sentry/sentry_metrics/querying/data/test_api.py +++ b/tests/sentry/sentry_metrics/querying/data/test_api.py @@ -75,7 +75,7 @@ def ts(self, dt: datetime) -> int: return int(dt.timestamp()) def test_query_with_empty_results(self) -> None: - for aggregate, expected_identity in ( + for aggregate, expected_total in ( ("count", 0.0), ("avg", None), ("sum", 0.0), @@ -94,13 +94,14 @@ def test_query_with_empty_results(self) -> None: environments=[], referrer="metrics.data.api", ) + # Right now we are returning data since there is a weird behavior in the metrics layer that returns an + # aggregate value even if there is no data (grouping by something results in the right result being + # returned). When the layer will be updated, this test should be asserted to have empty groups. groups = results["groups"] assert len(groups) == 1 assert groups[0]["by"] == {} - assert groups[0]["series"] == { - field: [expected_identity, expected_identity, expected_identity] - } - assert groups[0]["totals"] == {field: expected_identity} + assert groups[0]["series"] == {field: [None, None, None]} + assert groups[0]["totals"] == {field: expected_total} def test_query_with_one_aggregation(self) -> None: field = f"sum({TransactionMRI.DURATION.value})" @@ -119,7 +120,7 @@ def test_query_with_one_aggregation(self) -> None: groups = results["groups"] assert len(groups) == 1 assert groups[0]["by"] == {} - assert groups[0]["series"] == {field: [0.0, 12.0, 9.0]} + assert groups[0]["series"] == {field: [None, 12.0, 9.0]} assert groups[0]["totals"] == {field: 21.0} def test_query_with_one_aggregation_and_environment(self) -> None: @@ -139,7 +140,7 @@ def test_query_with_one_aggregation_and_environment(self) -> None: groups = results["groups"] assert len(groups) == 1 assert groups[0]["by"] == {} - assert groups[0]["series"] == {field: [0.0, 6.0, 4.0]} + assert groups[0]["series"] == {field: [None, 6.0, 4.0]} assert groups[0]["totals"] == {field: 10.0} def test_query_with_one_aggregation_and_latest_release(self) -> None: @@ -159,7 +160,7 @@ def test_query_with_one_aggregation_and_latest_release(self) -> None: groups = results["groups"] assert len(groups) == 1 assert groups[0]["by"] == {} - assert groups[0]["series"] == {field: [0.0, 6.0, 7.0]} + assert groups[0]["series"] == {field: [None, 6.0, 7.0]} assert groups[0]["totals"] == {field: 13.0} def test_query_with_percentile(self) -> None: @@ -179,7 +180,7 @@ def test_query_with_percentile(self) -> None: groups = results["groups"] assert len(groups) == 1 assert groups[0]["by"] == {} - assert groups[0]["series"] == {field: [0.0, pytest.approx(5.8), 3.8]} + assert groups[0]["series"] == {field: [None, pytest.approx(5.8), 3.8]} assert groups[0]["totals"] == {field: 5.5} def test_query_with_valid_percentiles(self) -> None: @@ -236,13 +237,13 @@ def test_query_with_group_by(self) -> None: groups = sorted(results["groups"], key=lambda value: value["by"]["platform"]) assert len(groups) == 3 assert groups[0]["by"] == {"platform": "android", "transaction": "/hello"} - assert groups[0]["series"] == {field: [0.0, 1.0, 2.0]} + assert groups[0]["series"] == {field: [None, 1.0, 2.0]} assert groups[0]["totals"] == {field: 3.0} assert groups[1]["by"] == {"platform": "ios", "transaction": "/hello"} - assert groups[1]["series"] == {field: [0.0, 6.0, 3.0]} + assert groups[1]["series"] == {field: [None, 6.0, 3.0]} assert groups[1]["totals"] == {field: 9.0} assert groups[2]["by"] == {"platform": "windows", "transaction": "/world"} - assert groups[2]["series"] == {field: [0.0, 5.0, 4.0]} + assert groups[2]["series"] == {field: [None, 5.0, 4.0]} assert groups[2]["totals"] == {field: 9.0} def test_query_with_group_by_on_null_tag(self) -> None: @@ -304,10 +305,10 @@ def test_query_with_parenthesized_filter(self) -> None: groups = sorted(results["groups"], key=lambda value: value["by"]["platform"]) assert len(groups) == 2 assert groups[0]["by"] == {"platform": "android"} - assert groups[0]["series"] == {field: [0.0, 1.0, 2.0]} + assert groups[0]["series"] == {field: [None, 1.0, 2.0]} assert groups[0]["totals"] == {field: 3.0} assert groups[1]["by"] == {"platform": "ios"} - assert groups[1]["series"] == {field: [0.0, 6.0, 3.0]} + assert groups[1]["series"] == {field: [None, 6.0, 3.0]} assert groups[1]["totals"] == {field: 9.0} def test_query_with_and_filter(self) -> None: @@ -327,7 +328,7 @@ def test_query_with_and_filter(self) -> None: groups = sorted(results["groups"], key=lambda value: value["by"]["platform"]) assert len(groups) == 1 assert groups[0]["by"] == {"platform": "ios"} - assert groups[0]["series"] == {field: [0.0, 6.0, 3.0]} + assert groups[0]["series"] == {field: [None, 6.0, 3.0]} assert groups[0]["totals"] == {field: 9.0} def test_query_with_or_filter(self) -> None: @@ -347,10 +348,10 @@ def test_query_with_or_filter(self) -> None: groups = sorted(results["groups"], key=lambda value: value["by"]["platform"]) assert len(groups) == 2 assert groups[0]["by"] == {"platform": "android"} - assert groups[0]["series"] == {field: [0.0, 1.0, 2.0]} + assert groups[0]["series"] == {field: [None, 1.0, 2.0]} assert groups[0]["totals"] == {field: 3.0} assert groups[1]["by"] == {"platform": "ios"} - assert groups[1]["series"] == {field: [0.0, 6.0, 3.0]} + assert groups[1]["series"] == {field: [None, 6.0, 3.0]} assert groups[1]["totals"] == {field: 9.0} def test_query_one_negated_filter(self) -> None: @@ -370,7 +371,7 @@ def test_query_one_negated_filter(self) -> None: groups = results["groups"] assert len(groups) == 1 assert groups[0]["by"] == {"platform": "android"} - assert groups[0]["series"] == {field: [0.0, 1.0, 2.0]} + assert groups[0]["series"] == {field: [None, 1.0, 2.0]} assert groups[0]["totals"] == {field: 3.0} def test_query_one_in_filter(self) -> None: @@ -390,10 +391,10 @@ def test_query_one_in_filter(self) -> None: groups = sorted(results["groups"], key=lambda value: value["by"]["platform"]) assert len(groups) == 2 assert groups[0]["by"] == {"platform": "android"} - assert groups[0]["series"] == {field: [0.0, 1.0, 2.0]} + assert groups[0]["series"] == {field: [None, 1.0, 2.0]} assert groups[0]["totals"] == {field: 3.0} assert groups[1]["by"] == {"platform": "ios"} - assert groups[1]["series"] == {field: [0.0, 6.0, 3.0]} + assert groups[1]["series"] == {field: [None, 6.0, 3.0]} assert groups[1]["totals"] == {field: 9.0} def test_query_one_not_in_filter(self) -> None: @@ -413,7 +414,7 @@ def test_query_one_not_in_filter(self) -> None: groups = results["groups"] assert len(groups) == 1 assert groups[0]["by"] == {"platform": "windows"} - assert groups[0]["series"] == {field: [0.0, 5.0, 4.0]} + assert groups[0]["series"] == {field: [None, 5.0, 4.0]} assert groups[0]["totals"] == {field: 9.0} def test_query_with_multiple_aggregations(self) -> None: @@ -434,7 +435,7 @@ def test_query_with_multiple_aggregations(self) -> None: groups = results["groups"] assert len(groups) == 1 assert groups[0]["by"] == {} - assert groups[0]["series"] == {field_2: [0.0, 6.0, 4.0], field_1: [0.0, 1.0, 2.0]} + assert groups[0]["series"] == {field_2: [None, 6.0, 4.0], field_1: [None, 1.0, 2.0]} assert groups[0]["totals"] == {field_2: 6.0, field_1: 1.0} def test_query_with_multiple_aggregations_and_single_group_by(self) -> None: @@ -456,8 +457,8 @@ def test_query_with_multiple_aggregations_and_single_group_by(self) -> None: assert len(groups) == 3 assert groups[0]["by"] == {"platform": "android"} assert sorted(groups[0]["series"].items(), key=lambda v: v[0]) == [ - (field_2, [0.0, 1.0, 2.0]), - (field_1, [0.0, 1.0, 2.0]), + (field_2, [None, 1.0, 2.0]), + (field_1, [None, 1.0, 2.0]), ] assert sorted(groups[0]["totals"].items(), key=lambda v: v[0]) == [ (field_2, 2.0), @@ -465,8 +466,8 @@ def test_query_with_multiple_aggregations_and_single_group_by(self) -> None: ] assert groups[1]["by"] == {"platform": "ios"} assert sorted(groups[1]["series"].items(), key=lambda v: v[0]) == [ - (field_2, [0.0, 6.0, 3.0]), - (field_1, [0.0, 6.0, 3.0]), + (field_2, [None, 6.0, 3.0]), + (field_1, [None, 6.0, 3.0]), ] assert sorted(groups[1]["totals"].items(), key=lambda v: v[0]) == [ (field_2, 6.0), @@ -474,8 +475,8 @@ def test_query_with_multiple_aggregations_and_single_group_by(self) -> None: ] assert groups[2]["by"] == {"platform": "windows"} assert sorted(groups[2]["series"].items(), key=lambda v: v[0]) == [ - (field_2, [0.0, 5.0, 4.0]), - (field_1, [0.0, 5.0, 4.0]), + (field_2, [None, 5.0, 4.0]), + (field_1, [None, 5.0, 4.0]), ] assert sorted(groups[2]["totals"].items(), key=lambda v: v[0]) == [ (field_2, 5.0), @@ -502,8 +503,8 @@ def test_query_with_multiple_aggregations_and_single_group_by_and_order_by(self) assert len(groups) == 3 assert groups[0]["by"] == {"platform": "ios"} assert sorted(groups[0]["series"].items(), key=lambda v: v[0]) == [ - (field_2, [0.0, 6.0, 3.0]), - (field_1, [0.0, 6.0, 3.0]), + (field_2, [None, 6.0, 3.0]), + (field_1, [None, 6.0, 3.0]), ] assert sorted(groups[0]["totals"].items(), key=lambda v: v[0]) == [ (field_2, 6.0), @@ -511,8 +512,8 @@ def test_query_with_multiple_aggregations_and_single_group_by_and_order_by(self) ] assert groups[1]["by"] == {"platform": "windows"} assert sorted(groups[1]["series"].items(), key=lambda v: v[0]) == [ - (field_2, [0.0, 5.0, 4.0]), - (field_1, [0.0, 5.0, 4.0]), + (field_2, [None, 5.0, 4.0]), + (field_1, [None, 5.0, 4.0]), ] assert sorted(groups[1]["totals"].items(), key=lambda v: v[0]) == [ (field_2, 5.0), @@ -520,8 +521,8 @@ def test_query_with_multiple_aggregations_and_single_group_by_and_order_by(self) ] assert groups[2]["by"] == {"platform": "android"} assert sorted(groups[2]["series"].items(), key=lambda v: v[0]) == [ - (field_2, [0.0, 1.0, 2.0]), - (field_1, [0.0, 1.0, 2.0]), + (field_2, [None, 1.0, 2.0]), + (field_1, [None, 1.0, 2.0]), ] assert sorted(groups[2]["totals"].items(), key=lambda v: v[0]) == [ (field_2, 2.0), @@ -551,8 +552,8 @@ def test_query_with_multiple_aggregations_and_single_group_by_and_order_by_with_ assert len(groups) == 2 assert groups[0]["by"] == {"platform": "android"} assert sorted(groups[0]["series"].items(), key=lambda v: v[0]) == [ - (field_2, [0.0, 1.0, 2.0]), - (field_1, [0.0, 1.0, 2.0]), + (field_2, [None, 1.0, 2.0]), + (field_1, [None, 1.0, 2.0]), ] assert sorted(groups[0]["totals"].items(), key=lambda v: v[0]) == [ (field_2, 2.0), @@ -560,8 +561,8 @@ def test_query_with_multiple_aggregations_and_single_group_by_and_order_by_with_ ] assert groups[1]["by"] == {"platform": "ios"} assert sorted(groups[1]["series"].items(), key=lambda v: v[0]) == [ - (field_2, [0.0, 6.0, 3.0]), - (field_1, [0.0, 6.0, 3.0]), + (field_2, [None, 6.0, 3.0]), + (field_1, [None, 6.0, 3.0]), ] assert sorted(groups[1]["totals"].items(), key=lambda v: v[0]) == [ (field_2, 6.0), @@ -655,7 +656,7 @@ def test_query_with_custom_set(self): groups = results["groups"] assert len(groups) == 1 assert groups[0]["by"] == {} - assert groups[0]["series"] == {field: [0, 2, 0]} + assert groups[0]["series"] == {field: [None, 2, None]} assert groups[0]["totals"] == {field: 2} @patch("sentry.sentry_metrics.querying.data.execution.SNUBA_QUERY_LIMIT", 5) @@ -684,13 +685,13 @@ def test_query_with_too_many_results(self) -> None: groups = sorted(results["groups"], key=lambda value: value["by"]["platform"]) assert len(groups) == 3 assert groups[0]["by"] == {"platform": "android", "transaction": "/hello"} - assert groups[0]["series"] == {field: [0.0, 3.0]} + assert groups[0]["series"] == {field: [None, 3.0]} assert groups[0]["totals"] == {field: 3.0} assert groups[1]["by"] == {"platform": "ios", "transaction": "/hello"} - assert groups[1]["series"] == {field: [0.0, 9.0]} + assert groups[1]["series"] == {field: [None, 9.0]} assert groups[1]["totals"] == {field: 9.0} assert groups[2]["by"] == {"platform": "windows", "transaction": "/world"} - assert groups[2]["series"] == {field: [0.0, 9.0]} + assert groups[2]["series"] == {field: [None, 9.0]} assert groups[2]["totals"] == {field: 9.0} @patch("sentry.sentry_metrics.querying.data.execution.SNUBA_QUERY_LIMIT", 5) @@ -773,7 +774,7 @@ def test_query_with_one_metric_blocked_for_one_project(self): groups = results["groups"] assert len(groups) == 1 assert groups[0]["by"] == {} - assert groups[0]["series"] == {field: [0.0, 15.0, 0.0]} + assert groups[0]["series"] == {field: [None, 15.0, None]} assert groups[0]["totals"] == {field: 15.0} def test_query_with_one_metric_blocked_for_all_projects(self): @@ -848,5 +849,5 @@ def test_query_with_two_metrics_and_one_blocked_for_a_project(self): groups = results["groups"] assert len(groups) == 1 assert groups[0]["by"] == {} - assert groups[0]["series"] == {field_2: [0.0, 10.0, 0.0]} + assert groups[0]["series"] == {field_2: [None, 10.0, None]} assert groups[0]["totals"] == {field_2: 10.0} From 9551b59401287cbbc7407071c3bf9824a59e3f3f Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Tue, 13 Feb 2024 11:00:01 +0100 Subject: [PATCH 296/357] ref(backpressure): Track reasons for unhealthy consumers and services (#64998) This refactors backpressure management so that when a service exceeds its memory threshold or there's an exception when checking, that information is attached to the Sentry error for that service. Likewise, when a consumer is unhealthy, the error contains the list of unhealthy services. This doesn't change the logic of backpressure management in any way, only the presentation. Unless I've introduced a bug somewhere, which I would obviously never do. --- src/sentry/processing/backpressure/health.py | 42 +++++++++++++----- src/sentry/processing/backpressure/memory.py | 14 +++--- src/sentry/processing/backpressure/monitor.py | 29 ++++++------ .../processing/backpressure/test_checking.py | 20 ++++++--- .../backpressure/test_monitoring.py | 44 +++++++++++-------- 5 files changed, 95 insertions(+), 54 deletions(-) diff --git a/src/sentry/processing/backpressure/health.py b/src/sentry/processing/backpressure/health.py index d8dd387b8b26f7..abc3dd98981ed6 100644 --- a/src/sentry/processing/backpressure/health.py +++ b/src/sentry/processing/backpressure/health.py @@ -1,15 +1,19 @@ import logging from collections.abc import Mapping +from typing import Any import sentry_sdk from django.conf import settings from sentry import options +from sentry.processing.backpressure.memory import ServiceMemory from sentry.processing.backpressure.topology import CONSUMERS from sentry.utils import metrics, redis logger = logging.getLogger(__name__) +UnhealthyReasons = Exception | list[ServiceMemory] + def _prefix_key(key_name: str) -> str: return f"bp1:{key_name}" @@ -89,30 +93,48 @@ def is_consumer_healthy(consumer_name: str = "default") -> bool: return False -def record_consumer_health(service_health: Mapping[str, bool]) -> None: +def record_consumer_health(unhealthy_services: Mapping[str, UnhealthyReasons]) -> None: with service_monitoring_cluster.pipeline() as pipeline: key_ttl = options.get("backpressure.status_ttl") - for name, is_healthy in service_health.items(): - pipeline.set(_service_key(name), "true" if is_healthy else "false", ex=key_ttl) + for name, unhealthy_reasons in unhealthy_services.items(): + pipeline.set(_service_key(name), "false" if unhealthy_reasons else "true", ex=key_ttl) + + extra: dict[str, Any] = {} + if unhealthy_reasons: + if isinstance(unhealthy_reasons, Exception): + extra = {"exception": unhealthy_reasons} + else: + for memory in unhealthy_reasons: + extra[memory.name] = { + "used": memory.used, + "available": memory.available, + "percentage": memory.percentage, + } - if not is_healthy: metrics.incr("backpressure.monitor.service.unhealthy", tags={"service": name}) with sentry_sdk.push_scope(): sentry_sdk.set_tag("service", name) - logger.error("Service `%s` marked as unhealthy", name) + logger.error("Service `%s` marked as unhealthy", name, extra=extra) for name, dependencies in CONSUMERS.items(): - is_healthy = True + unhealthy_dependencies = [] for dependency in dependencies: - is_healthy = is_healthy and service_health[dependency] + if unhealthy_services[dependency]: + unhealthy_dependencies.append(dependency) - pipeline.set(_consumer_key(name), "true" if is_healthy else "false", ex=key_ttl) + pipeline.set( + _consumer_key(name), "false" if unhealthy_dependencies else "true", ex=key_ttl + ) - if not is_healthy: + if unhealthy_dependencies: metrics.incr("backpressure.monitor.consumer.unhealthy", tags={"consumer": name}) with sentry_sdk.push_scope(): sentry_sdk.set_tag("consumer", name) - logger.error("Consumer `%s` marked as unhealthy", name) + logger.error( + "Consumer `%s` marked as unhealthy", + name, + extra={"unhealthy_dependencies": unhealthy_dependencies}, + ) pipeline.execute() diff --git a/src/sentry/processing/backpressure/memory.py b/src/sentry/processing/backpressure/memory.py index 26bc8077042be9..859fc7e3f77fca 100644 --- a/src/sentry/processing/backpressure/memory.py +++ b/src/sentry/processing/backpressure/memory.py @@ -9,11 +9,13 @@ @dataclass class ServiceMemory: + name: str used: int available: int percentage: float - def __init__(self, used: int, available: int): + def __init__(self, name: str, used: int, available: int): + self.name = name self.used = used self.available = available self.percentage = used / available @@ -31,7 +33,7 @@ def query_rabbitmq_memory_usage(host: str) -> ServiceMemory: response = requests.get(url, timeout=3) response.raise_for_status() json = response.json() - return ServiceMemory(json[0]["mem_used"], json[0]["mem_limit"]) + return ServiceMemory(host, json[0]["mem_used"], json[0]["mem_limit"]) # Based on configuration, this could be: @@ -41,13 +43,13 @@ def query_rabbitmq_memory_usage(host: str) -> ServiceMemory: Cluster = Union[RedisCluster, rb.Cluster] -def get_memory_usage(info: Mapping[str, Any]) -> ServiceMemory: +def get_memory_usage(node_id: str, info: Mapping[str, Any]) -> ServiceMemory: # or alternatively: `used_memory_rss`? memory_used = info.get("used_memory", 0) # `maxmemory` might be 0 in development memory_available = info.get("maxmemory", 0) or info["total_system_memory"] - return ServiceMemory(memory_used, memory_available) + return ServiceMemory(node_id, memory_used, memory_available) def iter_cluster_memory_usage(cluster: Cluster) -> Generator[ServiceMemory, None, None]: @@ -63,5 +65,5 @@ def iter_cluster_memory_usage(cluster: Cluster) -> Generator[ServiceMemory, None promise = client.info() cluster_info = promise.value - for info in cluster_info.values(): - yield get_memory_usage(info) + for node_id, info in cluster_info.items(): + yield get_memory_usage(node_id, info) diff --git a/src/sentry/processing/backpressure/monitor.py b/src/sentry/processing/backpressure/monitor.py index 0164ed1748c08f..68b2d125d8206d 100644 --- a/src/sentry/processing/backpressure/monitor.py +++ b/src/sentry/processing/backpressure/monitor.py @@ -1,6 +1,6 @@ import logging import time -from collections.abc import Generator, Mapping +from collections.abc import Generator, Mapping, MutableMapping from dataclasses import dataclass from typing import Union @@ -8,7 +8,7 @@ from django.conf import settings from sentry import options -from sentry.processing.backpressure.health import record_consumer_health +from sentry.processing.backpressure.health import UnhealthyReasons, record_consumer_health # from sentry import options from sentry.processing.backpressure.memory import ( @@ -77,19 +77,21 @@ def assert_all_services_defined(services: dict[str, Service]) -> None: ) -def check_service_health(services: Mapping[str, Service]) -> Mapping[str, bool]: - service_health = {} +def check_service_health(services: Mapping[str, Service]) -> MutableMapping[str, UnhealthyReasons]: + unhealthy_services: MutableMapping[str, UnhealthyReasons] = {} for name, service in services.items(): high_watermark = options.get(f"backpressure.high_watermarks.{name}") - is_healthy = True + reasons = [] logger.info("Checking service `%s` (configured high watermark: %s):", name, high_watermark) try: for memory in check_service_memory(service): - is_healthy = is_healthy and memory.percentage < high_watermark + if memory.percentage >= high_watermark: + reasons.append(memory) logger.info( - " used: %s, available: %s, percentage: %s", + " name: %s, used: %s, available: %s, percentage: %s", + memory.name, memory.used, memory.available, memory.percentage, @@ -98,12 +100,13 @@ def check_service_health(services: Mapping[str, Service]) -> Mapping[str, bool]: with sentry_sdk.push_scope() as scope: scope.set_tag("service", name) sentry_sdk.capture_exception(e) - is_healthy = False + unhealthy_services[name] = e + else: + unhealthy_services[name] = reasons - service_health[name] = is_healthy - logger.info(" => healthy: %s", is_healthy) + logger.info(" => healthy: %s", not unhealthy_services[name]) - return service_health + return unhealthy_services def start_service_monitoring() -> None: @@ -117,11 +120,11 @@ def start_service_monitoring() -> None: with sentry_sdk.start_transaction(name="backpressure.monitoring", sampled=True): # first, check each base service and record its health - service_health = check_service_health(services) + unhealthy_services = check_service_health(services) # then, check the derived services and record their health try: - record_consumer_health(service_health) + record_consumer_health(unhealthy_services) except Exception as e: sentry_sdk.capture_exception(e) diff --git a/tests/sentry/processing/backpressure/test_checking.py b/tests/sentry/processing/backpressure/test_checking.py index 290299ee389388..0aead28697a86a 100644 --- a/tests/sentry/processing/backpressure/test_checking.py +++ b/tests/sentry/processing/backpressure/test_checking.py @@ -22,7 +22,15 @@ } ) def test_backpressure_unhealthy(): - record_consumer_health({"celery": False}) + record_consumer_health( + { + "celery": Exception("Couldn't check celery"), + "attachments-store": [], + "processing-store": [], + "processing-locks": [], + "post-process-locks": [], + } + ) with raises(MessageRejected): process_one_message() @@ -52,11 +60,11 @@ def test_bad_config(): def test_backpressure_healthy(process_profile_task): record_consumer_health( { - "celery": True, - "attachments-store": True, - "processing-store": True, - "processing-locks": True, - "post-process-locks": True, + "celery": [], + "attachments-store": [], + "processing-store": [], + "processing-locks": [], + "post-process-locks": [], } ) process_one_message() diff --git a/tests/sentry/processing/backpressure/test_monitoring.py b/tests/sentry/processing/backpressure/test_monitoring.py index 77cc9a68a6148e..a7e2dfc703562e 100644 --- a/tests/sentry/processing/backpressure/test_monitoring.py +++ b/tests/sentry/processing/backpressure/test_monitoring.py @@ -1,7 +1,13 @@ +from collections.abc import MutableMapping + import pytest from django.test.utils import override_settings -from sentry.processing.backpressure.health import is_consumer_healthy, record_consumer_health +from sentry.processing.backpressure.health import ( + UnhealthyReasons, + is_consumer_healthy, + record_consumer_health, +) from sentry.processing.backpressure.monitor import ( Redis, assert_all_services_defined, @@ -40,8 +46,8 @@ def test_check_redis_health() -> None: "backpressure.high_watermarks.redis": 1.0, } ): - service_health = check_service_health(services) - assert service_health["redis"] is True + unhealthy_services = check_service_health(services) + assert not unhealthy_services["redis"] with override_options( { @@ -49,8 +55,8 @@ def test_check_redis_health() -> None: "backpressure.high_watermarks.redis": 0.0, } ): - service_health = check_service_health(services) - assert service_health["redis"] is False + unhealthy_services = check_service_health(services) + assert unhealthy_services["redis"] @override_options( @@ -61,27 +67,27 @@ def test_check_redis_health() -> None: } ) def test_record_consumer_health() -> None: - service_health = { - "celery": True, - "attachments-store": True, - "processing-store": True, - "processing-locks": True, - "post-process-locks": True, + unhealthy_services: MutableMapping[str, UnhealthyReasons] = { + "celery": [], + "attachments-store": [], + "processing-store": [], + "processing-locks": [], + "post-process-locks": [], } - record_consumer_health(service_health) + record_consumer_health(unhealthy_services) assert is_consumer_healthy() is True - service_health["celery"] = False - record_consumer_health(service_health) + unhealthy_services["celery"] = Exception("Couldn't check celery") + record_consumer_health(unhealthy_services) assert is_consumer_healthy() is False with pytest.raises(KeyError): record_consumer_health( { - "sellerie": True, # oops - "attachments-store": True, - "processing-store": True, - "processing-locks": True, - "post-process-locks": True, + "sellerie": [], # oops + "attachments-store": [], + "processing-store": [], + "processing-locks": [], + "post-process-locks": [], } ) From 01b34ec177c1c165c7fcef21c8955754f61453f4 Mon Sep 17 00:00:00 2001 From: Riccardo Busetti Date: Tue, 13 Feb 2024 11:16:21 +0100 Subject: [PATCH 297/357] fix(ddm): Fix wrong usage of interval and default to None null series (#65060) --- .../api/endpoints/organization_metrics.py | 2 +- .../querying/data_v2/transformation.py | 8 +-- .../sentry_metrics/querying/data_v2/utils.py | 17 ----- .../querying/data_v2/test_api.py | 64 +++++++++---------- 4 files changed, 35 insertions(+), 56 deletions(-) diff --git a/src/sentry/api/endpoints/organization_metrics.py b/src/sentry/api/endpoints/organization_metrics.py index b1ebe2c6733bd1..3e0c77af8ab937 100644 --- a/src/sentry/api/endpoints/organization_metrics.py +++ b/src/sentry/api/endpoints/organization_metrics.py @@ -371,7 +371,7 @@ def _interval_from_request(self, request: Request) -> int: """ Extracts the interval of the query from the request payload. """ - interval = parse_stats_period(request.data.get("interval", "1h")) + interval = parse_stats_period(request.GET.get("interval", "1h")) return int(3600 if interval is None else interval.total_seconds()) def _metrics_queries_plan_from_request(self, request: Request) -> MetricsQueriesPlan: diff --git a/src/sentry/sentry_metrics/querying/data_v2/transformation.py b/src/sentry/sentry_metrics/querying/data_v2/transformation.py index a402f119a7fd46..b683519751b03e 100644 --- a/src/sentry/sentry_metrics/querying/data_v2/transformation.py +++ b/src/sentry/sentry_metrics/querying/data_v2/transformation.py @@ -6,7 +6,7 @@ from sentry.search.utils import parse_datetime_string from sentry.sentry_metrics.querying.data_v2.execution import QueryResult -from sentry.sentry_metrics.querying.data_v2.utils import get_identity, nan_to_none +from sentry.sentry_metrics.querying.data_v2.utils import nan_to_none from sentry.sentry_metrics.querying.errors import MetricsQueryExecutionError from sentry.sentry_metrics.querying.types import GroupKey, ResultValue, Series, Totals @@ -199,11 +199,7 @@ def transform(self) -> Mapping[str, Any]: { "by": {name: value for name, value in group_key}, "series": _generate_full_series( - int(start.timestamp()), - len(intervals), - interval, - group_value.series, - get_identity(group_value.totals), + int(start.timestamp()), len(intervals), interval, group_value.series ), "totals": nan_to_none(group_value.totals), } diff --git a/src/sentry/sentry_metrics/querying/data_v2/utils.py b/src/sentry/sentry_metrics/querying/data_v2/utils.py index 4d9b8a3d9f285c..0b367089051f3c 100644 --- a/src/sentry/sentry_metrics/querying/data_v2/utils.py +++ b/src/sentry/sentry_metrics/querying/data_v2/utils.py @@ -3,23 +3,6 @@ from sentry.sentry_metrics.querying.types import ResultValue -def get_identity(value: ResultValue) -> ResultValue: - """ - Computes the identity of a value. - - For nan, we want to return None instead of 0.0 but this is just a design decision that conforms - to the previous implementation of the layer. - """ - if value is None: - return None - - if is_nan(value): - return None - - # We might decide in the future to have identity values specific to each aggregate. - return type(value)() - - def nan_to_none(value: ResultValue) -> ResultValue: """ Converts a nan value to None or returns the original value. diff --git a/tests/sentry/sentry_metrics/querying/data_v2/test_api.py b/tests/sentry/sentry_metrics/querying/data_v2/test_api.py index de8ea2e98fbec6..61724b01d244fa 100644 --- a/tests/sentry/sentry_metrics/querying/data_v2/test_api.py +++ b/tests/sentry/sentry_metrics/querying/data_v2/test_api.py @@ -106,7 +106,7 @@ def test_query_with_empty_results(self) -> None: data = results["data"] assert len(data) == 1 assert data[0][0]["by"] == {} - assert data[0][0]["series"] == [expected_identity, expected_identity, expected_identity] + assert data[0][0]["series"] == [None, None, None] assert data[0][0]["totals"] == expected_identity def test_query_with_one_aggregation(self) -> None: @@ -126,7 +126,7 @@ def test_query_with_one_aggregation(self) -> None: data = results["data"] assert len(data) == 1 assert data[0][0]["by"] == {} - assert data[0][0]["series"] == [0.0, 12.0, 9.0] + assert data[0][0]["series"] == [None, 12.0, 9.0] assert data[0][0]["totals"] == 21.0 def test_query_with_one_aggregation_and_environment(self) -> None: @@ -146,7 +146,7 @@ def test_query_with_one_aggregation_and_environment(self) -> None: data = results["data"] assert len(data) == 1 assert data[0][0]["by"] == {} - assert data[0][0]["series"] == [0.0, 6.0, 4.0] + assert data[0][0]["series"] == [None, 6.0, 4.0] assert data[0][0]["totals"] == 10.0 def test_query_with_one_aggregation_and_latest_release(self) -> None: @@ -166,7 +166,7 @@ def test_query_with_one_aggregation_and_latest_release(self) -> None: data = results["data"] assert len(data) == 1 assert data[0][0]["by"] == {} - assert data[0][0]["series"] == [0.0, 6.0, 7.0] + assert data[0][0]["series"] == [None, 6.0, 7.0] assert data[0][0]["totals"] == 13.0 def test_query_with_percentile(self) -> None: @@ -186,7 +186,7 @@ def test_query_with_percentile(self) -> None: data = results["data"] assert len(data) == 1 assert data[0][0]["by"] == {} - assert data[0][0]["series"] == [0.0, pytest.approx(5.8), 3.8] + assert data[0][0]["series"] == [None, pytest.approx(5.8), 3.8] assert data[0][0]["totals"] == 5.5 # We want to test that the `Array(x)` is stripped away from the `type` of the aggregate. meta = results["meta"] @@ -249,13 +249,13 @@ def test_query_with_group_by(self) -> None: first_query = sorted(data[0], key=lambda value: value["by"]["platform"]) assert len(first_query) == 3 assert first_query[0]["by"] == {"platform": "android", "transaction": "/hello"} - assert first_query[0]["series"] == [0.0, 1.0, 2.0] + assert first_query[0]["series"] == [None, 1.0, 2.0] assert first_query[0]["totals"] == 3.0 assert first_query[1]["by"] == {"platform": "ios", "transaction": "/hello"} - assert first_query[1]["series"] == [0.0, 6.0, 3.0] + assert first_query[1]["series"] == [None, 6.0, 3.0] assert first_query[1]["totals"] == 9.0 assert first_query[2]["by"] == {"platform": "windows", "transaction": "/world"} - assert first_query[2]["series"] == [0.0, 5.0, 4.0] + assert first_query[2]["series"] == [None, 5.0, 4.0] assert first_query[2]["totals"] == 9.0 # We want to test that the `group_bys` are shown in the meta. meta = results["meta"] @@ -330,10 +330,10 @@ def test_query_with_parenthesized_filter(self) -> None: first_query = sorted(data[0], key=lambda value: value["by"]["platform"]) assert len(first_query) == 2 assert first_query[0]["by"] == {"platform": "android"} - assert first_query[0]["series"] == [0.0, 1.0, 2.0] + assert first_query[0]["series"] == [None, 1.0, 2.0] assert first_query[0]["totals"] == 3.0 assert first_query[1]["by"] == {"platform": "ios"} - assert first_query[1]["series"] == [0.0, 6.0, 3.0] + assert first_query[1]["series"] == [None, 6.0, 3.0] assert first_query[1]["totals"] == 9.0 def test_query_with_and_filter(self) -> None: @@ -357,7 +357,7 @@ def test_query_with_and_filter(self) -> None: first_query = sorted(data[0], key=lambda value: value["by"]["platform"]) assert len(first_query) == 1 assert first_query[0]["by"] == {"platform": "ios"} - assert first_query[0]["series"] == [0.0, 6.0, 3.0] + assert first_query[0]["series"] == [None, 6.0, 3.0] assert first_query[0]["totals"] == 9.0 def test_query_with_or_filter(self) -> None: @@ -381,10 +381,10 @@ def test_query_with_or_filter(self) -> None: first_query = sorted(data[0], key=lambda value: value["by"]["platform"]) assert len(first_query) == 2 assert first_query[0]["by"] == {"platform": "android"} - assert first_query[0]["series"] == [0.0, 1.0, 2.0] + assert first_query[0]["series"] == [None, 1.0, 2.0] assert first_query[0]["totals"] == 3.0 assert first_query[1]["by"] == {"platform": "ios"} - assert first_query[1]["series"] == [0.0, 6.0, 3.0] + assert first_query[1]["series"] == [None, 6.0, 3.0] assert first_query[1]["totals"] == 9.0 def test_query_one_negated_filter(self) -> None: @@ -408,7 +408,7 @@ def test_query_one_negated_filter(self) -> None: first_query = sorted(data[0], key=lambda value: value["by"]["platform"]) assert len(first_query) == 1 assert first_query[0]["by"] == {"platform": "android"} - assert first_query[0]["series"] == [0.0, 1.0, 2.0] + assert first_query[0]["series"] == [None, 1.0, 2.0] assert first_query[0]["totals"] == 3.0 def test_query_one_in_filter(self) -> None: @@ -432,10 +432,10 @@ def test_query_one_in_filter(self) -> None: first_query = sorted(data[0], key=lambda value: value["by"]["platform"]) assert len(first_query) == 2 assert first_query[0]["by"] == {"platform": "android"} - assert first_query[0]["series"] == [0.0, 1.0, 2.0] + assert first_query[0]["series"] == [None, 1.0, 2.0] assert first_query[0]["totals"] == 3.0 assert first_query[1]["by"] == {"platform": "ios"} - assert first_query[1]["series"] == [0.0, 6.0, 3.0] + assert first_query[1]["series"] == [None, 6.0, 3.0] assert first_query[1]["totals"] == 9.0 def test_query_one_not_in_filter(self) -> None: @@ -459,7 +459,7 @@ def test_query_one_not_in_filter(self) -> None: first_query = sorted(data[0], key=lambda value: value["by"]["platform"]) assert len(first_query) == 1 assert first_query[0]["by"] == {"platform": "windows"} - assert first_query[0]["series"] == [0.0, 5.0, 4.0] + assert first_query[0]["series"] == [None, 5.0, 4.0] assert first_query[0]["totals"] == 9.0 def test_query_with_multiple_aggregations(self) -> None: @@ -486,10 +486,10 @@ def test_query_with_multiple_aggregations(self) -> None: data = results["data"] assert len(data) == 2 assert data[0][0]["by"] == {} - assert data[0][0]["series"] == [0.0, 1.0, 2.0] + assert data[0][0]["series"] == [None, 1.0, 2.0] assert data[0][0]["totals"] == 1.0 assert data[1][0]["by"] == {} - assert data[1][0]["series"] == [0.0, 6.0, 4.0] + assert data[1][0]["series"] == [None, 6.0, 4.0] assert data[1][0]["totals"] == 6.0 def test_query_with_multiple_aggregations_and_single_group_by(self) -> None: @@ -518,24 +518,24 @@ def test_query_with_multiple_aggregations_and_single_group_by(self) -> None: first_query = sorted(data[0], key=lambda value: value["by"]["platform"]) assert len(first_query) == 3 assert first_query[0]["by"] == {"platform": "android"} - assert first_query[0]["series"] == [0.0, 1.0, 2.0] + assert first_query[0]["series"] == [None, 1.0, 2.0] assert first_query[0]["totals"] == 1.0 assert first_query[1]["by"] == {"platform": "ios"} - assert first_query[1]["series"] == [0.0, 6.0, 3.0] + assert first_query[1]["series"] == [None, 6.0, 3.0] assert first_query[1]["totals"] == 3.0 assert first_query[2]["by"] == {"platform": "windows"} - assert first_query[2]["series"] == [0.0, 5.0, 4.0] + assert first_query[2]["series"] == [None, 5.0, 4.0] assert first_query[2]["totals"] == 4.0 second_query = sorted(data[1], key=lambda value: value["by"]["platform"]) assert len(second_query) == 3 assert second_query[0]["by"] == {"platform": "android"} - assert second_query[0]["series"] == [0.0, 1.0, 2.0] + assert second_query[0]["series"] == [None, 1.0, 2.0] assert second_query[0]["totals"] == 2.0 assert second_query[1]["by"] == {"platform": "ios"} - assert second_query[1]["series"] == [0.0, 6.0, 3.0] + assert second_query[1]["series"] == [None, 6.0, 3.0] assert second_query[1]["totals"] == 6.0 assert second_query[2]["by"] == {"platform": "windows"} - assert second_query[2]["series"] == [0.0, 5.0, 4.0] + assert second_query[2]["series"] == [None, 5.0, 4.0] assert second_query[2]["totals"] == 5.0 def test_query_with_multiple_aggregations_and_single_group_by_and_order_by_with_limit( @@ -566,18 +566,18 @@ def test_query_with_multiple_aggregations_and_single_group_by_and_order_by_with_ first_query = sorted(data[0], key=lambda value: value["by"]["platform"]) assert len(first_query) == 2 assert first_query[0]["by"] == {"platform": "android"} - assert first_query[0]["series"] == [0.0, 1.0, 2.0] + assert first_query[0]["series"] == [None, 1.0, 2.0] assert first_query[0]["totals"] == 1.0 assert first_query[1]["by"] == {"platform": "ios"} - assert first_query[1]["series"] == [0.0, 6.0, 3.0] + assert first_query[1]["series"] == [None, 6.0, 3.0] assert first_query[1]["totals"] == 3.0 second_query = sorted(data[1], key=lambda value: value["by"]["platform"]) assert len(second_query) == 2 assert second_query[0]["by"] == {"platform": "ios"} - assert second_query[0]["series"] == [0.0, 6.0, 3.0] + assert second_query[0]["series"] == [None, 6.0, 3.0] assert second_query[0]["totals"] == 6.0 assert second_query[1]["by"] == {"platform": "windows"} - assert second_query[1]["series"] == [0.0, 5.0, 4.0] + assert second_query[1]["series"] == [None, 5.0, 4.0] assert second_query[1]["totals"] == 5.0 # We want to test that the correct order and limit are in the meta. meta = results["meta"] @@ -617,7 +617,7 @@ def test_query_with_custom_set(self): data = results["data"] assert len(data) == 1 assert data[0][0]["by"] == {} - assert data[0][0]["series"] == [0, 2, 0] + assert data[0][0]["series"] == [None, 2, None] assert data[0][0]["totals"] == 2 def test_query_with_one_metric_blocked_for_one_project(self): @@ -656,7 +656,7 @@ def test_query_with_one_metric_blocked_for_one_project(self): data = results["data"] assert len(data) == 1 assert data[0][0]["by"] == {} - assert data[0][0]["series"] == [0.0, 15.0, 0.0] + assert data[0][0]["series"] == [None, 15.0, None] assert data[0][0]["totals"] == 15.0 def test_query_with_one_metric_blocked_for_all_projects(self): @@ -740,7 +740,7 @@ def test_query_with_two_metrics_and_one_blocked_for_a_project(self): assert len(data) == 2 assert len(data[0]) == 0 assert data[1][0]["by"] == {} - assert data[1][0]["series"] == [0.0, 10.0, 0.0] + assert data[1][0]["series"] == [None, 10.0, None] assert data[1][0]["totals"] == 10.0 def test_query_with_invalid_syntax( From 8986b16902f99e029f8be11c424e0e0c319c957f Mon Sep 17 00:00:00 2001 From: Ogi <86684834+obostjancic@users.noreply.github.com> Date: Tue, 13 Feb 2024 13:08:00 +0100 Subject: [PATCH 298/357] ref(ddm): single query component (#65062) --- static/app/views/ddm/queries.tsx | 121 +++++++++++++++---------- static/app/views/ddm/sampleTable.tsx | 2 +- static/app/views/ddm/widgetDetails.tsx | 29 +++--- 3 files changed, 90 insertions(+), 62 deletions(-) diff --git a/static/app/views/ddm/queries.tsx b/static/app/views/ddm/queries.tsx index d43361487cf847..5cb6c8b75c0e53 100644 --- a/static/app/views/ddm/queries.tsx +++ b/static/app/views/ddm/queries.tsx @@ -39,54 +39,88 @@ export function Queries() { {widgets.map((widget, index) => ( setSelectedWidgetIndex(index)}> - {showQuerySymbols && ( - setSelectedWidgetIndex(index)} - role="button" - aria-label={t('Select query')} - /> - )} - handleChange(index, data)} - metricsQuery={{ - mri: widget.mri, - op: widget.op, - groupBy: widget.groupBy, - query: widget.query, - }} - displayType={widget.displayType} - isEdit projects={selection.projects} + symbol={ + showQuerySymbols && ( + setSelectedWidgetIndex(index)} + role="button" + aria-label={t('Select query')} + /> + ) + } + contextMenu={ + + {({hasFeature}) => ( + + )} + + } /> - - {({hasFeature}) => ( - - )} - ))} ); } +interface Props { + onChange: (data: Partial) => void; + projects: number[]; + widget: MetricWidgetQueryParams; + contextMenu?: React.ReactNode; + symbol?: React.ReactNode; +} + +export function Query({widget, projects, onChange, contextMenu, symbol}: Props) { + return ( + + {symbol} + + {contextMenu} + + ); +} + +const QueryWrapper = styled('div')<{hasSymbol: boolean}>` + display: grid; + gap: ${space(1)}; + padding-bottom: ${space(1)}; + grid-template-columns: 1fr max-content; + ${p => p.hasSymbol && `grid-template-columns: min-content 1fr max-content;`} +`; + const StyledQuerySymbol = styled(QuerySymbol)` margin-top: 10px; cursor: pointer; @@ -94,15 +128,6 @@ const StyledQuerySymbol = styled(QuerySymbol)` const Wrapper = styled('div')<{showQuerySymbols: boolean}>` padding-bottom: ${space(2)}; - display: grid; - grid-template-columns: 1fr max-content; - gap: ${space(1)}; - - ${p => - p.showQuerySymbols && - ` - grid-template-columns: min-content 1fr max-content; - `} `; const Row = styled('div')` diff --git a/static/app/views/ddm/sampleTable.tsx b/static/app/views/ddm/sampleTable.tsx index 55a9614d7302a7..aa758ac06c966f 100644 --- a/static/app/views/ddm/sampleTable.tsx +++ b/static/app/views/ddm/sampleTable.tsx @@ -57,7 +57,7 @@ function sortAndLimitSpans(samples?: SpanSummary[], limit: number = 5) { ]); } -interface SamplesTableProps extends SelectionRange { +export interface SamplesTableProps extends SelectionRange { mri?: MRI; onRowHover?: (sampleId?: string) => void; query?: string; diff --git a/static/app/views/ddm/widgetDetails.tsx b/static/app/views/ddm/widgetDetails.tsx index d89375c70db8eb..f0f464b1c2fa26 100644 --- a/static/app/views/ddm/widgetDetails.tsx +++ b/static/app/views/ddm/widgetDetails.tsx @@ -12,6 +12,7 @@ import type {MetricWidgetQueryParams} from 'sentry/utils/metrics/types'; import useOrganization from 'sentry/utils/useOrganization'; import {CodeLocations} from 'sentry/views/ddm/codeLocations'; import {useDDMContext} from 'sentry/views/ddm/context'; +import type {SamplesTableProps} from 'sentry/views/ddm/sampleTable'; import {SampleTable} from 'sentry/views/ddm/sampleTable'; import {getQueryWithFocusedSeries} from 'sentry/views/ddm/utils'; @@ -83,19 +84,12 @@ export function WidgetDetails() { - {organization.features.includes('metrics-samples-list') ? ( - - ) : ( - - )} + @@ -107,6 +101,15 @@ export function WidgetDetails() { ); } +export function MetricSamplesTab({mri, query, onRowHover, ...range}: SamplesTableProps) { + const organization = useOrganization(); + + if (organization.features.includes('metrics-samples-list')) { + return ; + } + return ; +} + const TrayWrapper = styled('div')` padding-top: ${space(4)}; display: grid; From f4e7a9c876b0a1c54df002dc8632fc0bea7e8ccd Mon Sep 17 00:00:00 2001 From: Ogi <86684834+obostjancic@users.noreply.github.com> Date: Tue, 13 Feb 2024 13:25:25 +0100 Subject: [PATCH 299/357] fix(ddm): dashboard feature guard (#65057) --- static/app/views/ddm/contextMenu.tsx | 44 +++++++++--------- static/app/views/ddm/layout.tsx | 17 ++----- static/app/views/ddm/pageHeaderActions.tsx | 54 +++++++++------------- static/app/views/ddm/queries.tsx | 35 ++++++-------- 4 files changed, 63 insertions(+), 87 deletions(-) diff --git a/static/app/views/ddm/contextMenu.tsx b/static/app/views/ddm/contextMenu.tsx index 000b259112d0c2..705c96b6b28a15 100644 --- a/static/app/views/ddm/contextMenu.tsx +++ b/static/app/views/ddm/contextMenu.tsx @@ -1,12 +1,12 @@ -import {Fragment, useMemo} from 'react'; +import {useMemo} from 'react'; +import styled from '@emotion/styled'; import * as Sentry from '@sentry/react'; import {openAddToDashboardModal, openModal} from 'sentry/actionCreators/modal'; import {navigateTo} from 'sentry/actionCreators/navigation'; -import FeatureDisabled from 'sentry/components/acl/featureDisabled'; +import Feature from 'sentry/components/acl/feature'; import type {MenuItemProps} from 'sentry/components/dropdownMenu'; import {DropdownMenu} from 'sentry/components/dropdownMenu'; -import {Hovercard} from 'sentry/components/hovercard'; import { IconClose, IconCopy, @@ -35,7 +35,6 @@ import {OrganizationContext} from 'sentry/views/organizationContext'; type ContextMenuProps = { displayType: MetricDisplayType; - hasDashboardFeature: boolean; metricsQuery: MetricsQuery; widgetIndex: number; }; @@ -44,7 +43,6 @@ export function MetricQueryContextMenu({ metricsQuery, displayType, widgetIndex, - hasDashboardFeature, }: ContextMenuProps) { const organization = useOrganization(); const router = useRouter(); @@ -91,24 +89,25 @@ export function MetricQueryContextMenu({ }, { leadingItems: [], - key: 'add-dashoard', - label: hasDashboardFeature ? ( - {t('Add to Dashboard')} - ) : ( - - } + key: 'add-dashboard', + label: ( + - {t('Add to Dashboard')} - + {({hasFeature}) => ( + + {t('Add to Dashboard')} + + )} + ), - disabled: !hasDashboardFeature || !createDashboardWidget, + disabled: !createDashboardWidget, onAction: () => { + if (!organization.features.includes('dashboards-edit')) { + return; + } trackAnalytics('ddm.add-to-dashboard', { organization, source: 'widget', @@ -147,7 +146,6 @@ export function MetricQueryContextMenu({ }, ], [ - hasDashboardFeature, createAlert, createDashboardWidget, metricsQuery.mri, @@ -232,3 +230,7 @@ export function useCreateDashboardWidget( }); }, [metricsQuery, datetime, displayType, environments, organization, projects, router]); } + +const AddToDashboardItem = styled('div')<{disabled: boolean}>` + color: ${p => (p.disabled ? p.theme.disabled : p.theme.textColor)}; +`; diff --git a/static/app/views/ddm/layout.tsx b/static/app/views/ddm/layout.tsx index 16f4e9a73f4636..ea204f6d1c1610 100644 --- a/static/app/views/ddm/layout.tsx +++ b/static/app/views/ddm/layout.tsx @@ -4,7 +4,6 @@ import * as Sentry from '@sentry/react'; import emptyStateImg from 'sentry-images/spot/custom-metrics-empty-state.svg'; -import Feature from 'sentry/components/acl/feature'; import {Button} from 'sentry/components/button'; import FeatureBadge from 'sentry/components/featureBadge'; import FloatingFeedbackWidget from 'sentry/components/feedback/widget/floatingFeedbackWidget'; @@ -64,18 +63,10 @@ export const DDMLayout = memo(() => { - - {({hasFeature}) => ( - addCustomMetric('header')} - hasDashboardFeature={hasFeature} - /> - )} - + addCustomMetric('header')} + /> diff --git a/static/app/views/ddm/pageHeaderActions.tsx b/static/app/views/ddm/pageHeaderActions.tsx index 1e93932da203cf..4b92a6add4105f 100644 --- a/static/app/views/ddm/pageHeaderActions.tsx +++ b/static/app/views/ddm/pageHeaderActions.tsx @@ -1,12 +1,12 @@ -import {Fragment, useCallback, useMemo} from 'react'; +import {useCallback, useMemo} from 'react'; +import styled from '@emotion/styled'; import * as Sentry from '@sentry/react'; import {navigateTo} from 'sentry/actionCreators/navigation'; -import FeatureDisabled from 'sentry/components/acl/featureDisabled'; +import Feature from 'sentry/components/acl/feature'; import {Button} from 'sentry/components/button'; import ButtonBar from 'sentry/components/buttonBar'; import {DropdownMenu} from 'sentry/components/dropdownMenu'; -import {Hovercard} from 'sentry/components/hovercard'; import { IconAdd, IconBookmark, @@ -30,15 +30,10 @@ import {useCreateDashboard} from 'sentry/views/ddm/useCreateDashboard'; interface Props { addCustomMetric: () => void; - hasDashboardFeature: boolean; showCustomMetricButton: boolean; } -export function PageHeaderActions({ - showCustomMetricButton, - addCustomMetric, - hasDashboardFeature, -}: Props) { +export function PageHeaderActions({showCustomMetricButton, addCustomMetric}: Props) { const router = useRouter(); const organization = useOrganization(); const {selection} = usePageFilters(); @@ -87,23 +82,23 @@ export function PageHeaderActions({ { leadingItems: [], key: 'add-dashboard', - label: hasDashboardFeature ? ( - {t('Add to Dashboard')} - ) : ( - - } + label: ( + - {t('Add to Dashboard')} - + {({hasFeature}) => ( + + {t('Add to Dashboard')} + + )} + ), - disabled: !hasDashboardFeature, onAction: () => { + if (!organization.features.includes('dashboards-edit')) { + return; + } trackAnalytics('ddm.add-to-dashboard', { organization, source: 'global', @@ -118,14 +113,7 @@ export function PageHeaderActions({ onAction: () => navigateTo(`/settings/projects/:projectId/metrics/`, router), }, ], - [ - addWidget, - createDashboard, - hasEmptyWidget, - organization, - router, - hasDashboardFeature, - ] + [addWidget, createDashboard, hasEmptyWidget, organization, router] ); const alertItems = useMemo( @@ -226,3 +214,7 @@ export function PageHeaderActions({ ); } + +const AddToDashboardItem = styled('div')<{disabled: boolean}>` + color: ${p => (p.disabled ? p.theme.disabled : p.theme.textColor)}; +`; diff --git a/static/app/views/ddm/queries.tsx b/static/app/views/ddm/queries.tsx index 5cb6c8b75c0e53..2154149721572f 100644 --- a/static/app/views/ddm/queries.tsx +++ b/static/app/views/ddm/queries.tsx @@ -2,7 +2,6 @@ import {useCallback, useLayoutEffect} from 'react'; import styled from '@emotion/styled'; import * as echarts from 'echarts/core'; -import Feature from 'sentry/components/acl/feature'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import type {MetricWidgetQueryParams} from 'sentry/utils/metrics/types'; @@ -55,27 +54,19 @@ export function Queries() { ) } contextMenu={ - - {({hasFeature}) => ( - - )} - + } /> From fa45f97cef14426ee0c6fca5ed380fc860089ddd Mon Sep 17 00:00:00 2001 From: Armen Zambrano G <44410+armenzg@users.noreply.github.com> Date: Tue, 13 Feb 2024 08:16:58 -0500 Subject: [PATCH 300/357] ref(metrics_extraction): Test improvements + deprecate constant (#65012) The following changes: * Drop `APDEX_THRESHOLD_DEFAULT` and use `DEFAULT_PROJECT_THRESHOLD` * Clean up some tests for improved readability * Split up `_on_demand_query_check` for single responsibility principle --- src/sentry/constants.py | 1 - src/sentry/snuba/metrics/extraction.py | 7 +- tests/sentry/snuba/metrics/test_extraction.py | 104 +++++------------- .../endpoints/test_organization_events_mep.py | 71 ++++++------ 4 files changed, 71 insertions(+), 112 deletions(-) diff --git a/src/sentry/constants.py b/src/sentry/constants.py index 6bdeedecd11b6f..4b8b675560e2a0 100644 --- a/src/sentry/constants.py +++ b/src/sentry/constants.py @@ -637,7 +637,6 @@ def from_str(cls, string: str) -> int: SCRAPE_JAVASCRIPT_DEFAULT = True TRUSTED_RELAYS_DEFAULT = None JOIN_REQUESTS_DEFAULT = True -APDEX_THRESHOLD_DEFAULT = 300 AI_SUGGESTED_SOLUTION = True GITHUB_COMMENT_BOT_DEFAULT = True diff --git a/src/sentry/snuba/metrics/extraction.py b/src/sentry/snuba/metrics/extraction.py index a39188dbf3ff38..8ddcf1743ac372 100644 --- a/src/sentry/snuba/metrics/extraction.py +++ b/src/sentry/snuba/metrics/extraction.py @@ -21,7 +21,7 @@ SearchKey, SearchValue, ) -from sentry.constants import APDEX_THRESHOLD_DEFAULT, DataCategory +from sentry.constants import DataCategory from sentry.discover.arithmetic import is_equation from sentry.exceptions import InvalidSearchQuery from sentry.models.organization import Organization @@ -29,7 +29,7 @@ from sentry.models.transaction_threshold import ProjectTransactionThreshold, TransactionMetric from sentry.search.events import fields from sentry.search.events.builder import UnresolvedQuery -from sentry.search.events.constants import VITAL_THRESHOLDS +from sentry.search.events.constants import DEFAULT_PROJECT_THRESHOLD, VITAL_THRESHOLDS from sentry.snuba.dataset import Dataset from sentry.snuba.metrics.naming_layer.mri import ParsedMRI, parse_mri from sentry.snuba.metrics.utils import MetricOperationType @@ -229,6 +229,7 @@ def get_default_spec_version(cls: Any) -> SpecVersion: } # Maps plain Discover functions to derived metric functions which are understood by the metrics layer. +# XXX: We need to support count_miserable _SEARCH_TO_DERIVED_METRIC_AGGREGATES: dict[str, MetricOperationType] = { "failure_count": "on_demand_failure_count", "failure_rate": "on_demand_failure_rate", @@ -1513,7 +1514,7 @@ def _get_satisfactory_threshold_and_metric(project: Project) -> tuple[int, str]: if len(result) == 0: # We use the default threshold shown in the UI. - threshold = APDEX_THRESHOLD_DEFAULT + threshold = DEFAULT_PROJECT_THRESHOLD metric = TransactionMetric.DURATION.value else: # We technically don't use this threshold since we extract it from the apdex(x) field diff --git a/tests/sentry/snuba/metrics/test_extraction.py b/tests/sentry/snuba/metrics/test_extraction.py index 15cdf50f4849c4..1f6832e958fdd0 100644 --- a/tests/sentry/snuba/metrics/test_extraction.py +++ b/tests/sentry/snuba/metrics/test_extraction.py @@ -34,73 +34,39 @@ def test_equality_of_specs(default_project) -> None: [ ("count()", "release:a", False), # supported by standard metrics ("failure_rate()", "release:a", False), # supported by standard metrics - ("count_unique(geo.city)", "release:a", False), # geo.city not supported by standard metrics, but also not by on demand - ( - "count()", - "transaction.duration:>1", - True, - ), # transaction.duration not supported by standard metrics + ("count_unique(geo.city)", "release:a", False), + # transaction.duration not supported by standard metrics + ("count()", "transaction.duration:>1", True), ("failure_count()", "transaction.duration:>1", True), # supported by on demand ("failure_rate()", "transaction.duration:>1", True), # supported by on demand ("apdex(10)", "", True), # every apdex query is on-demand ("apdex(10)", "transaction.duration:>10", True), # supported by on demand - ( - "count_if(transaction.duration,equals,0)", - "release:a", - False, - ), # count_if supported by standard metrics + # count_if supported by standard metrics + ("count_if(transaction.duration,equals,0)", "release:a", False), ("p75(transaction.duration)", "release:a", False), # supported by standard metrics - ( - "p75(transaction.duration)", - "transaction.duration:>1", - True, - ), # transaction.duration query is on-demand + # transaction.duration query is on-demand + ("p75(transaction.duration)", "transaction.duration:>1", True), ("p90(transaction.duration)", "release:a", False), # supported by standard metrics - ( - "p90(transaction.duration)", - "transaction.duration:>1", - True, - ), # transaction.duration query is on-demand - ( - "percentile(transaction.duration, 0.9)", - "release:a", - False, - ), # supported by standard metrics - ( - "percentile(transaction.duration, 0.9)", - "transaction.duration:>1", - True, - ), # transaction.duration query is on-demand - ( - "percentile(transaction.duration, 0.90)", - "release:a", - False, - ), # supported by standard metrics - ( - "percentile(transaction.duration, 0.90)", - "transaction.duration:>1", - True, - ), + # transaction.duration query is on-demand + ("p90(transaction.duration)", "transaction.duration:>1", True), + # supported by standard metrics + ("percentile(transaction.duration, 0.9)", "release:a", False), + # transaction.duration query is on-demand + ("percentile(transaction.duration, 0.9)", "transaction.duration:>1", True), + # supported by standard metrics + ("percentile(transaction.duration, 0.90)", "release:a", False), + ("percentile(transaction.duration, 0.90)", "transaction.duration:>1", True), ("count()", "", False), # Malformed aggregate should return false - ( - "count()", - "event.type:error transaction.duration:>0", - False, - ), # event.type:error not supported by metrics - ( - "count()", - "event.type:default transaction.duration:>0", - False, - ), # event.type:error not supported by metrics - ( - "count()", - "error.handled:true transaction.duration:>0", - False, - ), # error.handled is an error search term + # event.type:error not supported by metrics + ("count()", "event.type:error transaction.duration:>0", False), + # event.type:error not supported by metrics + ("count()", "event.type:default transaction.duration:>0", False), + # error.handled is an error search term + ("count()", "error.handled:true transaction.duration:>0", False), ], ) -def test_should_use_on_demand(agg, query, result) -> None: +def test_should_use_on_demand(agg: str, query: str, result: bool) -> None: assert should_use_on_demand_metrics(Dataset.PerformanceMetrics, agg, query) is result @@ -109,26 +75,10 @@ def test_should_use_on_demand(agg, query, result) -> None: [ ("sum(c:custom/page_load@millisecond)", "release:a", False), ("sum(c:custom/page_load@millisecond)", "transaction.duration:>0", False), - ( - "p75(d:transactions/measurements.fcp@millisecond)", - "release:a", - False, - ), - ( - "p75(d:transactions/measurements.fcp@millisecond)", - "transaction.duration:>0", - False, - ), - ( - "p95(d:spans/duration@millisecond)", - "release:a", - False, - ), - ( - "p95(d:spans/duration@millisecond)", - "transaction.duration:>0", - False, - ), + ("p75(d:transactions/measurements.fcp@millisecond)", "release:a", False), + ("p75(d:transactions/measurements.fcp@millisecond)", "transaction.duration:>0", False), + ("p95(d:spans/duration@millisecond)", "release:a", False), + ("p95(d:spans/duration@millisecond)", "transaction.duration:>0", False), ], ) def test_should_use_on_demand_with_mri(agg, query, result) -> None: diff --git a/tests/snuba/api/endpoints/test_organization_events_mep.py b/tests/snuba/api/endpoints/test_organization_events_mep.py index d625586db65fc6..16fa644b3d1b3a 100644 --- a/tests/snuba/api/endpoints/test_organization_events_mep.py +++ b/tests/snuba/api/endpoints/test_organization_events_mep.py @@ -3137,14 +3137,11 @@ def setUp(self) -> None: self.url = reverse(self.viewname, kwargs={"organization_slug": self.organization.slug}) self.features = {"organizations:on-demand-metrics-extraction-widgets": True} - def _on_demand_query_check( - self, - params: dict[str, Any], - groupbys: list[str] | None = None, - expected_on_demand_query: bool | None = True, - expected_dataset: str | None = "metricsEnhanced", - ) -> Response: - """Do a request to the events endpoint with metrics enhanced and on-demand enabled.""" + def _create_specs( + self, params: dict[str, Any], groupbys: list[str] | None = None + ) -> list[OnDemandMetricSpec]: + """Creates all specs based on the parameters that would be passed to the endpoint.""" + specs = [] for field in params["field"]: spec = OnDemandMetricSpec( field=field, @@ -3153,43 +3150,55 @@ def _on_demand_query_check( groupbys=groupbys, spec_type=MetricSpecType.DYNAMIC_QUERY, ) + specs.append(spec) + return specs + + def _make_on_demand_request(self, params: dict[str, Any]) -> Response: + """Ensures that the required parameters for an on-demand request are included.""" # Expected parameters for this helper function params["dataset"] = "metricsEnhanced" params["useOnDemandMetrics"] = "true" params["onDemandType"] = "dynamic_query" + return self.do_request(params) - self.store_on_demand_metric(1, spec=spec) - response = self.do_request(params) - + def _assert_on_demand_response( + self, + response: Response, + expected_on_demand_query: bool | None = True, + expected_dataset: str | None = "metricsEnhanced", + ) -> None: + """Basic assertions for an on-demand request.""" assert response.status_code == 200, response.content meta = response.data["meta"] assert meta.get("isMetricsExtractedData", False) is expected_on_demand_query assert meta["dataset"] == expected_dataset - return response - def test_is_metrics_extracted_data_is_included(self) -> None: - self._on_demand_query_check( - {"field": ["count()"], "query": "transaction.duration:>=91", "yAxis": "count()"} - ) + params = {"field": ["count()"], "query": "transaction.duration:>=91", "yAxis": "count()"} + specs = self._create_specs(params) + for spec in specs: + self.store_on_demand_metric(1, spec=spec) + response = self._make_on_demand_request(params) + self._assert_on_demand_response(response) def test_transaction_user_misery(self) -> None: - resp = self._on_demand_query_check( - { - "field": ["user_misery(300)", "apdex(300)"], - "project": self.project.id, - "query": "", - "sort": "-user_misery(300)", - "per_page": "20", - "referrer": "api.dashboards.tablewidget", - }, - groupbys=["transaction"], - ) - assert resp.data == { - "data": [{"user_misery(300)": 0.0, "apdex(300)": 0.0}], + user_misery_field = "user_misery(300)" + apdex_field = "apdex(300)" + params = { + "field": [user_misery_field, apdex_field], + "project": self.project.id, + "query": "", + } + specs = self._create_specs(params, groupbys=["transaction"]) + for spec in specs: + self.store_on_demand_metric(1, spec=spec) + response = self._make_on_demand_request(params) + self._assert_on_demand_response(response, expected_on_demand_query=True) + assert response.data == { + "data": [{user_misery_field: 0.0, apdex_field: 0.0}], "meta": { - "fields": {"user_misery(300)": "number", "apdex(300)": "number"}, - "units": {"user_misery(300)": None, "apdex(300)": None}, + "fields": {user_misery_field: "number", apdex_field: "number"}, + "units": {user_misery_field: None, apdex_field: None}, "isMetricsData": True, "isMetricsExtractedData": True, "tips": {}, From 293517eba455027b9bce183a77cf55fd397e0005 Mon Sep 17 00:00:00 2001 From: Philipp Hofmann Date: Tue, 13 Feb 2024 15:23:37 +0100 Subject: [PATCH 301/357] feat(sdk-crashes): Add metrics (#65059) Add detecting_sdk_crash and sdk_crash_detected metrics to calculate crash rates per SDK and version. --- .../utils/sdk_crashes/sdk_crash_detection.py | 9 ++ .../sdk_crashes/test_sdk_crash_detection.py | 100 +++++++++++++++++- 2 files changed, 108 insertions(+), 1 deletion(-) diff --git a/src/sentry/utils/sdk_crashes/sdk_crash_detection.py b/src/sentry/utils/sdk_crashes/sdk_crash_detection.py index dad2b91ab5cb07..e8885e35d68d6e 100644 --- a/src/sentry/utils/sdk_crashes/sdk_crash_detection.py +++ b/src/sentry/utils/sdk_crashes/sdk_crash_detection.py @@ -6,6 +6,7 @@ from sentry.eventstore.models import Event, GroupEvent from sentry.issues.grouptype import GroupCategory +from sentry.utils import metrics from sentry.utils.safe import get_path, set_path from sentry.utils.sdk_crashes.event_stripper import strip_event_data from sentry.utils.sdk_crashes.sdk_crash_detection_config import SDKCrashDetectionConfig @@ -89,6 +90,12 @@ def detect_sdk_crash( if not frames: return None + # If sdk name or version are None, detector.should_detect_sdk_crash returns False above, and we don't reach this point. + sdk_name = get_path(event.data, "sdk", "name") + sdk_version = get_path(event.data, "sdk", "version") + metric_tags = {"sdk_name": sdk_name, "sdk_version": sdk_version} + metrics.incr("post_process.sdk_crash_monitoring.detecting_sdk_crash", tags=metric_tags) + if sdk_crash_detector.is_sdk_crash(frames): if random.random() >= sample_rate: return None @@ -111,6 +118,8 @@ def detect_sdk_crash( # So Sentry can tell how many projects are impacted by this SDK crash set_path(sdk_crash_event_data, "user", "id", value=event.project.id) + metrics.incr("post_process.sdk_crash_monitoring.sdk_crash_detected", tags=metric_tags) + return self.sdk_crash_reporter.report(sdk_crash_event_data, project_id) return None diff --git a/tests/sentry/utils/sdk_crashes/test_sdk_crash_detection.py b/tests/sentry/utils/sdk_crashes/test_sdk_crash_detection.py index b66e83c8a0205b..2f0133e41e0fb6 100644 --- a/tests/sentry/utils/sdk_crashes/test_sdk_crash_detection.py +++ b/tests/sentry/utils/sdk_crashes/test_sdk_crash_detection.py @@ -1,6 +1,6 @@ import abc from collections.abc import Sequence -from unittest.mock import patch +from unittest.mock import call, patch import pytest @@ -202,3 +202,101 @@ def test_multiple_configs_first_one_picked(mock_sdk_crash_reporter, store_event) assert mock_sdk_crash_reporter.report.call_count == 1 project_id = mock_sdk_crash_reporter.report.call_args.args[1] assert project_id == 1234 + + +@django_db_all +@pytest.mark.snuba +@patch("sentry.utils.sdk_crashes.sdk_crash_detection.sdk_crash_detection.sdk_crash_reporter") +@patch("sentry.utils.metrics.incr") +def test_should_increment_counters_for_sdk_crash(incr, sdk_crash_reporter, store_event): + event = store_event(data=get_crash_event()) + + sdk_configs = build_sdk_configs() + configs = [sdk_configs[0], sdk_configs[0]] + + sdk_crash_detection.detect_sdk_crash(event=event, configs=configs) + + incr.assert_has_calls( + [ + call( + "post_process.sdk_crash_monitoring.detecting_sdk_crash", + tags={"sdk_name": "sentry.cocoa", "sdk_version": "8.2.0"}, + ), + call( + "post_process.sdk_crash_monitoring.sdk_crash_detected", + tags={"sdk_name": "sentry.cocoa", "sdk_version": "8.2.0"}, + ), + ], + any_order=True, + ) + + +@django_db_all +@pytest.mark.snuba +@patch("sentry.utils.sdk_crashes.sdk_crash_detection.sdk_crash_detection.sdk_crash_reporter") +@patch("sentry.utils.metrics.incr") +def test_should_only_increment_detecting_counter_for_non_crash_event( + incr, sdk_crash_reporter, store_event +): + non_crash_event = store_event(data=get_crash_event(function="+[SentrySDK crash]")) + + sdk_configs = build_sdk_configs() + configs = [sdk_configs[0], sdk_configs[0]] + + sdk_crash_detection.detect_sdk_crash(event=non_crash_event, configs=configs) + + incr.assert_called_with( + "post_process.sdk_crash_monitoring.detecting_sdk_crash", + tags={"sdk_name": "sentry.cocoa", "sdk_version": "8.2.0"}, + ) + + # Ensure that the counter sdk_crash_detected is not incremented + for call_args in incr.call_args_list: + assert call_args[0][0] != "post_process.sdk_crash_monitoring.sdk_crash_detected" + + +@django_db_all +@pytest.mark.snuba +@patch("sentry.utils.sdk_crashes.sdk_crash_detection.sdk_crash_detection.sdk_crash_reporter") +@patch("sentry.utils.metrics.incr") +def test_should_not_increment_counters_for_not_supported_sdk(incr, sdk_crash_reporter, store_event): + event_data = get_crash_event() + set_path(event_data, "sdk", "name", value="sentry.coco") + crash_event = store_event(data=event_data) + + sdk_configs = build_sdk_configs() + configs = [sdk_configs[0], sdk_configs[0]] + + sdk_crash_detection.detect_sdk_crash(event=crash_event, configs=configs) + + # Ensure that no counter is incremented + for call_args in incr.call_args_list: + assert call_args[0][0] != "post_process.sdk_crash_monitoring.detecting_sdk_crash" + assert call_args[0][0] != "post_process.sdk_crash_monitoring.sdk_crash_detected" + + +@django_db_all +@pytest.mark.snuba +@patch("sentry.utils.sdk_crashes.sdk_crash_detection.sdk_crash_detection.sdk_crash_reporter") +@patch("sentry.utils.metrics.incr") +def test_should_not_increment_counters_already_reported_sdk_crash( + incr, sdk_crash_reporter, store_event +): + event_data = get_crash_event() + set_path( + event_data, + "contexts", + "sdk_crash_detection", + value={"original_project_id": 1234, "original_event_id": 1234}, + ) + crash_event = store_event(data=event_data) + + sdk_configs = build_sdk_configs() + configs = [sdk_configs[0], sdk_configs[0]] + + sdk_crash_detection.detect_sdk_crash(event=crash_event, configs=configs) + + # Ensure that no counter is incremented + for call_args in incr.call_args_list: + assert call_args[0][0] != "post_process.sdk_crash_monitoring.detecting_sdk_crash" + assert call_args[0][0] != "post_process.sdk_crash_monitoring.sdk_crash_detected" From 4dc9c8b46538c1c4d8397344c9ccaf6ad8fd8d52 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Tue, 13 Feb 2024 09:23:58 -0500 Subject: [PATCH 302/357] ref: replace deprecated utcfromtimestamp (#65038) warning in python 3.12, fatal in python 3.14 --- src/sentry/api/serializers/models/event.py | 2 +- src/sentry/auth/staff.py | 4 ++-- src/sentry/backup/comparators.py | 2 +- src/sentry/debug_files/artifact_bundle_indexing.py | 4 ++-- .../rules/biases/boost_latest_releases_bias.py | 4 ++-- src/sentry/eventstore/snuba/backend.py | 2 +- src/sentry/integrations/github/client.py | 2 +- src/sentry/integrations/gitlab/utils.py | 2 +- src/sentry/release_health/metrics.py | 2 +- src/sentry/replays/usecases/ingest/__init__.py | 2 +- src/sentry/search/utils.py | 2 +- src/sentry/snuba/sessions.py | 2 +- src/sentry/snuba/sessions_v2.py | 4 ++-- src/sentry/statistical_detectors/detector.py | 4 +--- src/sentry/tasks/auto_resolve_issues.py | 2 +- src/sentry/tasks/store.py | 2 +- src/sentry/testutils/pytest/selenium.py | 2 +- src/sentry/utils/dates.py | 2 +- src/sentry/utils/samples.py | 2 +- tests/snuba/sessions/test_sessions.py | 8 ++++---- 20 files changed, 27 insertions(+), 29 deletions(-) diff --git a/src/sentry/api/serializers/models/event.py b/src/sentry/api/serializers/models/event.py index a750edc953b9fc..db0f6bc441e7fc 100644 --- a/src/sentry/api/serializers/models/event.py +++ b/src/sentry/api/serializers/models/event.py @@ -238,7 +238,7 @@ def serialize(self, obj, attrs, user): # Sentry at one point attempted to record invalid types here. # Remove after June 2 2016 try: - received = datetime.utcfromtimestamp(received).replace(tzinfo=timezone.utc) + received = datetime.fromtimestamp(received, timezone.utc) except TypeError: received = None diff --git a/src/sentry/auth/staff.py b/src/sentry/auth/staff.py index 2c6fffad00bbd6..81a62758500df5 100644 --- a/src/sentry/auth/staff.py +++ b/src/sentry/auth/staff.py @@ -162,7 +162,7 @@ def get_session_data(self, current_datetime=None): current_datetime = django_timezone.now() try: - data["idl"] = datetime.utcfromtimestamp(float(data["idl"])).replace(tzinfo=timezone.utc) + data["idl"] = datetime.fromtimestamp(float(data["idl"]), timezone.utc) except (TypeError, ValueError): logger.warning( "staff.invalid-idle-expiration", @@ -179,7 +179,7 @@ def get_session_data(self, current_datetime=None): return try: - data["exp"] = datetime.utcfromtimestamp(float(data["exp"])).replace(tzinfo=timezone.utc) + data["exp"] = datetime.fromtimestamp(float(data["exp"]), timezone.utc) except (TypeError, ValueError): logger.warning( "staff.invalid-expiration", diff --git a/src/sentry/backup/comparators.py b/src/sentry/backup/comparators.py index 318f5f053aa9d1..fb33339810dcf8 100644 --- a/src/sentry/backup/comparators.py +++ b/src/sentry/backup/comparators.py @@ -20,7 +20,7 @@ from sentry.backup.helpers import Side from sentry.utils.json import JSONData -UNIX_EPOCH = unix_zero_date = datetime.utcfromtimestamp(0).replace(tzinfo=timezone.utc).isoformat() +UNIX_EPOCH = unix_zero_date = datetime.fromtimestamp(0, timezone.utc).isoformat() class ScrubbedData: diff --git a/src/sentry/debug_files/artifact_bundle_indexing.py b/src/sentry/debug_files/artifact_bundle_indexing.py index 7afba989dfd750..6dddd421cdad6a 100644 --- a/src/sentry/debug_files/artifact_bundle_indexing.py +++ b/src/sentry/debug_files/artifact_bundle_indexing.py @@ -80,13 +80,13 @@ def from_str(bundle_meta: str) -> FlatFileMeta: @staticmethod def build_none(): - return FlatFileMeta(id=-1, date=datetime.utcfromtimestamp(0)) + return FlatFileMeta(id=-1, date=datetime.fromtimestamp(0)) def to_string(self) -> str: return f"bundle_index/{self.id}/{int(self.date.timestamp() * 1000)}" def is_none(self): - return self.id == -1 and self.date == datetime.utcfromtimestamp(0) + return self.id == -1 and self.date == datetime.fromtimestamp(0) class FlatFileIdentifier(NamedTuple): diff --git a/src/sentry/dynamic_sampling/rules/biases/boost_latest_releases_bias.py b/src/sentry/dynamic_sampling/rules/biases/boost_latest_releases_bias.py index 176a9843ed31e6..e3b2acde36b6e6 100644 --- a/src/sentry/dynamic_sampling/rules/biases/boost_latest_releases_bias.py +++ b/src/sentry/dynamic_sampling/rules/biases/boost_latest_releases_bias.py @@ -51,10 +51,10 @@ def generate_rules(self, project: Project, base_sample_rate: float) -> list[Poly }, "id": RESERVED_IDS[RuleType.BOOST_LATEST_RELEASES_RULE] + idx, "timeRange": { - "start": datetime.utcfromtimestamp(boosted_release.timestamp).strftime( + "start": datetime.fromtimestamp(boosted_release.timestamp).strftime( self.datetime_format ), - "end": datetime.utcfromtimestamp( + "end": datetime.fromtimestamp( boosted_release.timestamp + boosted_release.platform.time_to_adoption ).strftime(self.datetime_format), }, diff --git a/src/sentry/eventstore/snuba/backend.py b/src/sentry/eventstore/snuba/backend.py index afe4c42630d4aa..42c26e8adc0474 100644 --- a/src/sentry/eventstore/snuba/backend.py +++ b/src/sentry/eventstore/snuba/backend.py @@ -426,7 +426,7 @@ def get_adjacent_event_ids(self, event, filter): prev_filter = deepcopy(filter) prev_filter.conditions = prev_filter.conditions or [] prev_filter.conditions.extend(get_before_event_condition(event)) - prev_filter.start = datetime.utcfromtimestamp(0) + prev_filter.start = datetime.fromtimestamp(0) # the previous event can have the same timestamp, add 1 second # to the end condition since it uses a less than condition prev_filter.end = event.datetime + timedelta(seconds=1) diff --git a/src/sentry/integrations/github/client.py b/src/sentry/integrations/github/client.py index d7d16890deab06..cbd724d0086237 100644 --- a/src/sentry/integrations/github/client.py +++ b/src/sentry/integrations/github/client.py @@ -51,7 +51,7 @@ def __init__(self, info: dict[str, int]) -> None: self.used = info["used"] def next_window(self) -> str: - return datetime.utcfromtimestamp(self.reset).strftime("%H:%M:%S") + return datetime.fromtimestamp(self.reset).strftime("%H:%M:%S") def __repr__(self) -> str: return f"GithubRateLimitInfo(limit={self.limit},rem={self.remaining},reset={self.reset})" diff --git a/src/sentry/integrations/gitlab/utils.py b/src/sentry/integrations/gitlab/utils.py index 9efc801d92ec29..86743480f81f47 100644 --- a/src/sentry/integrations/gitlab/utils.py +++ b/src/sentry/integrations/gitlab/utils.py @@ -14,7 +14,7 @@ def __init__(self, info: Mapping[str, int]) -> None: self.used = info["used"] def next_window(self) -> str: - return datetime.utcfromtimestamp(self.reset).strftime("%H:%M:%S") + return datetime.fromtimestamp(self.reset).strftime("%H:%M:%S") def __repr__(self) -> str: return f"GitLabRateLimitInfo(limit={self.limit},rem={self.remaining},reset={self.reset}),used={self.used})" diff --git a/src/sentry/release_health/metrics.py b/src/sentry/release_health/metrics.py index ed9e48ee11d962..347f67b5492f01 100644 --- a/src/sentry/release_health/metrics.py +++ b/src/sentry/release_health/metrics.py @@ -493,7 +493,7 @@ def get_release_sessions_time_bounds( def iso_format_snuba_datetime(date: str) -> str: return datetime.strptime(date, "%Y-%m-%dT%H:%M:%S+00:00").isoformat()[:19] + "Z" - formatted_unix_start_time = datetime.utcfromtimestamp(0).strftime("%Y-%m-%dT%H:%M:%S+00:00") + formatted_unix_start_time = datetime.fromtimestamp(0).strftime("%Y-%m-%dT%H:%M:%S+00:00") def clean_date_string(d: str | None) -> str | None: # This check is added because if there are no sessions found, then the diff --git a/src/sentry/replays/usecases/ingest/__init__.py b/src/sentry/replays/usecases/ingest/__init__.py index bdf877a1251ef2..ffc9053421708f 100644 --- a/src/sentry/replays/usecases/ingest/__init__.py +++ b/src/sentry/replays/usecases/ingest/__init__.py @@ -145,7 +145,7 @@ def track_initial_segment_event( key_id=key_id, outcome=Outcome.ACCEPTED, reason=None, - timestamp=datetime.utcfromtimestamp(received).replace(tzinfo=timezone.utc), + timestamp=datetime.fromtimestamp(received, timezone.utc), event_id=replay_id, category=DataCategory.REPLAY, quantity=1, diff --git a/src/sentry/search/utils.py b/src/sentry/search/utils.py index abbffa4f6783aa..fe208a4a061d38 100644 --- a/src/sentry/search/utils.py +++ b/src/sentry/search/utils.py @@ -204,7 +204,7 @@ def parse_datetime_range( def parse_unix_timestamp(value: str) -> datetime: - return datetime.utcfromtimestamp(float(value)).replace(tzinfo=timezone.utc) + return datetime.fromtimestamp(float(value), timezone.utc) def parse_iso_timestamp(value: str) -> datetime: diff --git a/src/sentry/snuba/sessions.py b/src/sentry/snuba/sessions.py index 212d5bce5e4a7c..0822423ef1af0b 100644 --- a/src/sentry/snuba/sessions.py +++ b/src/sentry/snuba/sessions.py @@ -716,7 +716,7 @@ def iso_format_snuba_datetime(date): referrer="sessions.release-sessions-time-bounds", )["data"] - formatted_unix_start_time = datetime.utcfromtimestamp(0).strftime("%Y-%m-%dT%H:%M:%S+00:00") + formatted_unix_start_time = datetime.fromtimestamp(0).strftime("%Y-%m-%dT%H:%M:%S+00:00") if rows: rv = rows[0] diff --git a/src/sentry/snuba/sessions_v2.py b/src/sentry/snuba/sessions_v2.py index a26f4233478d01..108ed6937010b9 100644 --- a/src/sentry/snuba/sessions_v2.py +++ b/src/sentry/snuba/sessions_v2.py @@ -737,7 +737,7 @@ def get_category_stats( def isoformat_z(date): - return datetime.utcfromtimestamp(int(to_timestamp(date))).isoformat() + "Z" + return datetime.fromtimestamp(int(to_timestamp(date))).isoformat() + "Z" def get_timestamps(query): @@ -749,7 +749,7 @@ def get_timestamps(query): start = int(to_timestamp(query.start)) end = int(to_timestamp(query.end)) - return [datetime.utcfromtimestamp(ts).isoformat() + "Z" for ts in range(start, end, rollup)] + return [datetime.fromtimestamp(ts).isoformat() + "Z" for ts in range(start, end, rollup)] def _split_rows_groupby(rows, groupby): diff --git a/src/sentry/statistical_detectors/detector.py b/src/sentry/statistical_detectors/detector.py index 49876c2d745257..153dac720a0529 100644 --- a/src/sentry/statistical_detectors/detector.py +++ b/src/sentry/statistical_detectors/detector.py @@ -566,9 +566,7 @@ def save_regressions_with_versions( regression_groups = [] for version, prev_date_regressed, regression in regression_chunk: - date_regressed = datetime.utcfromtimestamp(regression["breakpoint"]).replace( - tzinfo=timezone.utc - ) + date_regressed = datetime.fromtimestamp(regression["breakpoint"], timezone.utc) # enforce a buffer window after the date regressed after which the issue # cannot be changed to regressed again to avoid the issue state changing frequently diff --git a/src/sentry/tasks/auto_resolve_issues.py b/src/sentry/tasks/auto_resolve_issues.py index 384f97c2147c4f..3190b00bd38e7a 100644 --- a/src/sentry/tasks/auto_resolve_issues.py +++ b/src/sentry/tasks/auto_resolve_issues.py @@ -72,7 +72,7 @@ def auto_resolve_project_issues(project_id, cutoff=None, chunk_size=1000, **kwar project.update_option("sentry:_last_auto_resolve", int(time())) if cutoff: - cutoff = datetime.utcfromtimestamp(cutoff).replace(tzinfo=timezone.utc) + cutoff = datetime.fromtimestamp(cutoff, timezone.utc) else: cutoff = django_timezone.now() - timedelta(hours=int(age)) diff --git a/src/sentry/tasks/store.py b/src/sentry/tasks/store.py index fd3cc986860355..7740ad9a9266c1 100644 --- a/src/sentry/tasks/store.py +++ b/src/sentry/tasks/store.py @@ -706,7 +706,7 @@ def create_failed_event( raw_event = RawEvent.objects.create( project_id=project_id, event_id=event_id, - datetime=datetime.utcfromtimestamp(data["timestamp"]).replace(tzinfo=timezone.utc), + datetime=datetime.fromtimestamp(data["timestamp"], timezone.utc), data=data, ) diff --git a/src/sentry/testutils/pytest/selenium.py b/src/sentry/testutils/pytest/selenium.py index 89264ff58b3381..ee2f9f890b9ef1 100644 --- a/src/sentry/testutils/pytest/selenium.py +++ b/src/sentry/testutils/pytest/selenium.py @@ -533,7 +533,7 @@ def format_log(log): timestamp_format = "%Y-%m-%d %H:%M:%S.%f" entries = [ "{0} {1[level]} - {1[message]}".format( - datetime.utcfromtimestamp(entry["timestamp"] / 1000.0).strftime(timestamp_format), entry + datetime.fromtimestamp(entry["timestamp"] / 1000.0).strftime(timestamp_format), entry ).rstrip() for entry in log ] diff --git a/src/sentry/utils/dates.py b/src/sentry/utils/dates.py index 6fcb717e6283e5..005554c58bd274 100644 --- a/src/sentry/utils/dates.py +++ b/src/sentry/utils/dates.py @@ -94,7 +94,7 @@ def parse_timestamp(value: Any) -> datetime | None: if isinstance(value, datetime): return value elif isinstance(value, (int, float)): - return datetime.utcfromtimestamp(value).replace(tzinfo=timezone.utc) + return datetime.fromtimestamp(value, timezone.utc) value = (value or "").rstrip("Z").encode("ascii", "replace").split(b".", 1) if not value: return None diff --git a/src/sentry/utils/samples.py b/src/sentry/utils/samples.py index d30421cf40223b..c81a991b3066ea 100644 --- a/src/sentry/utils/samples.py +++ b/src/sentry/utils/samples.py @@ -16,7 +16,7 @@ from sentry.utils.dates import to_timestamp logger = logging.getLogger(__name__) -epoch = datetime.utcfromtimestamp(0) +epoch = datetime.fromtimestamp(0) def random_normal(mu, sigma, minimum, maximum=None): diff --git a/tests/snuba/sessions/test_sessions.py b/tests/snuba/sessions/test_sessions.py index 94ecac085f2cfe..b4cddd0e1b4096 100644 --- a/tests/snuba/sessions/test_sessions.py +++ b/tests/snuba/sessions/test_sessions.py @@ -43,12 +43,12 @@ def parametrize_backend(cls): def format_timestamp(dt): if not isinstance(dt, datetime): - dt = datetime.utcfromtimestamp(dt) + dt = datetime.fromtimestamp(dt) return dt.strftime("%Y-%m-%dT%H:%M:%S+00:00") def make_24h_stats(ts, adjust_start=False): - ret_val = _make_stats(datetime.utcfromtimestamp(ts).replace(tzinfo=timezone.utc), 3600, 24) + ret_val = _make_stats(datetime.fromtimestamp(ts, timezone.utc), 3600, 24) if adjust_start: # HACK this adds another interval at the beginning in accordance with the new way of calculating intervals @@ -447,14 +447,14 @@ def test_fetching_release_sessions_time_bounds_for_different_release(self): ) expected_formatted_lower_bound = ( - datetime.utcfromtimestamp(self.session_started - 3600 * 2) + datetime.fromtimestamp(self.session_started - 3600 * 2) .replace(minute=0) .isoformat()[:19] + "Z" ) expected_formatted_upper_bound = ( - datetime.utcfromtimestamp(self.session_started).replace(minute=0).isoformat()[:19] + "Z" + datetime.fromtimestamp(self.session_started).replace(minute=0).isoformat()[:19] + "Z" ) # Test for self.session_release From 601294f58e300d30b3e40c7b61b53f358551ae53 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Tue, 13 Feb 2024 09:55:04 -0500 Subject: [PATCH 303/357] ref: upgrade typing-extensions (#65037) the current version does not work with python 3.12 --- requirements-base.txt | 2 +- requirements-dev-frozen.txt | 2 +- requirements-frozen.txt | 2 +- src/sentry/api/endpoints/actionable_items.py | 3 ++- src/sentry/api/endpoints/organization_details.py | 2 +- src/sentry/api/endpoints/organization_stats_summary.py | 3 +-- src/sentry/api/endpoints/organization_stats_v2.py | 3 +-- src/sentry/api/endpoints/source_map_debug.py | 3 ++- .../api/endpoints/source_map_debug_blue_thunder_edition.py | 3 +-- src/sentry/api/serializers/models/alert_rule.py | 3 +-- src/sentry/api/serializers/models/external_actor.py | 4 +--- src/sentry/api/serializers/models/integration.py | 4 +--- src/sentry/api/serializers/models/organization.py | 3 +-- .../api/serializers/models/organization_member/response.py | 3 +-- src/sentry/api/serializers/models/project.py | 3 +-- src/sentry/api/serializers/models/project_key.py | 4 +--- src/sentry/api/serializers/models/projectownership.py | 3 +-- src/sentry/api/serializers/models/relay.py | 2 +- src/sentry/api/serializers/models/role.py | 4 +--- src/sentry/api/serializers/models/rule.py | 2 +- src/sentry/api/serializers/models/team.py | 3 +-- src/sentry/api/serializers/models/user.py | 3 +-- src/sentry/api/serializers/types.py | 3 +-- src/sentry/conf/types/role_dict.py | 4 +--- src/sentry/conf/types/sdk_config.py | 4 +--- src/sentry/replays/post_process.py | 3 +-- src/sentry/scim/endpoints/utils.py | 3 ++- src/sentry/sdk_updates.py | 2 +- src/sentry/search/events/types.py | 3 +-- src/sentry/services/hybrid_cloud/app/model.py | 3 +-- src/sentry/services/hybrid_cloud/identity/model.py | 4 +--- src/sentry/services/hybrid_cloud/organization/model.py | 3 +-- src/sentry/services/hybrid_cloud/organization_actions/impl.py | 2 +- src/sentry/services/hybrid_cloud/project/model.py | 3 ++- src/sentry/services/hybrid_cloud/user/model.py | 3 +-- src/sentry/services/hybrid_cloud/user_option/model.py | 4 +--- src/sentry/services/hybrid_cloud/usersocialauth/model.py | 4 +--- src/sentry/snuba/discover.py | 3 +-- src/sentry/utils/sdk_crashes/sdk_crash_detection_config.py | 2 +- 39 files changed, 43 insertions(+), 74 deletions(-) diff --git a/requirements-base.txt b/requirements-base.txt index 68efd7f49ea300..1f0e319570d494 100644 --- a/requirements-base.txt +++ b/requirements-base.txt @@ -73,7 +73,7 @@ statsd>=3.3 structlog>=22 symbolic==12.8.0 toronado>=0.1.0 -typing-extensions>=4.0.0 +typing-extensions>=4.9.0 ua-parser>=0.10.0 unidiff>=0.7.4 urllib3[brotli]>=2 diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index b411e208ca3daf..4ef6bb3a812449 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -218,7 +218,7 @@ types-redis==3.5.18 types-requests==2.31.0.20231231 types-setuptools==65.3.0 types-simplejson==3.17.7.2 -typing-extensions==4.5.0 +typing-extensions==4.9.0 tzdata==2023.3 ua-parser==0.10.0 unidiff==0.7.4 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index 48f39e668ac1f7..fc6e1e330ed2f0 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -136,7 +136,7 @@ structlog==22.1.0 symbolic==12.8.0 toronado==0.1.0 tqdm==4.64.1 -typing-extensions==4.5.0 +typing-extensions==4.9.0 tzdata==2023.3 ua-parser==0.10.0 unidiff==0.7.4 diff --git a/src/sentry/api/endpoints/actionable_items.py b/src/sentry/api/endpoints/actionable_items.py index 74497473ed24ac..298a78e6f4535a 100644 --- a/src/sentry/api/endpoints/actionable_items.py +++ b/src/sentry/api/endpoints/actionable_items.py @@ -1,7 +1,8 @@ +from typing import TypedDict + from rest_framework.exceptions import NotFound from rest_framework.request import Request from rest_framework.response import Response -from typing_extensions import TypedDict from sentry import eventstore from sentry.api.api_owners import ApiOwner diff --git a/src/sentry/api/endpoints/organization_details.py b/src/sentry/api/endpoints/organization_details.py index 1986365b3785db..cc505b81c3f692 100644 --- a/src/sentry/api/endpoints/organization_details.py +++ b/src/sentry/api/endpoints/organization_details.py @@ -3,13 +3,13 @@ import logging from copy import copy from datetime import datetime, timedelta, timezone +from typing import TypedDict from django.db import models, router, transaction from django.db.models.query_utils import DeferredAttribute from django.urls import reverse from django.utils import timezone as django_timezone from rest_framework import serializers, status -from typing_extensions import TypedDict from bitfield.types import BitHandler from sentry import audit_log, roles diff --git a/src/sentry/api/endpoints/organization_stats_summary.py b/src/sentry/api/endpoints/organization_stats_summary.py index 2569c7600c7ab8..9d4a0b4e0ace28 100644 --- a/src/sentry/api/endpoints/organization_stats_summary.py +++ b/src/sentry/api/endpoints/organization_stats_summary.py @@ -1,7 +1,7 @@ import csv from contextlib import contextmanager from io import StringIO -from typing import Any +from typing import Any, TypedDict import sentry_sdk from django.http import HttpResponse @@ -10,7 +10,6 @@ from rest_framework.exceptions import ParseError from rest_framework.request import Request from rest_framework.response import Response -from typing_extensions import TypedDict from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus diff --git a/src/sentry/api/endpoints/organization_stats_v2.py b/src/sentry/api/endpoints/organization_stats_v2.py index 11f2470b73370d..6977f25b0bc1f9 100644 --- a/src/sentry/api/endpoints/organization_stats_v2.py +++ b/src/sentry/api/endpoints/organization_stats_v2.py @@ -1,5 +1,5 @@ from contextlib import contextmanager -from typing import Any +from typing import Any, TypedDict import sentry_sdk from drf_spectacular.utils import extend_schema @@ -7,7 +7,6 @@ from rest_framework.exceptions import ParseError from rest_framework.request import Request from rest_framework.response import Response -from typing_extensions import TypedDict from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus diff --git a/src/sentry/api/endpoints/source_map_debug.py b/src/sentry/api/endpoints/source_map_debug.py index f3ce08a48aa103..0d0a5c01b32e69 100644 --- a/src/sentry/api/endpoints/source_map_debug.py +++ b/src/sentry/api/endpoints/source_map_debug.py @@ -1,8 +1,9 @@ +from typing import TypedDict + from drf_spectacular.utils import extend_schema from rest_framework.exceptions import ParseError from rest_framework.request import Request from rest_framework.response import Response -from typing_extensions import TypedDict from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus diff --git a/src/sentry/api/endpoints/source_map_debug_blue_thunder_edition.py b/src/sentry/api/endpoints/source_map_debug_blue_thunder_edition.py index c8391f2431cd5d..6154e1b3ce7cf0 100644 --- a/src/sentry/api/endpoints/source_map_debug_blue_thunder_edition.py +++ b/src/sentry/api/endpoints/source_map_debug_blue_thunder_edition.py @@ -1,4 +1,4 @@ -from typing import Literal +from typing import Literal, TypedDict import sentry_sdk from django.db.models import QuerySet @@ -8,7 +8,6 @@ from rest_framework.exceptions import NotFound from rest_framework.request import Request from rest_framework.response import Response -from typing_extensions import TypedDict from sentry import eventstore, features from sentry.api.api_owners import ApiOwner diff --git a/src/sentry/api/serializers/models/alert_rule.py b/src/sentry/api/serializers/models/alert_rule.py index 553056a6903d34..06f2293b34ee69 100644 --- a/src/sentry/api/serializers/models/alert_rule.py +++ b/src/sentry/api/serializers/models/alert_rule.py @@ -3,11 +3,10 @@ from collections import defaultdict from collections.abc import Mapping, MutableMapping, Sequence from datetime import datetime -from typing import Any +from typing import Any, TypedDict from django.db.models import Max, Q, prefetch_related_objects from drf_spectacular.utils import extend_schema_serializer -from typing_extensions import TypedDict from sentry import features from sentry.api.serializers import Serializer, register, serialize diff --git a/src/sentry/api/serializers/models/external_actor.py b/src/sentry/api/serializers/models/external_actor.py index 7bcff3dd730d25..7712401e54da8b 100644 --- a/src/sentry/api/serializers/models/external_actor.py +++ b/src/sentry/api/serializers/models/external_actor.py @@ -1,7 +1,5 @@ from collections.abc import Mapping, MutableMapping -from typing import Any - -from typing_extensions import TypedDict +from typing import Any, TypedDict from sentry.api.serializers import Serializer, register from sentry.models.integrations.external_actor import ExternalActor diff --git a/src/sentry/api/serializers/models/integration.py b/src/sentry/api/serializers/models/integration.py index c1db05f05cb303..7ae24c3c7df346 100644 --- a/src/sentry/api/serializers/models/integration.py +++ b/src/sentry/api/serializers/models/integration.py @@ -2,9 +2,7 @@ import logging from collections.abc import Mapping, MutableMapping, Sequence -from typing import Any - -from typing_extensions import TypedDict +from typing import Any, TypedDict from sentry.api.serializers import Serializer, register, serialize from sentry.integrations import IntegrationProvider diff --git a/src/sentry/api/serializers/models/organization.py b/src/sentry/api/serializers/models/organization.py index e3d57f23964ba5..91e7dd5c59a644 100644 --- a/src/sentry/api/serializers/models/organization.py +++ b/src/sentry/api/serializers/models/organization.py @@ -2,13 +2,12 @@ from collections.abc import Callable, Mapping, MutableMapping, Sequence from datetime import datetime, timedelta -from typing import TYPE_CHECKING, Any, cast +from typing import TYPE_CHECKING, Any, TypedDict, cast import sentry_sdk from rest_framework import serializers from sentry_relay.auth import PublicKey from sentry_relay.exceptions import RelayError -from typing_extensions import TypedDict from sentry import features, onboarding_tasks, quotas, roles from sentry.api.fields.sentry_slug import SentrySerializerSlugField diff --git a/src/sentry/api/serializers/models/organization_member/response.py b/src/sentry/api/serializers/models/organization_member/response.py index 20feac3a7242e6..6ed1e3634e8e63 100644 --- a/src/sentry/api/serializers/models/organization_member/response.py +++ b/src/sentry/api/serializers/models/organization_member/response.py @@ -1,6 +1,5 @@ from datetime import datetime - -from typing_extensions import TypedDict +from typing import TypedDict from sentry.api.serializers.models.external_actor import ExternalActorResponse from sentry.api.serializers.models.role import ( diff --git a/src/sentry/api/serializers/models/project.py b/src/sentry/api/serializers/models/project.py index 2718afe2d53ecb..f1332f2f4f868f 100644 --- a/src/sentry/api/serializers/models/project.py +++ b/src/sentry/api/serializers/models/project.py @@ -3,14 +3,13 @@ from collections import defaultdict from collections.abc import Iterable, Mapping, MutableMapping, Sequence from datetime import datetime, timedelta -from typing import Any, Final, cast +from typing import Any, Final, TypedDict, cast import sentry_sdk from django.db import connection from django.db.models import prefetch_related_objects from django.db.models.aggregates import Count from django.utils import timezone -from typing_extensions import TypedDict from sentry import features, options, projectoptions, release_health, roles from sentry.api.serializers import Serializer, register, serialize diff --git a/src/sentry/api/serializers/models/project_key.py b/src/sentry/api/serializers/models/project_key.py index 2ec154f8a2dc64..21379e0c990b73 100644 --- a/src/sentry/api/serializers/models/project_key.py +++ b/src/sentry/api/serializers/models/project_key.py @@ -1,8 +1,6 @@ from collections.abc import Mapping from datetime import datetime -from typing import Any - -from typing_extensions import TypedDict +from typing import Any, TypedDict from sentry.api.serializers import Serializer, register from sentry.loader.browsersdkversion import ( diff --git a/src/sentry/api/serializers/models/projectownership.py b/src/sentry/api/serializers/models/projectownership.py index 26e88786a3aaa5..78abdcb6add764 100644 --- a/src/sentry/api/serializers/models/projectownership.py +++ b/src/sentry/api/serializers/models/projectownership.py @@ -1,6 +1,5 @@ from datetime import datetime - -from typing_extensions import TypedDict +from typing import TypedDict from sentry.api.serializers import Serializer, register from sentry.models.projectownership import ProjectOwnership diff --git a/src/sentry/api/serializers/models/relay.py b/src/sentry/api/serializers/models/relay.py index bf2a9658b7d23c..2b3371f915052c 100644 --- a/src/sentry/api/serializers/models/relay.py +++ b/src/sentry/api/serializers/models/relay.py @@ -1,4 +1,4 @@ -from typing_extensions import TypedDict +from typing import TypedDict from sentry.api.serializers import Serializer, register from sentry.models.relay import Relay diff --git a/src/sentry/api/serializers/models/role.py b/src/sentry/api/serializers/models/role.py index bef09279dbb6b3..71a28948ffc37e 100644 --- a/src/sentry/api/serializers/models/role.py +++ b/src/sentry/api/serializers/models/role.py @@ -1,7 +1,5 @@ from collections.abc import Mapping -from typing import Any - -from typing_extensions import TypedDict +from typing import Any, TypedDict from sentry import features from sentry.api.serializers import Serializer diff --git a/src/sentry/api/serializers/models/rule.py b/src/sentry/api/serializers/models/rule.py index 98752c74a91d11..334039eee304b0 100644 --- a/src/sentry/api/serializers/models/rule.py +++ b/src/sentry/api/serializers/models/rule.py @@ -1,8 +1,8 @@ from collections import defaultdict +from typing import TypedDict from django.db.models import Max, Q, prefetch_related_objects from rest_framework import serializers -from typing_extensions import TypedDict from sentry.api.serializers import Serializer, register from sentry.constants import ObjectStatus diff --git a/src/sentry/api/serializers/models/team.py b/src/sentry/api/serializers/models/team.py index 913b3913a491fd..d297d42b80f6ca 100644 --- a/src/sentry/api/serializers/models/team.py +++ b/src/sentry/api/serializers/models/team.py @@ -4,10 +4,9 @@ from collections import defaultdict from collections.abc import Mapping, MutableMapping, MutableSequence, Sequence from datetime import datetime -from typing import TYPE_CHECKING, AbstractSet, Any +from typing import TYPE_CHECKING, AbstractSet, Any, TypedDict from django.db.models import Count -from typing_extensions import TypedDict from sentry import roles from sentry.api.serializers import Serializer, register, serialize diff --git a/src/sentry/api/serializers/models/user.py b/src/sentry/api/serializers/models/user.py index ca86194c82c657..93099b4c796f1c 100644 --- a/src/sentry/api/serializers/models/user.py +++ b/src/sentry/api/serializers/models/user.py @@ -5,12 +5,11 @@ from collections import defaultdict from collections.abc import Callable, Mapping, MutableMapping, Sequence from datetime import datetime -from typing import Any, cast +from typing import Any, TypedDict, cast from django.conf import settings from django.contrib.auth.models import AnonymousUser from django.db.models import QuerySet -from typing_extensions import TypedDict from sentry import experiments from sentry.api.serializers import Serializer, register diff --git a/src/sentry/api/serializers/types.py b/src/sentry/api/serializers/types.py index 815184f3dcac84..01fb0850481f81 100644 --- a/src/sentry/api/serializers/types.py +++ b/src/sentry/api/serializers/types.py @@ -1,6 +1,5 @@ from datetime import datetime - -from typing_extensions import TypedDict +from typing import TypedDict class SerializedAvatarFields(TypedDict, total=False): diff --git a/src/sentry/conf/types/role_dict.py b/src/sentry/conf/types/role_dict.py index 09db040be61e20..1d96cfdee751ae 100644 --- a/src/sentry/conf/types/role_dict.py +++ b/src/sentry/conf/types/role_dict.py @@ -1,8 +1,6 @@ from __future__ import annotations -from typing import NotRequired - -from typing_extensions import TypedDict +from typing import NotRequired, TypedDict class RoleDict(TypedDict): diff --git a/src/sentry/conf/types/sdk_config.py b/src/sentry/conf/types/sdk_config.py index bcab60cb70598e..bce2c6fea56574 100644 --- a/src/sentry/conf/types/sdk_config.py +++ b/src/sentry/conf/types/sdk_config.py @@ -1,9 +1,7 @@ from __future__ import annotations from collections.abc import Callable -from typing import Any, Literal, NotRequired - -from typing_extensions import TypedDict +from typing import Any, Literal, NotRequired, TypedDict class SdkConfig(TypedDict): diff --git a/src/sentry/replays/post_process.py b/src/sentry/replays/post_process.py index 6611c888d60c22..2ad4763eb251a3 100644 --- a/src/sentry/replays/post_process.py +++ b/src/sentry/replays/post_process.py @@ -3,10 +3,9 @@ import collections from collections.abc import Generator, Iterable, Iterator, MutableMapping from itertools import zip_longest -from typing import Any +from typing import Any, TypedDict from drf_spectacular.utils import extend_schema_serializer -from typing_extensions import TypedDict from sentry.replays.validators import VALID_FIELD_SET diff --git a/src/sentry/scim/endpoints/utils.py b/src/sentry/scim/endpoints/utils.py index 408f232ade684a..9c0bfce38250c4 100644 --- a/src/sentry/scim/endpoints/utils.py +++ b/src/sentry/scim/endpoints/utils.py @@ -1,10 +1,11 @@ +from typing import TypedDict + import sentry_sdk from drf_spectacular.utils import extend_schema from rest_framework import serializers from rest_framework.exceptions import APIException, ParseError from rest_framework.negotiation import BaseContentNegotiation from rest_framework.request import Request -from typing_extensions import TypedDict from sentry.api.api_owners import ApiOwner from sentry.api.bases.organization import OrganizationEndpoint, OrganizationPermission diff --git a/src/sentry/sdk_updates.py b/src/sentry/sdk_updates.py index a0765018ccda72..2e1fcb38be7f21 100644 --- a/src/sentry/sdk_updates.py +++ b/src/sentry/sdk_updates.py @@ -1,11 +1,11 @@ from __future__ import annotations import logging +from typing import TypedDict from django.conf import settings from django.core.cache import cache from packaging.version import Version -from typing_extensions import TypedDict from sentry.tasks.release_registry import SDK_INDEX_CACHE_KEY from sentry.utils.safe import get_path diff --git a/src/sentry/search/events/types.py b/src/sentry/search/events/types.py index 2ee9c8a28f758e..6dd83ad73173b0 100644 --- a/src/sentry/search/events/types.py +++ b/src/sentry/search/events/types.py @@ -2,7 +2,7 @@ from collections.abc import Mapping, Sequence from dataclasses import dataclass from datetime import datetime, timezone -from typing import Any, NotRequired, Optional, Union +from typing import Any, NotRequired, Optional, TypedDict, Union from snuba_sdk.aliased_expression import AliasedExpression from snuba_sdk.column import Column @@ -10,7 +10,6 @@ from snuba_sdk.entity import Entity from snuba_sdk.function import CurriedFunction, Function from snuba_sdk.orderby import OrderBy -from typing_extensions import TypedDict from sentry.models.environment import Environment from sentry.models.organization import Organization diff --git a/src/sentry/services/hybrid_cloud/app/model.py b/src/sentry/services/hybrid_cloud/app/model.py index 9a60e815e92bf3..daf02cb103d791 100644 --- a/src/sentry/services/hybrid_cloud/app/model.py +++ b/src/sentry/services/hybrid_cloud/app/model.py @@ -7,10 +7,9 @@ import hmac from collections.abc import Mapping from hashlib import sha256 -from typing import Any, Protocol +from typing import Any, Protocol, TypedDict from pydantic.fields import Field -from typing_extensions import TypedDict from sentry.constants import SentryAppInstallationStatus from sentry.services.hybrid_cloud import RpcModel, RpcModelProtocolMeta diff --git a/src/sentry/services/hybrid_cloud/identity/model.py b/src/sentry/services/hybrid_cloud/identity/model.py index 22ec0e46bab65c..45e7291ef8af7d 100644 --- a/src/sentry/services/hybrid_cloud/identity/model.py +++ b/src/sentry/services/hybrid_cloud/identity/model.py @@ -2,9 +2,7 @@ # from __future__ import annotations # in modules such as this one where hybrid cloud data models or service classes are # defined, because we want to reflect on type annotations and avoid forward references. -from typing import TYPE_CHECKING, Any - -from typing_extensions import TypedDict +from typing import TYPE_CHECKING, Any, TypedDict from sentry.services.hybrid_cloud import RpcModel diff --git a/src/sentry/services/hybrid_cloud/organization/model.py b/src/sentry/services/hybrid_cloud/organization/model.py index 87eba17dc53498..70c1cb813427b9 100644 --- a/src/sentry/services/hybrid_cloud/organization/model.py +++ b/src/sentry/services/hybrid_cloud/organization/model.py @@ -5,12 +5,11 @@ from collections.abc import Mapping, Sequence from datetime import datetime from enum import IntEnum -from typing import Any +from typing import Any, TypedDict from django.dispatch import Signal from django.utils import timezone from pydantic import Field -from typing_extensions import TypedDict from sentry import roles from sentry.db.models import ValidateFunction, Value diff --git a/src/sentry/services/hybrid_cloud/organization_actions/impl.py b/src/sentry/services/hybrid_cloud/organization_actions/impl.py index 8e0f20cfafead5..d19c69a896465c 100644 --- a/src/sentry/services/hybrid_cloud/organization_actions/impl.py +++ b/src/sentry/services/hybrid_cloud/organization_actions/impl.py @@ -1,10 +1,10 @@ import hashlib +from typing import TypedDict from uuid import uuid4 from django.db import router, transaction from django.db.models.expressions import CombinedExpression from django.utils.text import slugify -from typing_extensions import TypedDict from sentry.models.organization import Organization, OrganizationStatus from sentry.models.outbox import outbox_context diff --git a/src/sentry/services/hybrid_cloud/project/model.py b/src/sentry/services/hybrid_cloud/project/model.py index 829ee15d4234b2..af555dd13c729f 100644 --- a/src/sentry/services/hybrid_cloud/project/model.py +++ b/src/sentry/services/hybrid_cloud/project/model.py @@ -4,8 +4,9 @@ # defined, because we want to reflect on type annotations and avoid forward references. +from typing import TypedDict + from pydantic.fields import Field -from typing_extensions import TypedDict from sentry.constants import ObjectStatus from sentry.db.models import ValidateFunction, Value diff --git a/src/sentry/services/hybrid_cloud/user/model.py b/src/sentry/services/hybrid_cloud/user/model.py index 90c1adb9830f62..df9038b68cc66b 100644 --- a/src/sentry/services/hybrid_cloud/user/model.py +++ b/src/sentry/services/hybrid_cloud/user/model.py @@ -5,10 +5,9 @@ import datetime from enum import IntEnum -from typing import Any +from typing import Any, TypedDict from pydantic.fields import Field -from typing_extensions import TypedDict from sentry.services.hybrid_cloud import DEFAULT_DATE, RpcModel diff --git a/src/sentry/services/hybrid_cloud/user_option/model.py b/src/sentry/services/hybrid_cloud/user_option/model.py index 6f9b0101fdd6cb..dbf5a268581dec 100644 --- a/src/sentry/services/hybrid_cloud/user_option/model.py +++ b/src/sentry/services/hybrid_cloud/user_option/model.py @@ -3,9 +3,7 @@ # in modules such as this one where hybrid cloud data models or service classes are # defined, because we want to reflect on type annotations and avoid forward references. -from typing import Any - -from typing_extensions import TypedDict +from typing import Any, TypedDict from sentry.services.hybrid_cloud import RpcModel diff --git a/src/sentry/services/hybrid_cloud/usersocialauth/model.py b/src/sentry/services/hybrid_cloud/usersocialauth/model.py index e38c14f1fa3462..fb874602211464 100644 --- a/src/sentry/services/hybrid_cloud/usersocialauth/model.py +++ b/src/sentry/services/hybrid_cloud/usersocialauth/model.py @@ -3,9 +3,7 @@ # in modules such as this one where hybrid cloud data models or service classes are # defined, because we want to reflect on type annotations and avoid forward references. -from typing import Any - -from typing_extensions import TypedDict +from typing import Any, TypedDict from sentry.services.hybrid_cloud import RpcModel from social_auth.utils import expiration_datetime, get_backend, tokens diff --git a/src/sentry/snuba/discover.py b/src/sentry/snuba/discover.py index 2c9feb2c7e48c8..8ceb12110f85d5 100644 --- a/src/sentry/snuba/discover.py +++ b/src/sentry/snuba/discover.py @@ -5,12 +5,11 @@ from collections.abc import Sequence from copy import deepcopy from datetime import datetime, timedelta -from typing import Any, NotRequired +from typing import Any, NotRequired, TypedDict import sentry_sdk from sentry_relay.consts import SPAN_STATUS_CODE_TO_NAME from snuba_sdk import Condition, Function, Op -from typing_extensions import TypedDict from sentry.discover.arithmetic import categorize_columns from sentry.exceptions import InvalidSearchQuery diff --git a/src/sentry/utils/sdk_crashes/sdk_crash_detection_config.py b/src/sentry/utils/sdk_crashes/sdk_crash_detection_config.py index 1f97cccb60373b..48c19be054c007 100644 --- a/src/sentry/utils/sdk_crashes/sdk_crash_detection_config.py +++ b/src/sentry/utils/sdk_crashes/sdk_crash_detection_config.py @@ -1,9 +1,9 @@ from collections.abc import Sequence from dataclasses import dataclass from enum import Enum, unique +from typing import TypedDict import sentry_sdk -from typing_extensions import TypedDict from sentry import options from sentry.utils.sdk_crashes.path_replacer import ( From 8664f0d0f524b006cb53db1ad6adacce3f4f51ab Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Tue, 13 Feb 2024 09:59:00 -0500 Subject: [PATCH 304/357] ref(metrics-extraction): Clean up additional n+1 (#65068) ### Summary Found another n+1 due to filters. Also in this PR, removing spec_type since it causes a lot of noise in spans. This should hopefully further reduce [SoftTimelineExceeded](https://sentry.sentry.io/issues/3389239864/?project=1&query=is%3Aunresolved&referrer=issue-stream&statsPeriod=7d&stream_index=21) (indirectly) and the `sentry.on_demand_metrics.get_widget_metric_specs` metric (directly). Related (same fix): https://github.com/getsentry/sentry/pull/64902 --- src/sentry/relay/config/metric_extraction.py | 10 ++++++---- src/sentry/snuba/metrics/extraction.py | 4 ---- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/src/sentry/relay/config/metric_extraction.py b/src/sentry/relay/config/metric_extraction.py index c90095ca92225d..20720309f8aacb 100644 --- a/src/sentry/relay/config/metric_extraction.py +++ b/src/sentry/relay/config/metric_extraction.py @@ -341,7 +341,7 @@ def _update_state_with_spec_limit( widget_queries.setdefault(spec_version.version, set()) widget_queries[spec_version.version].add(widget_query) - for (version, widget_query_set) in widget_queries.items(): + for version, widget_query_set in widget_queries.items(): for widget_query in widget_query_set: widget_query.dashboardwidgetqueryondemand_set.filter(spec_version=version).update( extraction_state=OnDemandExtractionState.DISABLED_SPEC_LIMIT @@ -537,9 +537,11 @@ def _widget_query_stateful_extraction_enabled(widget_query: DashboardWidgetQuery this assumes stateful extraction can be used, and returns the enabled state.""" stateful_extraction_version = OnDemandMetricSpecVersioning.get_default_spec_version().version - on_demand_entries = widget_query.dashboardwidgetqueryondemand_set.filter( - spec_version=stateful_extraction_version - ) + on_demand_entries = [ + entry + for entry in widget_query.dashboardwidgetqueryondemand_set.all() + if entry.spec_version == stateful_extraction_version + ] if len(on_demand_entries) != 1: with sentry_sdk.push_scope() as scope: diff --git a/src/sentry/snuba/metrics/extraction.py b/src/sentry/snuba/metrics/extraction.py index 8ddcf1743ac372..666abbfb38312d 100644 --- a/src/sentry/snuba/metrics/extraction.py +++ b/src/sentry/snuba/metrics/extraction.py @@ -1155,10 +1155,6 @@ def _eager_process(self): self._metric_type = metric_type self._arguments = arguments or [] - sentry_sdk.start_span( - op="OnDemandMetricSpec.spec_type", description=self.spec_type - ).finish() - @property def field_to_extract(self): if self.op in ("on_demand_apdex", "on_demand_count_web_vitals"): From d549f7e4ca7cfa23a80ec19c5d95cead6760e2d9 Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Tue, 13 Feb 2024 10:00:01 -0500 Subject: [PATCH 305/357] ref(cli): Add env for pushing snuba queries to a file (#65024) ### Summary In local dev for debugging it might be nicer to have snuba logs pushed into a file; easier to copy and devserver is pretty spammy nowadays. This isn't set by default, we can likely leave managing the resulting log file size to the developer. eg. usage `SENTRY_SNUBA_INFO_FILE="../snuba.log" devserver` --- src/sentry/utils/snuba.py | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/src/sentry/utils/snuba.py b/src/sentry/utils/snuba.py index 8f2a0155b02b4e..2ab31b4fdcb56e 100644 --- a/src/sentry/utils/snuba.py +++ b/src/sentry/utils/snuba.py @@ -68,7 +68,21 @@ } # Show the snuba query params and the corresponding sql or errors in the server logs -SNUBA_INFO = os.environ.get("SENTRY_SNUBA_INFO", "false").lower() in ("true", "1") +SNUBA_INFO_FILE = os.environ.get("SENTRY_SNUBA_INFO_FILE", "") + + +def log_snuba_info(content): + if SNUBA_INFO_FILE: + with open(SNUBA_INFO_FILE, "a") as file: + file.writelines(content) + else: + print(content) # NOQA: only prints when an env variable is set + + +SNUBA_INFO = ( + os.environ.get("SENTRY_SNUBA_INFO", "false").lower() in ("true", "1") or SNUBA_INFO_FILE +) + if SNUBA_INFO: import sqlparse @@ -966,14 +980,14 @@ def _bulk_snuba_query( body = json.loads(response.data, skip_trace=True) if SNUBA_INFO: if "sql" in body: - print( # NOQA: only prints when an env variable is set + log_snuba_info( "{}.sql:\n {}".format( headers.get("referer", ""), sqlparse.format(body["sql"], reindent_aligned=True), ) ) if "error" in body: - print( # NOQA: only prints when an env variable is set + log_snuba_info( "{}.err: {}".format(headers.get("referer", ""), body["error"]) ) except ValueError: @@ -1045,9 +1059,7 @@ def _snuba_query( if SNUBA_INFO: import pprint - print( # NOQA: only prints when an env variable is set - f"{referrer}.body:\n {pprint.pformat(request.to_dict())}" - ) + log_snuba_info(f"{referrer}.body:\n {pprint.pformat(request.to_dict())}") request.flags.debug = True if isinstance(request.query, MetricsQuery): From 36eee7ebfe0dcc41d5b13c2472f5b7aa8165b62b Mon Sep 17 00:00:00 2001 From: Yagiz Nizipli Date: Tue, 13 Feb 2024 10:11:46 -0500 Subject: [PATCH 306/357] build: update sentry eslint package to improve perf (#65072) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR updates internal package that disables certain rules in `eslint-plugin-import`. ### Before ``` ❯ yarn lint:js yarn run v1.22.5 $ eslint static/app tests/js --ext .js,.jsx,.ts,.tsx ✨ Done in 40.17s. ``` ### After ``` ❯ yarn lint:js yarn run v1.22.5 $ eslint static/app tests/js --ext .js,.jsx,.ts,.tsx ✨ Done in 34.74s. ``` --- package.json | 2 +- yarn.lock | 40 ++++++++++++++++++++-------------------- 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/package.json b/package.json index 593ed2b628be9e..8e95fba3b2894f 100644 --- a/package.json +++ b/package.json @@ -192,7 +192,7 @@ "babel-plugin-dynamic-import-node": "^2.3.3", "benchmark": "^2.1.4", "eslint": "8.49.0", - "eslint-config-sentry-app": "2.3.0", + "eslint-config-sentry-app": "2.4.0", "html-webpack-plugin": "^5.5.0", "jest": "29.6.2", "jest-canvas-mock": "^2.5.2", diff --git a/yarn.lock b/yarn.lock index 4d5bb9bae88d09..1a2243c092b0c4 100644 --- a/yarn.lock +++ b/yarn.lock @@ -6083,40 +6083,40 @@ escodegen@^2.0.0: optionalDependencies: source-map "~0.6.1" -eslint-config-sentry-app@2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/eslint-config-sentry-app/-/eslint-config-sentry-app-2.3.0.tgz#b634a0ab498e4b37127eb68825673473a76b682c" - integrity sha512-A9rjtxM/+UnGn1UKNYbeCjf/dgdHLbZ6bcgZ1OFr8LpYI0yNnLJjbyjSTJsN8RpEIhKKYWbzsw9gvt3b5B1lHA== +eslint-config-sentry-app@2.4.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/eslint-config-sentry-app/-/eslint-config-sentry-app-2.4.0.tgz#471a75c0d95a28a5235806562702c7fc40522b7d" + integrity sha512-ulbj1mEAcwXhq5mqbavzibbBPKXNRHynPeVvwAaw/uobXe4xjh1ocR/FNSoNAVnSaY18lKCk8nahxDlK3J86OQ== dependencies: "@emotion/eslint-plugin" "^11.11.0" "@typescript-eslint/eslint-plugin" "^6.19.0" "@typescript-eslint/parser" "^6.19.0" - eslint-config-sentry "^2.3.0" - eslint-config-sentry-react "^2.3.0" + eslint-config-sentry "^2.4.0" + eslint-config-sentry-react "^2.4.0" eslint-import-resolver-typescript "^2.7.1" eslint-import-resolver-webpack "^0.13.8" eslint-plugin-import "^2.29.1" eslint-plugin-jest "^27.6.3" eslint-plugin-no-lookahead-lookbehind-regexp "0.1.0" eslint-plugin-react "^7.33.2" - eslint-plugin-sentry "^2.3.0" + eslint-plugin-sentry "^2.4.0" eslint-plugin-simple-import-sort "^10.0.0" -eslint-config-sentry-react@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/eslint-config-sentry-react/-/eslint-config-sentry-react-2.3.0.tgz#cfb73157d7e9e6575d360a9122f0e2091e10de86" - integrity sha512-A4LCCArBMpGm7djCbw58uDTn8/FUPz882MYxAXagDpIZadIKi2CC+OTEcHul9HNApTz1JrrJ+hxcJv1dI5W6XA== +eslint-config-sentry-react@^2.4.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/eslint-config-sentry-react/-/eslint-config-sentry-react-2.4.0.tgz#ef9eb629adfc166e99e8bafea01c51cd458e454e" + integrity sha512-2+7zTcvHhrddOsOTWqUduNdgjOjQwhOgJwmJMG8Phq5zCFLpFzTeXdlXjOsOFxnqE3KFXsGZrASV3/JT8yKFIg== dependencies: - eslint-config-sentry "^2.3.0" + eslint-config-sentry "^2.4.0" eslint-plugin-jest-dom "^5.1.0" eslint-plugin-react-hooks "^4.6.0" eslint-plugin-testing-library "^6.2.0" eslint-plugin-typescript-sort-keys "^3.1.0" -eslint-config-sentry@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/eslint-config-sentry/-/eslint-config-sentry-2.3.0.tgz#df8951e7b645c6a525449e2aae31d45e8eec11d7" - integrity sha512-H4dt4FkJ78DN/MHhxrx3o2BznncXN0/1SQpHQRtEfX2Vv9rGanvDVDH3RG/dreFAP19pqobMHXONLH4cYJAvTw== +eslint-config-sentry@^2.4.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/eslint-config-sentry/-/eslint-config-sentry-2.4.0.tgz#151ccf4418e62432877ace8dcfc765358b93c166" + integrity sha512-mz7L6g3jJJOTU8XydGSw+RbpUNkAX2diNMNmbK0mk0WQXzUFAPqDOhMsa4v+WvxlLm06aQjdyxKx0/GllRvfvQ== eslint-import-resolver-node@^0.3.9: version "0.3.9" @@ -6235,10 +6235,10 @@ eslint-plugin-react@^7.33.2: semver "^6.3.1" string.prototype.matchall "^4.0.8" -eslint-plugin-sentry@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-sentry/-/eslint-plugin-sentry-2.3.0.tgz#ef04352e92bee9e53f6f73f1ecebabef7e01c89e" - integrity sha512-kHcP4GfcLO4SAEMLZ/LsK+7RBC5wZEZIwoExgUXsWbtOQ+VkMjuLDBrDXlLtyte0IjstIuGB4nXJ+SAL9HuHug== +eslint-plugin-sentry@^2.4.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-sentry/-/eslint-plugin-sentry-2.4.0.tgz#1b3047effee49aa210f4bab7fd69e26f6f38d104" + integrity sha512-U98ISI1g8m1XkmZ16YvQn+Alvvokbg4duopcXvzzEmRPAru//czo7wcMTDIMlkuwrepMiX90f1W5HTULerrAng== dependencies: requireindex "~1.2.0" From 28f839193628488bd8feb63cb0034d87b5f5214d Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Tue, 13 Feb 2024 07:31:48 -0800 Subject: [PATCH 307/357] fix(issues): Timeline count w/ zero items, menu padding (#65049) --- .../views/issueDetails/traceTimeline/traceLink.tsx | 11 ++++++++--- .../traceTimeline/traceTimelineTooltip.tsx | 11 +++++------ 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/static/app/views/issueDetails/traceTimeline/traceLink.tsx b/static/app/views/issueDetails/traceTimeline/traceLink.tsx index 4fa32243903d7e..985f646ada2340 100644 --- a/static/app/views/issueDetails/traceTimeline/traceLink.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceLink.tsx @@ -1,3 +1,4 @@ +import {Fragment} from 'react'; import styled from '@emotion/styled'; import Link from 'sentry/components/links/link'; @@ -48,9 +49,13 @@ export function TraceLink({event}: TraceLinkProps) { > {t('View Full Trace')} - {data.length >= 100 - ? t(' (100+ issues)') - : tn(' (%s issue)', ' (%s issues)', data.length)} + {data.length > 0 && ( + + {data.length >= 100 + ? t(' (100+ issues)') + : tn(' (%s issue)', ' (%s issues)', data.length)} + + )} diff --git a/static/app/views/issueDetails/traceTimeline/traceTimelineTooltip.tsx b/static/app/views/issueDetails/traceTimeline/traceTimelineTooltip.tsx index ac0c4b8611a42e..ddfed423349618 100644 --- a/static/app/views/issueDetails/traceTimeline/traceTimelineTooltip.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceTimelineTooltip.tsx @@ -36,13 +36,12 @@ export function TraceTimelineTooltip({event, timelineEvents}: TraceTimelineToolt timelineEvent => timelineEvent.id !== event.id ); const displayYouAreHere = filteredTimelineEvents.length !== timelineEvents.length; + const hasTitle = filteredTimelineEvents.length > 1 || displayYouAreHere; return ( {displayYouAreHere && {t('You are here')}} - - {(filteredTimelineEvents.length > 1 || displayYouAreHere) && ( - {t('Around the same time')} - )} + + {hasTitle && {t('Around the same time')}} {filteredTimelineEvents.slice(0, 3).map(timelineEvent => { const project = projects.find(p => p.slug === timelineEvent.project); return ( @@ -115,10 +114,10 @@ const UnstyledUnorderedList = styled('div')` width: 220px; `; -const EventItemsWrapper = styled('div')` +const EventItemsWrapper = styled('div')<{hasTitle: boolean}>` display: flex; flex-direction: column; - padding: ${space(1)} ${space(0.5)} ${space(0.5)} ${space(0.5)}; + padding: ${p => space(p.hasTitle ? 1 : 0.5)} ${space(0.5)} ${space(0.5)} ${space(0.5)}; `; const EventItemsTitle = styled('div')` From 7c5b7eb62007809739e683b2c2a9129ae824a0be Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Tue, 13 Feb 2024 07:32:15 -0800 Subject: [PATCH 308/357] feat(issues): Add description to trace timeline (#65050) --- .../traceTimeline/traceTimeline.tsx | 73 ++++++++++--------- 1 file changed, 40 insertions(+), 33 deletions(-) diff --git a/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx b/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx index d7b8a0e3efc623..c992edf8616992 100644 --- a/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx @@ -3,6 +3,8 @@ import styled from '@emotion/styled'; import ErrorBoundary from 'sentry/components/errorBoundary'; import Placeholder from 'sentry/components/placeholder'; +import QuestionTooltip from 'sentry/components/questionTooltip'; +import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import type {Event} from 'sentry/types'; import useRouteAnalyticsParams from 'sentry/utils/routeAnalytics/useRouteAnalyticsParams'; @@ -51,24 +53,47 @@ export function TraceTimeline({event}: TraceTimelineProps) { return ( - - {isLoading ? ( - - - - - ) : ( - - - {/* Sets a min width of 200 for testing */} - - - )} - + +
    + {isLoading ? ( + + + + + ) : ( + + + {/* Sets a min width of 200 for testing */} + + + )} +
    + + + +
    ); } +const TimelineWrapper = styled('div')` + display: grid; + grid-template-columns: 1fr auto; + align-items: start; + gap: ${space(2)}; + margin-top: ${space(0.5)}; +`; + +const QuestionTooltipWrapper = styled('div')` + margin-top: ${space(0.25)}; +`; + /** * Displays the container the dots appear inside of */ @@ -83,24 +108,6 @@ const TimelineOutline = styled('div')` background-color: ${p => p.theme.backgroundSecondary}; `; -/** - * Render all child elements directly on top of each other. - * - * This implementation does not remove the stack of elements from the document - * flow, so width/height is reserved. - * - * An alternative would be to use `position:absolute;` in which case the size - * would not be part of document flow and other elements could render behind. - */ -const Stacked = styled('div')` - display: grid; - grid-template: 1fr / 1fr; - > * { - grid-area: 1 / 1; - } - margin-top: ${space(0.5)}; -`; - const TimelineEventsContainer = styled('div')` position: relative; height: 34px; @@ -111,6 +118,6 @@ const LoadingSkeleton = styled('div')` display: flex; flex-direction: column; gap: ${space(0.25)}; - padding: ${space(0.75)} 0 ${space(1)}; + padding: ${space(0.5)} 0 ${space(1)}; height: 34px; `; From 1ee85fda902e94dee20dc85102b72b8450d9dcd9 Mon Sep 17 00:00:00 2001 From: Ogi <86684834+obostjancic@users.noreply.github.com> Date: Tue, 13 Feb 2024 16:56:06 +0100 Subject: [PATCH 309/357] feat(ddm): chart height (#65079) --- static/app/views/ddm/scratchpad.tsx | 2 ++ static/app/views/ddm/widget.tsx | 4 +++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/static/app/views/ddm/scratchpad.tsx b/static/app/views/ddm/scratchpad.tsx index 50ccaa97c31935..539ce2de7bc6de 100644 --- a/static/app/views/ddm/scratchpad.tsx +++ b/static/app/views/ddm/scratchpad.tsx @@ -4,6 +4,7 @@ import * as echarts from 'echarts/core'; import {space} from 'sentry/styles/space'; import {getMetricsCorrelationSpanUrl} from 'sentry/utils/metrics'; +import {hasDDMExperimentalFeature} from 'sentry/utils/metrics/features'; import type {MetricWidgetQueryParams} from 'sentry/utils/metrics/types'; import useOrganization from 'sentry/utils/useOrganization'; import usePageFilters from 'sentry/utils/usePageFilters'; @@ -82,6 +83,7 @@ export function MetricScratchpad() { focusArea={focusArea} showQuerySymbols={showQuerySymbols} onSampleClick={handleSampleClick} + chartHeight={hasDDMExperimentalFeature(organization) ? 200 : 300} highlightedSampleId={ selectedWidgetIndex === index ? highlightedSampleId : undefined } diff --git a/static/app/views/ddm/widget.tsx b/static/app/views/ddm/widget.tsx index 9ae8e03455fd5e..679be166ac4a30 100644 --- a/static/app/views/ddm/widget.tsx +++ b/static/app/views/ddm/widget.tsx @@ -48,6 +48,7 @@ type MetricWidgetProps = { onChange: (index: number, data: Partial) => void; projects: PageFilters['projects']; widget: MetricWidgetQueryParams; + chartHeight?: number; focusArea?: FocusAreaProps; getChartPalette?: (seriesNames: string[]) => Record; hasSiblings?: boolean; @@ -82,6 +83,7 @@ export const MetricWidget = memo( focusArea, onSampleClick, highlightedSampleId, + chartHeight = 300, }: MetricWidgetProps) => { const handleChange = useCallback( (data: Partial) => { @@ -186,7 +188,7 @@ export const MetricWidget = memo( onChange={handleChange} focusArea={focusArea} samples={samples} - chartHeight={300} + chartHeight={chartHeight} chartGroup={DDM_CHART_GROUP} {...widget} /> From bbc1642b11f8d2bdaa4ac0293f0d6dd2f0244293 Mon Sep 17 00:00:00 2001 From: Colton Allen Date: Tue, 13 Feb 2024 10:06:28 -0600 Subject: [PATCH 310/357] fix(replays): Add coverage and update implementation to handle malformed payload submissions (#65032) This pull attempts to add coverage for every message permutation that can break ingestion. It also updates the implementation to satisfy the coverage. --- src/sentry/replays/consumers/recording.py | 23 ++- .../replays/consumers/recording_buffered.py | 22 ++- .../replays/usecases/ingest/__init__.py | 15 +- .../replays/consumers/test_recording.py | 180 ++++++++++++++++++ 4 files changed, 218 insertions(+), 22 deletions(-) diff --git a/src/sentry/replays/consumers/recording.py b/src/sentry/replays/consumers/recording.py index 13865f161f262b..300d2d2324ed18 100644 --- a/src/sentry/replays/consumers/recording.py +++ b/src/sentry/replays/consumers/recording.py @@ -12,6 +12,7 @@ from arroyo.types import Commit, Message, Partition from django.conf import settings from sentry_kafka_schemas import get_codec +from sentry_kafka_schemas.codecs import ValidationError from sentry_kafka_schemas.schema_types.ingest_replay_recordings_v1 import ReplayRecording from sentry_sdk.tracing import Span @@ -25,7 +26,7 @@ @dataclasses.dataclass class MessageContext: - message: ReplayRecording + message: bytes transaction: Span current_hub: sentry_sdk.Hub @@ -110,14 +111,19 @@ def initialize_threaded_context(message: Message[KafkaPayload]) -> MessageContex < getattr(settings, "SENTRY_REPLAY_RECORDINGS_CONSUMER_APM_SAMPLING", 0), ) current_hub = sentry_sdk.Hub(sentry_sdk.Hub.current) - message_dict = RECORDINGS_CODEC.decode(message.payload.value) - return MessageContext(message_dict, transaction, current_hub) + return MessageContext(message.payload.value, transaction, current_hub) def process_message_threaded(message: Message[MessageContext]) -> Any: """Move the replay payload to permanent storage.""" context: MessageContext = message.payload - message_dict = context.message + + try: + message_dict: ReplayRecording = RECORDINGS_CODEC.decode(context.message) + except ValidationError: + # TODO: DLQ + logger.exception("Could not decode recording message.") + return None ingest_recording(message_dict, context.transaction, context.current_hub) @@ -131,5 +137,12 @@ def process_message(message: Message[KafkaPayload]) -> Any: < getattr(settings, "SENTRY_REPLAY_RECORDINGS_CONSUMER_APM_SAMPLING", 0), ) current_hub = sentry_sdk.Hub(sentry_sdk.Hub.current) - message_dict = RECORDINGS_CODEC.decode(message.payload.value) + + try: + message_dict: ReplayRecording = RECORDINGS_CODEC.decode(message.payload.value) + except ValidationError: + # TODO: DLQ + logger.exception("Could not decode recording message.") + return None + ingest_recording(message_dict, transaction, current_hub) diff --git a/src/sentry/replays/consumers/recording_buffered.py b/src/sentry/replays/consumers/recording_buffered.py index 3fa2f6045b47d9..69c9a209aa98aa 100644 --- a/src/sentry/replays/consumers/recording_buffered.py +++ b/src/sentry/replays/consumers/recording_buffered.py @@ -52,15 +52,11 @@ from arroyo.processing.strategies.run_task import RunTask from arroyo.types import BaseValue, Commit, Message, Partition from sentry_kafka_schemas import get_codec +from sentry_kafka_schemas.codecs import ValidationError from sentry_kafka_schemas.schema_types.ingest_replay_recordings_v1 import ReplayRecording from sentry.replays.lib.storage import RecordingSegmentStorageMeta, storage -from sentry.replays.usecases.ingest import ( - MissingRecordingSegmentHeaders, - decompress, - process_headers, - track_initial_segment_event, -) +from sentry.replays.usecases.ingest import decompress, process_headers, track_initial_segment_event from sentry.replays.usecases.ingest.dom_index import ( ReplayActionsEvent, emit_replay_actions, @@ -195,12 +191,20 @@ def new(self) -> RecordingBuffer: def process_message(buffer: RecordingBuffer, message: bytes) -> None: with sentry_sdk.start_span(op="replays.consumer.recording.decode_kafka_message"): - decoded_message: ReplayRecording = RECORDINGS_CODEC.decode(message) + try: + decoded_message: ReplayRecording = RECORDINGS_CODEC.decode(message) + except ValidationError: + # TODO: DLQ + logger.exception("Could not decode recording message.") + return None try: headers, recording_data = process_headers(decoded_message["payload"]) - except MissingRecordingSegmentHeaders: - logger.warning("missing header on %s", decoded_message["replay_id"]) + except Exception: + # TODO: DLQ + logger.exception( + "Recording headers could not be extracted %s", decoded_message["replay_id"] + ) return None # Append an upload event to the state object for later processing. diff --git a/src/sentry/replays/usecases/ingest/__init__.py b/src/sentry/replays/usecases/ingest/__init__.py index ffc9053421708f..d7c34e082fe0f9 100644 --- a/src/sentry/replays/usecases/ingest/__init__.py +++ b/src/sentry/replays/usecases/ingest/__init__.py @@ -85,8 +85,9 @@ def _ingest_recording(message: RecordingIngestMessage, transaction: Span) -> Non try: headers, recording_segment = process_headers(message.payload_with_headers) - except MissingRecordingSegmentHeaders: - logger.warning("missing header on %s", message.replay_id) + except Exception: + # TODO: DLQ + logger.exception("Recording headers could not be extracted %s", message.replay_id) return None # Normalize ingest data into a standardized ingest format. @@ -154,12 +155,10 @@ def track_initial_segment_event( @metrics.wraps("replays.usecases.ingest.process_headers") def process_headers(bytes_with_headers: bytes) -> tuple[RecordingSegmentHeaders, bytes]: - try: - recording_headers, recording_segment = bytes_with_headers.split(b"\n", 1) - except ValueError: - raise MissingRecordingSegmentHeaders - else: - return json.loads(recording_headers, use_rapid_json=True), recording_segment + recording_headers_json, recording_segment = bytes_with_headers.split(b"\n", 1) + recording_headers = json.loads(recording_headers_json) + assert isinstance(recording_headers.get("segment_id"), int) + return recording_headers, recording_segment def replay_recording_segment_cache_id(project_id: int, replay_id: str, segment_id: str) -> str: diff --git a/tests/sentry/replays/consumers/test_recording.py b/tests/sentry/replays/consumers/test_recording.py index 3f4d9095bee14f..2cf134e2fdad42 100644 --- a/tests/sentry/replays/consumers/test_recording.py +++ b/tests/sentry/replays/consumers/test_recording.py @@ -213,6 +213,186 @@ def test_invalid_json(self, emit_replay_actions, mock_record, mock_onboarding_ta # No replay actions were emitted because JSON deserialization failed. assert not emit_replay_actions.called + @patch("sentry.models.OrganizationOnboardingTask.objects.record") + @patch("sentry.analytics.record") + @patch("sentry.replays.usecases.ingest.dom_index.emit_replay_actions") + def test_invalid_payload_invalid_headers( + self, emit_replay_actions, mock_record, mock_onboarding_task + ): + """Test missing segment_id key does not break ingestion.""" + segment_id = 0 + + self.submit( + [ + { + "type": "replay_recording_not_chunked", + "replay_id": self.replay_id, + "org_id": self.organization.id, + "key_id": 123, + "project_id": self.project.id, + "received": int(time.time()), + "retention_days": 30, + "payload": b'{"something":"else"}\n' + b'[{"hello":"world"}]', + } + ] + ) + + # Assert the message was totally broken and nothing happened. + bytes = self.get_recording_data(segment_id) + assert bytes is None + self.project.refresh_from_db() + assert not self.project.flags.has_replays + # assert not mock_onboarding_task.called + # assert not mock_record.called + assert not emit_replay_actions.called + + @patch("sentry.models.OrganizationOnboardingTask.objects.record") + @patch("sentry.analytics.record") + @patch("sentry.replays.usecases.ingest.dom_index.emit_replay_actions") + def test_invalid_payload_invalid_unicode_codepoint( + self, emit_replay_actions, mock_record, mock_onboarding_task + ): + """Test invalid unicode codepoint in headers does not break ingestion.""" + segment_id = 0 + + self.submit( + [ + { + "type": "replay_recording_not_chunked", + "replay_id": self.replay_id, + "org_id": self.organization.id, + "key_id": 123, + "project_id": self.project.id, + "received": int(time.time()), + "retention_days": 30, + "payload": '{"segment_id":"\\ud83c"}\n'.encode("utf-16") + + b'[{"hello":"world"}]', + } + ] + ) + + # Assert the message was totally broken and nothing happened. + bytes = self.get_recording_data(segment_id) + assert bytes is None + self.project.refresh_from_db() + assert not self.project.flags.has_replays + # assert not mock_onboarding_task.called + # assert not mock_record.called + assert not emit_replay_actions.called + + @patch("sentry.models.OrganizationOnboardingTask.objects.record") + @patch("sentry.analytics.record") + @patch("sentry.replays.usecases.ingest.dom_index.emit_replay_actions") + def test_invalid_payload_malformed_headers( + self, emit_replay_actions, mock_record, mock_onboarding_task + ): + """Test malformed headers in payload attribute do not break ingestion.""" + segment_id = 0 + + self.submit( + [ + { + "type": "replay_recording_not_chunked", + "replay_id": self.replay_id, + "org_id": self.organization.id, + "key_id": 123, + "project_id": self.project.id, + "received": int(time.time()), + "retention_days": 30, + "payload": b"i am invalid\n" + b'[{"hello":"world"}]', + } + ] + ) + + # Assert the message was totally broken and nothing happened. + bytes = self.get_recording_data(segment_id) + assert bytes is None + self.project.refresh_from_db() + assert not self.project.flags.has_replays + # assert not mock_onboarding_task.called + # assert not mock_record.called + assert not emit_replay_actions.called + + @patch("sentry.models.OrganizationOnboardingTask.objects.record") + @patch("sentry.analytics.record") + @patch("sentry.replays.usecases.ingest.dom_index.emit_replay_actions") + def test_invalid_payload_missing_headers( + self, emit_replay_actions, mock_record, mock_onboarding_task + ): + """Test missing headers in payload attribute does not break ingestion.""" + segment_id = 0 + + self.submit( + [ + { + "type": "replay_recording_not_chunked", + "replay_id": self.replay_id, + "org_id": self.organization.id, + "key_id": 123, + "project_id": self.project.id, + "received": int(time.time()), + "retention_days": 30, + "payload": b"no headers :P", + } + ] + ) + + # Assert the message was totally broken and nothing happened. + bytes = self.get_recording_data(segment_id) + assert bytes is None + self.project.refresh_from_db() + assert not self.project.flags.has_replays + # assert not mock_onboarding_task.called + # assert not mock_record.called + assert not emit_replay_actions.called + + @patch("sentry.models.OrganizationOnboardingTask.objects.record") + @patch("sentry.analytics.record") + @patch("sentry.replays.usecases.ingest.dom_index.emit_replay_actions") + def test_invalid_payload_type(self, emit_replay_actions, mock_record, mock_onboarding_task): + """Test invalid payload types do not break ingestion.""" + segment_id = 0 + + self.submit( + [ + { + "type": "replay_recording_not_chunked", + "replay_id": self.replay_id, + "org_id": self.organization.id, + "key_id": 123, + "project_id": self.project.id, + "received": int(time.time()), + "retention_days": 30, + "payload": "I'm a string!", + } + ] + ) + + # Assert the message was totally broken and nothing happened. + bytes = self.get_recording_data(segment_id) + assert bytes is None + self.project.refresh_from_db() + assert not self.project.flags.has_replays + # assert not mock_onboarding_task.called + # assert not mock_record.called + assert not emit_replay_actions.called + + @patch("sentry.models.OrganizationOnboardingTask.objects.record") + @patch("sentry.analytics.record") + @patch("sentry.replays.usecases.ingest.dom_index.emit_replay_actions") + def test_invalid_message(self, emit_replay_actions, mock_record, mock_onboarding_task): + """Test invalid messages do not break ingestion.""" + self.submit(["i am totally wrong"]) + + # Assert the message was totally broken and nothing happened. + bytes = self.get_recording_data(0) + assert bytes is None + self.project.refresh_from_db() + assert not self.project.flags.has_replays + # assert not mock_onboarding_task.called + # assert not mock_record.called + assert not emit_replay_actions.called + class ThreadedRecordingTestCase(RecordingTestCase): force_synchronous = False From b96b5971ba8bee0d809dfff6fc8a7f139558fa52 Mon Sep 17 00:00:00 2001 From: Mark Story Date: Tue, 13 Feb 2024 11:11:33 -0500 Subject: [PATCH 311/357] feat(hybridcloud) Add tasks for webhookpayload delivery (#64395) We've had some issues with outbox delivery timing out and getting backlogged. Outboxes are well suited to small payloads on low-latency RPC calls which webhooks are not. By splitting storage off from outboxes we can handle updating delivery records in a way that avoids keeping transactions open while network requests are being sent. The separate storage model also allows us to simplify the queries we use to find messages that need to be delivered as we don't have to spend cycles attempting to coalesce records that cannot be coalesced. These changes add the tasks to process webhooks with this new storage model and include an option to control rollout. --- src/sentry/conf/server.py | 11 +- .../hybridcloud/models/webhookpayload.py | 19 +- src/sentry/hybridcloud/tasks/__init__.py | 1 + .../hybridcloud/tasks/deliver_webhooks.py | 265 ++++++++++++++ .../middleware/integrations/parsers/base.py | 38 +- .../middleware/integrations/parsers/slack.py | 2 + src/sentry/options/defaults.py | 6 + src/sentry/testutils/factories.py | 2 +- src/sentry/testutils/outbox.py | 44 +++ tests/sentry/hybridcloud/tasks/__init__.py | 0 .../tasks/test_deliver_webhooks.py | 337 ++++++++++++++++++ .../integrations/parsers/test_bitbucket.py | 24 ++ .../parsers/test_bitbucket_server.py | 41 ++- .../integrations/parsers/test_github.py | 23 ++ .../parsers/test_github_enterprise.py | 26 ++ .../integrations/parsers/test_gitlab.py | 27 ++ .../integrations/parsers/test_jira.py | 27 ++ .../integrations/parsers/test_jira_server.py | 29 ++ .../integrations/parsers/test_msteams.py | 40 +++ .../integrations/parsers/test_plugin.py | 24 ++ .../integrations/parsers/test_vercel.py | 16 +- .../integrations/parsers/test_vsts.py | 40 +++ 22 files changed, 1022 insertions(+), 20 deletions(-) create mode 100644 src/sentry/hybridcloud/tasks/__init__.py create mode 100644 src/sentry/hybridcloud/tasks/deliver_webhooks.py create mode 100644 tests/sentry/hybridcloud/tasks/__init__.py create mode 100644 tests/sentry/hybridcloud/tasks/test_deliver_webhooks.py diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index d787dc50805d3e..ecc66c2d800271 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -722,6 +722,7 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str: CELERY_IMPORTS = ( "sentry.data_export.tasks", "sentry.discover.tasks", + "sentry.hybridcloud.tasks.deliver_webhooks", "sentry.incidents.tasks", "sentry.snuba.tasks", "sentry.replays.tasks", @@ -819,6 +820,7 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str: ), Queue("options.control", routing_key="options.control", exchange=control_exchange), Queue("outbox.control", routing_key="outbox.control", exchange=control_exchange), + Queue("webhook.control", routing_key="webhook.control", exchange=control_exchange), ] CELERY_ISSUE_STATES_QUEUE = Queue( @@ -952,7 +954,7 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str: }, "deliver-from-outbox-control": { "task": "sentry.tasks.enqueue_outbox_jobs_control", - # Run every 10 seconds as integration webhooks are delivered by this task + # Run every 10 seconds to keep consistency times low "schedule": timedelta(seconds=10), "options": {"expires": 60, "queue": "outbox.control"}, }, @@ -978,6 +980,12 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str: "schedule": crontab_with_minute_jitter(hour="*/6"), "options": {"expires": 60 * 25, "queue": "integrations.control"}, }, + "deliver-webhooks-control": { + "task": "sentry.hybridcloud.tasks.deliver_webhooks.schedule_webhook_delivery", + # Run every 10 seconds as integration webhooks are delivered by this task + "schedule": timedelta(seconds=10), + "options": {"expires": 60, "queue": "webhook.control"}, + }, } # Most tasks run in the regions @@ -1259,6 +1267,7 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str: # We prefer using crontab, as the time for timedelta will reset on each deployment. More information: https://docs.celeryq.dev/en/stable/userguide/periodic-tasks.html#periodic-tasks TIMEDELTA_ALLOW_LIST = { "deliver-from-outbox-control", + "deliver-webhooks-control", "flush-buffers", "sync-options", "sync-options-control", diff --git a/src/sentry/hybridcloud/models/webhookpayload.py b/src/sentry/hybridcloud/models/webhookpayload.py index 487893577e2e3e..eeb279e52d6a7b 100644 --- a/src/sentry/hybridcloud/models/webhookpayload.py +++ b/src/sentry/hybridcloud/models/webhookpayload.py @@ -1,7 +1,7 @@ from __future__ import annotations import datetime -from typing import Self +from typing import Any, Self from django.db import models from django.http import HttpRequest @@ -58,6 +58,18 @@ class Meta: "request_path", ) + @classmethod + def get_attributes_from_request( + cls, + request: HttpRequest, + ) -> dict[str, Any]: + return dict( + request_method=request.method, + request_path=request.get_full_path(), + request_headers=json.dumps({k: v for k, v in request.headers.items()}), + request_body=request.body.decode(encoding="utf-8"), + ) + @classmethod def create_from_request( cls, @@ -72,10 +84,7 @@ def create_from_request( mailbox_name=f"{provider}:{identifier}", region_name=region, integration_id=integration_id, - request_method=request.method, - request_path=request.get_full_path(), - request_headers=json.dumps({k: v for k, v in request.headers.items()}), - request_body=request.body.decode(encoding="utf-8"), + **cls.get_attributes_from_request(request), ) def schedule_next_attempt(self): diff --git a/src/sentry/hybridcloud/tasks/__init__.py b/src/sentry/hybridcloud/tasks/__init__.py new file mode 100644 index 00000000000000..afdcdfd440b3c8 --- /dev/null +++ b/src/sentry/hybridcloud/tasks/__init__.py @@ -0,0 +1 @@ +from .deliver_webhooks import * # noqa diff --git a/src/sentry/hybridcloud/tasks/deliver_webhooks.py b/src/sentry/hybridcloud/tasks/deliver_webhooks.py new file mode 100644 index 00000000000000..298a4d0646fbc4 --- /dev/null +++ b/src/sentry/hybridcloud/tasks/deliver_webhooks.py @@ -0,0 +1,265 @@ +import datetime +import logging + +import sentry_sdk +from django.db.models import Min, Subquery +from django.utils import timezone +from requests import Response +from requests.models import HTTPError +from rest_framework import status + +from sentry.exceptions import RestrictedIPAddress +from sentry.hybridcloud.models.webhookpayload import BACKOFF_INTERVAL, MAX_ATTEMPTS, WebhookPayload +from sentry.shared_integrations.exceptions import ( + ApiConflictError, + ApiConnectionResetError, + ApiError, + ApiHostError, + ApiTimeoutError, +) +from sentry.silo.base import SiloMode +from sentry.silo.client import RegionSiloClient, SiloClientError +from sentry.tasks.base import instrumented_task +from sentry.types.region import get_region_by_name +from sentry.utils import json, metrics + +logger = logging.getLogger(__name__) + +MAX_MAILBOX_DRAIN = 50 +""" +The maximum number of records that will be delivered in a scheduled delivery + +There is a balance here between clearing big backlogs and having races when +a batch is slow but not timeout slow. +""" + +BATCH_SCHEDULE_OFFSET = datetime.timedelta(minutes=BACKOFF_INTERVAL) +""" +The time that batches are scheduled into the future when work starts. +Spacing batches out helps minimize competitive races when delivery is slow +but not at the timeout threshold +""" + +BATCH_SIZE = 1000 +"""The number of mailboxes that will have messages scheduled each cycle""" + + +class DeliveryFailed(Exception): + """ + Used to signal an expected delivery failure. + """ + + pass + + +@instrumented_task( + name="sentry.hybridcloud.tasks.deliver_webhooks.schedule_webhook_delivery", + queue="webhook.control", + silo_mode=SiloMode.CONTROL, +) +def schedule_webhook_delivery(**kwargs) -> None: + """ + Find mailboxes that contain undelivered webhooks that were scheduled + to be delivered now or in the past. + + Triggered frequently by celery beat. + """ + # The double call to .values() ensures that the group by includes mailbox_nam + # but only id_min is selected + head_of_line = ( + WebhookPayload.objects.all() + .values("mailbox_name") + .annotate(id_min=Min("id")) + .values("id_min") + ) + # Get any heads that are scheduled to run + scheduled_mailboxes = WebhookPayload.objects.filter( + schedule_for__lte=timezone.now(), + id__in=Subquery(head_of_line), + ).values("id", "mailbox_name") + + metrics.distribution( + "hybridcloud.schedule_webhook_delivery.mailbox_count", scheduled_mailboxes.count() + ) + for record in scheduled_mailboxes[:BATCH_SIZE]: + # Reschedule the records that we will attempt to deliver next. + # We reschedule in an attempt to minimize races for potentially in-flight batches. + mailbox_batch = ( + WebhookPayload.objects.filter(id__gte=record["id"], mailbox_name=record["mailbox_name"]) + .order_by("id") + .values("id")[:MAX_MAILBOX_DRAIN] + ) + WebhookPayload.objects.filter(id__in=Subquery(mailbox_batch)).update( + schedule_for=timezone.now() + BATCH_SCHEDULE_OFFSET + ) + + drain_mailbox.delay(record["id"]) + + +@instrumented_task( + name="sentry.hybridcloud.tasks.deliver_webhooks.drain_mailbox", + queue="webhook.control", + silo_mode=SiloMode.CONTROL, +) +def drain_mailbox(payload_id: int) -> None: + """ + Attempt deliver up to 50 webhooks from the mailbox that `id` is from. + + Messages will be delivered in order until one fails or 50 are delivered. + Once messages have successfully been delivered or discarded, they are deleted. + """ + try: + payload = WebhookPayload.objects.get(id=payload_id) + except WebhookPayload.DoesNotExist: + # We could have hit a race condition. Since we've lost already return + # and let the other process continue, or a future process. + metrics.incr("hybridcloud.deliver_webhooks.delivery", tags={"outcome": "race"}) + logger.info( + "deliver_webhook.potential_race", + extra={ + "id": payload_id, + }, + ) + return + + # Drain up to a max number of records. This helps ensure that one slow mailbox doesn't + # cause backups for other mailboxes + query = WebhookPayload.objects.filter( + id__gte=payload.id, mailbox_name=payload.mailbox_name + ).order_by("id") + for record in query[:MAX_MAILBOX_DRAIN]: + try: + deliver_message(record) + except DeliveryFailed as err: + metrics.incr("hybridcloud.deliver_webhooks.delivery", tags={"outcome": "retry"}) + logger.info( + "deliver_webhook.delivery_failed", + extra={ + "error": str(err), + "payload_id": payload.id, + "attempts": payload.attempts, + "mailbox_name": payload.mailbox_name, + }, + ) + return + + +def deliver_message(payload: WebhookPayload) -> None: + """Deliver a message if it still has delivery attempts remaining""" + if payload.attempts >= MAX_ATTEMPTS: + payload.delete() + + metrics.incr("hybridcloud.deliver_webhooks.delivery", tags={"outcome": "attempts_exceed"}) + logger.info( + "deliver_webhook.discard", extra={"id": payload.id, "attempts": payload.attempts} + ) + return + + payload.schedule_next_attempt() + perform_request(payload) + payload.delete() + + metrics.incr("hybridcloud.deliver_webhooks.delivery", tags={"outcome": "ok"}) + metrics.distribution("hybridcloud.deliver_webhooks.attempts", payload.attempts) + + +def perform_request(payload: WebhookPayload) -> None: + logging_context: dict[str, str | int] = { + "payload_id": payload.id, + "attempt": payload.attempts, + } + region = get_region_by_name(name=payload.region_name) + + try: + client = RegionSiloClient(region=region) + with metrics.timer( + "hybridcloud.deliver_webhooks.send_request", + tags={"destination_region": region.name}, + ): + logging_context["region"] = region.name + logging_context["request_method"] = payload.request_method + logging_context["request_path"] = payload.request_path + + headers = json.loads(payload.request_headers) + response = client.request( + method=payload.request_method, + path=payload.request_path, + headers=headers, + # We need to send the body as raw bytes to avoid interfering with webhook signatures + data=payload.request_body.encode("utf-8"), + json=False, + ) + logger.info( + "webhook_proxy.complete", + extra={ + "status": getattr( + response, "status_code", 204 + ), # Request returns empty dict instead of a response object when the code is a 204 + **logging_context, + }, + ) + except ApiHostError as err: + metrics.incr( + "hybridcloud.deliver_webhooks.failure", + tags={"reason": "host_error", "destination_region": region.name}, + ) + with sentry_sdk.push_scope() as scope: + scope.set_context( + "region", + { + "name": region.name, + "id": region.category, + "address": region.address, + }, + ) + err_cause = err.__cause__ + if err_cause is not None and isinstance(err_cause, RestrictedIPAddress): + # Region silos that are IP address restricted are actionable. + silo_client_err = SiloClientError("Region silo is IP address restricted") + silo_client_err.__cause__ = err + sentry_sdk.capture_exception(silo_client_err) + raise DeliveryFailed() + + sentry_sdk.capture_exception(err) + raise DeliveryFailed() from err + except ApiConflictError as err: + metrics.incr( + "hybridcloud.deliver_webhooks.failure", + tags={"reason": "conflict", "destination_region": region.name}, + ) + logger.warning( + "hybridcloud.deliver_webhooks.conflict_occurred", + extra={"conflict_text": err.text, **logging_context}, + ) + # We don't retry conflicts as those are explicit failure code to drop webhook. + except (ApiTimeoutError, ApiConnectionResetError) as err: + metrics.incr( + "hybridcloud.deliver_webhooks.failure", + tags={"reason": "timeout_reset", "destination_region": region.name}, + ) + logger.warning("hybridcloud.deliver_webhooks.timeout_error", extra=logging_context) + raise DeliveryFailed() from err + except ApiError as err: + err_cause = err.__cause__ + if err_cause is not None and isinstance(err_cause, HTTPError): + orig_response: Response | None = err_cause.response + if ( + orig_response is not None + and status.HTTP_500_INTERNAL_SERVER_ERROR <= orig_response.status_code < 600 + ): + raise DeliveryFailed() from err + + # For some integrations, we make use of outboxes to handle asynchronous webhook requests. + # There is an edge case where webhook requests eventually become invalid and + # the 3rd-party destination (integration provider) will reject them. + # JWT expirations is one example of causing this issue. Issues like these are no longer salvageable, and we must + # discard these associated webhook outbox messages. If we do not discard them, then these outbox messages + # will be re-processed causing a backlog on the ControlOutbox table. + metrics.incr( + "hybridcloud.deliver_webhooks.failure", + tags={"reason": "discard", "destination_region": region.name}, + ) + logger.warning( + "hybridcloud.deliver_webhooks.api_error", extra={"error": str(err), **logging_context} + ) + raise DeliveryFailed() from err diff --git a/src/sentry/middleware/integrations/parsers/base.py b/src/sentry/middleware/integrations/parsers/base.py index 319afe3d278eaf..e8e5e3848d1703 100644 --- a/src/sentry/middleware/integrations/parsers/base.py +++ b/src/sentry/middleware/integrations/parsers/base.py @@ -11,6 +11,8 @@ from django.urls import ResolverMatch, resolve from rest_framework import status +from sentry import options +from sentry.hybridcloud.models.webhookpayload import WebhookPayload from sentry.models.integrations import Integration from sentry.models.integrations.organization_integration import OrganizationIntegration from sentry.models.outbox import ControlOutbox, WebhookProviderIdentifier @@ -140,11 +142,23 @@ def get_response_from_outbox_creation( Used to create outboxes for provided regions to handle the webhooks asynchronously. Responds to the webhook provider with a 202 Accepted status. """ - if len(regions) > 0: + if len(regions) < 1: + return HttpResponse(status=status.HTTP_202_ACCEPTED) + + # TODO(hybridcloud) Rename/remove this once webhookpayloads are stable. + shard_identifier = shard_identifier_override or self.webhook_identifier.value + rollout_rate = options.get("hybridcloud.webhookpayload.rollout") + if ((shard_identifier % 100000) / 100000) < rollout_rate: + for region in regions: + WebhookPayload.create_from_request( + region=region.name, + provider=self.provider, + identifier=shard_identifier, + request=self.request, + ) + else: for outbox in ControlOutbox.for_webhook_update( - shard_identifier=shard_identifier_override - if shard_identifier_override is not None - else self.webhook_identifier.value, + shard_identifier=shard_identifier, region_names=[region.name for region in regions], request=self.request, ): @@ -159,7 +173,21 @@ def get_response_from_outbox_creation_for_integration( Used to create outboxes for provided regions to handle the webhooks asynchronously. Responds to the webhook provider with a 202 Accepted status. """ - if len(regions) > 0: + if not regions: + return HttpResponse(status=status.HTTP_202_ACCEPTED) + + identifier = integration.id + rollout_rate = options.get("hybridcloud.webhookpayload.rollout") + if ((identifier % 100000) / 100000) < rollout_rate: + for region in regions: + WebhookPayload.create_from_request( + region=region.name, + provider=self.provider, + identifier=identifier, + request=self.request, + integration_id=identifier, + ) + else: for outbox in ControlOutbox.for_webhook_update( shard_identifier=integration.id, request=self.request, diff --git a/src/sentry/middleware/integrations/parsers/slack.py b/src/sentry/middleware/integrations/parsers/slack.py index 106e9575ee867f..4272d164e154e1 100644 --- a/src/sentry/middleware/integrations/parsers/slack.py +++ b/src/sentry/middleware/integrations/parsers/slack.py @@ -84,6 +84,8 @@ def get_async_region_response(self, regions: Sequence[Region]) -> HttpResponseBa if self.response_url is None: return self.get_response_from_control_silo() + # TODO(hybridcloud) this isn't using outboxes per-se and we will likely need to keep + # `get_webhook_payload_from_request` around after webhooks are entirely on WebhookPayload webhook_payload = ControlOutbox.get_webhook_payload_from_request(request=self.request) convert_to_async_slack_response.apply_async( kwargs={ diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index b0e4eeb3bab4d6..b7de72ed8f7d4c 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -2093,3 +2093,9 @@ default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE, ) +# Rate to move from outbox based webhook delivery to webhookpayload. +register( + "hybridcloud.webhookpayload.rollout", + default=0.0, + flags=FLAG_AUTOMATOR_MODIFIABLE, +) diff --git a/src/sentry/testutils/factories.py b/src/sentry/testutils/factories.py index efbd4d76e9394d..8d790f04ca4a77 100644 --- a/src/sentry/testutils/factories.py +++ b/src/sentry/testutils/factories.py @@ -27,7 +27,7 @@ from sentry.auth.access import RpcBackedAccess from sentry.constants import SentryAppInstallationStatus, SentryAppStatus from sentry.event_manager import EventManager -from sentry.hybridcloud.models import WebhookPayload +from sentry.hybridcloud.models.webhookpayload import WebhookPayload from sentry.incidents.logic import ( create_alert_rule, create_alert_rule_trigger, diff --git a/src/sentry/testutils/outbox.py b/src/sentry/testutils/outbox.py index d4e1f8b2afb252..b16bcc00046fc4 100644 --- a/src/sentry/testutils/outbox.py +++ b/src/sentry/testutils/outbox.py @@ -8,6 +8,7 @@ from django.conf import settings from django.core.handlers.wsgi import WSGIRequest +from sentry.hybridcloud.models.webhookpayload import THE_PAST, WebhookPayload from sentry.models.outbox import ControlOutbox, OutboxBase, OutboxCategory, OutboxScope from sentry.silo import SiloMode from sentry.tasks.deliver_from_outbox import enqueue_outbox_jobs, enqueue_outbox_jobs_control @@ -95,3 +96,46 @@ def assert_webhook_outboxes_with_shard_id( ) if len(region_names_set) != 0: raise Exception(f"ControlOutbox not found for some region_names: {str(region_names_set)}") + + +def assert_no_webhook_payloads(): + messages = WebhookPayload.objects.filter().count() + assert messages == 0, "No webhookpayload messages should be created" + + +def assert_webhook_payloads_for_mailbox( + request: WSGIRequest, + mailbox_name: str, + region_names: list[str], +): + """ + A test method for asserting that a webhook payload is properly queued for + the given request + + :param request: + :param mailbox_name: The mailbox name that messages should be found in. + :param region_names: The regions each messages should be queued for + """ + expected_payload = WebhookPayload.get_attributes_from_request(request=request) + region_names_set = set(region_names) + messages = WebhookPayload.objects.filter(mailbox_name=mailbox_name) + message_count = messages.count() + if message_count != len(region_names_set): + raise Exception( + f"Mismatch: Found {message_count} WebhookPayload but {len(region_names_set)} region_names" + ) + for message in messages: + assert message.request_method == expected_payload["request_method"] + assert message.request_path == expected_payload["request_path"] + assert message.request_headers == expected_payload["request_headers"] + assert message.request_body == expected_payload["request_body"] + assert message.schedule_for == THE_PAST + assert message.attempts == 0 + try: + region_names_set.remove(message.region_name) + except KeyError: + raise Exception( + f"Found ControlOutbox for '{message.region_name}', which was not in region_names: {str(region_names_set)}" + ) + if len(region_names_set) != 0: + raise Exception(f"WebhookPayload not found for some region_names: {str(region_names_set)}") diff --git a/tests/sentry/hybridcloud/tasks/__init__.py b/tests/sentry/hybridcloud/tasks/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/tests/sentry/hybridcloud/tasks/test_deliver_webhooks.py b/tests/sentry/hybridcloud/tasks/test_deliver_webhooks.py new file mode 100644 index 00000000000000..7ac6c6446bd17e --- /dev/null +++ b/tests/sentry/hybridcloud/tasks/test_deliver_webhooks.py @@ -0,0 +1,337 @@ +from datetime import timedelta +from unittest.mock import patch + +import pytest +import responses +from django.utils import timezone +from requests.exceptions import ConnectionError, ReadTimeout + +from sentry.hybridcloud.models.webhookpayload import MAX_ATTEMPTS, WebhookPayload +from sentry.hybridcloud.tasks.deliver_webhooks import drain_mailbox, schedule_webhook_delivery +from sentry.testutils.cases import TestCase +from sentry.testutils.region import override_regions +from sentry.testutils.silo import control_silo_test +from sentry.types.region import Region, RegionCategory, RegionResolutionError + +region_config = [Region("us", 1, "http://us.testserver", RegionCategory.MULTI_TENANT)] + + +@control_silo_test +class ScheduleWebhooksTest(TestCase): + @patch("sentry.hybridcloud.tasks.deliver_webhooks.drain_mailbox") + def test_schedule_no_records(self, mock_deliver): + schedule_webhook_delivery() + assert mock_deliver.delay.call_count == 0 + + @patch("sentry.hybridcloud.tasks.deliver_webhooks.drain_mailbox") + def test_schedule_multiple_mailboxes(self, mock_deliver): + webhook_one = self.create_webhook_payload( + mailbox_name="github:123", + region_name="us", + ) + webhook_two = self.create_webhook_payload( + mailbox_name="github:256", + region_name="us", + ) + assert webhook_one.schedule_for < timezone.now() + assert webhook_two.schedule_for < timezone.now() + + schedule_webhook_delivery() + assert mock_deliver.delay.call_count == 2 + + @patch("sentry.hybridcloud.tasks.deliver_webhooks.drain_mailbox") + def test_schedule_one_mailbox_multiple_messages(self, mock_deliver): + webhook_one = self.create_webhook_payload( + mailbox_name="github:123", + region_name="us", + ) + self.create_webhook_payload( + mailbox_name="github:123", + region_name="us", + ) + schedule_webhook_delivery() + assert mock_deliver.delay.call_count == 1 + mock_deliver.delay.assert_called_with(webhook_one.id) + + @patch("sentry.hybridcloud.tasks.deliver_webhooks.drain_mailbox") + def test_schedule_mailbox_scheduled_later(self, mock_deliver): + webhook_one = self.create_webhook_payload( + mailbox_name="github:123", + region_name="us", + ) + self.create_webhook_payload( + mailbox_name="github:256", + region_name="us", + schedule_for=timezone.now() + timedelta(minutes=1), + ) + schedule_webhook_delivery() + assert mock_deliver.delay.call_count == 1 + mock_deliver.delay.assert_called_with(webhook_one.id) + + @patch("sentry.hybridcloud.tasks.deliver_webhooks.drain_mailbox") + def test_schedule_updates_mailbox_attributes(self, mock_deliver): + webhook_one = self.create_webhook_payload( + mailbox_name="github:123", + region_name="us", + ) + webhook_two = self.create_webhook_payload( + mailbox_name="github:123", + region_name="us", + ) + schedule_webhook_delivery() + + webhook_one.refresh_from_db() + webhook_two.refresh_from_db() + # Scheduler should move all messages forward + assert webhook_one.attempts == 0 + assert webhook_one.schedule_for > timezone.now() + assert webhook_two.attempts == 0 + assert webhook_two.schedule_for > timezone.now() + + assert mock_deliver.delay.call_count == 1 + mock_deliver.delay.assert_called_with(webhook_one.id) + + @responses.activate + @override_regions(region_config) + def test_schedule_mailbox_with_more_than_batch_size_records(self): + responses.add( + responses.POST, "http://us.testserver/extensions/github/webhook/", body=ReadTimeout() + ) + num_records = 55 + for _ in range(0, num_records): + self.create_webhook_payload( + mailbox_name="github:123", + region_name="us", + ) + # Run the task that is spawned to provide some integration test coverage. + with self.tasks(): + schedule_webhook_delivery() + + # First attempt will fail rescheduling messages. + assert len(responses.calls) == 1 + assert WebhookPayload.objects.count() == num_records + head = WebhookPayload.objects.all().order_by("id").first() + assert head + assert head.schedule_for > timezone.now() + + # Do another scheduled run. This should not make any forwarding requests + with self.tasks(): + schedule_webhook_delivery() + assert len(responses.calls) == 1 + # Head doesn't move. + new_head = WebhookPayload.objects.all().order_by("id").first() + assert new_head + assert head.schedule_for == new_head.schedule_for + + # No messages delivered + assert WebhookPayload.objects.count() == num_records + + +@control_silo_test +class DrainMailboxTest(TestCase): + def create_payloads(self, num: int, mailbox: str) -> list[WebhookPayload]: + created = [] + for _ in range(0, num): + hook = self.create_webhook_payload( + mailbox_name=mailbox, + region_name="us", + ) + created.append(hook) + return created + + @responses.activate + def test_drain_missing_payload(self): + drain_mailbox(99) + assert len(responses.calls) == 0 + + @responses.activate + def test_drain_unknown_region(self): + webhook_one = self.create_webhook_payload( + mailbox_name="github:123", + region_name="lolnope", + ) + with pytest.raises(RegionResolutionError): + drain_mailbox(webhook_one.id) + assert len(responses.calls) == 0 + + @responses.activate + @override_regions(region_config) + def test_drain_success_partial(self): + responses.add( + responses.POST, + "http://us.testserver/extensions/github/webhook/", + status=200, + body="", + ) + responses.add( + responses.POST, + "http://us.testserver/extensions/github/webhook/", + status=500, + body="", + ) + records = self.create_payloads(5, "github:123") + drain_mailbox(records[0].id) + + # Attempts should stop as soon as the first delivery + # fails. This retains mailbox ordering while yielding this + # worker for new work + assert len(responses.calls) == 2 + + # Mailbox should have 4 records left + assert WebhookPayload.objects.count() == 4 + + # Remaining record should be scheduled to run later. + first = WebhookPayload.objects.order_by("id").first() + assert first + assert first.attempts == 1 + assert first.schedule_for > timezone.now() + + @responses.activate + @override_regions(region_config) + def test_drain_success(self): + responses.add( + responses.POST, + "http://us.testserver/extensions/github/webhook/", + status=200, + body="", + ) + records = self.create_payloads(3, "github:123") + drain_mailbox(records[0].id) + + # Mailbox should be empty + assert not WebhookPayload.objects.filter().exists() + + @responses.activate + @override_regions(region_config) + def test_drain_limit_depth(self): + responses.add( + responses.POST, + "http://us.testserver/extensions/github/webhook/", + status=200, + body="", + ) + records = self.create_payloads(51, "github:123") + drain_mailbox(records[0].id) + + # Drain removes up to 50 messages. + assert WebhookPayload.objects.count() == 1 + + @responses.activate + @override_regions(region_config) + def test_drain_too_many_attempts(self): + webhook_one = self.create_webhook_payload( + mailbox_name="github:123", + region_name="us", + attempts=MAX_ATTEMPTS, + ) + drain_mailbox(webhook_one.id) + assert not WebhookPayload.objects.filter(id=webhook_one.id).exists() + assert len(responses.calls) == 0 + + @responses.activate + @override_regions(region_config) + def test_drain_more_than_max_attempts(self): + webhook_one = self.create_webhook_payload( + mailbox_name="github:123", + region_name="us", + attempts=MAX_ATTEMPTS + 1, + ) + drain_mailbox(webhook_one.id) + assert not WebhookPayload.objects.filter(id=webhook_one.id).exists() + assert len(responses.calls) == 0 + + @responses.activate + @override_regions(region_config) + def test_drain_fatality(self): + responses.add( + responses.POST, + "http://us.testserver/extensions/github/webhook/", + # While this specific scenario won't happen, the client libraries could fail + body=ValueError(), + ) + webhook_one = self.create_webhook_payload( + mailbox_name="github:123", + region_name="us", + ) + with pytest.raises(ValueError): + drain_mailbox(webhook_one.id) + hook = WebhookPayload.objects.filter(id=webhook_one.id).first() + assert hook + assert hook.attempts == 1 + assert hook.schedule_for >= timezone.now() + assert len(responses.calls) == 1 + + @responses.activate + @override_regions(region_config) + def test_drain_host_error(self): + responses.add( + responses.POST, + "http://us.testserver/extensions/github/webhook/", + body=ConnectionError(), + ) + webhook_one = self.create_webhook_payload( + mailbox_name="github:123", + region_name="us", + ) + drain_mailbox(webhook_one.id) + hook = WebhookPayload.objects.filter(id=webhook_one.id).first() + assert hook + assert len(responses.calls) == 1 + + @responses.activate + @override_regions(region_config) + def test_drain_conflict(self): + # Getting a conflict back from the region silo means + # we should drop the hook. + responses.add( + responses.POST, + "http://us.testserver/extensions/github/webhook/", + status=409, + body="", + ) + webhook_one = self.create_webhook_payload( + mailbox_name="github:123", + region_name="us", + ) + drain_mailbox(webhook_one.id) + assert not WebhookPayload.objects.filter(id=webhook_one.id).exists() + assert len(responses.calls) == 1 + + @responses.activate + @override_regions(region_config) + def test_drain_api_error(self): + responses.add( + responses.POST, + "http://us.testserver/extensions/github/webhook/", + status=401, + body="", + ) + webhook_one = self.create_webhook_payload( + mailbox_name="github:123", + region_name="us", + ) + drain_mailbox(webhook_one.id) + hook = WebhookPayload.objects.filter(id=webhook_one.id).first() + assert hook + assert hook.schedule_for > timezone.now() + assert hook.attempts == 1 + + assert len(responses.calls) == 1 + + @responses.activate + @override_regions(region_config) + def test_drain_timeout(self): + responses.add( + responses.POST, "http://us.testserver/extensions/github/webhook/", body=ReadTimeout() + ) + webhook_one = self.create_webhook_payload( + mailbox_name="github:123", + region_name="us", + ) + drain_mailbox(webhook_one.id) + hook = WebhookPayload.objects.filter(id=webhook_one.id).first() + assert hook + assert hook.schedule_for > timezone.now() + assert hook.attempts == 1 + + assert len(responses.calls) == 1 diff --git a/tests/sentry/middleware/integrations/parsers/test_bitbucket.py b/tests/sentry/middleware/integrations/parsers/test_bitbucket.py index 8e0a77a2e13b51..c5e4c7e6279c7b 100644 --- a/tests/sentry/middleware/integrations/parsers/test_bitbucket.py +++ b/tests/sentry/middleware/integrations/parsers/test_bitbucket.py @@ -7,9 +7,12 @@ from sentry.models.organizationmapping import OrganizationMapping from sentry.silo.base import SiloMode from sentry.testutils.cases import TestCase +from sentry.testutils.helpers.options import override_options from sentry.testutils.outbox import ( assert_no_webhook_outboxes, + assert_no_webhook_payloads, assert_webhook_outboxes_with_shard_id, + assert_webhook_payloads_for_mailbox, ) from sentry.testutils.region import override_regions from sentry.testutils.silo import control_silo_test @@ -101,3 +104,24 @@ def test_routing_webhook_with_regions(self): expected_shard_id=self.organization.id, region_names=[self.region.name], ) + + @override_settings(SILO_MODE=SiloMode.CONTROL) + @override_regions(region_config) + @override_options({"hybridcloud.webhookpayload.rollout": 1.0}) + def test_webhook_outbox_creation_webhookpayload(self): + self.get_integration() + path = reverse( + "sentry-extensions-bitbucket-webhook", kwargs={"organization_id": self.organization.id} + ) + request = self.factory.post(path) + assert_no_webhook_payloads() + parser = BitbucketRequestParser(request=request, response_handler=self.get_response) + + response = parser.get_response() + assert isinstance(response, HttpResponse) + assert response.status_code == 202 + assert response.content == b"" + + assert_webhook_payloads_for_mailbox( + mailbox_name=f"bitbucket:{self.organization.id}", region_names=["us"], request=request + ) diff --git a/tests/sentry/middleware/integrations/parsers/test_bitbucket_server.py b/tests/sentry/middleware/integrations/parsers/test_bitbucket_server.py index defc3ca3a95469..1be0094a8f1e5e 100644 --- a/tests/sentry/middleware/integrations/parsers/test_bitbucket_server.py +++ b/tests/sentry/middleware/integrations/parsers/test_bitbucket_server.py @@ -6,10 +6,17 @@ from django.urls import reverse from sentry.middleware.integrations.parsers.bitbucket_server import BitbucketServerRequestParser +from sentry.models.integrations.integration import Integration from sentry.models.organizationmapping import OrganizationMapping from sentry.silo.base import SiloMode from sentry.testutils.cases import TestCase -from sentry.testutils.outbox import assert_webhook_outboxes_with_shard_id, outbox_runner +from sentry.testutils.helpers.options import override_options +from sentry.testutils.outbox import ( + assert_no_webhook_payloads, + assert_webhook_outboxes_with_shard_id, + assert_webhook_payloads_for_mailbox, + outbox_runner, +) from sentry.testutils.region import override_regions from sentry.testutils.silo import control_silo_test from sentry.types.region import Region, RegionCategory @@ -22,12 +29,8 @@ class BitbucketServerRequestParserTest(TestCase): region = Region("us", 1, "https://us.testserver", RegionCategory.MULTI_TENANT) region_config = (region,) - def setUp(self): - super().setUp() - self.path = reverse( - "sentry-extensions-bitbucket-webhook", kwargs={"organization_id": self.organization.id} - ) - self.integration = self.create_integration( + def get_integration(self) -> Integration: + return self.create_integration( organization=self.organization, external_id="bitbucketserver:1", provider="bitbucket_server", @@ -64,3 +67,27 @@ def test_routing_webhook(self): expected_shard_id=self.organization.id, region_names=[self.region.name], ) + + @override_settings(SILO_MODE=SiloMode.CONTROL) + @override_regions(region_config) + @override_options({"hybridcloud.webhookpayload.rollout": 1.0}) + def test_webhook_outbox_creation_webhookpayload(self): + integration = self.get_integration() + region_route = reverse( + "sentry-extensions-bitbucketserver-webhook", + kwargs={"organization_id": self.organization.id, "integration_id": integration.id}, + ) + request = self.factory.post(region_route) + assert_no_webhook_payloads() + parser = BitbucketServerRequestParser(request=request, response_handler=self.get_response) + + response = parser.get_response() + assert isinstance(response, HttpResponse) + assert response.status_code == 202 + assert response.content == b"" + + assert_webhook_payloads_for_mailbox( + mailbox_name=f"bitbucket_server:{self.organization.id}", + region_names=["us"], + request=request, + ) diff --git a/tests/sentry/middleware/integrations/parsers/test_github.py b/tests/sentry/middleware/integrations/parsers/test_github.py index f95734f4bfe1f9..fcd65d3cd70687 100644 --- a/tests/sentry/middleware/integrations/parsers/test_github.py +++ b/tests/sentry/middleware/integrations/parsers/test_github.py @@ -10,9 +10,12 @@ from sentry.models.outbox import ControlOutbox, OutboxCategory, outbox_context from sentry.silo.base import SiloMode from sentry.testutils.cases import TestCase +from sentry.testutils.helpers.options import override_options from sentry.testutils.outbox import ( assert_no_webhook_outboxes, + assert_no_webhook_payloads, assert_webhook_outboxes_with_shard_id, + assert_webhook_payloads_for_mailbox, ) from sentry.testutils.region import override_regions from sentry.testutils.silo import control_silo_test @@ -138,6 +141,26 @@ def test_webhook_outbox_creation(self): region_names=[region.name], ) + @override_settings(SILO_MODE=SiloMode.CONTROL) + @override_regions(region_config) + @override_options({"hybridcloud.webhookpayload.rollout": 1.0}) + def test_webhook_outbox_creation_webhookpayload(self): + integration = self.get_integration() + request = self.factory.post( + self.path, data={"installation": {"id": "github:1"}}, content_type="application/json" + ) + assert_no_webhook_payloads() + parser = GithubRequestParser(request=request, response_handler=self.get_response) + + response = parser.get_response() + assert isinstance(response, HttpResponse) + assert response.status_code == 202 + assert response.content == b"" + + assert_webhook_payloads_for_mailbox( + mailbox_name=f"github:{integration.id}", region_names=["us"], request=request + ) + @override_settings(SILO_MODE=SiloMode.CONTROL) @override_regions(region_config) @responses.activate diff --git a/tests/sentry/middleware/integrations/parsers/test_github_enterprise.py b/tests/sentry/middleware/integrations/parsers/test_github_enterprise.py index ca7ae5d45a3a84..a962b9c2dfb509 100644 --- a/tests/sentry/middleware/integrations/parsers/test_github_enterprise.py +++ b/tests/sentry/middleware/integrations/parsers/test_github_enterprise.py @@ -10,9 +10,12 @@ from sentry.models.outbox import ControlOutbox, OutboxCategory, outbox_context from sentry.silo.base import SiloMode from sentry.testutils.cases import TestCase +from sentry.testutils.helpers.options import override_options from sentry.testutils.outbox import ( assert_no_webhook_outboxes, + assert_no_webhook_payloads, assert_webhook_outboxes_with_shard_id, + assert_webhook_payloads_for_mailbox, ) from sentry.testutils.region import override_regions from sentry.testutils.silo import control_silo_test @@ -160,3 +163,26 @@ def test_webhook_outbox_creation(self): expected_shard_id=integration.id, region_names=[region.name], ) + + @override_settings(SILO_MODE=SiloMode.CONTROL) + @override_regions(region_config) + @override_options({"hybridcloud.webhookpayload.rollout": 1.0}) + def test_webhook_outbox_creation_webhookpayload(self): + integration = self.get_integration() + request = self.factory.post( + self.path, + data={"installation": {"id": self.external_identifier}, "action": "opened"}, + content_type="application/json", + HTTP_X_GITHUB_ENTERPRISE_HOST=self.external_host, + ) + assert_no_webhook_payloads() + parser = GithubEnterpriseRequestParser(request=request, response_handler=self.get_response) + + response = parser.get_response() + assert isinstance(response, HttpResponse) + assert response.status_code == 202 + assert response.content == b"" + + assert_webhook_payloads_for_mailbox( + mailbox_name=f"github_enterprise:{integration.id}", region_names=["us"], request=request + ) diff --git a/tests/sentry/middleware/integrations/parsers/test_gitlab.py b/tests/sentry/middleware/integrations/parsers/test_gitlab.py index 5945b0fa255b07..b9660f188407c1 100644 --- a/tests/sentry/middleware/integrations/parsers/test_gitlab.py +++ b/tests/sentry/middleware/integrations/parsers/test_gitlab.py @@ -12,9 +12,12 @@ from sentry.models.outbox import ControlOutbox, OutboxCategory, outbox_context from sentry.silo.base import SiloMode from sentry.testutils.cases import TestCase +from sentry.testutils.helpers.options import override_options from sentry.testutils.outbox import ( assert_no_webhook_outboxes, + assert_no_webhook_payloads, assert_webhook_outboxes_with_shard_id, + assert_webhook_payloads_for_mailbox, ) from sentry.testutils.region import override_regions from sentry.testutils.silo import control_silo_test @@ -198,3 +201,27 @@ def test_webhook_outbox_creation(self): expected_shard_id=integration.id, region_names=[region.name], ) + + @override_settings(SILO_MODE=SiloMode.CONTROL) + @override_regions(region_config) + @override_options({"hybridcloud.webhookpayload.rollout": 1.0}) + def test_webhook_outbox_creation_webhookpayload(self): + integration = self.get_integration() + request = self.factory.post( + self.path, + data=PUSH_EVENT, + content_type="application/json", + HTTP_X_GITLAB_TOKEN=WEBHOOK_TOKEN, + HTTP_X_GITLAB_EVENT="Push Hook", + ) + assert_no_webhook_payloads() + parser = GitlabRequestParser(request=request, response_handler=self.get_response) + + response = parser.get_response() + assert isinstance(response, HttpResponse) + assert response.status_code == 202 + assert response.content == b"" + + assert_webhook_payloads_for_mailbox( + mailbox_name=f"gitlab:{integration.id}", region_names=["us"], request=request + ) diff --git a/tests/sentry/middleware/integrations/parsers/test_jira.py b/tests/sentry/middleware/integrations/parsers/test_jira.py index 12959f083edc18..01be55caffe0db 100644 --- a/tests/sentry/middleware/integrations/parsers/test_jira.py +++ b/tests/sentry/middleware/integrations/parsers/test_jira.py @@ -11,9 +11,12 @@ from sentry.models.integrations.integration import Integration from sentry.silo.base import SiloMode from sentry.testutils.cases import TestCase +from sentry.testutils.helpers.options import override_options from sentry.testutils.outbox import ( assert_no_webhook_outboxes, + assert_no_webhook_payloads, assert_webhook_outboxes_with_shard_id, + assert_webhook_payloads_for_mailbox, ) from sentry.testutils.region import override_regions from sentry.testutils.silo import control_silo_test @@ -120,6 +123,30 @@ def test_get_response_routing_to_region_async(self): region_names=[region.name], ) + @responses.activate + @override_settings(SILO_MODE=SiloMode.CONTROL) + @override_regions(region_config) + @override_options({"hybridcloud.webhookpayload.rollout": 1.0}) + def test_get_response_routing_to_region_async_webhookpayload(self): + request = self.factory.post(path=f"{self.path_base}/issue-updated/") + parser = JiraRequestParser(request, self.get_response) + + integration = self.get_integration() + assert_no_webhook_payloads() + with patch.object(parser, "get_integration_from_request") as method: + method.return_value = integration + response = parser.get_response() + + assert isinstance(response, HttpResponse) + assert response.status_code == 202 + assert response.content == b"" + + assert len(responses.calls) == 0 + + assert_webhook_payloads_for_mailbox( + mailbox_name=f"jira:{integration.id}", region_names=[region.name], request=request + ) + @override_regions(region_config) @override_settings(SILO_MODE=SiloMode.CONTROL) @responses.activate diff --git a/tests/sentry/middleware/integrations/parsers/test_jira_server.py b/tests/sentry/middleware/integrations/parsers/test_jira_server.py index 95f1ac95527ed6..6a41b6c7850efb 100644 --- a/tests/sentry/middleware/integrations/parsers/test_jira_server.py +++ b/tests/sentry/middleware/integrations/parsers/test_jira_server.py @@ -9,9 +9,11 @@ from sentry.models.organizationmapping import OrganizationMapping from sentry.silo.base import SiloMode from sentry.testutils.cases import TestCase +from sentry.testutils.helpers.options import override_options from sentry.testutils.outbox import ( assert_no_webhook_outboxes, assert_webhook_outboxes_with_shard_id, + assert_webhook_payloads_for_mailbox, ) from sentry.testutils.region import override_regions from sentry.testutils.silo import control_silo_test @@ -82,6 +84,33 @@ def test_routing_endpoint_with_integration(self): region_names=[region.name], ) + @override_settings(SILO_MODE=SiloMode.CONTROL) + @override_regions(region_config) + @override_options({"hybridcloud.webhookpayload.rollout": 1.0}) + @responses.activate + def test_routing_endpoint_with_integration_webhookpayload(self): + route = reverse("sentry-extensions-jiraserver-issue-updated", kwargs={"token": "TOKEN"}) + request = self.factory.post(route) + parser = JiraServerRequestParser(request=request, response_handler=self.get_response) + + OrganizationMapping.objects.get(organization_id=self.organization.id).update( + region_name="us" + ) + with mock.patch( + "sentry.middleware.integrations.parsers.jira_server.get_integration_from_token" + ) as mock_get_token: + mock_get_token.return_value = self.integration + response = parser.get_response() + assert isinstance(response, HttpResponse) + assert response.status_code == 202 + assert response.content == b"" + assert len(responses.calls) == 0 + assert_webhook_payloads_for_mailbox( + request=request, + mailbox_name=f"jira_server:{self.integration.id}", + region_names=[region.name], + ) + @responses.activate @override_settings(SILO_MODE=SiloMode.CONTROL) def test_routing_search_endpoint(self): diff --git a/tests/sentry/middleware/integrations/parsers/test_msteams.py b/tests/sentry/middleware/integrations/parsers/test_msteams.py index aba4249fd10213..3a2f30a30562dd 100644 --- a/tests/sentry/middleware/integrations/parsers/test_msteams.py +++ b/tests/sentry/middleware/integrations/parsers/test_msteams.py @@ -9,9 +9,12 @@ from sentry.middleware.integrations.classifications import IntegrationClassification from sentry.middleware.integrations.parsers.msteams import MsTeamsRequestParser from sentry.testutils.cases import TestCase +from sentry.testutils.helpers.options import override_options from sentry.testutils.outbox import ( assert_no_webhook_outboxes, + assert_no_webhook_payloads, assert_webhook_outboxes_with_shard_id, + assert_webhook_payloads_for_mailbox, ) from sentry.testutils.silo import control_silo_test, create_test_regions from tests.sentry.integrations.msteams.test_helpers import ( @@ -99,6 +102,43 @@ def test_routing_events(self): region_names=["us"], ) + @override_options({"hybridcloud.webhookpayload.rollout": 1.0}) + @responses.activate + def test_routing_webhook_payloads(self): + # No regions identified + request = self.factory.post( + self.path, + data=GENERIC_EVENT, + HTTP_AUTHORIZATION=f"Bearer {TOKEN}", + content_type="application/json", + ) + parser = MsTeamsRequestParser(request=request, response_handler=self.get_response) + + response = parser.get_response() + assert isinstance(response, HttpResponse) + assert response.status_code == 200 + assert response.content == b"passthrough" + assert len(responses.calls) == 0 + assert_no_webhook_payloads() + + # Regions found + request = self.factory.post( + self.path, + data=self.generate_card_response(self.integration.id), + HTTP_AUTHORIZATION=f"Bearer {TOKEN}", + content_type="application/json", + ) + parser = MsTeamsRequestParser(request=request, response_handler=self.get_response) + response = parser.get_response() + assert isinstance(response, HttpResponse) + assert response.status_code == 202 + assert len(responses.calls) == 0 + assert_webhook_payloads_for_mailbox( + request=request, + mailbox_name=f"msteams:{self.integration.id}", + region_names=["us"], + ) + @responses.activate def test_routing_control_paths(self): requests = [ diff --git a/tests/sentry/middleware/integrations/parsers/test_plugin.py b/tests/sentry/middleware/integrations/parsers/test_plugin.py index 657e0d0338d7c7..fdb562dbb40c3b 100644 --- a/tests/sentry/middleware/integrations/parsers/test_plugin.py +++ b/tests/sentry/middleware/integrations/parsers/test_plugin.py @@ -3,13 +3,16 @@ from django.test import RequestFactory from django.urls import reverse +from sentry.hybridcloud.models.webhookpayload import WebhookPayload from sentry.middleware.integrations.parsers.plugin import PluginRequestParser from sentry.models.organizationmapping import OrganizationMapping from sentry.models.outbox import ControlOutbox from sentry.testutils.cases import TestCase +from sentry.testutils.helpers.options import override_options from sentry.testutils.outbox import ( assert_no_webhook_outboxes, assert_webhook_outboxes_with_shard_id, + assert_webhook_payloads_for_mailbox, ) from sentry.testutils.silo import control_silo_test, create_test_regions @@ -62,6 +65,27 @@ def test_routing_webhooks_with_region(self): # Purge outboxes after checking each route ControlOutbox.objects.all().delete() + @override_options({"hybridcloud.webhookpayload.rollout": 1.0}) + def test_routing_webhooks_with_region_webhookpayload(self): + routes = [ + reverse("sentry-plugins-github-webhook", args=[self.organization.id]), + reverse("sentry-plugins-bitbucket-webhook", args=[self.organization.id]), + ] + OrganizationMapping.objects.get(organization_id=self.organization.id).update( + region_name="us" + ) + for route in routes: + request = self.factory.post(route) + parser = PluginRequestParser(request=request, response_handler=self.get_response) + parser.get_response() + assert_webhook_payloads_for_mailbox( + request=request, + mailbox_name=f"plugins:{self.organization.id}", + region_names=["us"], + ) + # Purge outboxes after checking each route + WebhookPayload.objects.all().delete() + def test_routing_for_missing_organization(self): # Delete the mapping to simulate an org being deleted. OrganizationMapping.objects.filter(organization_id=self.organization.id).delete() diff --git a/tests/sentry/middleware/integrations/parsers/test_vercel.py b/tests/sentry/middleware/integrations/parsers/test_vercel.py index 59496368318f25..fc8cf07376729b 100644 --- a/tests/sentry/middleware/integrations/parsers/test_vercel.py +++ b/tests/sentry/middleware/integrations/parsers/test_vercel.py @@ -5,7 +5,8 @@ from sentry.middleware.integrations.parsers.vercel import VercelRequestParser from sentry.testutils.cases import TestCase -from sentry.testutils.outbox import assert_no_webhook_outboxes +from sentry.testutils.helpers.options import override_options +from sentry.testutils.outbox import assert_no_webhook_outboxes, assert_no_webhook_payloads from sentry.testutils.silo import control_silo_test @@ -32,3 +33,16 @@ def test_routing_all_to_control(self): assert response.content == b"passthrough" assert len(responses.calls) == 0 assert_no_webhook_outboxes() + + @responses.activate + @override_options({"hybridcloud.webhookpayload.rollout": 1.0}) + def test_routing_all_to_control_webhookpayload(self): + for request in self.vercel_dummy_requests: + parser = VercelRequestParser(request=request, response_handler=self.get_response) + assert parser.get_integration_from_request() is None + response = parser.get_response() + assert isinstance(response, HttpResponse) + assert response.status_code == 200 + assert response.content == b"passthrough" + assert len(responses.calls) == 0 + assert_no_webhook_payloads() diff --git a/tests/sentry/middleware/integrations/parsers/test_vsts.py b/tests/sentry/middleware/integrations/parsers/test_vsts.py index 5aae5242a26d39..5c64f13ce7f75b 100644 --- a/tests/sentry/middleware/integrations/parsers/test_vsts.py +++ b/tests/sentry/middleware/integrations/parsers/test_vsts.py @@ -9,9 +9,12 @@ from sentry.middleware.integrations.classifications import IntegrationClassification from sentry.middleware.integrations.parsers.vsts import VstsRequestParser from sentry.testutils.cases import TestCase +from sentry.testutils.helpers.options import override_options from sentry.testutils.outbox import ( assert_no_webhook_outboxes, + assert_no_webhook_payloads, assert_webhook_outboxes_with_shard_id, + assert_webhook_payloads_for_mailbox, ) from sentry.testutils.silo import control_silo_test, create_test_regions @@ -77,6 +80,43 @@ def test_routing_work_item_webhook(self): region_names=["us"], ) + @override_options({"hybridcloud.webhookpayload.rollout": 1.0}) + @responses.activate + def test_routing_work_item_webhookpayload(self): + # No integration found for request... + data = deepcopy(WORK_ITEM_UPDATED) + data["resourceContainers"]["collection"]["id"] = "non-existant" + request = self.factory.post( + self.path, + data=data, + content_type="application/json", + HTTP_SHARED_SECRET=self.shared_secret, + ) + parser = VstsRequestParser(request=request, response_handler=self.get_response) + + response = parser.get_response() + assert isinstance(response, HttpResponse) + assert response.status_code == 400 + assert len(responses.calls) == 0 + assert_no_webhook_payloads() + + # Regions found + request = self.factory.post( + self.path, + data=WORK_ITEM_UPDATED, + content_type="application/json", + HTTP_SHARED_SECRET=self.shared_secret, + ) + parser = VstsRequestParser(request=request, response_handler=self.get_response) + response = parser.get_response() + assert isinstance(response, HttpResponse) + assert response.status_code == 202 + assert_webhook_payloads_for_mailbox( + request=request, + mailbox_name=f"vsts:{self.integration.id}", + region_names=["us"], + ) + @responses.activate def test_routing_control_paths(self): config_request = self.factory.get( From 64d95ae762c6b795a987712ce4f31cf639dc58d4 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Tue, 13 Feb 2024 08:20:19 -0800 Subject: [PATCH 312/357] ref(grouping): Remove hierarchical code from `find_existing_grouphash_new` (#64970) This PR is a follow-up to https://github.com/getsentry/sentry/pull/64858, which removed all hierarchical grouping code from `_save_aggregate_new`. This does the same for `find_existing_grouphash_new`. In both cases, we're able to do this because these functions will only be called for events not using the mobile config (and therefore not ever producing hierarchical hashes). This is part of an effort to clean up and simplify these functions as much as possible before changing their logic, so that change can be as safe as possible. --- src/sentry/event_manager.py | 8 +- src/sentry/grouping/ingest.py | 73 ++----------------- .../grouping/test_assign_to_group.py | 6 +- 3 files changed, 14 insertions(+), 73 deletions(-) diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index edfb6362d462b6..f5bf6bbf5916a6 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -1607,9 +1607,7 @@ def _save_aggregate_new( GroupHash.objects.get_or_create(project=project, hash=hash)[0] for hash in hashes.hashes ] - existing_grouphash, _ = find_existing_grouphash_new( - project, grouphashes, hashes.hierarchical_hashes - ) + existing_grouphash = find_existing_grouphash_new(grouphashes) # In principle the group gets the same metadata as the event, so common # attributes can be defined in eventtypes. @@ -1649,9 +1647,7 @@ def _save_aggregate_new( ).select_for_update() ) - existing_grouphash, _ = find_existing_grouphash_new( - project, grouphashes, hashes.hierarchical_hashes - ) + existing_grouphash = find_existing_grouphash_new(grouphashes) if existing_grouphash is None: group = _create_group(project, event, **group_creation_kwargs) diff --git a/src/sentry/grouping/ingest.py b/src/sentry/grouping/ingest.py index 268556df7e5da6..3486ddc21c8256 100644 --- a/src/sentry/grouping/ingest.py +++ b/src/sentry/grouping/ingest.py @@ -304,75 +304,16 @@ def find_existing_grouphash( def find_existing_grouphash_new( - project: Project, - flat_grouphashes: Sequence[GroupHash], - hierarchical_hashes: Sequence[str] | None, -) -> tuple[GroupHash | None, str | None]: - all_grouphashes = [] - root_hierarchical_hash = None - - found_split = False - - if hierarchical_hashes: - hierarchical_grouphashes = { - h.hash: h - for h in GroupHash.objects.filter(project=project, hash__in=hierarchical_hashes) - } - - # Look for splits: - # 1. If we find a hash with SPLIT state at `n`, we want to use - # `n + 1` as the root hash. - # 2. If we find a hash associated to a group that is more specific - # than the primary hash, we want to use that hash as root hash. - for hash in reversed(hierarchical_hashes): - group_hash = hierarchical_grouphashes.get(hash) - - if group_hash is not None and group_hash.state == GroupHash.State.SPLIT: - found_split = True - break - - root_hierarchical_hash = hash - - if group_hash is not None: - all_grouphashes.append(group_hash) - - if group_hash.group_id is not None: - # Even if we did not find a hash with SPLIT state, we want to use - # the most specific hierarchical hash as root hash if it was already - # associated to a group. - # See `move_all_events` test case - break - - if root_hierarchical_hash is None: - # All hashes were split, so we group by most specific hash. This is - # a legitimate usecase when there are events whose stacktraces are - # suffixes of other event's stacktraces. - root_hierarchical_hash = hierarchical_hashes[-1] - group_hash = hierarchical_grouphashes.get(root_hierarchical_hash) - - if group_hash is not None: - all_grouphashes.append(group_hash) - - if not found_split: - # In case of a split we want to avoid accidentally finding the split-up - # group again via flat hashes, which are very likely associated with - # whichever group is attached to the split hash. This distinction will - # become irrelevant once we start moving existing events into child - # groups and delete the parent group. - all_grouphashes.extend(flat_grouphashes) - - for group_hash in all_grouphashes: + grouphashes: Sequence[GroupHash], +) -> GroupHash | None: + for group_hash in grouphashes: if group_hash.group_id is not None: - return group_hash, root_hierarchical_hash + return group_hash - # When refactoring for hierarchical grouping, we noticed that a + # TODO: When refactoring for hierarchical grouping, we noticed that a # tombstone may get ignored entirely if there is another hash *before* # that happens to have a group_id. This bug may not have been noticed - # for a long time because most events only ever have 1-2 hashes. It - # will definitely get more noticeable with hierarchical grouping and - # it's not clear what good behavior would look like. Do people want to - # be able to tombstone `hierarchical_hashes[4]` while still having a - # group attached to `hierarchical_hashes[0]`? Maybe. + # for a long time because most events only ever have 1-2 hashes. if group_hash.group_tombstone_id is not None: raise HashDiscarded( "Matches group tombstone %s" % group_hash.group_tombstone_id, @@ -380,7 +321,7 @@ def find_existing_grouphash_new( tombstone_id=group_hash.group_tombstone_id, ) - return None, root_hierarchical_hash + return None def get_hash_values( diff --git a/tests/sentry/event_manager/grouping/test_assign_to_group.py b/tests/sentry/event_manager/grouping/test_assign_to_group.py index ab0e6ef4a2eeac..69f0d4527d4e0a 100644 --- a/tests/sentry/event_manager/grouping/test_assign_to_group.py +++ b/tests/sentry/event_manager/grouping/test_assign_to_group.py @@ -171,11 +171,15 @@ def get_results_from_saving_event( ) new_event = save_new_event(event_data, project) - hash_search_result = return_values[find_existing_grouphash_fn][0][0] post_save_grouphashes = { gh.hash: gh.group_id for gh in GroupHash.objects.filter(project_id=project.id) } + hash_search_result = return_values[find_existing_grouphash_fn][0] + # The current logic wraps the search result in an extra layer which we need to unwrap + if not new_logic_enabled: + hash_search_result = hash_search_result[0] + # We should never call any of these more than once, regardless of the test assert calculate_primary_hash_spy.call_count <= 1 assert calculate_secondary_hash_spy.call_count <= 1 From ee681aec5d50da852c1995571718191a0cd03fdb Mon Sep 17 00:00:00 2001 From: Leander Rodrigues Date: Tue, 13 Feb 2024 08:39:44 -0800 Subject: [PATCH 313/357] chore(issues): Remove usage of streamline-targeting-context flag from the backend (#64876) Follow up PR to #64819 to remove the feature flag, associated tests and references to it. --- .../api/endpoints/codeowners/__init__.py | 18 +- src/sentry/api/endpoints/codeowners/index.py | 66 +++---- src/sentry/api/endpoints/project_ownership.py | 52 ++--- .../serializers/models/projectownership.py | 16 +- .../apidocs/examples/project_examples.py | 1 - src/sentry/conf/server.py | 3 - src/sentry/features/__init__.py | 1 - .../slack/message_builder/issues.py | 25 ++- .../notifications/utils/participants.py | 28 +-- .../api/endpoints/test_project_codeowners.py | 185 ++++++++---------- .../api/endpoints/test_project_ownership.py | 128 ++++++------ .../slack/test_message_builder.py | 3 +- .../notifications/utils/test_participants.py | 6 - 13 files changed, 224 insertions(+), 308 deletions(-) diff --git a/src/sentry/api/endpoints/codeowners/__init__.py b/src/sentry/api/endpoints/codeowners/__init__.py index fa7dac3df8ad7e..adbd5ffef1f52d 100644 --- a/src/sentry/api/endpoints/codeowners/__init__.py +++ b/src/sentry/api/endpoints/codeowners/__init__.py @@ -69,20 +69,12 @@ def validate(self, attrs: Mapping[str, Any]) -> Mapping[str, Any]: ) # Convert IssueOwner syntax into schema syntax - has_targeting_context = features.has( - "organizations:streamline-targeting-context", self.context["project"].organization - ) try: - if has_targeting_context: - validated_data = create_schema_from_issue_owners( - issue_owners=issue_owner_rules, - project_id=self.context["project"].id, - add_owner_ids=True, - ) - else: - validated_data = create_schema_from_issue_owners( - issue_owners=issue_owner_rules, project_id=self.context["project"].id - ) + validated_data = create_schema_from_issue_owners( + issue_owners=issue_owner_rules, + project_id=self.context["project"].id, + add_owner_ids=True, + ) return { **attrs, "schema": validated_data, diff --git a/src/sentry/api/endpoints/codeowners/index.py b/src/sentry/api/endpoints/codeowners/index.py index 73960c7ef3bdf5..a280d5e332f765 100644 --- a/src/sentry/api/endpoints/codeowners/index.py +++ b/src/sentry/api/endpoints/codeowners/index.py @@ -3,7 +3,7 @@ from rest_framework.request import Request from rest_framework.response import Response -from sentry import analytics, features +from sentry import analytics from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import region_silo_endpoint @@ -26,28 +26,33 @@ class ProjectCodeOwnersEndpoint(ProjectEndpoint, ProjectCodeOwnersMixin): "POST": ApiPublishStatus.PRIVATE, } - def add_owner_id_to_schema(self, codeowner: ProjectCodeOwners, project: Project) -> None: - if not hasattr(codeowner, "schema") or ( - codeowner.schema - and codeowner.schema.get("rules") - and "id" not in codeowner.schema["rules"][0]["owners"][0].keys() + def refresh_codeowners_schema(self, codeowner: ProjectCodeOwners, project: Project) -> None: + if ( + not hasattr(codeowner, "schema") + or codeowner.schema is None + or codeowner.schema.get("rules") is None ): - # Convert raw to issue owners syntax so that the schema can be created - raw = codeowner.raw - associations, _ = validate_codeowners_associations(codeowner.raw, project) - codeowner.raw = convert_codeowners_syntax( - codeowner.raw, - associations, - codeowner.repository_project_path_config, - ) - codeowner.schema = create_schema_from_issue_owners( - codeowner.raw, project.id, add_owner_ids=True, remove_deleted_owners=True - ) + return + + # Convert raw to issue owners syntax so that the schema can be created + raw = codeowner.raw + associations, _ = validate_codeowners_associations(codeowner.raw, project) + codeowner.raw = convert_codeowners_syntax( + codeowner.raw, + associations, + codeowner.repository_project_path_config, + ) + codeowner.schema = create_schema_from_issue_owners( + codeowner.raw, + project.id, + add_owner_ids=True, + remove_deleted_owners=True, + ) - # Convert raw back to codeowner type to be saved - codeowner.raw = raw + # Convert raw back to codeowner type to be saved + codeowner.raw = raw - codeowner.save() + codeowner.save() def get(self, request: Request, project: Project) -> Response: """ @@ -65,17 +70,11 @@ def get(self, request: Request, project: Project) -> Response: expand = request.GET.getlist("expand", []) expand.append("errors") - has_targeting_context = features.has( - "organizations:streamline-targeting-context", project.organization - ) - codeowners = list(ProjectCodeOwners.objects.filter(project=project).order_by("-date_added")) - - if has_targeting_context and codeowners: - for codeowner in codeowners: - self.add_owner_id_to_schema(codeowner, project) - expand.append("renameIdentifier") - expand.append("hasTargetingContext") + for codeowner in codeowners: + self.refresh_codeowners_schema(codeowner, project) + expand.append("renameIdentifier") + expand.append("hasTargetingContext") return Response( serialize( @@ -115,12 +114,7 @@ def post(self, request: Request, project: Project) -> Response: codeowners_id=project_codeowners.id, ) - expand = ["ownershipSyntax", "errors"] - has_targeting_context = features.has( - "organizations:streamline-targeting-context", project.organization - ) - if has_targeting_context: - expand.append("hasTargetingContext") + expand = ["ownershipSyntax", "errors", "hasTargetingContext"] return Response( serialize( diff --git a/src/sentry/api/endpoints/project_ownership.py b/src/sentry/api/endpoints/project_ownership.py index 71affd9fbb0d6b..08f62a677ba160 100644 --- a/src/sentry/api/endpoints/project_ownership.py +++ b/src/sentry/api/endpoints/project_ownership.py @@ -92,17 +92,9 @@ def validate(self, attrs): {"raw": f"Raw needs to be <= {max_length} characters in length"} ) - if features.has( - "organizations:streamline-targeting-context", - self.context["ownership"].project.organization, - ): - schema = create_schema_from_issue_owners( - attrs["raw"], self.context["ownership"].project_id, add_owner_ids=True - ) - else: - schema = create_schema_from_issue_owners( - attrs["raw"], self.context["ownership"].project_id - ) + schema = create_schema_from_issue_owners( + attrs["raw"], self.context["ownership"].project_id, add_owner_ids=True + ) self._validate_no_codeowners(schema["rules"]) @@ -202,16 +194,18 @@ def get_ownership(self, project): last_updated=None, ) - def add_owner_id_to_schema(self, ownership: ProjectOwnership, project: Project) -> None: - if not hasattr(ownership, "schema") or ( - ownership.schema - and ownership.schema.get("rules") - and "id" not in ownership.schema["rules"][0]["owners"][0].keys() + def refresh_ownership_schema(self, ownership: ProjectOwnership, project: Project) -> None: + if ( + not hasattr(ownership, "schema") + or ownership.schema is None + or ownership.schema.get("rules") is None ): - ownership.schema = create_schema_from_issue_owners( - ownership.raw, project.id, add_owner_ids=True, remove_deleted_owners=True - ) - ownership.save() + return + + ownership.schema = create_schema_from_issue_owners( + ownership.raw, project.id, add_owner_ids=True, remove_deleted_owners=True + ) + ownership.save() def rename_schema_identifier_for_parsing(self, ownership: ProjectOwnership) -> None: """ @@ -240,17 +234,12 @@ def get(self, request: Request, project) -> Response: Returns details on a project's ownership configuration. """ ownership = self.get_ownership(project) - should_return_schema = features.has( - "organizations:streamline-targeting-context", project.organization - ) - if should_return_schema and ownership: - self.add_owner_id_to_schema(ownership, project) + if ownership: + self.refresh_ownership_schema(ownership, project) self.rename_schema_identifier_for_parsing(ownership) - return Response( - serialize(ownership, request.user, should_return_schema=should_return_schema) - ) + return Response(serialize(ownership, request.user)) @extend_schema( operation_id="Update Ownership Configuration for a Project", @@ -280,9 +269,6 @@ def put(self, request: Request, project) -> Response: if list(request.data) != ["raw"] and not has_project_write: raise PermissionDenied - should_return_schema = features.has( - "organizations:streamline-targeting-context", project.organization - ) serializer = ProjectOwnershipRequestSerializer( data=request.data, partial=True, context={"ownership": self.get_ownership(project)} ) @@ -305,7 +291,5 @@ def put(self, request: Request, project) -> Response: data={**change_data, **project.get_audit_log_data()}, ) ownership_rule_created.send_robust(project=project, sender=self.__class__) - return Response( - serialize(ownership, request.user, should_return_schema=should_return_schema) - ) + return Response(serialize(ownership, request.user)) return Response(serializer.errors, status=400) diff --git a/src/sentry/api/serializers/models/projectownership.py b/src/sentry/api/serializers/models/projectownership.py index 78abdcb6add764..ea993e006124d0 100644 --- a/src/sentry/api/serializers/models/projectownership.py +++ b/src/sentry/api/serializers/models/projectownership.py @@ -23,15 +23,15 @@ class ProjectOwnershipResponse(ProjectOwnershipResponseOptional): @register(ProjectOwnership) class ProjectOwnershipSerializer(Serializer): - def serialize( - self, obj, attrs, user, should_return_schema=False, **kwargs - ) -> ProjectOwnershipResponse: + def serialize(self, obj, attrs, user, **kwargs) -> ProjectOwnershipResponse: assignment = ( "Auto Assign to Suspect Commits" if obj.auto_assignment and obj.suspect_committer_auto_assignment - else "Auto Assign to Issue Owner" - if obj.auto_assignment and not obj.suspect_committer_auto_assignment - else "Turn off Auto-Assignment" + else ( + "Auto Assign to Issue Owner" + if obj.auto_assignment and not obj.suspect_committer_auto_assignment + else "Turn off Auto-Assignment" + ) ) project_ownership_data: ProjectOwnershipResponse = { @@ -43,8 +43,6 @@ def serialize( "autoAssignment": assignment, "codeownersAutoSync": obj.codeowners_auto_sync, } - - if should_return_schema: - project_ownership_data["schema"] = obj.schema + project_ownership_data["schema"] = obj.schema return project_ownership_data diff --git a/src/sentry/apidocs/examples/project_examples.py b/src/sentry/apidocs/examples/project_examples.py index 29f736a5216c83..6de74a070b5480 100644 --- a/src/sentry/apidocs/examples/project_examples.py +++ b/src/sentry/apidocs/examples/project_examples.py @@ -213,7 +213,6 @@ "performance-metrics-backed-transaction-summary", "performance-db-main-thread-detector", "issue-platform", - "streamline-targeting-context", "performance-consecutive-db-issue", "performance-consecutive-http-post-process-group", "performance-n-plus-one-api-calls-detector", diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index ecc66c2d800271..010d50bc31b393 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1436,7 +1436,6 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:source-maps-debugger-blue-thunder-edition": "Enable source maps debugger", "organizations:sourcemaps-bundle-flat-file-indexing": "Enable the new flat file indexing system for sourcemaps.", "organizations:sourcemaps-upload-release-as-artifact-bundle": "Upload release bundles as artifact bundles", - "organizations:streamline-targeting-context": "Enable the new suggested assignees feature", "organizations:user-feedback-ui": "Enable User Feedback v2 UI", } @@ -1909,8 +1908,6 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:starfish-view": False, # Enable starfish dropdown on the webservice view for switching chart visualization "organizations:starfish-wsv-chart-dropdown": False, - # Enable the new suggested assignees feature - "organizations:streamline-targeting-context": False, # Enable the new suspect commits calculation that uses all frames in the stack trace "organizations:suspect-commits-all-frames": False, # Allow organizations to configure all symbol sources. diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index 842d3b71b2df34..966c50092c0bfb 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -271,7 +271,6 @@ default_manager.add("organizations:starfish-test-endpoint", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:starfish-view", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:starfish-wsv-chart-dropdown", OrganizationFeature, FeatureHandlerStrategy.REMOTE) -default_manager.add("organizations:streamline-targeting-context", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:suspect-commits-all-frames", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:symbol-sources", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:team-workflow-notifications", OrganizationFeature, FeatureHandlerStrategy.REMOTE) diff --git a/src/sentry/integrations/slack/message_builder/issues.py b/src/sentry/integrations/slack/message_builder/issues.py index ba351081e23607..58f6b8076e1f8a 100644 --- a/src/sentry/integrations/slack/message_builder/issues.py +++ b/src/sentry/integrations/slack/message_builder/issues.py @@ -306,16 +306,13 @@ def get_suggested_assignees( ): # we don't want every user in the project to be a suggested assignee resolved_owners = ActorTuple.resolve_many(issue_owners) suggested_assignees = RpcActor.many_from_object(resolved_owners) - if features.has("organizations:streamline-targeting-context", project.organization): - try: - suspect_commit_users = RpcActor.many_from_object( - get_suspect_commit_users(project, event) - ) - suggested_assignees.extend(suspect_commit_users) - except (Release.DoesNotExist, Commit.DoesNotExist): - logger.info("Skipping suspect committers because release does not exist.") - except Exception: - logger.exception("Could not get suspect committers. Continuing execution.") + try: + suspect_commit_users = RpcActor.many_from_object(get_suspect_commit_users(project, event)) + suggested_assignees.extend(suspect_commit_users) + except (Release.DoesNotExist, Commit.DoesNotExist): + logger.info("Skipping suspect committers because release does not exist.") + except Exception: + logger.exception("Could not get suspect committers. Continuing execution.") if suggested_assignees: suggested_assignees = dedupe_suggested_assignees(suggested_assignees) assignee_texts = [] @@ -475,9 +472,11 @@ def _assign_button(use_block_kit) -> MessageAction: label="Select Assignee...", type="select", selected_options=format_actor_options([assignee]) if assignee else [], - option_groups=get_option_groups(group) - if not use_block_kit - else get_option_groups_block_kit(group), + option_groups=( + get_option_groups(group) + if not use_block_kit + else get_option_groups_block_kit(group) + ), ) return assign_button diff --git a/src/sentry/notifications/utils/participants.py b/src/sentry/notifications/utils/participants.py index d6e1ef8e38432c..d92aa4ed83e7f2 100644 --- a/src/sentry/notifications/utils/participants.py +++ b/src/sentry/notifications/utils/participants.py @@ -338,23 +338,25 @@ def determine_eligible_recipients( suggested_assignees.append(assignee_actor) suspect_commit_users = None - if features.has("organizations:streamline-targeting-context", project.organization): - try: - suspect_commit_users = RpcActor.many_from_object( - get_suspect_commit_users(project, event) - ) - suggested_assignees.extend(suspect_commit_users) - except (Release.DoesNotExist, Commit.DoesNotExist): - logger.info("Skipping suspect committers because release does not exist.") - except Exception: - logger.exception("Could not get suspect committers. Continuing execution.") + + try: + suspect_commit_users = RpcActor.many_from_object( + get_suspect_commit_users(project, event) + ) + suggested_assignees.extend(suspect_commit_users) + except (Release.DoesNotExist, Commit.DoesNotExist): + logger.info("Skipping suspect committers because release does not exist.") + except Exception: + logger.exception("Could not get suspect committers. Continuing execution.") metrics.incr( "features.owners.send_to", tags={ - "outcome": outcome - if outcome == "match" or fallthrough_choice is None - else fallthrough_choice.value, + "outcome": ( + outcome + if outcome == "match" or fallthrough_choice is None + else fallthrough_choice.value + ), "hasSuspectCommitters": str(bool(suspect_commit_users)), }, ) diff --git a/tests/sentry/api/endpoints/test_project_codeowners.py b/tests/sentry/api/endpoints/test_project_codeowners.py index e60afd81be384f..5e978d86540ec1 100644 --- a/tests/sentry/api/endpoints/test_project_codeowners.py +++ b/tests/sentry/api/endpoints/test_project_codeowners.py @@ -257,8 +257,8 @@ def test_schema_is_correct(self, get_codeowner_mock_file): { "matcher": {"pattern": "docs/*", "type": "codeowners"}, "owners": [ - {"identifier": self.user.email, "type": "user"}, - {"identifier": self.team.slug, "type": "team"}, + {"id": self.user.id, "identifier": self.user.email, "type": "user"}, + {"id": self.team.id, "identifier": self.team.slug, "type": "team"}, ], } ], @@ -281,8 +281,8 @@ def test_schema_preserves_comments(self, get_codeowner_mock_file): { "matcher": {"pattern": "docs/*", "type": "codeowners"}, "owners": [ - {"identifier": self.user.email, "type": "user"}, - {"identifier": self.team.slug, "type": "team"}, + {"id": self.user.id, "identifier": self.user.email, "type": "user"}, + {"id": self.team.id, "identifier": self.team.slug, "type": "team"}, ], } ], @@ -305,8 +305,8 @@ def test_raw_email_correct_schema(self, get_codeowner_mock_file): { "matcher": {"pattern": "docs/*", "type": "codeowners"}, "owners": [ - {"identifier": self.user.email, "type": "user"}, - {"identifier": self.team.slug, "type": "team"}, + {"id": self.user.id, "identifier": self.user.email, "type": "user"}, + {"id": self.team.id, "identifier": self.team.slug, "type": "team"}, ], } ], @@ -381,10 +381,9 @@ def test_users_without_access(self, get_codeowner_mock_file): "sentry.integrations.mixins.repositories.RepositoryMixin.get_codeowner_file", return_value={"html_url": "https://github.com/test/CODEOWNERS"}, ) - def test_post_with_streamline_targeting(self, get_codeowner_mock_file): + def test_post_with_schema(self, get_codeowner_mock_file): with self.feature({"organizations:integrations-codeowners": True}): - with self.feature({"organizations:streamline-targeting-context": True}): - response = self.client.post(self.url, self.data) + response = self.client.post(self.url, self.data) assert response.status_code == 201 assert response.data["raw"] == "docs/* @NisanthanNanthakumar @getsentry/ecosystem" assert response.data["codeMappingId"] == str(self.code_mapping.id) @@ -406,58 +405,48 @@ def test_post_with_streamline_targeting(self, get_codeowner_mock_file): return_value={"html_url": "https://github.com/test/CODEOWNERS"}, ) def test_get(self, get_codeowner_mock_file): - # Test post + get without the streamline-targeting-context flag - with self.feature({"organizations:integrations-codeowners": True}): - self.client.post(self.url, self.data) - response_no_schema = self.client.get(self.url) - assert "schema" not in response_no_schema.data[0].keys() - assert "codeOwnersUrl" not in response_no_schema.data[0].keys() - - # Test get after with the streamline-targeting-context flag - with self.feature({"organizations:streamline-targeting-context": True}): - self.client.get(self.url) - response = self.client.get(self.url) - response_data = response.data[0] - assert response.status_code == 200 - assert ( - response_data["raw"] == "docs/* @NisanthanNanthakumar @getsentry/ecosystem" - ) - assert response_data["codeMappingId"] == str(self.code_mapping.id) - assert response_data["schema"] == { - "$version": 1, - "rules": [ + self.client.post(self.url, self.data) + response = self.client.get(self.url) + + response_data = response.data[0] + assert response.status_code == 200 + assert response_data["raw"] == "docs/* @NisanthanNanthakumar @getsentry/ecosystem" + assert response_data["codeMappingId"] == str(self.code_mapping.id) + assert response_data["schema"] == { + "$version": 1, + "rules": [ + { + "matcher": {"type": "codeowners", "pattern": "docs/*"}, + "owners": [ { - "matcher": {"type": "codeowners", "pattern": "docs/*"}, - "owners": [ - { - "type": "user", - "id": self.user.id, - "name": "admin@sentry.io", - }, - {"type": "team", "id": self.team.id, "name": "tiger-team"}, - ], - } + "type": "user", + "id": self.user.id, + "name": "admin@sentry.io", + }, + {"type": "team", "id": self.team.id, "name": "tiger-team"}, ], } - assert response_data["codeOwnersUrl"] == "https://github.com/test/CODEOWNERS" - - # Assert that "identifier" is not renamed to "name" in the backend - ownership = ProjectCodeOwners.objects.get(project=self.project) - assert ownership.schema["rules"] == [ - { - "matcher": {"type": "codeowners", "pattern": "docs/*"}, - "owners": [ - {"type": "user", "identifier": "admin@sentry.io", "id": self.user.id}, - {"type": "team", "identifier": "tiger-team", "id": self.team.id}, - ], - } - ] + ], + } + assert response_data["codeOwnersUrl"] == "https://github.com/test/CODEOWNERS" + + # Assert that "identifier" is not renamed to "name" in the backend + ownership = ProjectCodeOwners.objects.get(project=self.project) + assert ownership.schema["rules"] == [ + { + "matcher": {"type": "codeowners", "pattern": "docs/*"}, + "owners": [ + {"type": "user", "identifier": "admin@sentry.io", "id": self.user.id}, + {"type": "team", "identifier": "tiger-team", "id": self.team.id}, + ], + } + ] @patch( "sentry.integrations.mixins.repositories.RepositoryMixin.get_codeowner_file", return_value={"html_url": "https://github.com/test/CODEOWNERS"}, ) - def test_get_rule_one_deleted_owner_with_streamline_targeting(self, get_codeowner_mock_file): + def test_get_rule_one_deleted_owner(self, get_codeowner_mock_file): self.member_user_delete = self.create_user("member_delete@localhost", is_superuser=False) self.create_member( user=self.member_user_delete, @@ -470,29 +459,25 @@ def test_get_rule_one_deleted_owner_with_streamline_targeting(self, get_codeowne ) self.data["raw"] = "docs/* @delete @getsentry/ecosystem" - # Post without the streamline-targeting-context flag with self.feature({"organizations:integrations-codeowners": True}): self.client.post(self.url, self.data) - - # Test get after with the streamline-targeting-context flag - with self.feature({"organizations:streamline-targeting-context": True}): - self.external_delete_user.delete() - response = self.client.get(self.url) - assert response.data[0]["schema"] == { - "$version": 1, - "rules": [ - { - "matcher": {"type": "codeowners", "pattern": "docs/*"}, - "owners": [{"type": "team", "name": "tiger-team", "id": self.team.id}], - } - ], - } + self.external_delete_user.delete() + response = self.client.get(self.url) + assert response.data[0]["schema"] == { + "$version": 1, + "rules": [ + { + "matcher": {"type": "codeowners", "pattern": "docs/*"}, + "owners": [{"type": "team", "name": "tiger-team", "id": self.team.id}], + } + ], + } @patch( "sentry.integrations.mixins.repositories.RepositoryMixin.get_codeowner_file", return_value={"html_url": "https://github.com/test/CODEOWNERS"}, ) - def test_get_no_rule_deleted_owner_with_streamline_targeting(self, get_codeowner_mock_file): + def test_get_no_rule_deleted_owner(self, get_codeowner_mock_file): self.member_user_delete = self.create_user("member_delete@localhost", is_superuser=False) self.create_member( user=self.member_user_delete, @@ -505,23 +490,17 @@ def test_get_no_rule_deleted_owner_with_streamline_targeting(self, get_codeowner ) self.data["raw"] = "docs/* @delete" - # Post without the streamline-targeting-context flag with self.feature({"organizations:integrations-codeowners": True}): self.client.post(self.url, self.data) - - # Test get after with the streamline-targeting-context flag - with self.feature({"organizations:streamline-targeting-context": True}): - self.external_delete_user.delete() - response = self.client.get(self.url) - assert response.data[0]["schema"] == {"$version": 1, "rules": []} + self.external_delete_user.delete() + response = self.client.get(self.url) + assert response.data[0]["schema"] == {"$version": 1, "rules": []} @patch( "sentry.integrations.mixins.repositories.RepositoryMixin.get_codeowner_file", return_value={"html_url": "https://github.com/test/CODEOWNERS"}, ) - def test_get_multiple_rules_deleted_owners_with_streamline_targeting( - self, get_codeowner_mock_file - ): + def test_get_multiple_rules_deleted_owners(self, get_codeowner_mock_file): self.member_user_delete = self.create_user("member_delete@localhost", is_superuser=False) self.create_member( user=self.member_user_delete, @@ -546,31 +525,27 @@ def test_get_multiple_rules_deleted_owners_with_streamline_targeting( "raw" ] = "docs/* @delete\n*.py @getsentry/ecosystem @delete\n*.css @delete2\n*.rb @NisanthanNanthakumar" - # Post without the streamline-targeting-context flag with self.feature({"organizations:integrations-codeowners": True}): self.client.post(self.url, self.data) - - # Test get after with the streamline-targeting-context flag - with self.feature({"organizations:streamline-targeting-context": True}): - self.external_delete_user.delete() - self.external_delete_user2.delete() - response = self.client.get(self.url) - assert response.data[0]["schema"] == { - "$version": 1, - "rules": [ - { - "matcher": {"type": "codeowners", "pattern": "*.py"}, - "owners": [{"type": "team", "name": "tiger-team", "id": self.team.id}], - }, - { - "matcher": {"type": "codeowners", "pattern": "*.rb"}, - "owners": [ - { - "type": "user", - "name": "admin@sentry.io", - "id": self.user.id, - } - ], - }, - ], - } + self.external_delete_user.delete() + self.external_delete_user2.delete() + response = self.client.get(self.url) + assert response.data[0]["schema"] == { + "$version": 1, + "rules": [ + { + "matcher": {"type": "codeowners", "pattern": "*.py"}, + "owners": [{"type": "team", "name": "tiger-team", "id": self.team.id}], + }, + { + "matcher": {"type": "codeowners", "pattern": "*.rb"}, + "owners": [ + { + "type": "user", + "name": "admin@sentry.io", + "id": self.user.id, + } + ], + }, + ], + } diff --git a/tests/sentry/api/endpoints/test_project_ownership.py b/tests/sentry/api/endpoints/test_project_ownership.py index 7e434d46e876ec..1c423afba61092 100644 --- a/tests/sentry/api/endpoints/test_project_ownership.py +++ b/tests/sentry/api/endpoints/test_project_ownership.py @@ -14,7 +14,6 @@ from sentry.silo import SiloMode from sentry.testutils.cases import APITestCase from sentry.testutils.helpers.datetime import before_now, iso_format -from sentry.testutils.helpers.features import with_feature from sentry.testutils.outbox import outbox_runner from sentry.testutils.silo import assume_test_silo_mode, region_silo_test from sentry.testutils.skips import requires_snuba @@ -80,6 +79,7 @@ def test_empty_state(self): "dateCreated": None, "lastUpdated": None, "codeownersAutoSync": True, + "schema": None, } def test_update(self): @@ -91,7 +91,6 @@ def test_update(self): assert resp.data["dateCreated"] is not None assert resp.data["lastUpdated"] is not None assert resp.data["codeownersAutoSync"] is True - assert "schema" not in resp.data.keys() resp = self.client.put(self.path, {"fallthrough": False}) assert resp.status_code == 200 @@ -187,8 +186,7 @@ def test_audit_log_ownership_change(self): assert len(auditlog) == 1 assert "modified" in auditlog[0].data["ownership_rules"] - @with_feature("organizations:streamline-targeting-context") - def test_update_with_streamline_targeting(self): + def test_update_schema(self): resp = self.client.put(self.path, {"raw": "*.js admin@localhost #tiger-team"}) assert resp.data["schema"] == { "$version": 1, @@ -204,41 +202,35 @@ def test_update_with_streamline_targeting(self): } def test_get(self): - # Test put + get without the streamline-targeting-context flag self.client.put(self.path, {"raw": "*.js admin@localhost #tiger-team"}) - resp_no_schema = self.client.get(self.path) - assert "schema" not in resp_no_schema.data.keys() - - # Test get after with the streamline-targeting-context flag - with self.feature({"organizations:streamline-targeting-context": True}): - resp = self.client.get(self.path) - assert resp.data["schema"] == { - "$version": 1, - "rules": [ - { - "matcher": {"type": "path", "pattern": "*.js"}, - "owners": [ - {"type": "user", "id": self.user.id, "name": "admin@localhost"}, - {"type": "team", "id": self.team.id, "name": "tiger-team"}, - ], - } - ], - } - - # Assert that "identifier" is not renamed to "name" in the backend - ownership = ProjectOwnership.objects.get(project=self.project) - assert ownership.schema["rules"] == [ + resp = self.client.get(self.path) + assert "schema" in resp.data.keys() + assert resp.data["schema"] == { + "$version": 1, + "rules": [ { "matcher": {"type": "path", "pattern": "*.js"}, "owners": [ - {"type": "user", "identifier": "admin@localhost", "id": self.user.id}, - {"type": "team", "identifier": "tiger-team", "id": self.team.id}, + {"type": "user", "id": self.user.id, "name": "admin@localhost"}, + {"type": "team", "id": self.team.id, "name": "tiger-team"}, ], } - ] + ], + } + + # Assert that "identifier" is not renamed to "name" in the backend + ownership = ProjectOwnership.objects.get(project=self.project) + assert ownership.schema["rules"] == [ + { + "matcher": {"type": "path", "pattern": "*.js"}, + "owners": [ + {"type": "user", "identifier": "admin@localhost", "id": self.user.id}, + {"type": "team", "identifier": "tiger-team", "id": self.team.id}, + ], + } + ] - @with_feature("organizations:streamline-targeting-context") - def test_get_empty_with_streamline_targeting(self): + def test_get_empty_schema(self): resp = self.client.get(self.path) assert resp.status_code == 200 assert resp.data == { @@ -252,7 +244,7 @@ def test_get_empty_with_streamline_targeting(self): "schema": None, } - def test_get_rule_deleted_owner_with_streamline_targeting(self): + def test_get_rule_deleted_owner(self): self.member_user_delete = self.create_user("member_delete@localhost", is_superuser=False) self.create_member( user=self.member_user_delete, @@ -260,26 +252,22 @@ def test_get_rule_deleted_owner_with_streamline_targeting(self): role="member", teams=[self.team], ) - # Put without the streamline-targeting-context flag self.client.put(self.path, {"raw": "*.js member_delete@localhost #tiger-team"}) with assume_test_silo_mode(SiloMode.CONTROL): self.member_user_delete.delete() + resp = self.client.get(self.path) + assert resp.data["schema"] == { + "$version": 1, + "rules": [ + { + "matcher": {"type": "path", "pattern": "*.js"}, + "owners": [{"type": "team", "name": "tiger-team", "id": self.team.id}], + } + ], + } - # Get after with the streamline-targeting-context flag - with self.feature({"organizations:streamline-targeting-context": True}): - resp = self.client.get(self.path) - assert resp.data["schema"] == { - "$version": 1, - "rules": [ - { - "matcher": {"type": "path", "pattern": "*.js"}, - "owners": [{"type": "team", "name": "tiger-team", "id": self.team.id}], - } - ], - } - - def test_get_no_rule_deleted_owner_with_streamline_targeting(self): + def test_get_no_rule_deleted_owner(self): self.member_user_delete = self.create_user("member_delete@localhost", is_superuser=False) self.create_member( user=self.member_user_delete, @@ -287,18 +275,15 @@ def test_get_no_rule_deleted_owner_with_streamline_targeting(self): role="member", teams=[self.team], ) - # Put without the streamline-targeting-context flag self.client.put(self.path, {"raw": "*.js member_delete@localhost"}) with assume_test_silo_mode(SiloMode.CONTROL): self.member_user_delete.delete() - # Get after with the streamline-targeting-context flag - with self.feature({"organizations:streamline-targeting-context": True}): - resp = self.client.get(self.path) - assert resp.data["schema"] == {"$version": 1, "rules": []} + resp = self.client.get(self.path) + assert resp.data["schema"] == {"$version": 1, "rules": []} - def test_get_multiple_rules_deleted_owners_with_streamline_targeting(self): + def test_get_multiple_rules_deleted_owners(self): self.member_user_delete = self.create_user("member_delete@localhost", is_superuser=False) self.create_member( user=self.member_user_delete, @@ -313,7 +298,6 @@ def test_get_multiple_rules_deleted_owners_with_streamline_targeting(self): role="member", teams=[self.team], ) - # Put without the streamline-targeting-context flag self.client.put( self.path, { @@ -325,24 +309,22 @@ def test_get_multiple_rules_deleted_owners_with_streamline_targeting(self): self.member_user_delete.delete() self.member_user_delete2.delete() - # Get after with the streamline-targeting-context flag - with self.feature({"organizations:streamline-targeting-context": True}): - resp = self.client.get(self.path) - assert resp.data["schema"] == { - "$version": 1, - "rules": [ - { - "matcher": {"pattern": "*.py", "type": "path"}, - "owners": [{"id": self.team.id, "name": "tiger-team", "type": "team"}], - }, - { - "matcher": {"pattern": "*.rb", "type": "path"}, - "owners": [ - {"id": self.member_user.id, "name": "member@localhost", "type": "user"} - ], - }, - ], - } + resp = self.client.get(self.path) + assert resp.data["schema"] == { + "$version": 1, + "rules": [ + { + "matcher": {"pattern": "*.py", "type": "path"}, + "owners": [{"id": self.team.id, "name": "tiger-team", "type": "team"}], + }, + { + "matcher": {"pattern": "*.rb", "type": "path"}, + "owners": [ + {"id": self.member_user.id, "name": "member@localhost", "type": "user"} + ], + }, + ], + } def test_invalid_email(self): resp = self.client.put(self.path, {"raw": "*.js idont@exist.com #tiger-team"}) diff --git a/tests/sentry/integrations/slack/test_message_builder.py b/tests/sentry/integrations/slack/test_message_builder.py index e42d4419247144..c596dd8081849d 100644 --- a/tests/sentry/integrations/slack/test_message_builder.py +++ b/tests/sentry/integrations/slack/test_message_builder.py @@ -605,6 +605,7 @@ def test_issue_alert_with_suspect_commits(self, mock_external_url): group=group, event=event, suspect_commit_text=suspect_commit_text, + suggested_assignees=commit_author.email, ) @patch( @@ -676,10 +677,10 @@ def test_issue_alert_with_suspect_commits_unknown_provider(self, mock_external_u group=group, event=event, suspect_commit_text=suspect_commit_text, + suggested_assignees=commit_author.email, ) @with_feature("organizations:slack-block-kit") - @with_feature("organizations:streamline-targeting-context") def test_issue_alert_with_suggested_assignees(self): self.project.flags.has_releases = True self.project.save(update_fields=["flags"]) diff --git a/tests/sentry/notifications/utils/test_participants.py b/tests/sentry/notifications/utils/test_participants.py index c7e286ec986686..d71a17da068505 100644 --- a/tests/sentry/notifications/utils/test_participants.py +++ b/tests/sentry/notifications/utils/test_participants.py @@ -487,7 +487,6 @@ def test_send_to_current_assignee_and_owners(self): slack=[self.user.id, self.user2.id, member.id], ) - @with_feature("organizations:streamline-targeting-context") def test_send_to_suspect_committers(self): """ Test suspect committer is added as suggested assignee, where "organizations:commit-context" @@ -528,7 +527,6 @@ def test_send_to_suspect_committers(self): slack=[self.user_suspect_committer.id, self.user.id], ) - @with_feature("organizations:streamline-targeting-context") @with_feature("organizations:commit-context") def test_send_to_suspect_committers_with_commit_context_feature_flag(self): """ @@ -557,7 +555,6 @@ def test_send_to_suspect_committers_with_commit_context_feature_flag(self): slack=[self.user_suspect_committer.id, self.user.id], ) - @with_feature("organizations:streamline-targeting-context") @with_feature("organizations:commit-context") def test_send_to_suspect_committers_no_owners_with_commit_context_feature_flag(self): """ @@ -612,7 +609,6 @@ def test_send_to_suspect_committers_no_owners_with_commit_context_feature_flag(s slack=[self.user_suspect_committer.id], ) - @with_feature("organizations:streamline-targeting-context") @with_feature("organizations:commit-context") def test_send_to_suspect_committers_dupe_with_commit_context_feature_flag(self): """ @@ -639,7 +635,6 @@ def test_send_to_suspect_committers_dupe_with_commit_context_feature_flag(self): self.get_send_to_owners(event), email=[self.user.id], slack=[self.user.id] ) - @with_feature("organizations:streamline-targeting-context") @with_feature("organizations:commit-context") def test_send_to_suspect_committers_exception_with_commit_context_feature_flag(self): """ @@ -666,7 +661,6 @@ def test_send_to_suspect_committers_exception_with_commit_context_feature_flag(s self.get_send_to_owners(event), email=[self.user.id], slack=[self.user.id] ) - @with_feature("organizations:streamline-targeting-context") @with_feature("organizations:commit-context") def test_send_to_suspect_committers_not_project_member_commit_context_feature_flag(self): """ From f326f7f41d315a089a61c456d6d7e602209865e2 Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Tue, 13 Feb 2024 10:46:42 -0600 Subject: [PATCH 314/357] fix(metrics): Add entity keys for spans (#65082) We need to specify the entity keys for spans to correctly find all span metrics. --- src/sentry/snuba/metrics/utils.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/sentry/snuba/metrics/utils.py b/src/sentry/snuba/metrics/utils.py index 79c2e3eb58a635..861edab7f665fe 100644 --- a/src/sentry/snuba/metrics/utils.py +++ b/src/sentry/snuba/metrics/utils.py @@ -188,6 +188,11 @@ EntityKey.MetricsSets, EntityKey.MetricsDistributions, }, + UseCaseID.SPANS: { + EntityKey.GenericMetricsCounters, + EntityKey.GenericMetricsSets, + EntityKey.GenericMetricsDistributions, + }, UseCaseID.TRANSACTIONS: { EntityKey.GenericMetricsCounters, EntityKey.GenericMetricsSets, From 326ea9420067536987cca1354e3abf156ccf7aa7 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Tue, 13 Feb 2024 08:58:24 -0800 Subject: [PATCH 315/357] ref(grouping): Pull code from `_save_aggregate_new` into helper functions (#64974) As part of the on-going work cleaning up `_save_aggreagte_new`, in order to make the upcoming logic changes as easy to reason about and therefore as safe as possible, this pulls code not directly tied to the process of finding and creating grouphashes and assigning group ids to them into the following helper functions: - `record_new_group_metrics` - There's currently a lot of logic to collect these metrics, and likely to be even more in the future. - `_get_group_processing_kwargs` - This is the result of pulling all of the metadata-related code out of `_save_aggregate_new` and combining it with the logic in `get_group_creation_kwargs`. Before the removal of the hierarchical code this wasn't possible, as it was interleaved with the metadata logic. Now, however, the metadata-gathering code is independent of the rest and can be factored out. Also, I noticed that the `group_creation_kwargs` in `_save_aggregate` are in fact used not only when creating a group but also when updating it, hence the substitution of `processing` for `creation`. - `check_for_group_creation_load_shed` - Quick killswitch check. - `add_group_id_to_grouphashes` - With the hierarchical logic gone, the only difference between the new group branch and the existing group branch was that the latter filtered out any grouphashes which already had a group assigned. Though we know that in the new group branch that filter won't find anything - both grouphashes are new - it's safe to apply it in both branches. - `check_for_category_mismatch` - Though I couldn't find any instances of it happening in our logs, apparently it's possible for an error event's hash to match a non-error-type group. --- src/sentry/event_manager.py | 170 +++++++++++++++------------------- src/sentry/grouping/ingest.py | 76 +++++++++++++++ 2 files changed, 153 insertions(+), 93 deletions(-) diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index f5bf6bbf5916a6..f52f310d44a1bd 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -47,9 +47,13 @@ from sentry.exceptions import HashDiscarded from sentry.grouping.api import GroupingConfig, get_grouping_config_dict_for_project from sentry.grouping.ingest import ( + add_group_id_to_grouphashes, + check_for_category_mismatch, + check_for_group_creation_load_shed, find_existing_grouphash, find_existing_grouphash_new, get_hash_values, + record_new_group_metrics, update_grouping_config_if_needed, ) from sentry.ingest.inbound_filters import FilterStatKeys @@ -907,6 +911,8 @@ def _materialize_metadata_many(jobs: Sequence[Job]) -> None: job["culprit"] = data["culprit"] +# TODO: This is only called in `_save_aggregate`, so when that goes, so can this (it's been +# supplanted by `_get_group_processing_kwargs` below) def _get_group_creation_kwargs(job: Job | PerformanceJob) -> dict[str, Any]: kwargs = { "platform": job["platform"], @@ -925,6 +931,43 @@ def _get_group_creation_kwargs(job: Job | PerformanceJob) -> dict[str, Any]: return kwargs +def _get_group_processing_kwargs(job: Job) -> dict[str, Any]: + """ + Pull together all the metadata used when creating a group or updating a group's metadata based + on a new event. + """ + _materialize_metadata_many([job]) + + event_data = job["event"].data + event_metadata = job["event_metadata"] + + group_metadata = materialize_metadata( + event_data, + # In principle the group gets the same metadata as the event, so common + # attributes can be defined in eventtypes. + get_event_type(event_data), + event_metadata, + ) + group_metadata["last_received"] = job["received_timestamp"] + + kwargs = { + "data": group_metadata, + "platform": job["platform"], + "message": job["event"].search_message, + "logger": job["logger_name"], + "level": LOG_LEVELS_MAP.get(job["level"]), + "last_seen": job["event"].datetime, + "first_seen": job["event"].datetime, + "active_at": job["event"].datetime, + "culprit": job["culprit"], + } + + if job["release"]: + kwargs["first_release"] = job["release"] + + return kwargs + + @metrics.wraps("save_event.get_or_create_environment_many") def _get_or_create_environment_many(jobs: Sequence[Job], projects: ProjectsMapping) -> None: for job in jobs: @@ -1336,7 +1379,6 @@ def assign_event_to_group(event: Event, job: Job, metric_tags: MutableTags) -> G event=event, job=job, release=job["release"], - received_timestamp=job["received_timestamp"], metric_tags=metric_tags, ) else: @@ -1585,11 +1627,12 @@ def _save_aggregate_new( event: Event, job: Job, release: Release | None, - received_timestamp: int | float, metric_tags: MutableTags, ) -> GroupInfo | None: project = event.project + group_processing_kwargs = _get_group_processing_kwargs(job) + _, _, hashes = get_hash_values(project, job, metric_tags) # Now that we've used the current and possibly secondary grouping config(s) to calculate the @@ -1598,38 +1641,14 @@ def _save_aggregate_new( # erroneously create new groups. update_grouping_config_if_needed(project) - _materialize_metadata_many([job]) - metadata = dict(job["event_metadata"]) - - group_creation_kwargs = _get_group_creation_kwargs(job) - grouphashes = [ GroupHash.objects.get_or_create(project=project, hash=hash)[0] for hash in hashes.hashes ] existing_grouphash = find_existing_grouphash_new(grouphashes) - # In principle the group gets the same metadata as the event, so common - # attributes can be defined in eventtypes. - # - # Additionally the `last_received` key is set for group metadata, later in - # _save_aggregate - group_creation_kwargs["data"] = materialize_metadata( - event.data, - get_event_type(event.data), - metadata, - ) - group_creation_kwargs["data"]["last_received"] = received_timestamp - if existing_grouphash is None: - if killswitch_matches_context( - "store.load-shed-group-creation-projects", - { - "project_id": project.id, - "platform": event.platform, - }, - ): - raise HashDiscarded("Load shedding group creation", reason="load_shed") + check_for_group_creation_load_shed(project, event) with sentry_sdk.start_span( op="event_manager.create_group_transaction" @@ -1650,7 +1669,7 @@ def _save_aggregate_new( existing_grouphash = find_existing_grouphash_new(grouphashes) if existing_grouphash is None: - group = _create_group(project, event, **group_creation_kwargs) + group = _create_group(project, event, **group_processing_kwargs) if ( features.has("projects:first-event-severity-calculation", event.project) @@ -1664,11 +1683,7 @@ def _save_aggregate_new( }, ) - new_hashes = list(grouphashes) - - GroupHash.objects.filter(id__in=[h.id for h in new_hashes]).exclude( - state=GroupHash.State.LOCKED_IN_MIGRATION - ).update(group=group) + add_group_id_to_grouphashes(group, grouphashes) is_new = True is_regression = False @@ -1676,78 +1691,47 @@ def _save_aggregate_new( span.set_tag("create_group_transaction.outcome", "new_group") metric_tags["create_group_transaction.outcome"] = "new_group" - metrics.incr( - "group.created", - skip_internal=True, - tags={ - "platform": event.platform or "unknown", - "sdk": normalized_sdk_tag_from_event(event), - }, - ) - - # This only applies to events with stacktraces - frame_mix = event.get_event_metadata().get("in_app_frame_mix") - if frame_mix: - metrics.incr( - "grouping.in_app_frame_mix", - sample_rate=1.0, - tags={ - "platform": event.platform or "unknown", - "sdk": normalized_sdk_tag_from_event(event), - "frame_mix": frame_mix, - }, - ) + record_new_group_metrics(event) return GroupInfo(group, is_new, is_regression) group = Group.objects.get(id=existing_grouphash.group_id) - if group.issue_category != GroupCategory.ERROR: - logger.info( - "event_manager.category_mismatch", - extra={ - "issue_category": group.issue_category, - "event_type": "error", - }, - ) + + if check_for_category_mismatch(group): return None is_new = False - new_hashes = [h for h in grouphashes if h.group_id is None] - - if new_hashes: - # There may still be secondary hashes that we did not use to find an - # existing group. A classic example is when grouping makes changes to - # the app-hash (changes to in_app logic), but the system hash stays - # stable and is used to find an existing group. Associate any new - # hashes with the group such that event saving continues to be - # resilient against grouping algorithm changes. - # - # There is a race condition here where two processes could "steal" - # hashes from each other. In practice this should not be user-visible - # as group creation is synchronized. Meaning the only way hashes could - # jump between groups is if there were two processes that: - # - # 1) have BOTH found an existing group - # (otherwise at least one of them would be in the group creation - # codepath which has transaction isolation/acquires row locks) - # 2) AND are looking at the same set, or an overlapping set of hashes - # (otherwise they would not operate on the same rows) - # 3) yet somehow also sort their event into two different groups each - # (otherwise the update would not change anything) - # - # We think this is a very unlikely situation. A previous version of - # _save_aggregate had races around group creation which made this race - # more user visible. For more context, see 84c6f75a and d0e22787, as - # well as GH-5085. - GroupHash.objects.filter(id__in=[h.id for h in new_hashes]).exclude( - state=GroupHash.State.LOCKED_IN_MIGRATION - ).update(group=group) + # There may still be secondary hashes that we did not use to find an + # existing group. A classic example is when grouping makes changes to + # the app-hash (changes to in_app logic), but the system hash stays + # stable and is used to find an existing group. Associate any new + # hashes with the group such that event saving continues to be + # resilient against grouping algorithm changes. + # + # There is a race condition here where two processes could "steal" + # hashes from each other. In practice this should not be user-visible + # as group creation is synchronized, meaning the only way hashes could + # jump between groups is if there were two processes that: + # + # 1) have BOTH found an existing group + # (otherwise at least one of them would be in the group creation + # codepath which has transaction isolation/acquires row locks) + # 2) AND are looking at the same set, or an overlapping set of hashes + # (otherwise they would not operate on the same rows) + # 3) yet somehow also sort their respective events into two different groups + # (otherwise the update would not change anything) + # + # We think this is a very unlikely situation. A previous version of + # _save_aggregate had races around group creation which made this race + # more user visible. For more context, see 84c6f75a and d0e22787, as + # well as GH-5085. + add_group_id_to_grouphashes(group, grouphashes) is_regression = _process_existing_aggregate( group=group, event=event, - incoming_group_values=group_creation_kwargs, + incoming_group_values=group_processing_kwargs, release=release, ) diff --git a/src/sentry/grouping/ingest.py b/src/sentry/grouping/ingest.py index 3486ddc21c8256..d36c25c29b9d31 100644 --- a/src/sentry/grouping/ingest.py +++ b/src/sentry/grouping/ingest.py @@ -1,6 +1,7 @@ from __future__ import annotations import copy +import logging import random import time from collections.abc import MutableMapping, Sequence @@ -25,7 +26,10 @@ load_grouping_config, ) from sentry.grouping.result import CalculatedHashes +from sentry.issues.grouptype import GroupCategory +from sentry.killswitches import killswitch_matches_context from sentry.locks import locks +from sentry.models.group import Group from sentry.models.grouphash import GroupHash from sentry.models.project import Project from sentry.projectoptions.defaults import BETA_GROUPING_CONFIG, DEFAULT_GROUPING_CONFIG @@ -37,6 +41,8 @@ if TYPE_CHECKING: from sentry.eventstore.models import Event +logger = logging.getLogger("sentry.events") + Job = MutableMapping[str, Any] @@ -403,3 +409,73 @@ def get_hash_values( job["finest_tree_label"] = all_hashes.finest_tree_label return (primary_hashes, secondary_hashes, all_hashes) + + +def record_new_group_metrics(event: Event): + metrics.incr( + "group.created", + skip_internal=True, + tags={ + "platform": event.platform or "unknown", + "sdk": normalized_sdk_tag_from_event(event), + }, + ) + + # This only applies to events with stacktraces + frame_mix = event.get_event_metadata().get("in_app_frame_mix") + if frame_mix: + metrics.incr( + "grouping.in_app_frame_mix", + sample_rate=1.0, + tags={ + "platform": event.platform or "unknown", + "sdk": normalized_sdk_tag_from_event(event), + "frame_mix": frame_mix, + }, + ) + + +def check_for_group_creation_load_shed(project: Project, event: Event): + """ + Raise a `HashDiscarded` error if the load-shed killswitch is enabled + """ + if killswitch_matches_context( + "store.load-shed-group-creation-projects", + { + "project_id": project.id, + "platform": event.platform, + }, + ): + raise HashDiscarded("Load shedding group creation", reason="load_shed") + + +def add_group_id_to_grouphashes( + group: Group, + grouphashes: list[GroupHash], +) -> None: + """ + Link the given group to any grouphash which doesn't yet have a group assigned. + """ + + new_grouphash_ids = [gh.id for gh in grouphashes if gh.group_id is None] + + GroupHash.objects.filter(id__in=new_grouphash_ids).exclude( + state=GroupHash.State.LOCKED_IN_MIGRATION + ).update(group=group) + + +def check_for_category_mismatch(group: Group) -> bool: + """ + Make sure an error event hasn't hashed to a value assigned to a non-error-type group + """ + if group.issue_category != GroupCategory.ERROR: + logger.info( + "event_manager.category_mismatch", + extra={ + "issue_category": group.issue_category, + "event_type": "error", + }, + ) + return True + + return False From 16e2f857776203077b76cda8de6b68efe0eb6353 Mon Sep 17 00:00:00 2001 From: Colleen O'Rourke Date: Tue, 13 Feb 2024 09:15:51 -0800 Subject: [PATCH 316/357] ref(weekly reports): Add spans to a couple functions (#65045) Add instrumentation to the 2 functions missing it in the weekly report task. This task is [very slow](https://sentry.sentry.io/performance/summary/?end=2024-02-12T09%3A59%3A47&project=1&query=&referrer=performance-transaction-summary&start=2024-02-11T21%3A02%3A11&transaction=sentry.tasks.weekly_reports.prepare_organization_report&unselectedSeries=p100%28%29&unselectedSeries=avg%28%29) and it'd help us understand exactly what is slow better if we had everything instrumented. --- src/sentry/tasks/weekly_reports.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/sentry/tasks/weekly_reports.py b/src/sentry/tasks/weekly_reports.py index 1d530b7d2239b1..6b9555bf131cbf 100644 --- a/src/sentry/tasks/weekly_reports.py +++ b/src/sentry/tasks/weekly_reports.py @@ -190,7 +190,8 @@ def prepare_organization_report( with sentry_sdk.start_span(op="weekly_reports.project_event_counts_for_organization"): project_event_counts_for_organization(ctx) - organization_project_issue_substatus_summaries(ctx) + with sentry_sdk.start_span(op="weekly_reports.organization_project_issue_substatus_summaries"): + organization_project_issue_substatus_summaries(ctx) with sentry_sdk.start_span(op="weekly_reports.project_passes"): # Run project passes @@ -204,7 +205,8 @@ def prepare_organization_report( with sentry_sdk.start_span(op="weekly_reports.fetch_key_performance_issue_groups"): fetch_key_performance_issue_groups(ctx) - report_is_available = not check_if_ctx_is_empty(ctx) + with sentry_sdk.start_span(op="weekly_reports.check_if_ctx_is_empty"): + report_is_available = not check_if_ctx_is_empty(ctx) set_tag("report.available", report_is_available) if not report_is_available: From cf5fed05c040a6b8a4766cc319fb73b418391c5a Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Tue, 13 Feb 2024 12:22:12 -0500 Subject: [PATCH 317/357] ref: upgrade virtualenv (#65077) the current version cannot create working python 3.12 environments --- requirements-dev-frozen.txt | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index 4ef6bb3a812449..c7189aec51b6fa 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -37,7 +37,7 @@ cryptography==41.0.7 cssselect==1.0.3 cssutils==2.9.0 datadog==0.44.0 -distlib==0.3.4 +distlib==0.3.8 distro==1.8.0 django==5.0.2 django-crispy-forms==1.14.0 @@ -51,7 +51,7 @@ email-reply-parser==0.5.12 execnet==1.9.0 fastjsonschema==2.16.2 fido2==0.9.2 -filelock==3.7.0 +filelock==3.13.1 flake8==7.0.0 flake8-bugbear==22.10.27 flake8-logging==1.5.0 @@ -120,7 +120,7 @@ phabricator==0.7.0 phonenumberslite==8.12.55 pillow==10.2.0 pip-tools==7.1.0 -platformdirs==2.5.2 +platformdirs==4.2.0 pluggy==1.4.0 pre-commit==3.3.2 progressbar2==3.41.0 @@ -225,7 +225,7 @@ unidiff==0.7.4 uritemplate==4.1.1 urllib3==2.0.7 vine==5.1.0 -virtualenv==20.14.1 +virtualenv==20.25.0 wcwidth==0.2.10 websocket-client==1.3.2 werkzeug==3.0.1 From 94e74f773be88305db17ec73b72f2c3eeee202bd Mon Sep 17 00:00:00 2001 From: Yagiz Nizipli Date: Tue, 13 Feb 2024 12:36:12 -0500 Subject: [PATCH 318/357] build: enable more biome rules (#65091) Enables Eslint equivalent Biome rules. --- .eslintrc.js | 29 +++++++++++++++++--- biome.json | 14 +++++++++- static/app/__mocks__/api.tsx | 4 +-- static/app/actionCreators/account.tsx | 2 +- static/app/utils/metrics/dashboardImport.tsx | 13 +++++---- 5 files changed, 47 insertions(+), 15 deletions(-) diff --git a/.eslintrc.js b/.eslintrc.js index 97e5ca127ffb2e..8fb794516665c1 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -31,6 +31,18 @@ module.exports = { {additionalHooks: ADDITIONAL_HOOKS_TO_CHECK_DEPS_FOR}, ], ...(!isRelaxed && !isCi ? strictRulesNotCi : {}), + + // TODO(@anonrig): Remove this from eslint-sentry-config + 'space-infix-ops': 'off', + 'object-shorthand': 'off', + 'object-curly-spacing': 'off', + 'import/no-amd': 'off', + 'no-danger-with-children': 'off', + 'no-fallthrough': 'off', + 'no-obj-calls': 'off', + 'array-bracket-spacing': 'off', + 'computed-property-spacing': 'off', + 'react/no-danger-with-children': 'off', }, // JSON file formatting is handled by Biome. ESLint should not be linting // and formatting these files. @@ -39,10 +51,19 @@ module.exports = { { files: ['tests/js/**/*.{ts,js}'], extends: ['plugin:testing-library/react', 'sentry-app/strict'], - }, - { - files: ['*.ts', '*.tsx'], - rules: {}, + rules: { + // TODO(@anonrig): Remove this from eslint-sentry-config + 'space-infix-ops': 'off', + 'object-shorthand': 'off', + 'object-curly-spacing': 'off', + 'import/no-amd': 'off', + 'no-danger-with-children': 'off', + 'no-fallthrough': 'off', + 'no-obj-calls': 'off', + 'array-bracket-spacing': 'off', + 'computed-property-spacing': 'off', + 'react/no-danger-with-children': 'off', + }, }, { // We specify rules explicitly for the sdk-loader here so we do not have diff --git a/biome.json b/biome.json index 06e4f6ea962d29..9ce566a9eac1e7 100644 --- a/biome.json +++ b/biome.json @@ -17,6 +17,7 @@ "noBlankTarget": "error" }, "correctness": { + "noGlobalObjectCalls": "error", "noUnreachable": "error", "useHookAtTopLevel": "error", "useIsNan": "error" @@ -28,7 +29,11 @@ "noDuplicateJsonKeys": "error", "noNodejsModules": "error", "useExportType": "error", - "useImportType": "error" + "useImportType": "error", + "useShorthandFunctionType": "error" + }, + "security": { + "noDangerouslySetInnerHtmlWithChildren": "error" }, "suspicious": { "noDebugger": "error", @@ -37,10 +42,17 @@ "noDuplicateObjectKeys": "error", "noDuplicateParameters": "error", "noDuplicateCase": "error", + "noFallthroughSwitchClause": "error", "noRedeclare": "error", + "noSparseArray": "error", + "noUnsafeDeclarationMerging": "error", + "noUnsafeNegation": "error", "useIsArray": "error" }, "style": { + "noCommaOperator": "error", + "noShoutyConstants": "error", + "noParameterProperties": "error", "noVar": "error", "useConst": "error" } diff --git a/static/app/__mocks__/api.tsx b/static/app/__mocks__/api.tsx index fdde0509573d70..f9eabc04d00400 100644 --- a/static/app/__mocks__/api.tsx +++ b/static/app/__mocks__/api.tsx @@ -27,9 +27,7 @@ type FunctionCallback = (...args: Args) => void; /** * Callables for matching requests based on arbitrary conditions. */ -interface MatchCallable { - (url: string, options: ApiNamespace.RequestOptions): boolean; -} +type MatchCallable = (url: string, options: ApiNamespace.RequestOptions) => boolean; type AsyncDelay = undefined | number; interface ResponseType extends ApiNamespace.ResponseMeta { diff --git a/static/app/actionCreators/account.tsx b/static/app/actionCreators/account.tsx index df60ebe8bfde6e..ff288f6d5e7af1 100644 --- a/static/app/actionCreators/account.tsx +++ b/static/app/actionCreators/account.tsx @@ -6,7 +6,7 @@ import type {ChangeAvatarUser} from 'sentry/views/settings/account/accountDetail export async function disconnectIdentity( identity: UserIdentityConfig, - onSuccess: {(): void} + onSuccess: () => void ) { const api = new Client(); diff --git a/static/app/utils/metrics/dashboardImport.tsx b/static/app/utils/metrics/dashboardImport.tsx index fc3187157e82ae..a34d5e53eddc90 100644 --- a/static/app/utils/metrics/dashboardImport.tsx +++ b/static/app/utils/metrics/dashboardImport.tsx @@ -1,9 +1,8 @@ import {Client} from 'sentry/api'; +import type {MetricMeta, MRI} from 'sentry/types'; import type {MetricsQuery} from 'sentry/utils/metrics/types'; import {MetricDisplayType} from 'sentry/utils/metrics/types'; -import type {MetricMeta, MRI} from '../../types/metrics'; - // import types export type ImportDashboard = { description: string; @@ -115,11 +114,13 @@ const METRIC_SUFFIX_TO_OP = { export class WidgetParser { private errors: string[] = []; private api = new Client(); + private importedWidget: ImportWidget; + private availableMetrics: MetricMeta[]; - constructor( - private importedWidget: ImportWidget, - private availableMetrics: MetricMeta[] - ) {} + constructor(importedWidget: ImportWidget, availableMetrics: MetricMeta[]) { + this.importedWidget = importedWidget; + this.availableMetrics = availableMetrics; + } // Parsing functions public async parse() { From cb4318cf372ff5e0492e1ec2222964bf8b3def6c Mon Sep 17 00:00:00 2001 From: Josh Ferge Date: Tue, 13 Feb 2024 09:49:05 -0800 Subject: [PATCH 319/357] feat(replays): add feature flag for combined envelope items in relay (#64948) Adds a feature flag for combining envelope items in relay -- will be used in https://github.com/getsentry/relay/pull/3035. getsentry handler PR will be linked below. --- src/sentry/conf/server.py | 2 ++ src/sentry/features/__init__.py | 1 + src/sentry/relay/config/__init__.py | 1 + 3 files changed, 4 insertions(+) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 010d50bc31b393..833415a2624c3b 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1835,6 +1835,8 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:session-replay-a11y-tab": False, # Enable the accessibility issues endpoint "organizations:session-replay-accessibility-issues": False, + # Enable combined envelope Kafka items in Relay + "organizations:session-replay-combined-envelope-items": False, # Enable core Session Replay SDK for recording onError events on sentry.io "organizations:session-replay-count-query-optimize": False, # Enable canvas recording diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index 966c50092c0bfb..d680f681f84039 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -237,6 +237,7 @@ default_manager.add("organizations:sentry-pride-logo-footer", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:session-replay-a11y-tab", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:session-replay-accessibility-issues", OrganizationFeature, FeatureHandlerStrategy.REMOTE) +default_manager.add("organizations:session-replay-combined-envelope-items", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:session-replay-count-query-optimize", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:session-replay-enable-canvas-replayer", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:session-replay-enable-canvas", OrganizationFeature, FeatureHandlerStrategy.REMOTE) diff --git a/src/sentry/relay/config/__init__.py b/src/sentry/relay/config/__init__.py index 6f1dd36f191609..070d33c3bb8e62 100644 --- a/src/sentry/relay/config/__init__.py +++ b/src/sentry/relay/config/__init__.py @@ -52,6 +52,7 @@ "organizations:transaction-name-normalize", "organizations:profiling", "organizations:session-replay", + "organizations:session-replay-combined-envelope-items", "organizations:user-feedback-ingest", "organizations:session-replay-recording-scrubbing", "organizations:device-class-synthesis", From f6d209a46f1631862da5c787bc0c7454992d1006 Mon Sep 17 00:00:00 2001 From: Leander Rodrigues Date: Tue, 13 Feb 2024 10:03:49 -0800 Subject: [PATCH 320/357] chore(issues): Remove usage of issue-alert-fallback-targeting flag from the backend (#64971) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Requires https://github.com/getsentry/sentry/pull/64965 I believe this will disable the fallthrough for all users, ignoring it even if it was set in the past. Since it's GA there should be no risk, though I believe that means there may be more I can delete 🕵️ --- .../apidocs/examples/project_examples.py | 1 - src/sentry/conf/server.py | 2 - src/sentry/features/__init__.py | 1 - src/sentry/models/projectownership.py | 38 ++-- .../notifications/utils/participants.py | 7 - .../api/endpoints/test_team_projects.py | 13 -- tests/sentry/digests/test_utilities.py | 8 +- .../msteams/notifications/test_issue_alert.py | 3 +- .../slack/notifications/test_issue_alert.py | 55 ++--- tests/sentry/mail/test_actions.py | 4 - tests/sentry/mail/test_adapter.py | 190 ++++++++++++++---- tests/sentry/models/test_projectownership.py | 16 +- .../notifications/test_digests.py | 6 +- .../notifications/utils/test_participants.py | 41 +--- tests/sentry/tasks/test_digests.py | 15 +- 15 files changed, 224 insertions(+), 176 deletions(-) diff --git a/src/sentry/apidocs/examples/project_examples.py b/src/sentry/apidocs/examples/project_examples.py index 6de74a070b5480..749c5b3c026d21 100644 --- a/src/sentry/apidocs/examples/project_examples.py +++ b/src/sentry/apidocs/examples/project_examples.py @@ -287,7 +287,6 @@ "profile-file-io-main-thread-ingest", "customer-domains", "performance-file-io-main-thread-post-process-group", - "issue-alert-fallback-targeting", "performance-render-blocking-asset-span-visible", "ds-sliding-window-org", "performance-consecutive-http-ingest", diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 833415a2624c3b..8e51105b4bf7f7 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1616,8 +1616,6 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:invite-members": True, # Enable rate limits for inviting members. "organizations:invite-members-rate-limits": True, - # Enable new issue alert "issue owners" fallback - "organizations:issue-alert-fallback-targeting": False, # Enables the inline replay viewer on the issue details page "organizations:issue-details-inline-replay-viewer": False, # Enables a toggle for entering the new issue details UI diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index d680f681f84039..7926169f440e74 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -121,7 +121,6 @@ default_manager.add("organizations:investigation-bias", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:invite-members-rate-limits", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:invite-members", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) -default_manager.add("organizations:issue-alert-fallback-targeting", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:issue-details-inline-replay-viewer", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:issue-details-new-experience-toggle", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:issue-details-stacktrace-link-in-frame", OrganizationFeature, FeatureHandlerStrategy.REMOTE) diff --git a/src/sentry/models/projectownership.py b/src/sentry/models/projectownership.py index 5f37f40a3ac71e..6856e3d9c24b9c 100644 --- a/src/sentry/models/projectownership.py +++ b/src/sentry/models/projectownership.py @@ -9,14 +9,12 @@ from django.db.models.signals import post_delete, post_save from django.utils import timezone -from sentry import features from sentry.backup.scopes import RelocationScope from sentry.db.models import Model, region_silo_only_model, sane_repr from sentry.db.models.fields import FlexibleForeignKey, JSONField from sentry.models.activity import Activity from sentry.models.actor import ActorTuple from sentry.models.groupowner import OwnerRuleType -from sentry.models.project import Project from sentry.ownership.grammar import Rule, load_schema, resolve_actors from sentry.types.activity import ActivityType from sentry.utils import metrics @@ -133,13 +131,7 @@ def get_owners( rules = cls._matching_ownership_rules(ownership, data) if not rules: - project = Project.objects.get(id=project_id) - if features.has( - "organizations:issue-alert-fallback-targeting", project.organization, actor=None - ): - return [], None - - return cls.Everyone if ownership.fallthrough else [], None + return [], None owners = {o for rule in rules for o in rule.owners} owners_to_actors = resolve_actors(owners, project_id) @@ -287,15 +279,17 @@ def handle_auto_assignment(cls, project_id, event=None, group=None): details = ( {"integration": ActivityIntegration.SUSPECT_COMMITTER.value} if issue_owner.type == GroupOwnerType.SUSPECT_COMMIT.value - else { - "integration": ActivityIntegration.PROJECT_OWNERSHIP.value, - "rule": (issue_owner.context or {}).get("rule", ""), - } - if issue_owner.type == GroupOwnerType.OWNERSHIP_RULE.value - else { - "integration": ActivityIntegration.CODEOWNERS.value, - "rule": (issue_owner.context or {}).get("rule", ""), - } + else ( + { + "integration": ActivityIntegration.PROJECT_OWNERSHIP.value, + "rule": (issue_owner.context or {}).get("rule", ""), + } + if issue_owner.type == GroupOwnerType.OWNERSHIP_RULE.value + else { + "integration": ActivityIntegration.CODEOWNERS.value, + "rule": (issue_owner.context or {}).get("rule", ""), + } + ) ) activity = Activity.objects.filter( group=group, type=ActivityType.ASSIGNED.value @@ -331,9 +325,11 @@ def handle_auto_assignment(cls, project_id, event=None, group=None): if assignment["new_assignment"] or assignment["updated_assignment"]: analytics.record( - "codeowners.assignment" - if details.get("integration") == ActivityIntegration.CODEOWNERS.value - else "issueowners.assignment", + ( + "codeowners.assignment" + if details.get("integration") == ActivityIntegration.CODEOWNERS.value + else "issueowners.assignment" + ), organization_id=ownership.project.organization_id, project_id=project_id, group_id=group.id, diff --git a/src/sentry/notifications/utils/participants.py b/src/sentry/notifications/utils/participants.py index d92aa4ed83e7f2..042fb36db96b4f 100644 --- a/src/sentry/notifications/utils/participants.py +++ b/src/sentry/notifications/utils/participants.py @@ -409,13 +409,6 @@ def get_send_to( def get_fallthrough_recipients( project: Project, fallthrough_choice: FallthroughChoiceType | None ) -> Iterable[RpcUser]: - if not features.has( - "organizations:issue-alert-fallback-targeting", - project.organization, - actor=None, - ): - return [] - if not fallthrough_choice: logger.warning("Missing fallthrough type in project: %s", project) return [] diff --git a/tests/sentry/api/endpoints/test_team_projects.py b/tests/sentry/api/endpoints/test_team_projects.py index feeca69436b96f..a8c5380e32b59f 100644 --- a/tests/sentry/api/endpoints/test_team_projects.py +++ b/tests/sentry/api/endpoints/test_team_projects.py @@ -115,19 +115,6 @@ def test_default_rules(self): status_code=201, ) - project = Project.objects.get(id=response.data["id"]) - assert Rule.objects.filter(project=project).exists() - - @with_feature("organizations:issue-alert-fallback-targeting") - def test_default_rule_fallback_targeting(self): - response = self.get_success_response( - self.organization.slug, - self.team.slug, - **self.data, - default_rules=True, - status_code=201, - ) - project = Project.objects.get(id=response.data["id"]) rule = Rule.objects.filter(project=project).first() assert ( diff --git a/tests/sentry/digests/test_utilities.py b/tests/sentry/digests/test_utilities.py index 4c6a4f49fdae5b..4d95e28f756e20 100644 --- a/tests/sentry/digests/test_utilities.py +++ b/tests/sentry/digests/test_utilities.py @@ -13,7 +13,7 @@ from sentry.eventstore.models import Event from sentry.models.project import Project from sentry.models.projectownership import ProjectOwnership -from sentry.notifications.types import ActionTargetType +from sentry.notifications.types import ActionTargetType, FallthroughChoiceType from sentry.ownership.grammar import Matcher, Owner, Rule, dump_schema from sentry.services.hybrid_cloud.actor import ActorType from sentry.testutils.cases import SnubaTestCase, TestCase @@ -79,7 +79,11 @@ def assert_get_personalized_digests( ): result_user_ids = [] participants_by_provider_by_event = get_participants_by_event( - digest, project, target_type, target_identifier + digest, + project, + target_type, + target_identifier, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, ) personalized_digests = get_personalized_digests(digest, participants_by_provider_by_event) for actor, user_digest in personalized_digests.items(): diff --git a/tests/sentry/integrations/msteams/notifications/test_issue_alert.py b/tests/sentry/integrations/msteams/notifications/test_issue_alert.py index 91863e744bc95b..5cb5471ceb4ec4 100644 --- a/tests/sentry/integrations/msteams/notifications/test_issue_alert.py +++ b/tests/sentry/integrations/msteams/notifications/test_issue_alert.py @@ -4,7 +4,7 @@ from sentry.models.projectownership import ProjectOwnership from sentry.models.rule import Rule from sentry.notifications.notifications.rules import AlertRuleNotification -from sentry.notifications.types import ActionTargetType +from sentry.notifications.types import ActionTargetType, FallthroughChoiceType from sentry.plugins.base import Notification from sentry.testutils.cases import MSTeamsActivityNotificationTest from sentry.testutils.silo import region_silo_test @@ -98,6 +98,7 @@ def test_issue_alert_owners(self, mock_send_card: MagicMock): ActionTargetType.ISSUE_OWNERS, self.user.id, notification_uuid=notification_uuid, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, ) with self.tasks(): diff --git a/tests/sentry/integrations/slack/notifications/test_issue_alert.py b/tests/sentry/integrations/slack/notifications/test_issue_alert.py index a85bb2848debd2..e835715ce30104 100644 --- a/tests/sentry/integrations/slack/notifications/test_issue_alert.py +++ b/tests/sentry/integrations/slack/notifications/test_issue_alert.py @@ -21,7 +21,7 @@ from sentry.models.projectownership import ProjectOwnership from sentry.models.rule import Rule from sentry.notifications.notifications.rules import AlertRuleNotification -from sentry.notifications.types import ActionTargetType +from sentry.notifications.types import ActionTargetType, FallthroughChoiceType from sentry.ownership.grammar import Matcher, Owner from sentry.ownership.grammar import Rule as GrammarRule from sentry.ownership.grammar import dump_schema @@ -344,10 +344,13 @@ def test_issue_alert_issue_owners(self): "actions": [action_data], }, ) - ProjectOwnership.objects.create(project_id=self.project.id, fallthrough=True) + ProjectOwnership.objects.create(project_id=self.project.id) notification = AlertRuleNotification( - Notification(event=event, rule=rule), ActionTargetType.ISSUE_OWNERS, self.user.id + Notification(event=event, rule=rule), + ActionTargetType.ISSUE_OWNERS, + self.user.id, + FallthroughChoiceType.ACTIVE_MEMBERS, ) with self.tasks(): @@ -390,10 +393,13 @@ def test_issue_alert_issue_owners_block(self): "actions": [action_data], }, ) - ProjectOwnership.objects.create(project_id=self.project.id, fallthrough=True) + ProjectOwnership.objects.create(project_id=self.project.id) notification = AlertRuleNotification( - Notification(event=event, rule=rule), ActionTargetType.ISSUE_OWNERS, self.user.id + Notification(event=event, rule=rule), + ActionTargetType.ISSUE_OWNERS, + self.user.id, + FallthroughChoiceType.ACTIVE_MEMBERS, ) with self.tasks(): @@ -444,10 +450,13 @@ def test_issue_alert_issue_owners_environment(self): name="ja rule", environment_id=environment.id, ) - ProjectOwnership.objects.create(project_id=self.project.id, fallthrough=True) + ProjectOwnership.objects.create(project_id=self.project.id) notification = AlertRuleNotification( - Notification(event=event, rule=rule), ActionTargetType.ISSUE_OWNERS, self.user.id + Notification(event=event, rule=rule), + ActionTargetType.ISSUE_OWNERS, + self.user.id, + FallthroughChoiceType.ACTIVE_MEMBERS, ) with self.tasks(): @@ -498,10 +507,13 @@ def test_issue_alert_issue_owners_environment_block(self): name="ja rule", environment_id=environment.id, ) - ProjectOwnership.objects.create(project_id=self.project.id, fallthrough=True) + ProjectOwnership.objects.create(project_id=self.project.id) notification = AlertRuleNotification( - Notification(event=event, rule=rule), ActionTargetType.ISSUE_OWNERS, self.user.id + Notification(event=event, rule=rule), + ActionTargetType.ISSUE_OWNERS, + self.user.id, + FallthroughChoiceType.ACTIVE_MEMBERS, ) with self.tasks(): @@ -562,9 +574,7 @@ def test_issue_alert_team_issue_owners(self): ) rule = GrammarRule(Matcher("path", "*"), [Owner("team", self.team.slug)]) - ProjectOwnership.objects.create( - project_id=self.project.id, schema=dump_schema([rule]), fallthrough=True - ) + ProjectOwnership.objects.create(project_id=self.project.id, schema=dump_schema([rule])) event = self.store_event( data={ @@ -655,9 +665,7 @@ def test_issue_alert_team_issue_owners_block(self): ) g_rule = GrammarRule(Matcher("path", "*"), [Owner("team", self.team.slug)]) - ProjectOwnership.objects.create( - project_id=self.project.id, schema=dump_schema([g_rule]), fallthrough=True - ) + ProjectOwnership.objects.create(project_id=self.project.id, schema=dump_schema([g_rule])) event = self.store_event( data={ @@ -736,9 +744,7 @@ def test_disabled_org_integration_for_team(self): ) rule = GrammarRule(Matcher("path", "*"), [Owner("team", self.team.slug)]) - ProjectOwnership.objects.create( - project_id=self.project.id, schema=dump_schema([rule]), fallthrough=True - ) + ProjectOwnership.objects.create(project_id=self.project.id, schema=dump_schema([rule])) event = self.store_event( data={ @@ -827,9 +833,7 @@ def test_issue_alert_team_issue_owners_user_settings_off_digests(self, digests): ) g_rule = GrammarRule(Matcher("path", "*"), [Owner("team", self.team.slug)]) - ProjectOwnership.objects.create( - project_id=self.project.id, schema=dump_schema([g_rule]), fallthrough=True - ) + ProjectOwnership.objects.create(project_id=self.project.id, schema=dump_schema([g_rule])) event = self.store_event( data={ @@ -1255,11 +1259,11 @@ def test_digest_enabled(self, digests): digests.enabled.return_value = True rule = Rule.objects.create(project=self.project, label="my rule") - ProjectOwnership.objects.create(project_id=self.project.id, fallthrough=True) + ProjectOwnership.objects.create(project_id=self.project.id) event = self.store_event( data={"message": "Hello world", "level": "error"}, project_id=self.project.id ) - key = f"mail:p:{self.project.id}" + key = f"mail:p:{self.project.id}:IssueOwners::AllMembers" backend.add(key, event_to_record(event, [rule]), increment_delay=0, maximum_delay=0) with self.tasks(): @@ -1284,11 +1288,12 @@ def test_digest_enabled_block(self, digests): digests.enabled.return_value = True rule = Rule.objects.create(project=self.project, label="my rule") - ProjectOwnership.objects.create(project_id=self.project.id, fallthrough=True) + ProjectOwnership.objects.create(project_id=self.project.id) event = self.store_event( data={"message": "Hello world", "level": "error"}, project_id=self.project.id ) - key = f"mail:p:{self.project.id}" + + key = f"mail:p:{self.project.id}:IssueOwners::AllMembers" backend.add(key, event_to_record(event, [rule]), increment_delay=0, maximum_delay=0) with self.tasks(): diff --git a/tests/sentry/mail/test_actions.py b/tests/sentry/mail/test_actions.py index f5664619962086..35360e1edd83c9 100644 --- a/tests/sentry/mail/test_actions.py +++ b/tests/sentry/mail/test_actions.py @@ -10,7 +10,6 @@ from sentry.notifications.types import ActionTargetType, FallthroughChoiceType from sentry.tasks.post_process import post_process_group from sentry.testutils.cases import PerformanceIssueTestCase, RuleTestCase, TestCase -from sentry.testutils.helpers import with_feature from sentry.testutils.helpers.datetime import before_now, iso_format from sentry.testutils.helpers.eventprocessing import write_event_to_cache from sentry.testutils.skips import requires_snuba @@ -170,7 +169,6 @@ def test_full_integration(self): assert sent.to == [self.user.email] assert "uh oh" in sent.subject - @with_feature("organizations:issue-alert-fallback-targeting") def test_full_integration_fallthrough(self): one_min_ago = iso_format(before_now(minutes=1)) event = self.store_event( @@ -208,7 +206,6 @@ def test_full_integration_fallthrough(self): assert sent.to == [self.user.email] assert "uh oh" in sent.subject - @with_feature("organizations:issue-alert-fallback-targeting") def test_full_integration_fallthrough_not_provided(self): one_min_ago = iso_format(before_now(minutes=1)) event = self.store_event( @@ -337,7 +334,6 @@ def test_hack_mail_workflow(self): for x in [out.subject for out in mail.outbox]: assert "uh oh" in x - @with_feature("organizations:issue-alert-fallback-targeting") def test_render_label_fallback_none(self): # Check that the label defaults to ActiveMembers rule = self.get_rule(data={"targetType": ActionTargetType.ISSUE_OWNERS.value}) diff --git a/tests/sentry/mail/test_adapter.py b/tests/sentry/mail/test_adapter.py index 989e255568e656..03742799910f13 100644 --- a/tests/sentry/mail/test_adapter.py +++ b/tests/sentry/mail/test_adapter.py @@ -36,7 +36,7 @@ from sentry.models.useremail import UserEmail from sentry.models.userreport import UserReport from sentry.notifications.notifications.rules import AlertRuleNotification -from sentry.notifications.types import ActionTargetType +from sentry.notifications.types import ActionTargetType, FallthroughChoiceType from sentry.notifications.utils.digest import get_digest_subject from sentry.ownership import grammar from sentry.ownership.grammar import Matcher, Owner, dump_schema @@ -170,7 +170,11 @@ def test_simple_notification(self, mock_record): notification = Notification(event=event, rule=rule) with self.options({"system.url-prefix": "http://example.com"}), self.tasks(): - self.adapter.notify(notification, ActionTargetType.ISSUE_OWNERS) + self.adapter.notify( + notification, + ActionTargetType.ISSUE_OWNERS, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, + ) msg = mail.outbox[0] assert isinstance(msg, EmailMultiAlternatives) @@ -213,12 +217,16 @@ def test_notification_with_environment(self): rule = Rule.objects.create( project=self.project, label="my rule", environment_id=environment.id ) - ProjectOwnership.objects.create(project_id=self.project.id, fallthrough=True) + ProjectOwnership.objects.create(project_id=self.project.id) notification = Notification(event=event, rule=rule) with self.options({"system.url-prefix": "http://example.com"}), self.tasks(): - self.adapter.notify(notification, ActionTargetType.ISSUE_OWNERS) + self.adapter.notify( + notification, + ActionTargetType.ISSUE_OWNERS, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, + ) msg = mail.outbox[0] assert isinstance(msg, EmailMultiAlternatives) @@ -276,7 +284,11 @@ def test_someone_else_snoozes_themself(self): notification = Notification(event=event, rule=rule) with self.options({"system.url-prefix": "http://example.com"}), self.tasks(): - self.adapter.notify(notification, ActionTargetType.ISSUE_OWNERS) + self.adapter.notify( + notification, + ActionTargetType.ISSUE_OWNERS, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, + ) assert len(mail.outbox) == 1 msg = mail.outbox[0] @@ -336,7 +348,11 @@ def test_simple_notification_generic(self): notification = Notification(event=event, rule=rule) with self.options({"system.url-prefix": "http://example.com"}), self.tasks(): - self.adapter.notify(notification, ActionTargetType.ISSUE_OWNERS) + self.adapter.notify( + notification, + ActionTargetType.ISSUE_OWNERS, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, + ) msg = mail.outbox[0] assert isinstance(msg, EmailMultiAlternatives) @@ -388,7 +404,11 @@ def test_simple_notification_generic_no_evidence(self): notification = Notification(event=event, rule=rule) with self.options({"system.url-prefix": "http://example.com"}), self.tasks(): - self.adapter.notify(notification, ActionTargetType.ISSUE_OWNERS) + self.adapter.notify( + notification, + ActionTargetType.ISSUE_OWNERS, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, + ) msg = mail.outbox[0] assert isinstance(msg, EmailMultiAlternatives) @@ -404,7 +424,11 @@ def test_simple_notification_perf(self): notification = Notification(event=event, rule=rule) with self.options({"system.url-prefix": "http://example.com"}), self.tasks(): - self.adapter.notify(notification, ActionTargetType.ISSUE_OWNERS) + self.adapter.notify( + notification, + ActionTargetType.ISSUE_OWNERS, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, + ) msg = mail.outbox[0] assert isinstance(msg, EmailMultiAlternatives) @@ -440,7 +464,11 @@ def test_notify_users_renders_interfaces_with_utf8(self, _to_email_html, _get_ti ProjectOwnership.objects.create(project_id=self.project.id, fallthrough=True) with self.options({"system.url-prefix": "http://example.com"}): - self.adapter.notify(notification, ActionTargetType.ISSUE_OWNERS) + self.adapter.notify( + notification, + ActionTargetType.ISSUE_OWNERS, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, + ) _get_title.assert_called_once_with() _to_email_html.assert_called_once_with(event) @@ -448,8 +476,7 @@ def test_notify_users_renders_interfaces_with_utf8(self, _to_email_html, _get_ti @mock_notify @mock.patch("sentry.notifications.notifications.rules.logger") def test_notify_users_does_email(self, mock_logger, mock_func): - with assume_test_silo_mode(SiloMode.CONTROL): - UserOption.objects.create(user=self.user, key="timezone", value="Europe/Vienna") + self.create_user_option(user=self.user, key="timezone", value="Europe/Vienna") event_manager = EventManager({"message": "hello world", "level": "error"}) event_manager.normalize() event_data = event_manager.get_data() @@ -460,19 +487,22 @@ def test_notify_users_does_email(self, mock_logger, mock_func): event = event_manager.save(self.project.id) group = event.group - with assume_test_silo_mode(SiloMode.CONTROL): - NotificationSettingProvider.objects.create( - user_id=self.user.id, - scope_type="user", - scope_identifier=self.user.id, - provider="slack", - type="alerts", - value="never", - ) + self.create_notification_settings_provider( + user_id=self.user.id, + scope_type="user", + scope_identifier=self.user.id, + provider="slack", + type="alerts", + value="never", + ) ProjectOwnership.objects.create(project_id=self.project.id, fallthrough=True) with self.tasks(): - AlertRuleNotification(Notification(event=event), ActionTargetType.ISSUE_OWNERS).send() + AlertRuleNotification( + Notification(event=event), + ActionTargetType.ISSUE_OWNERS, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, + ).send() assert mock_func.call_count == 1 @@ -497,7 +527,7 @@ def test_notify_users_does_email(self, mock_logger, mock_func): "group": group.id, "project_id": group.project.id, "organization": group.organization.id, - "fallthrough_choice": None, + "fallthrough_choice": "ActiveMembers", "notification_uuid": mock.ANY, }, ) @@ -553,7 +583,11 @@ def test_email_notification_is_not_sent_to_deleted_email(self, mock_func): event = event_manager.save(self.project.id) with self.tasks(): - AlertRuleNotification(Notification(event=event), ActionTargetType.ISSUE_OWNERS).send() + AlertRuleNotification( + Notification(event=event), + ActionTargetType.ISSUE_OWNERS, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, + ).send() assert mock_func.call_count == 1 @@ -591,7 +625,11 @@ def test_multiline_error(self, mock_func): event = event_manager.save(self.project.id) with self.tasks(): - AlertRuleNotification(Notification(event=event), ActionTargetType.ISSUE_OWNERS).send() + AlertRuleNotification( + Notification(event=event), + ActionTargetType.ISSUE_OWNERS, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, + ).send() assert mock_func.call_count == 1 args, kwargs = mock_func.call_args @@ -607,7 +645,11 @@ def test_notify_users_with_utf8_subject(self): ProjectOwnership.objects.create(project_id=self.project.id, fallthrough=True) with self.options({"system.url-prefix": "http://example.com"}), self.tasks(): - self.adapter.notify(notification, ActionTargetType.ISSUE_OWNERS) + self.adapter.notify( + notification, + ActionTargetType.ISSUE_OWNERS, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, + ) assert len(mail.outbox) == 1 msg = mail.outbox[0] @@ -635,7 +677,11 @@ def test_notify_users_with_their_timezones(self): ProjectOwnership.objects.create(project_id=self.project.id, fallthrough=True) with self.options({"system.url-prefix": "http://example.com"}), self.tasks(): - self.adapter.notify(notification, ActionTargetType.ISSUE_OWNERS) + self.adapter.notify( + notification, + ActionTargetType.ISSUE_OWNERS, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, + ) assert len(mail.outbox) == 1 msg = mail.outbox[0] @@ -720,7 +766,11 @@ def test_notify_with_suspect_commits(self): with self.tasks(): notification = Notification(event=event) - self.adapter.notify(notification, ActionTargetType.ISSUE_OWNERS) + self.adapter.notify( + notification, + ActionTargetType.ISSUE_OWNERS, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, + ) assert len(mail.outbox) >= 1 @@ -750,7 +800,11 @@ def test_notify_with_replay_id(self): with self.feature(features): with self.tasks(): notification = Notification(event=event) - self.adapter.notify(notification, ActionTargetType.ISSUE_OWNERS) + self.adapter.notify( + notification, + ActionTargetType.ISSUE_OWNERS, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, + ) assert len(mail.outbox) >= 1 @@ -770,7 +824,11 @@ def test_slack_link(self): with self.tasks(): notification = Notification(event=event) - self.adapter.notify(notification, ActionTargetType.ISSUE_OWNERS) + self.adapter.notify( + notification, + ActionTargetType.ISSUE_OWNERS, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, + ) assert len(mail.outbox) >= 1 @@ -795,7 +853,11 @@ def test_slack_link_with_integration(self): with self.tasks(): notification = Notification(event=event) - self.adapter.notify(notification, ActionTargetType.ISSUE_OWNERS) + self.adapter.notify( + notification, + ActionTargetType.ISSUE_OWNERS, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, + ) assert len(mail.outbox) >= 1 @@ -818,7 +880,11 @@ def test_slack_link_with_plugin(self): with self.tasks(): notification = Notification(event=event) - self.adapter.notify(notification, ActionTargetType.ISSUE_OWNERS) + self.adapter.notify( + notification, + ActionTargetType.ISSUE_OWNERS, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, + ) assert len(mail.outbox) >= 1 @@ -1271,7 +1337,11 @@ def test_group_substatus_header(self): notification = Notification(event=event, rule=rule) with self.options({"system.url-prefix": "http://example.com"}), self.tasks(): - self.adapter.notify(notification, ActionTargetType.ISSUE_OWNERS) + self.adapter.notify( + notification, + ActionTargetType.ISSUE_OWNERS, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, + ) msg = mail.outbox[0] assert isinstance(msg, EmailMultiAlternatives) @@ -1315,7 +1385,12 @@ def test_notify_digest(self, notify): )[0] with self.tasks(): - self.adapter.notify_digest(project, digest, ActionTargetType.ISSUE_OWNERS) + self.adapter.notify_digest( + project, + digest, + ActionTargetType.ISSUE_OWNERS, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, + ) assert notify.call_count == 0 assert len(mail.outbox) == 1 @@ -1367,7 +1442,12 @@ def test_notify_digest_replay_id(self, notify): features = ["organizations:session-replay", "organizations:session-replay-issue-emails"] with self.feature(features), self.tasks(): - self.adapter.notify_digest(project, digest, ActionTargetType.ISSUE_OWNERS) + self.adapter.notify_digest( + project, + digest, + ActionTargetType.ISSUE_OWNERS, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, + ) assert notify.call_count == 0 assert len(mail.outbox) == 1 @@ -1400,7 +1480,12 @@ def test_dont_notify_digest_snoozed(self, notify): )[0] with self.tasks(): - self.adapter.notify_digest(project, digest, ActionTargetType.ISSUE_OWNERS) + self.adapter.notify_digest( + project, + digest, + ActionTargetType.ISSUE_OWNERS, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, + ) assert notify.call_count == 0 assert len(mail.outbox) == 0 @@ -1432,7 +1517,12 @@ def test_notify_digest_snooze_one_rule(self, notify): )[0] with self.tasks(): - self.adapter.notify_digest(project, digest, ActionTargetType.ISSUE_OWNERS) + self.adapter.notify_digest( + project, + digest, + ActionTargetType.ISSUE_OWNERS, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, + ) assert notify.call_count == 0 assert len(mail.outbox) == 2 # we send it to 2 users @@ -1477,7 +1567,12 @@ def test_dont_notify_digest_snoozed_multiple_rules(self, notify): )[0] with self.tasks(): - self.adapter.notify_digest(project, digest, ActionTargetType.ISSUE_OWNERS) + self.adapter.notify_digest( + project, + digest, + ActionTargetType.ISSUE_OWNERS, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, + ) assert notify.call_count == 0 assert len(mail.outbox) == 1 # we send it to only 1 user @@ -1516,7 +1611,12 @@ def test_dont_notify_digest_snoozed_multiple_rules_global_snooze(self, notify): )[0] with self.tasks(): - self.adapter.notify_digest(project, digest, ActionTargetType.ISSUE_OWNERS) + self.adapter.notify_digest( + project, + digest, + ActionTargetType.ISSUE_OWNERS, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, + ) assert notify.call_count == 0 assert len(mail.outbox) == 1 # we send it to only 1 user @@ -1533,7 +1633,12 @@ def test_notify_digest_single_record(self, send_async, notify): rule = self.project.rule_set.all()[0] ProjectOwnership.objects.create(project_id=self.project.id, fallthrough=True) digest = build_digest(self.project, (event_to_record(event, (rule,)),))[0] - self.adapter.notify_digest(self.project, digest, ActionTargetType.ISSUE_OWNERS) + self.adapter.notify_digest( + self.project, + digest, + ActionTargetType.ISSUE_OWNERS, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, + ) assert send_async.call_count == 1 assert notify.call_count == 1 @@ -1559,7 +1664,12 @@ def test_notify_digest_subject_prefix(self): )[0] with self.tasks(): - self.adapter.notify_digest(self.project, digest, ActionTargetType.ISSUE_OWNERS) + self.adapter.notify_digest( + self.project, + digest, + ActionTargetType.ISSUE_OWNERS, + fallthrough_choice=FallthroughChoiceType.ACTIVE_MEMBERS, + ) assert len(mail.outbox) == 1 diff --git a/tests/sentry/models/test_projectownership.py b/tests/sentry/models/test_projectownership.py index ee3d42114a98b9..89c74bed6e6062 100644 --- a/tests/sentry/models/test_projectownership.py +++ b/tests/sentry/models/test_projectownership.py @@ -83,12 +83,12 @@ def assert_ownership_equals(self, o1, o2): def test_get_owners_default(self): ProjectOwnership.objects.create(project_id=self.project.id, fallthrough=True) - assert ProjectOwnership.get_owners(self.project.id, {}) == (ProjectOwnership.Everyone, None) + assert ProjectOwnership.get_owners(self.project.id, {}) == ([], None) def test_get_owners_no_record(self): + assert ProjectOwnership.get_owners(self.project.id, {}) == ([], None) ProjectOwnership.objects.create(project_id=self.project.id, fallthrough=True) - assert ProjectOwnership.get_owners(self.project.id, {}) == (ProjectOwnership.Everyone, None) - assert ProjectOwnership.get_owners(self.project.id, {}) == (ProjectOwnership.Everyone, None) + assert ProjectOwnership.get_owners(self.project.id, {}) == ([], None) def test_get_owners_basic(self): rule_a = Rule(Matcher("path", "*.py"), [Owner("team", self.team.slug)]) @@ -99,7 +99,7 @@ def test_get_owners_basic(self): ) # No data matches - assert ProjectOwnership.get_owners(self.project.id, {}) == (ProjectOwnership.Everyone, None) + assert ProjectOwnership.get_owners(self.project.id, {}) == ([], None) # Match only rule_a self.assert_ownership_equals( @@ -125,11 +125,7 @@ def test_get_owners_basic(self): ([ActorTuple(self.team.id, Team), ActorTuple(self.user.id, User)], [rule_a, rule_b]), ) - assert ProjectOwnership.get_owners( - self.project.id, {"stacktrace": {"frames": [{"filename": "xxxx"}]}} - ) == (ProjectOwnership.Everyone, None) - - # When fallthrough = False, we don't implicitly assign to Everyone + # We should be ignoring the fallthrough flag owner = ProjectOwnership.objects.get(project_id=self.project.id) owner.fallthrough = False owner.save() @@ -602,7 +598,7 @@ def test_saves_without_either_auto_assignment_option(self): suspect_committer_auto_assignment=False, auto_assignment=False, ) - assert ProjectOwnership.get_owners(self.project.id, {}) == (ProjectOwnership.Everyone, None) + assert ProjectOwnership.get_owners(self.project.id, {}) == ([], None) def test_force_handle_auto_assignment(self): # Run auto-assignment first diff --git a/tests/sentry/notifications/notifications/test_digests.py b/tests/sentry/notifications/notifications/test_digests.py index d973f867136037..44683930e96505 100644 --- a/tests/sentry/notifications/notifications/test_digests.py +++ b/tests/sentry/notifications/notifications/test_digests.py @@ -84,7 +84,7 @@ def run_test( def setUp(self): super().setUp() self.rule = Rule.objects.create(project=self.project, label="Test Rule", data={}) - self.key = f"mail:p:{self.project.id}" + self.key = f"mail:p:{self.project.id}:IssueOwners::AllMembers" ProjectOwnership.objects.create(project_id=self.project.id, fallthrough=True) for i in range(USER_COUNT - 1): self.create_member( @@ -171,7 +171,7 @@ def test_slack_digest_notification(self, digests): timestamp_raw = before_now(days=1) timestamp_secs = int(timestamp_raw.timestamp()) timestamp = iso_format(timestamp_raw) - key = f"slack:p:{self.project.id}" + key = f"slack:p:{self.project.id}:IssueOwners::AllMembers" rule = Rule.objects.create(project=self.project, label="my rule") event = self.store_event( data={ @@ -236,7 +236,7 @@ def test_slack_digest_notification_block(self, digests): timestamp_raw = before_now(days=1) timestamp_secs = int(timestamp_raw.timestamp()) timestamp = iso_format(timestamp_raw) - key = f"slack:p:{self.project.id}" + key = f"slack:p:{self.project.id}:IssueOwners::AllMembers" rule = Rule.objects.create(project=self.project, label="my rule") event1 = self.store_event( data={ diff --git a/tests/sentry/notifications/utils/test_participants.py b/tests/sentry/notifications/utils/test_participants.py index d71a17da068505..ca22217529f48c 100644 --- a/tests/sentry/notifications/utils/test_participants.py +++ b/tests/sentry/notifications/utils/test_participants.py @@ -400,14 +400,10 @@ def test_disable_alerts_multiple_scopes(self): self.get_send_to_owners(event), email=[self.user.id], slack=[self.user.id] ) - def test_fallthrough(self): + def test_no_fallthrough(self): event = self.store_event_owners("no_rule.cpp") - self.assert_recipients_are( - self.get_send_to_owners(event), - email=[self.user.id, self.user2.id, self.user_suspect_committer.id], - slack=[self.user.id, self.user2.id, self.user_suspect_committer.id], - ) + self.assert_recipients_are(self.get_send_to_owners(event), email=[], slack=[]) def test_without_fallthrough(self): ProjectOwnership.objects.get(project_id=self.project.id).update(fallthrough=False) @@ -760,15 +756,13 @@ def test_get_owners_empty(self): self.assert_recipients(expected=[], received=recipients) assert outcome == "empty" - # If no match, and fallthrough is enabled - def test_get_owners_everyone(self): + # If no match, and fallthrough is still ignored + def test_get_owners_fallthrough_ignored(self): self.create_ownership(self.project, [], True) event = self.create_event(self.project) recipients, outcome = get_owners(project=self.project, event=event) - self.assert_recipients( - expected=[self.user_1, self.user_2, self.user_3], received=recipients - ) - assert outcome == "everyone" + self.assert_recipients(expected=[], received=recipients) + assert outcome == "empty" # If matched, and all-recipients flag def test_get_owners_match(self): @@ -905,25 +899,11 @@ def get_send_to_fallthrough( def store_event(self, filename: str, project: Project) -> Event: return super().store_event(data=make_event_data(filename), project_id=project.id) - def test_feature_off_no_owner(self): - event = self.store_event("empty.lol", self.project) - assert get_fallthrough_recipients(self.project, FallthroughChoiceType.ACTIVE_MEMBERS) == [] - assert self.get_send_to_fallthrough(event, self.project, None) == {} - - def test_feature_off_with_owner(self): - event = self.store_event("empty.py", self.project) - self.assert_recipients_are( - self.get_send_to_fallthrough(event, self.project, None), - email=[self.user.id, self.user2.id], - ) - - @with_feature("organizations:issue-alert-fallback-targeting") def test_invalid_fallthrough_choice(self): with pytest.raises(NotImplementedError) as e: get_fallthrough_recipients(self.project, "invalid") # type: ignore[arg-type] assert str(e.value).startswith("Unknown fallthrough choice: invalid") - @with_feature("organizations:issue-alert-fallback-targeting") def test_fallthrough_setting_on(self): """ Test that the new fallthrough choice takes precedence even if the fallthrough setting is on. @@ -939,14 +919,12 @@ def test_fallthrough_setting_on(self): event = self.store_event("empty.lol", self.project) assert self.get_send_to_fallthrough(event, self.project, FallthroughChoiceType.NO_ONE) == {} - @with_feature("organizations:issue-alert-fallback-targeting") def test_no_fallthrough(self): """ Test the new fallthrough choice when no fallthrough choice is provided.""" event = self.store_event("none.lol", self.project) assert self.get_send_to_fallthrough(event, self.project, fallthrough_choice=None) == {} - @with_feature("organizations:issue-alert-fallback-targeting") def test_no_owners(self): """ Test the fallthrough when there is no ProjectOwnership set. @@ -960,12 +938,10 @@ def test_no_owners(self): ) assert ret == {} - @with_feature("organizations:issue-alert-fallback-targeting") def test_fallthrough_no_one(self): event = self.store_event("empty.lol", self.project) assert self.get_send_to_fallthrough(event, self.project, FallthroughChoiceType.NO_ONE) == {} - @with_feature("organizations:issue-alert-fallback-targeting") def test_fallthrough_all_members_no_owner(self): empty_project = self.create_project(organization=self.organization) ProjectOwnership.objects.create( @@ -984,7 +960,6 @@ def test_fallthrough_all_members_no_owner(self): email=[self.user.id, self.user2.id], ) - @with_feature("organizations:issue-alert-fallback-targeting") def test_fallthrough_all_members_multiple_teams(self): team3 = self.create_team(organization=self.organization, members=[self.user2, self.user3]) self.project.add_team(team3) @@ -995,7 +970,6 @@ def test_fallthrough_all_members_multiple_teams(self): email=[self.user.id, self.user2.id, self.user3.id], ) - @with_feature("organizations:issue-alert-fallback-targeting") def test_fallthrough_admin_or_recent_inactive_users(self): notified_users = [self.user, self.user2] for i in range(2): @@ -1022,7 +996,6 @@ def test_fallthrough_admin_or_recent_inactive_users(self): email=[user.id for user in [self.user, self.user2]], ) - @with_feature("organizations:issue-alert-fallback-targeting") def test_fallthrough_admin_or_recent_under_20(self): notifiable_users = [self.user, self.user2] for i in range(10): @@ -1052,7 +1025,6 @@ def test_fallthrough_admin_or_recent_under_20(self): assert len(notified_users) == 12 assert notified_users == expected_notified_users - @with_feature("organizations:issue-alert-fallback-targeting") def test_fallthrough_admin_or_recent_over_20(self): notifiable_users = [self.user, self.user2] for i in range(FALLTHROUGH_NOTIFICATION_LIMIT + 5): @@ -1082,7 +1054,6 @@ def test_fallthrough_admin_or_recent_over_20(self): assert len(notified_users) == FALLTHROUGH_NOTIFICATION_LIMIT assert notified_users.issubset(expected_notified_users) - @with_feature("organizations:issue-alert-fallback-targeting") def test_fallthrough_recipients_active_member_ordering(self): present = timezone.now() diff --git a/tests/sentry/tasks/test_digests.py b/tests/sentry/tasks/test_digests.py index 4ce4d04d1fadad..93212ca72f27de 100644 --- a/tests/sentry/tasks/test_digests.py +++ b/tests/sentry/tasks/test_digests.py @@ -12,7 +12,6 @@ from sentry.tasks.digests import deliver_digest from sentry.testutils.cases import TestCase from sentry.testutils.helpers.datetime import before_now, iso_format -from sentry.testutils.helpers.features import with_feature from sentry.testutils.skips import requires_snuba pytestmark = [requires_snuba] @@ -50,25 +49,18 @@ def run_test(self, key: str) -> None: ) with self.tasks(): deliver_digest(key) - assert "2 new alerts since" in mail.outbox[0].subject def test_old_key(self): self.run_test(f"mail:p:{self.project.id}") - message = mail.outbox[0] - assert isinstance(message, EmailMultiAlternatives) - assert isinstance(message.alternatives[0][0], str) - assert "notification_uuid" in message.alternatives[0][0] + assert len(mail.outbox) == 0 def test_new_key(self): self.run_test(f"mail:p:{self.project.id}:IssueOwners:") - message = mail.outbox[0] - assert isinstance(message, EmailMultiAlternatives) - assert isinstance(message.alternatives[0][0], str) - assert "notification_uuid" in message.alternatives[0][0] + assert len(mail.outbox) == 0 - @with_feature("organizations:issue-alert-fallback-targeting") def test_fallthrough_choice_key(self): self.run_test(f"mail:p:{self.project.id}:IssueOwners::AllMembers") + assert "2 new alerts since" in mail.outbox[0].subject message = mail.outbox[0] assert isinstance(message, EmailMultiAlternatives) assert isinstance(message.alternatives[0][0], str) @@ -76,6 +68,7 @@ def test_fallthrough_choice_key(self): def test_member_key(self): self.run_test(f"mail:p:{self.project.id}:Member:{self.user.id}") + assert "2 new alerts since" in mail.outbox[0].subject message = mail.outbox[0] assert isinstance(message, EmailMultiAlternatives) assert isinstance(message.alternatives[0][0], str) From 8a47e214e713b2a8aad83c752140958848252910 Mon Sep 17 00:00:00 2001 From: Jodi Jang <116035587+jangjodi@users.noreply.github.com> Date: Tue, 13 Feb 2024 13:08:20 -0500 Subject: [PATCH 321/357] ref(similarity-embeddings): Add logs (#65025) Add logs for similar issues embeddings parameters --- .../group_similar_issues_embeddings.py | 2 ++ .../test_group_similar_issues_embeddings.py | 27 +++++++++++-------- 2 files changed, 18 insertions(+), 11 deletions(-) diff --git a/src/sentry/api/endpoints/group_similar_issues_embeddings.py b/src/sentry/api/endpoints/group_similar_issues_embeddings.py index ca80a5deebd41d..f71800a4bd4b25 100644 --- a/src/sentry/api/endpoints/group_similar_issues_embeddings.py +++ b/src/sentry/api/endpoints/group_similar_issues_embeddings.py @@ -136,6 +136,8 @@ def get(self, request: Request, group) -> Response: if request.GET.get("threshold"): similar_issues_params.update({"threshold": float(request.GET["threshold"])}) + logger.info("Similar issues embeddings parameters", extra=similar_issues_params) + results = get_similar_issues_embeddings(similar_issues_params) analytics.record( diff --git a/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py b/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py index c1b6314ace394f..c0a2c2f0cbb811 100644 --- a/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py +++ b/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py @@ -230,7 +230,8 @@ def test_no_feature_flag(self): @with_feature("projects:similarity-embeddings") @mock.patch("sentry.seer.utils.seer_staging_connection_pool.urlopen") - def test_simple(self, mock_seer_request): + @mock.patch("sentry.api.endpoints.group_similar_issues_embeddings.logger") + def test_simple(self, mock_logger, mock_seer_request): seer_return_value: SimilarIssuesEmbeddingsResponse = { "responses": [ { @@ -252,22 +253,26 @@ def test_simple(self, mock_seer_request): [self.similar_group.id], [0.95], [0.99], ["Yes"] ) + expected_seer_request_params = { + "group_id": self.group.id, + "project_id": self.project.id, + "stacktrace": EXPECTED_STACKTRACE_STRING, + "message": self.group.message, + "k": 1, + "threshold": 0.98, + } + mock_seer_request.assert_called_with( "POST", "/v0/issues/similar-issues", - body=json.dumps( - { - "group_id": self.group.id, - "project_id": self.project.id, - "stacktrace": EXPECTED_STACKTRACE_STRING, - "message": self.group.message, - "k": 1, - "threshold": 0.98, - }, - ), + body=json.dumps(expected_seer_request_params), headers={"Content-Type": "application/json;charset=utf-8"}, ) + mock_logger.info.assert_called_with( + "Similar issues embeddings parameters", extra=expected_seer_request_params + ) + @with_feature("projects:similarity-embeddings") @mock.patch("sentry.analytics.record") @mock.patch("sentry.seer.utils.seer_staging_connection_pool.urlopen") From 723f5c9fd1dc87879b14479d4d0eccac6d37d44f Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Tue, 13 Feb 2024 13:10:23 -0500 Subject: [PATCH 322/357] fix(copy): Change subtitle on widget charts (#65094) ### Summary This changes the subtitle on widget charts to be a bit more descriptive --- .../performance/landing/widgets/widgets/lineChartListWidget.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/app/views/performance/landing/widgets/widgets/lineChartListWidget.tsx b/static/app/views/performance/landing/widgets/widgets/lineChartListWidget.tsx index 9787f0e7adedf6..87c409708315e3 100644 --- a/static/app/views/performance/landing/widgets/widgets/lineChartListWidget.tsx +++ b/static/app/views/performance/landing/widgets/widgets/lineChartListWidget.tsx @@ -654,7 +654,7 @@ export function LineChartListWidget(props: PerformanceWidgetProps) { {...props} location={location} Subtitle={() => ( - {props.subTitle ?? t('Suggested transactions')} + {props.subTitle ?? t('Found in the following transactions')} )} HeaderActions={provided => getContainerActions(provided)} InteractiveTitle={ From af122bc3f1f151298caf035c01ec0ca673928a76 Mon Sep 17 00:00:00 2001 From: Josh Ferge Date: Tue, 13 Feb 2024 10:24:24 -0800 Subject: [PATCH 323/357] ref(typing): type issues.ingest (#65053) - [x] fix typing issues in issues.ingest - [x] remove from exclusion list --- pyproject.toml | 1 - src/sentry/event_manager.py | 9 ++++++--- src/sentry/issues/ingest.py | 5 +++-- tests/sentry/issues/test_ingest.py | 4 ++-- 4 files changed, 11 insertions(+), 8 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index dd9ebd701dbe12..54b921907c8996 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -372,7 +372,6 @@ module = [ "sentry.integrations.vsts.repository", "sentry.integrations.vsts_extension.integration", "sentry.issues.escalating_group_forecast", - "sentry.issues.ingest", "sentry.issues.issue_occurrence", "sentry.issues.occurrence_consumer", "sentry.issues.search", diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index f52f310d44a1bd..3cda576eb94fb1 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -188,7 +188,7 @@ def sdk_metadata_from_event(event: Event) -> Mapping[str, Any]: return {} -def plugin_is_regression(group: Group, event: Event) -> bool: +def plugin_is_regression(group: Group, event: BaseEvent) -> bool: project = event.project for plugin in plugins.for_project(project): result = safe_execute( @@ -1781,7 +1781,7 @@ def _create_group(project: Project, event: Event, **kwargs: Any) -> Group: ) -def _handle_regression(group: Group, event: Event, release: Release | None) -> bool | None: +def _handle_regression(group: Group, event: BaseEvent, release: Release | None) -> bool | None: if not group.is_resolved(): return None @@ -1924,7 +1924,10 @@ def _handle_regression(group: Group, event: Event, release: Release | None) -> b def _process_existing_aggregate( - group: Group, event: Event, incoming_group_values: Mapping[str, Any], release: Release | None + group: Group, + event: BaseEvent, + incoming_group_values: Mapping[str, Any], + release: Release | None, ) -> bool: last_seen = max(event.datetime, group.last_seen) updated_group_values: dict[str, Any] = {"last_seen": last_seen} diff --git a/src/sentry/issues/ingest.py b/src/sentry/issues/ingest.py index 80a88832fdf35d..86478e41dbd4b7 100644 --- a/src/sentry/issues/ingest.py +++ b/src/sentry/issues/ingest.py @@ -67,7 +67,7 @@ def save_issue_occurrence( return occurrence, group_info -def process_occurrence_data(data: Mapping[str, Any]) -> None: +def process_occurrence_data(data: dict[str, Any]) -> None: if "fingerprint" not in data: return @@ -118,6 +118,7 @@ class OccurrenceMetadata(TypedDict): title: str location: str | None last_received: str + initial_priority: int | None def materialize_metadata(occurrence: IssueOccurrence, event: Event) -> OccurrenceMetadata: @@ -260,7 +261,7 @@ def send_issue_occurrence_to_eventstream( group_event = event.for_group(group_info.group) group_event.occurrence = occurrence - eventstream.insert( + eventstream.backend.insert( event=group_event, is_new=group_info.is_new, is_regression=group_info.is_regression, diff --git a/tests/sentry/issues/test_ingest.py b/tests/sentry/issues/test_ingest.py index 352967552ce405..49f39e7f18237f 100644 --- a/tests/sentry/issues/test_ingest.py +++ b/tests/sentry/issues/test_ingest.py @@ -95,7 +95,7 @@ def test_new_group_release_env(self) -> None: release_project_env.refresh_from_db() assert release_project_env.new_issues_count == 1 assert GroupRelease.objects.filter(group_id=group.id, release_id=release.id).exists() - eventstream.insert.assert_called_once_with( + eventstream.backend.insert.assert_called_once_with( event=event.for_group(group_info.group), is_new=True, is_regression=False, @@ -399,7 +399,7 @@ def test(self) -> None: event, "for_group", return_value=group_event ): send_issue_occurrence_to_eventstream(event, occurrence, group_info) - eventstream.insert.assert_called_once_with( + eventstream.backend.insert.assert_called_once_with( event=group_event, is_new=group_info.is_new, is_regression=group_info.is_regression, From abf6b145f2f9470ebb1f8c88c9eb2e95ea6c8981 Mon Sep 17 00:00:00 2001 From: Malachi Willey Date: Tue, 13 Feb 2024 10:33:30 -0800 Subject: [PATCH 324/357] ref(tsc): Convert GroupingInfo to FC and useApiQuery (#65089) --- .../events/groupingInfo/index.spec.tsx | 60 +++ .../components/events/groupingInfo/index.tsx | 341 ++++++++---------- 2 files changed, 218 insertions(+), 183 deletions(-) create mode 100644 static/app/components/events/groupingInfo/index.spec.tsx diff --git a/static/app/components/events/groupingInfo/index.spec.tsx b/static/app/components/events/groupingInfo/index.spec.tsx new file mode 100644 index 00000000000000..20c177b9f1771e --- /dev/null +++ b/static/app/components/events/groupingInfo/index.spec.tsx @@ -0,0 +1,60 @@ +import {EventFixture} from 'sentry-fixture/event'; +import {GroupFixture} from 'sentry-fixture/group'; + +import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; + +import {EventGroupingInfo} from 'sentry/components/events/groupingInfo'; +import {EventGroupVariantType, IssueCategory} from 'sentry/types'; + +describe('EventGroupingInfo', function () { + const group = GroupFixture(); + const event = EventFixture(); + + const defaultProps = { + event, + projectSlug: 'project-slug', + showGroupingConfig: true, + group, + }; + + it('fetches and renders grouping info for errors', async function () { + MockApiClient.addMockResponse({ + url: `/projects/org-slug/project-slug/events/${event.id}/grouping-info/`, + body: { + app: { + description: 'variant description', + hash: '123', + hasMismatch: false, + key: 'key', + type: EventGroupVariantType.CHECKSUM, + }, + }, + }); + + render(); + + await screen.findByText('variant description'); + + // Hash should not be visible until toggling open + expect(screen.queryByText('123')).not.toBeInTheDocument(); + await userEvent.click(screen.getByRole('button', {name: 'Show Details'})); + expect(screen.getByText('123')).toBeInTheDocument(); + }); + + it('gets performance grouping info from group/event data', async function () { + const perfEvent = EventFixture({ + type: 'transaction', + occurrence: {fingerprint: ['123'], evidenceData: {op: 'bad-op'}}, + }); + const perfGroup = GroupFixture({issueCategory: IssueCategory.PERFORMANCE}); + + render(); + + expect(screen.getByText('performance problem')).toBeInTheDocument(); + + // Hash should not be visible until toggling open + expect(screen.queryByText('123')).not.toBeInTheDocument(); + await userEvent.click(screen.getByRole('button', {name: 'Show Details'})); + expect(screen.getByText('123')).toBeInTheDocument(); + }); +}); diff --git a/static/app/components/events/groupingInfo/index.tsx b/static/app/components/events/groupingInfo/index.tsx index f405270e56c18e..6700b382a39a32 100644 --- a/static/app/components/events/groupingInfo/index.tsx +++ b/static/app/components/events/groupingInfo/index.tsx @@ -1,16 +1,17 @@ -import {Fragment} from 'react'; +import {Fragment, useState} from 'react'; import styled from '@emotion/styled'; -import DeprecatedAsyncComponent from 'sentry/components/deprecatedAsyncComponent'; import {EventDataSection} from 'sentry/components/events/eventDataSection'; import {FeatureFeedback} from 'sentry/components/featureFeedback'; +import LoadingError from 'sentry/components/loadingError'; import LoadingIndicator from 'sentry/components/loadingIndicator'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; -import type {EventGroupInfo, Group, Organization} from 'sentry/types'; -import {IssueCategory} from 'sentry/types'; -import type {Event, EventOccurrence} from 'sentry/types/event'; -import withOrganization from 'sentry/utils/withOrganization'; +import type {Group} from 'sentry/types'; +import {EventGroupVariantType, IssueCategory} from 'sentry/types'; +import type {Event, EventGroupVariant} from 'sentry/types/event'; +import {useApiQuery} from 'sentry/utils/queryClient'; +import useOrganization from 'sentry/utils/useOrganization'; import SectionToggleButton from 'sentry/views/issueDetails/sectionToggleButton'; import GroupingConfigSelect from './groupingConfigSelect'; @@ -22,199 +23,175 @@ const groupingFeedbackTypes = [ t('Other grouping issue'), ]; -type Props = DeprecatedAsyncComponent['props'] & { +type GroupingInfoProps = { event: Event; - organization: Organization; projectSlug: string; showGroupingConfig: boolean; group?: Group; }; -type State = DeprecatedAsyncComponent['state'] & { - configOverride: string | null; - groupInfo: EventGroupInfo; - isOpen: boolean; +type EventGroupingInfoResponse = { + [variant: string]: EventGroupVariant; }; -class GroupingInfo extends DeprecatedAsyncComponent { - getEndpoints(): ReturnType { - const {organization, event, projectSlug, group} = this.props; - - if ( - event.occurrence && - group?.issueCategory === IssueCategory.PERFORMANCE && - event.type === 'transaction' - ) { - return []; - } - - let path = `/projects/${organization.slug}/${projectSlug}/events/${event.id}/grouping-info/`; - if (this.state?.configOverride) { - path = `${path}?config=${this.state.configOverride}`; - } - - return [['groupInfo', path]]; - } - - getDefaultState() { - return { - ...super.getDefaultState(), - isOpen: false, - configOverride: null, - }; +function generatePerformanceGroupInfo({ + event, + group, +}: {event: Event; group: Group}): EventGroupingInfoResponse | null { + if (!event.occurrence) { + return null; } - toggle = () => { - this.setState(state => ({ - isOpen: !state.isOpen, - configOverride: state.isOpen ? null : state.configOverride, - })); - }; - - handleConfigSelect = selection => { - this.setState({configOverride: selection.value}, () => this.reloadData()); - }; - - generatePerformanceGroupInfo() { - const {group, event} = this.props; - const {occurrence} = event; - const {evidenceData} = occurrence as EventOccurrence; - - const variant = group - ? { - [group.issueType]: { - description: t('performance problem'), - hash: occurrence?.fingerprint[0] || '', - hasMismatch: false, - key: group.issueType, - type: 'performance-problem', - evidence: { - op: evidenceData?.op, - parent_span_ids: evidenceData?.parentSpanIds, - cause_span_ids: evidenceData?.causeSpanIds, - offender_span_ids: evidenceData?.offenderSpanIds, - }, + const {evidenceData} = event.occurrence; + + const hash = event.occurrence?.fingerprint[0] || ''; + + return group + ? { + [group.issueType]: { + description: t('performance problem'), + hash: event.occurrence?.fingerprint[0] || '', + hashMismatch: false, + key: group.issueType, + type: EventGroupVariantType.PERFORMANCE_PROBLEM, + evidence: { + op: evidenceData?.op, + parent_span_ids: evidenceData?.parentSpanIds, + cause_span_ids: evidenceData?.causeSpanIds, + offender_span_ids: evidenceData?.offenderSpanIds, + desc: t('performance problem'), + fingerprint: hash, }, - } - : null; + }, + } + : null; +} - return variant; - } +function GroupConfigSelect({ + event, + configOverride, + setConfigOverride, +}: { + configOverride: string | null; + event: Event; + setConfigOverride: (value: string) => void; +}) { + const organization = useOrganization(); - renderGroupInfoSummary() { - const {groupInfo: _groupInfo} = this.state; - const {group, event} = this.props; - - const groupInfo = - group?.issueCategory === IssueCategory.PERFORMANCE && - event.occurrence && - event.type === 'transaction' - ? // performance issue grouping details are generated clint-side - this.generatePerformanceGroupInfo() - : _groupInfo; - - const groupedBy = groupInfo - ? Object.values(groupInfo) - .filter(variant => variant.hash !== null && variant.description !== null) - .map(variant => variant.description) - .sort((a, b) => a!.toLowerCase().localeCompare(b!.toLowerCase())) - .join(', ') - : t('nothing'); - - return ( -

    - {t('Grouped by:')} {groupedBy} -

    - ); + if (!event.groupingConfig) { + return null; } - renderGroupConfigSelect() { - const {configOverride} = this.state; - const {event, organization} = this.props; + const configId = configOverride ?? event.groupingConfig?.id; - if (!event.groupingConfig) { - return null; - } - - const configId = configOverride ?? event.groupingConfig?.id; - - return ( - - ); - } - - renderGroupInfo() { - const {groupInfo: _groupInfo, loading} = this.state; - const {event, showGroupingConfig, group} = this.props; - - const groupInfo = - group?.issueCategory === IssueCategory.PERFORMANCE && - event.occurrence && - event.type === 'transaction' - ? this.generatePerformanceGroupInfo() - : _groupInfo; - - const variants = groupInfo - ? Object.values(groupInfo).sort((a, b) => - a.hash && !b.hash - ? -1 - : a.description - ?.toLowerCase() - .localeCompare(b.description?.toLowerCase() ?? '') ?? 1 - ) - : []; - - return ( - - -
    {showGroupingConfig && this.renderGroupConfigSelect()}
    - -
    - - {loading ? ( - - ) : ( - variants.map((variant, index) => ( - - - {index < variants.length - 1 && } - - )) - )} -
    - ); - } + return ( + setConfigOverride(selection.value)} + /> + ); +} - renderLoading() { - return this.renderBody(); - } +function GroupInfoSummary({groupInfo}: {groupInfo: EventGroupingInfoResponse | null}) { + const groupedBy = groupInfo + ? Object.values(groupInfo) + .filter(variant => variant.hash !== null && variant.description !== null) + .map(variant => variant.description) + .sort((a, b) => a!.toLowerCase().localeCompare(b!.toLowerCase())) + .join(', ') + : t('nothing'); + + return ( +

    + {t('Grouped by:')} {groupedBy} +

    + ); +} - renderBody() { - const {isOpen} = this.state; - - return ( - } - > - {isOpen ? this.renderGroupInfo() : this.renderGroupInfoSummary()} - - ); - } +export function EventGroupingInfo({ + event, + projectSlug, + showGroupingConfig, + group, +}: GroupingInfoProps) { + const organization = useOrganization(); + const [isOpen, setIsOpen] = useState(false); + const [configOverride, setConfigOverride] = useState(null); + + const hasPerformanceGrouping = + event.occurrence && + group?.issueCategory === IssueCategory.PERFORMANCE && + event.type === 'transaction'; + + const {data, isLoading, isError, isSuccess} = useApiQuery( + [ + `/projects/${organization.slug}/${projectSlug}/events/${event.id}/grouping-info/`, + {query: configOverride ? {config: configOverride} : {}}, + ], + {enabled: !hasPerformanceGrouping, staleTime: Infinity} + ); + + const groupInfo = hasPerformanceGrouping + ? generatePerformanceGroupInfo({group, event}) + : data ?? null; + + const variants = groupInfo + ? Object.values(groupInfo).sort((a, b) => + a.hash && !b.hash + ? -1 + : a.description + ?.toLowerCase() + .localeCompare(b.description?.toLowerCase() ?? '') ?? 1 + ) + : []; + + return ( + } + > + {!isOpen ? : null} + {isOpen ? ( + + +
    + {showGroupingConfig && ( + + )} +
    + +
    + {isError ? ( + + ) : null} + {isLoading && !hasPerformanceGrouping ? : null} + {hasPerformanceGrouping || isSuccess + ? variants.map((variant, index) => ( + + + {index < variants.length - 1 && } + + )) + : null} +
    + ) : null} +
    + ); } const ConfigHeader = styled('div')` @@ -239,5 +216,3 @@ const VariantDivider = styled('hr')` padding-top: ${space(1)}; border-top: 1px solid ${p => p.theme.border}; `; - -export const EventGroupingInfo = withOrganization(GroupingInfo); From 7f01bd006cff7e1af56dd932016e3b5996e4279b Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Tue, 13 Feb 2024 10:51:48 -0800 Subject: [PATCH 325/357] ref(feedback/issues): refactor linked comment (#65095) - fix a bug in which i converted the comment into a tuple, but it should be a string - refactor the linked comment string to be more readable - also change some instances from `Sentry issue` -> `Sentry Issue` for consistency --- src/sentry/integrations/github/issues.py | 21 +++++++++---------- src/sentry/integrations/gitlab/issues.py | 2 +- src/sentry_plugins/github/plugin.py | 2 +- src/sentry_plugins/phabricator/plugin.py | 2 +- .../sentry/integrations/github/test_issues.py | 4 ++-- .../sentry/integrations/gitlab/test_issues.py | 2 +- 6 files changed, 16 insertions(+), 17 deletions(-) diff --git a/src/sentry/integrations/github/issues.py b/src/sentry/integrations/github/issues.py index de92e51fa2e4b2..4172ab37d311c5 100644 --- a/src/sentry/integrations/github/issues.py +++ b/src/sentry/integrations/github/issues.py @@ -208,9 +208,16 @@ def get_link_issue_config(self, group: Group, **kwargs: Any) -> list[dict[str, A def get_linked_issue_comment_prefix(group: Group) -> str: if group.issue_category == GroupCategory.FEEDBACK: - return "Sentry feedback" + return "Sentry Feedback" else: - return "Sentry issue" + return "Sentry Issue" + + def get_default_comment(group: Group) -> str: + prefix = get_linked_issue_comment_prefix(group) + url = group.get_absolute_url(params={"referrer": "github_integration"}) + issue_short_id = group.qualified_short_id + + return f"{prefix}: [{issue_short_id}]({absolute_uri(url)})" return [ { @@ -235,15 +242,7 @@ def get_linked_issue_comment_prefix(group: Group) -> str: { "name": "comment", "label": "Comment", - "default": ( - get_linked_issue_comment_prefix(group) - + ": [{issue_id}]({url})".format( - url=absolute_uri( - group.get_absolute_url(params={"referrer": "github_integration"}) - ), - issue_id=group.qualified_short_id, - ), - ), + "default": get_default_comment(group), "type": "textarea", "required": False, "autosize": True, diff --git a/src/sentry/integrations/gitlab/issues.py b/src/sentry/integrations/gitlab/issues.py index 62dc4eec5f0165..043f178636204a 100644 --- a/src/sentry/integrations/gitlab/issues.py +++ b/src/sentry/integrations/gitlab/issues.py @@ -152,7 +152,7 @@ def get_link_issue_config(self, group: Group, **kwargs) -> list[dict[str, Any]]: { "name": "comment", "label": "Comment", - "default": "Sentry issue: [{issue_id}]({url})".format( + "default": "Sentry Issue: [{issue_id}]({url})".format( url=absolute_uri( group.get_absolute_url(params={"referrer": "gitlab_integration"}) ), diff --git a/src/sentry_plugins/github/plugin.py b/src/sentry_plugins/github/plugin.py index bf29211f239edc..535a00c193b561 100644 --- a/src/sentry_plugins/github/plugin.py +++ b/src/sentry_plugins/github/plugin.py @@ -143,7 +143,7 @@ def get_link_existing_issue_fields(self, request: Request, group, event, **kwarg { "name": "comment", "label": "Comment", - "default": "Sentry issue: [{issue_id}]({url})".format( + "default": "Sentry Issue: [{issue_id}]({url})".format( url=absolute_uri(group.get_absolute_url(params={"referrer": "github_plugin"})), issue_id=group.qualified_short_id, ), diff --git a/src/sentry_plugins/phabricator/plugin.py b/src/sentry_plugins/phabricator/plugin.py index 514111308e68b4..6905dc42b90356 100644 --- a/src/sentry_plugins/phabricator/plugin.py +++ b/src/sentry_plugins/phabricator/plugin.py @@ -139,7 +139,7 @@ def get_link_existing_issue_fields(self, request: Request, group, event, **kwarg { "name": "comment", "label": "Comment", - "default": "Sentry issue: [{issue_id}]({url})".format( + "default": "Sentry Issue: [{issue_id}]({url})".format( url=absolute_uri( group.get_absolute_url(params={"referrer": "phabricator_plugin"}) ), diff --git a/tests/sentry/integrations/github/test_issues.py b/tests/sentry/integrations/github/test_issues.py index e7a3978fcbe2d1..4e8d1bfdeb2de5 100644 --- a/tests/sentry/integrations/github/test_issues.py +++ b/tests/sentry/integrations/github/test_issues.py @@ -422,12 +422,12 @@ def test_linked_issue_comment(self): assert issue_event.group is not None resp = self.install.get_link_issue_config(group=issue_event.group, **data) # assert comment wording for linked issue is correct - assert "Sentry issue" in resp[2]["default"][0] + assert "Sentry Issue" in resp[2]["default"] # link a feedback issue resp = self.install.get_link_issue_config(group=feedback_issue, **data) # assert comment wording for linked feedback is correct - assert "Sentry feedback" in resp[2]["default"][0] + assert "Sentry Feedback" in resp[2]["default"] @responses.activate def after_link_issue(self): diff --git a/tests/sentry/integrations/gitlab/test_issues.py b/tests/sentry/integrations/gitlab/test_issues.py index fc4b7e44f596cb..0d1a0e05ce086d 100644 --- a/tests/sentry/integrations/gitlab/test_issues.py +++ b/tests/sentry/integrations/gitlab/test_issues.py @@ -126,7 +126,7 @@ def test_get_link_issue_config(self): { "name": "comment", "label": "Comment", - "default": "Sentry issue: [{issue_id}]({url})".format( + "default": "Sentry Issue: [{issue_id}]({url})".format( url=absolute_uri( self.group.get_absolute_url(params={"referrer": "gitlab_integration"}) ), From b89a2a025542f38247e642ad437432659683d49d Mon Sep 17 00:00:00 2001 From: Snigdha Sharma Date: Tue, 13 Feb 2024 10:56:56 -0800 Subject: [PATCH 326/357] feat(issue-priority): Add migration to backfill group priority on all rows (#63733) This PR adds the migration to backfill the group priority column on all existing rows in the Group table. We should also set the "initial_priority" in the group metadata. The migration shouldn't be merged until the priority logic is in place to increment/decrement the priority for escalating issues. FIXES https://github.com/getsentry/sentry/issues/63283 --------- Co-authored-by: Mark Story --- migrations_lockfile.txt | 2 +- .../0644_backfill_priority_for_groups.py | 178 +++++++++++++ .../test_0644_backfill_priority_for_groups.py | 243 ++++++++++++++++++ 3 files changed, 422 insertions(+), 1 deletion(-) create mode 100644 src/sentry/migrations/0644_backfill_priority_for_groups.py create mode 100644 tests/sentry/migrations/test_0644_backfill_priority_for_groups.py diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt index a1c1e509c99e21..9c95b7c1dda008 100644 --- a/migrations_lockfile.txt +++ b/migrations_lockfile.txt @@ -9,5 +9,5 @@ feedback: 0004_index_together hybridcloud: 0011_add_hybridcloudapitoken_index nodestore: 0002_nodestore_no_dictfield replays: 0004_index_together -sentry: 0643_add_date_modified_col_dashboard_widget_query +sentry: 0644_backfill_priority_for_groups social_auth: 0002_default_auto_field diff --git a/src/sentry/migrations/0644_backfill_priority_for_groups.py b/src/sentry/migrations/0644_backfill_priority_for_groups.py new file mode 100644 index 00000000000000..93185685b52304 --- /dev/null +++ b/src/sentry/migrations/0644_backfill_priority_for_groups.py @@ -0,0 +1,178 @@ +# Generated by Django 3.2.23 on 2024-01-23 23:32 + +import logging +from enum import Enum + +from django.conf import settings +from django.db import connection, migrations +from psycopg2.extras import execute_values + +from sentry.issues.grouptype import get_group_type_by_type_id +from sentry.new_migrations.migrations import CheckedMigration +from sentry.utils import json, redis +from sentry.utils.query import RangeQuerySetWrapperWithProgressBarApprox + +# copied to ensure migraitons work if the enums change # + +logger = logging.getLogger(__name__) + + +class GroupSubStatus: + # GroupStatus.IGNORED + UNTIL_ESCALATING = 1 + # Group is ignored/archived for a count/user count/duration + UNTIL_CONDITION_MET = 4 + # Group is ignored/archived forever + FOREVER = 5 + + # GroupStatus.UNRESOLVED + ESCALATING = 2 + ONGOING = 3 + REGRESSED = 6 + NEW = 7 + + +class PriorityLevel: + LOW = 25 + MEDIUM = 50 + HIGH = 75 + + +class GroupCategory(Enum): + ERROR = 1 + PERFORMANCE = 2 + PROFILE = 3 # deprecated, merging with PERFORMANCE + CRON = 4 + REPLAY = 5 + FEEDBACK = 6 + + +PERFORMANCE_P95_ENDPOINT_REGRESSION_GROUPTYPE_ID = 1018 +PROFILE_FUNCTION_REGRESSION_TYPE_ID = 2011 + + +# end copy # + +BATCH_SIZE = 100 + +UPDATE_QUERY = """ + UPDATE sentry_groupedmessage + SET priority = new_data.priority, + data = new_data.data::text + FROM (VALUES %s) AS new_data(id, priority, data) + WHERE sentry_groupedmessage.id = new_data.id AND sentry_groupedmessage.priority IS NULL +""" + +REDIS_KEY = "priority_backfill.last_processed_id" + + +def _get_priority_level(group_id, level, type_id, substatus): + group_type = get_group_type_by_type_id(type_id) + + # Replay and Feedback issues are medium priority + if group_type.category in [GroupCategory.REPLAY.value, GroupCategory.FEEDBACK.value]: + return PriorityLevel.MEDIUM + + # All escalating issues are high priority for all other issue categories + if substatus == GroupSubStatus.ESCALATING: + return PriorityLevel.HIGH + + if group_type.category == GroupCategory.ERROR.value: + if level in [logging.INFO, logging.DEBUG]: + return PriorityLevel.LOW + elif level == logging.WARNING: + return PriorityLevel.MEDIUM + elif level in [logging.ERROR, logging.FATAL]: + return PriorityLevel.HIGH + + logging.warning('Unknown log level "%s" for group %s', level, group_id) + return PriorityLevel.MEDIUM + + if group_type.category == GroupCategory.CRON.value: + if level == logging.WARNING: + return PriorityLevel.MEDIUM + + return PriorityLevel.HIGH + + # Profiling issues should be treated the same as Performance issues since they are merging + if group_type.category in [GroupCategory.PERFORMANCE.value, GroupCategory.PROFILE.value]: + # Statistical detectors are medium priority + if type_id in [ + PROFILE_FUNCTION_REGRESSION_TYPE_ID, + PERFORMANCE_P95_ENDPOINT_REGRESSION_GROUPTYPE_ID, + ]: + return PriorityLevel.MEDIUM + return PriorityLevel.LOW + + # All other issues are the default medium priority + return PriorityLevel.MEDIUM + + +def update_group_priority(apps, schema_editor): + Group = apps.get_model("sentry", "Group") + + redis_client = redis.redis_clusters.get(settings.SENTRY_MONITORS_REDIS_CLUSTER) + cursor = connection.cursor() + batch = [] + + last_processed_id = int(redis_client.get(REDIS_KEY) or 0) + logger.info("Starting group priority backfill from id %s", last_processed_id) + for ( + group_id, + data, + level, + group_type, + substatus, + priority, + ) in RangeQuerySetWrapperWithProgressBarApprox( + Group.objects.filter(id__gt=last_processed_id).values_list( + "id", "data", "level", "type", "substatus", "priority" + ), + result_value_getter=lambda item: item[0], + ): + if priority is not None: + continue + + priority = _get_priority_level(group_id, level, group_type, substatus) + data.get("metadata", {})["initial_priority"] = priority + data = json.dumps(data) + batch.append((group_id, priority, data)) + + if len(batch) >= BATCH_SIZE: + logger.info( + "Processing batch for group priority backfill with %s items", + BATCH_SIZE, + extra={"group_id": group_id}, + ) + execute_values(cursor, UPDATE_QUERY, batch, page_size=BATCH_SIZE) + redis_client.set(REDIS_KEY, group_id, ex=60 * 60 * 24 * 7) + batch = [] + + if batch: + execute_values(cursor, UPDATE_QUERY, batch, page_size=BATCH_SIZE) + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. For + # the most part, this should only be used for operations where it's safe to run the migration + # after your code has deployed. So this should not be used for most operations that alter the + # schema of a table. + # Here are some things that make sense to mark as dangerous: + # - Large data migrations. Typically we want these to be run manually by ops so that they can + # be monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # have ops run this and not block the deploy. Note that while adding an index is a schema + # change, it's completely safe to run the operation after the code has deployed. + is_dangerous = True + + dependencies = [ + ("sentry", "0643_add_date_modified_col_dashboard_widget_query"), + ] + + operations = [ + migrations.RunPython( + update_group_priority, + reverse_code=migrations.RunPython.noop, + hints={"tables": ["sentry_groupedmessage"]}, + ), + ] diff --git a/tests/sentry/migrations/test_0644_backfill_priority_for_groups.py b/tests/sentry/migrations/test_0644_backfill_priority_for_groups.py new file mode 100644 index 00000000000000..565bfd3bdc4350 --- /dev/null +++ b/tests/sentry/migrations/test_0644_backfill_priority_for_groups.py @@ -0,0 +1,243 @@ +import logging + +from django.conf import settings + +from sentry.issues.grouptype import ( + ErrorGroupType, + FeedbackGroup, + MonitorCheckInFailure, + PerformanceConsecutiveHTTPQueriesGroupType, + PerformanceP95EndpointRegressionGroupType, + ReplayDeadClickType, +) +from sentry.models.group import GroupStatus +from sentry.models.project import Project +from sentry.testutils.cases import TestMigrations +from sentry.types.group import GroupSubStatus +from sentry.utils import redis + + +class PriorityLevel: + LOW = 25 + MEDIUM = 50 + HIGH = 75 + + +class BackfillGroupPriority(TestMigrations): + migrate_from = "0643_add_date_modified_col_dashboard_widget_query" + migrate_to = "0644_backfill_priority_for_groups" + + def setup_initial_state(self): + self._create_groups_to_backfill(self.project) + redis_cluster = redis.redis_clusters.get(settings.SENTRY_MONITORS_REDIS_CLUSTER) + redis_cluster.set("priority_backfill.last_processed_id", self.cache_group_id) + + def test(self): + for groups, expected_priority in ( + (self.high_priority_groups, PriorityLevel.HIGH), + (self.medium_priority_groups, PriorityLevel.MEDIUM), + (self.low_priority_groups, PriorityLevel.LOW), + ): + for desc, group in groups: + group.refresh_from_db() + if desc == "skip me": + # these groups should not have been backfilled because the group id is less than the redis cached ID + assert not group.priority + continue + + assert group.priority == expected_priority, desc + if not desc.startswith("existing"): + assert group.data.get("metadata")["initial_priority"] == expected_priority + + def _create_groups_to_backfill(self, project: Project) -> None: + skipped_group_count = 3 + data = [ + # three groups to skip to test the redis cache + ( + "skip me", + {"type": FeedbackGroup.type_id}, + PriorityLevel.MEDIUM, + ), + ( + "skip me", + {"type": FeedbackGroup.type_id}, + PriorityLevel.MEDIUM, + ), + ( + "skip me", + {"type": FeedbackGroup.type_id}, + PriorityLevel.MEDIUM, + ), + # groups with priority remain unchanged, even if escalating. + ( + "existing low priority", + { + "priority": PriorityLevel.LOW, + "data": {"metadata": {"initial_priority": PriorityLevel.LOW}}, + }, + PriorityLevel.LOW, + ), + ( + "existing low priority with escalation", + { + "priority": PriorityLevel.LOW, + "status": GroupStatus.UNRESOLVED, + "substatus": GroupSubStatus.ESCALATING, + "data": {"metadata": {"initial_priority": PriorityLevel.LOW}}, + }, + PriorityLevel.LOW, + ), + # escalating groups are high priority, except for Replay and Feedback issues + ( + "escalating error group", + { + "status": GroupStatus.UNRESOLVED, + "substatus": GroupSubStatus.ESCALATING, + "type": ErrorGroupType.type_id, + "level": logging.INFO, # this level should not matter + }, + PriorityLevel.HIGH, + ), + ( + "escalating performance group", + { + "status": GroupStatus.UNRESOLVED, + "substatus": GroupSubStatus.ESCALATING, + "type": PerformanceConsecutiveHTTPQueriesGroupType.type_id, + }, + PriorityLevel.HIGH, + ), + ( + "escalating cron group", + { + "status": GroupStatus.UNRESOLVED, + "substatus": GroupSubStatus.ESCALATING, + "type": MonitorCheckInFailure.type_id, + }, + PriorityLevel.HIGH, + ), + ( + "escalating replay group", + { + "status": GroupStatus.UNRESOLVED, + "substatus": GroupSubStatus.ESCALATING, + "type": ReplayDeadClickType.type_id, + }, + PriorityLevel.MEDIUM, + ), + ( + "escalating feedback group", + { + "status": GroupStatus.UNRESOLVED, + "substatus": GroupSubStatus.ESCALATING, + "type": FeedbackGroup.type_id, + }, + PriorityLevel.MEDIUM, + ), + # error groups respect log levels if present + ( + "error group with log level INFO", + { + "type": ErrorGroupType.type_id, + "level": logging.INFO, + }, + PriorityLevel.LOW, + ), + ( + "error group with log level DEBUG", + { + "type": ErrorGroupType.type_id, + "level": logging.DEBUG, + }, + PriorityLevel.LOW, + ), + ( + "error group with log level WARNING", + { + "type": ErrorGroupType.type_id, + "level": logging.WARNING, + }, + PriorityLevel.MEDIUM, + ), + ( + "error group with log level ERROR", + { + "type": ErrorGroupType.type_id, + "level": logging.ERROR, + }, + PriorityLevel.HIGH, + ), + ( + "error group with log level FATAL", + { + "type": ErrorGroupType.type_id, + "level": logging.FATAL, + }, + PriorityLevel.HIGH, + ), + # cron groups are medium priority if they are warnings, high priority otherwise + ( + "cron group with log level WARNING", + { + "type": MonitorCheckInFailure.type_id, + "level": logging.WARNING, + }, + PriorityLevel.MEDIUM, + ), + ( + "cron group with log level ERROR", + { + "substatus": GroupSubStatus.ONGOING, + "type": MonitorCheckInFailure.type_id, + "level": logging.ERROR, + }, + PriorityLevel.HIGH, + ), + ( + "cron group with log level DEBUG", + { + "type": MonitorCheckInFailure.type_id, + "level": logging.DEBUG, + }, + PriorityLevel.HIGH, + ), + # statistical detectors are medium priority + ( + "statistical detector group", + { + "level": logging.ERROR, + "type": PerformanceP95EndpointRegressionGroupType.type_id, + }, + PriorityLevel.MEDIUM, + ), + # performance issues are otherwise low priority + ( + "performance group", + { + "level": logging.ERROR, + "type": PerformanceConsecutiveHTTPQueriesGroupType.type_id, + }, + PriorityLevel.LOW, + ), + ] + + self.low_priority_groups = [] + self.medium_priority_groups = [] + self.high_priority_groups = [] + + for desc, group_data, expected_priority in data: + group = self.create_group(project, **group_data) # type: ignore + + if desc == "skip me": + skipped_group_count -= 1 + if skipped_group_count == 0: + self.cache_group_id = group.id + + if expected_priority == PriorityLevel.LOW: + self.low_priority_groups.append((desc, group)) + + elif expected_priority == PriorityLevel.MEDIUM: + self.medium_priority_groups.append((desc, group)) + + elif expected_priority == PriorityLevel.HIGH: + self.high_priority_groups.append((desc, group)) From b1f0652c47ddbe63148bae9fa7dd1b2eb278390f Mon Sep 17 00:00:00 2001 From: Malachi Willey Date: Tue, 13 Feb 2024 11:10:28 -0800 Subject: [PATCH 327/357] ref(suspect-commits): GA suspect-commits-all-frames (#65043) This feature flag has been enabled in SaaS for some time now. This PR removes the flag and all now-unused logic. Most of the tests were already re-written for the new methods, so there are a lot of deleted tests which should not decrease coverage. Any cases that didn't look covered were modified to use the new methods. --- src/sentry/conf/server.py | 2 - src/sentry/features/__init__.py | 1 - src/sentry/integrations/github/client.py | 65 --- src/sentry/integrations/github/integration.py | 47 -- .../github_enterprise/integration.py | 48 -- src/sentry/integrations/gitlab/client.py | 12 - src/sentry/integrations/gitlab/integration.py | 44 -- .../integrations/mixins/commit_context.py | 33 -- .../integrations/utils/commit_context.py | 114 ---- src/sentry/tasks/commit_context.py | 268 ++------- src/sentry/tasks/post_process.py | 13 +- .../sentry/integrations/github/test_client.py | 420 ++++++-------- .../integrations/github/test_integration.py | 118 ++-- .../github_enterprise/test_integration.py | 108 +--- .../integrations/gitlab/test_integration.py | 119 ---- tests/sentry/tasks/test_commit_context.py | 536 ++---------------- tests/sentry/tasks/test_post_process.py | 38 +- 17 files changed, 325 insertions(+), 1661 deletions(-) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 8e51105b4bf7f7..f0e2321e1bd590 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1908,8 +1908,6 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:starfish-view": False, # Enable starfish dropdown on the webservice view for switching chart visualization "organizations:starfish-wsv-chart-dropdown": False, - # Enable the new suspect commits calculation that uses all frames in the stack trace - "organizations:suspect-commits-all-frames": False, # Allow organizations to configure all symbol sources. "organizations:symbol-sources": True, # Enable team insights page diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index 7926169f440e74..a3d561cff3b891 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -271,7 +271,6 @@ default_manager.add("organizations:starfish-test-endpoint", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:starfish-view", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:starfish-wsv-chart-dropdown", OrganizationFeature, FeatureHandlerStrategy.REMOTE) -default_manager.add("organizations:suspect-commits-all-frames", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:symbol-sources", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:team-workflow-notifications", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:trace-view-load-more", OrganizationFeature, FeatureHandlerStrategy.REMOTE) diff --git a/src/sentry/integrations/github/client.py b/src/sentry/integrations/github/client.py index cbd724d0086237..2d3da2aa7f9cac 100644 --- a/src/sentry/integrations/github/client.py +++ b/src/sentry/integrations/github/client.py @@ -640,71 +640,6 @@ def get_file(self, repo: Repository, path: str, ref: str, codeowners: bool = Fal ) return result - def get_blame_for_file( - self, repo: Repository, path: str, ref: str, lineno: int - ) -> Sequence[Mapping[str, Any]]: - [owner, name] = repo.name.split("/") - query = f"""query {{ - repository(name: "{name}", owner: "{owner}") {{ - ref(qualifiedName: "{ref}") {{ - target {{ - ... on Commit {{ - blame(path: "{path}") {{ - ranges {{ - commit {{ - oid - author {{ - name - email - }} - message - committedDate - }} - startingLine - endingLine - age - }} - }} - }} - }} - }} - }} - }}""" - - try: - contents = self.post( - path="/graphql", - data={"query": query}, - allow_text=False, - ) - except ValueError as e: - sentry_sdk.capture_exception(e) - return [] - - errors = contents.get("errors", []) - if len(errors) > 0: - if any([error for error in errors if error.get("type") == "RATE_LIMITED"]): - raise ApiRateLimitedError("GitHub rate limit exceeded") - - # When data is present, it means that the query was at least partially successful, - # usually a missing repo/branch/file which is expected with wrong configurations. - # If data is not present, the query may be formed incorrectly, so raise an error. - if not contents.get("data"): - err_message = ", ".join([error.get("message", "") for error in errors]) - raise ApiError(err_message) - - response_data = contents.get("data") - if not isinstance(response_data, dict): - raise ApiError("GitHub returned no data.", 404) - response_repo = response_data.get("repository") - if not isinstance(response_repo, dict): - raise ApiError("Repository does not exist in GitHub.", 404) - response_ref = response_repo.get("ref") - if not isinstance(response_ref, dict): - raise ApiError("Branch does not exist in GitHub.", 404) - - return response_ref.get("target", {}).get("blame", {}).get("ranges", []) - def get_blame_for_files( self, files: Sequence[SourceLineInfo], extra: Mapping[str, Any] ) -> Sequence[FileBlameInfo]: diff --git a/src/sentry/integrations/github/integration.py b/src/sentry/integrations/github/integration.py index 871322d3f5fb20..2b7247c1f06b3a 100644 --- a/src/sentry/integrations/github/integration.py +++ b/src/sentry/integrations/github/integration.py @@ -3,13 +3,11 @@ import logging import re from collections.abc import Collection, Mapping, Sequence -from datetime import timezone from typing import Any from django.http import HttpResponse from django.utils.text import slugify from django.utils.translation import gettext_lazy as _ -from isodate import parse_datetime from rest_framework.request import Request from sentry import features, options @@ -249,51 +247,6 @@ def has_repo_access(self, repo: RpcRepository) -> bool: return False return True - def get_commit_context( - self, repo: Repository, filepath: str, ref: str, event_frame: Mapping[str, Any] - ) -> Mapping[str, str] | None: - lineno = event_frame.get("lineno", 0) - if not lineno: - return None - blame_range: Sequence[Mapping[str, Any]] | None = self.get_blame_for_file( - repo, filepath, ref, lineno - ) - - if blame_range is None: - return None - - try: - commit: Mapping[str, Any] = max( - ( - blame - for blame in blame_range - if blame.get("startingLine", 0) <= lineno <= blame.get("endingLine", 0) - and blame.get("commit", {}).get("committedDate") - ), - key=lambda blame: parse_datetime(blame.get("commit", {}).get("committedDate")), - default={}, - ) - if not commit: - return None - except (ValueError, IndexError): - return None - - commitInfo = commit.get("commit") - if not commitInfo: - return None - else: - committed_date = parse_datetime(commitInfo.get("committedDate")).astimezone( - timezone.utc - ) - - return { - "commitId": commitInfo.get("oid"), - "committedDate": committed_date, - "commitMessage": commitInfo.get("message"), - "commitAuthorName": commitInfo.get("author", {}).get("name"), - "commitAuthorEmail": commitInfo.get("author", {}).get("email"), - } - class GitHubIntegrationProvider(IntegrationProvider): key = "github" diff --git a/src/sentry/integrations/github_enterprise/integration.py b/src/sentry/integrations/github_enterprise/integration.py index 4d66aea0f139a1..fc07dfc3e781a1 100644 --- a/src/sentry/integrations/github_enterprise/integration.py +++ b/src/sentry/integrations/github_enterprise/integration.py @@ -1,14 +1,11 @@ from __future__ import annotations -from collections.abc import Mapping, Sequence -from datetime import timezone from typing import Any from urllib.parse import urlparse from django import forms from django.http import HttpResponse from django.utils.translation import gettext_lazy as _ -from isodate import parse_datetime from rest_framework.request import Request from sentry import http @@ -189,51 +186,6 @@ def format_source_url(self, repo: Repository, filepath: str, branch: str) -> str # "https://github.example.org/octokit/octokit.rb/blob/master/README.md" return f"{repo.url}/blob/{branch}/{filepath}" - def get_commit_context( - self, repo: Repository, filepath: str, ref: str, event_frame: Mapping[str, Any] - ) -> Mapping[str, Any] | None: - lineno = event_frame.get("lineno", 0) - if not lineno: - return None - blame_range: Sequence[Mapping[str, Any]] | None = self.get_blame_for_file( - repo, filepath, ref, lineno - ) - - if blame_range is None: - return None - - try: - commit: Mapping[str, Any] = max( - ( - blame - for blame in blame_range - if blame.get("startingLine", 0) <= lineno <= blame.get("endingLine", 0) - and blame.get("commit", {}).get("committedDate") - ), - key=lambda blame: parse_datetime(blame.get("commit", {}).get("committedDate")), - default={}, - ) - if not commit: - return None - except (ValueError, IndexError): - return None - - commitInfo = commit.get("commit") - if not commitInfo: - return None - else: - committed_date = parse_datetime(commitInfo.get("committedDate")).astimezone( - timezone.utc - ) - - return { - "commitId": commitInfo.get("oid"), - "committedDate": committed_date, - "commitMessage": commitInfo.get("message"), - "commitAuthorName": commitInfo.get("author", {}).get("name"), - "commitAuthorEmail": commitInfo.get("author", {}).get("email"), - } - class InstallationForm(forms.Form): url = forms.CharField( diff --git a/src/sentry/integrations/gitlab/client.py b/src/sentry/integrations/gitlab/client.py index 45c2c849205929..21566c270d8f4d 100644 --- a/src/sentry/integrations/gitlab/client.py +++ b/src/sentry/integrations/gitlab/client.py @@ -356,18 +356,6 @@ def get_file(self, repo: Repository, path: str, ref: str, codeowners: bool = Fal ) return result - def get_blame_for_file( - self, repo: Repository, path: str, ref: str, lineno: int - ) -> Sequence[Mapping[str, Any]]: - project_id = repo.config["project_id"] - encoded_path = quote(path, safe="") - request_path = GitLabApiClientPath.blame.format(project=project_id, path=encoded_path) - contents = self.get( - request_path, params={"ref": ref, "range[start]": lineno, "range[end]": lineno} - ) - - return contents or [] - def get_blame_for_files( self, files: Sequence[SourceLineInfo], extra: Mapping[str, Any] ) -> list[FileBlameInfo]: diff --git a/src/sentry/integrations/gitlab/integration.py b/src/sentry/integrations/gitlab/integration.py index fc7338af2ceca6..84a9ebb658b8cc 100644 --- a/src/sentry/integrations/gitlab/integration.py +++ b/src/sentry/integrations/gitlab/integration.py @@ -1,14 +1,10 @@ from __future__ import annotations -from collections.abc import Mapping, Sequence -from datetime import timezone -from typing import Any from urllib.parse import urlparse from django import forms from django.http import HttpResponse from django.utils.translation import gettext_lazy as _ -from isodate import parse_datetime from rest_framework.request import Request from sentry.identity.gitlab import get_oauth_data, get_user_info @@ -165,46 +161,6 @@ def error_message_from_json(self, data): if "error" in data: return data["error"] - def get_commit_context( - self, repo: Repository, filepath: str, ref: str, event_frame: Mapping[str, Any] - ) -> Mapping[str, Any] | None: - """ - Returns the latest commit that altered the line from the event frame if it exists. - """ - lineno = event_frame.get("lineno", 0) - if not lineno: - return None - blame_range: Sequence[Mapping[str, Any]] | None = self.get_blame_for_file( - repo, filepath, ref, lineno - ) - if blame_range is None: - return None - - try: - commit = max( - (blame for blame in blame_range if blame.get("commit", {}).get("committed_date")), - key=lambda blame: parse_datetime(blame.get("commit", {}).get("committed_date")), - ) - except (ValueError, IndexError): - return None - - commitInfo = commit.get("commit") - if not commitInfo: - return None - else: - # TODO(nisanthan): Use dateutil.parser.isoparse once on python 3.11 - committed_date = parse_datetime(commitInfo.get("committed_date")).astimezone( - timezone.utc - ) - - return { - "commitId": commitInfo.get("id"), - "committedDate": committed_date, - "commitMessage": commitInfo.get("message"), - "commitAuthorName": commitInfo.get("committer_name"), - "commitAuthorEmail": commitInfo.get("committer_email"), - } - class InstallationForm(forms.Form): url = forms.CharField( diff --git a/src/sentry/integrations/mixins/commit_context.py b/src/sentry/integrations/mixins/commit_context.py index 8e01f5957993b0..1e4215d4f890d9 100644 --- a/src/sentry/integrations/mixins/commit_context.py +++ b/src/sentry/integrations/mixins/commit_context.py @@ -35,11 +35,6 @@ class FileBlameInfo(SourceLineInfo): class GetBlameForFile(Protocol): - def get_blame_for_file( - self, repo: Repository, filepath: str, ref: str, lineno: int - ) -> list[dict[str, Any]] | None: - ... - def get_blame_for_files( self, files: Sequence[SourceLineInfo], extra: Mapping[str, Any] ) -> list[FileBlameInfo]: @@ -56,28 +51,6 @@ class CommitContextMixin(GetClient): # dynamically given a search query repo_search = False - def get_blame_for_file( - self, repo: Repository, filepath: str, ref: str, lineno: int - ) -> Sequence[Mapping[str, Any]] | None: - """ - Calls the client's `get_blame_for_file` method to see if the file has a blame list. - - repo: Repository (object) - filepath: filepath of the source code. (string) - ref: commitsha or default_branch (string) - """ - filepath = filepath.lstrip("/") - try: - client = self.get_client() - except Identity.DoesNotExist: - return None - try: - response = client.get_blame_for_file(repo, filepath, ref, lineno) - except IdentityNotValid: - return None - - return response - def get_blame_for_files( self, files: Sequence[SourceLineInfo], extra: Mapping[str, Any] ) -> list[FileBlameInfo]: @@ -104,9 +77,3 @@ def get_commit_context_all_frames( Given a list of source files and line numbers,returns the commit info for the most recent commit. """ return self.get_blame_for_files(files, extra) - - def get_commit_context( - self, repo: Repository, filepath: str, branch: str, event_frame: Mapping[str, Any] - ) -> Mapping[str, Any] | None: - """Formats the source code url used for stack trace linking.""" - raise NotImplementedError diff --git a/src/sentry/integrations/utils/commit_context.py b/src/sentry/integrations/utils/commit_context.py index 601d288f7894ed..8f3b9bb2a3041b 100644 --- a/src/sentry/integrations/utils/commit_context.py +++ b/src/sentry/integrations/utils/commit_context.py @@ -100,120 +100,6 @@ def find_commit_context_for_event_all_frames( return (selected_blame, selected_install) -def find_commit_context_for_event( - code_mappings: Sequence[RepositoryProjectPathConfig], - frame: Mapping[str, Any], - platform: str, - sdk_name: str | None, - extra: Mapping[str, Any], -) -> tuple[list[tuple[Mapping[str, Any], RepositoryProjectPathConfig]], IntegrationInstallation]: - """ - - Get all the Commit Context for an event frame using a source code integration for all the matching code mappings - code_mappings: List of RepositoryProjectPathConfig - frame: Event frame - """ - result = [] - installation = None - for code_mapping in code_mappings: - if not code_mapping.organization_integration_id: - logger.info( - "process_commit_context.no_integration", - extra={ - **extra, - "code_mapping_id": code_mapping.id, - }, - ) - continue - - stacktrace_path = get_stacktrace_path_from_event_frame(frame) - - if not stacktrace_path: - logger.info( - "process_commit_context.no_stacktrace_path", - extra={ - **extra, - "code_mapping_id": code_mapping.id, - }, - ) - continue - - src_path = convert_stacktrace_frame_path_to_source_path( - frame=EventFrame.from_dict(frame), - code_mapping=code_mapping, - platform=platform, - sdk_name=sdk_name, - ) - - # src_path can be none if the stacktrace_path is an invalid filepath - if not src_path: - logger.info( - "process_commit_context.no_src_path", - extra={ - **extra, - "code_mapping_id": code_mapping.id, - "stacktrace_path": stacktrace_path, - }, - ) - continue - - log_info = { - **extra, - "code_mapping_id": code_mapping.id, - "stacktrace_path": stacktrace_path, - "src_path": src_path, - } - logger.info( - "process_commit_context.found_stacktrace_and_src_paths", - extra=log_info, - ) - integration = integration_service.get_integration( - organization_integration_id=code_mapping.organization_integration_id - ) - install = integration.get_installation(organization_id=code_mapping.organization_id) - if installation is None and install is not None: - installation = install - with metrics.timer( - "tasks.process_commit_context.get_commit_context", - tags={"provider": integration.provider}, - ): - try: - commit_context = install.get_commit_context( - code_mapping.repository, src_path, code_mapping.default_branch, frame - ) - except ApiError as e: - metrics.incr("tasks.process_commit_context.api_error", tags={"status": e.code}) - commit_context = None - - if e.code in (401, 403, 404, 429): - logger.warning( - "process_commit_context.failed_to_fetch_commit_context.api_error", - extra={**log_info, "code": e.code, "error_message": e.text}, - ) - # Only create Sentry errors for status codes that aren't expected - else: - logger.exception( - "process_commit_context.failed_to_fetch_commit_context.api_error", - extra={**log_info, "code": e.code, "error_message": e.text}, - ) - - analytics.record( - "integrations.failed_to_fetch_commit_context", - organization_id=code_mapping.organization_id, - project_id=code_mapping.project.id, - group_id=extra["group"], - code_mapping_id=code_mapping.id, - provider=integration.provider, - error_message=e.text, - ) - - # Only return suspect commits that are less than a year old - if commit_context and is_date_less_than_year(commit_context["committedDate"]): - result.append((commit_context, code_mapping)) - - return result, installation - - def is_date_less_than_year(date: datetime) -> bool: return date > datetime.now(tz=timezone.utc) - timedelta(days=365) diff --git a/src/sentry/tasks/commit_context.py b/src/sentry/tasks/commit_context.py index e67844be8e718b..3ca8c212cdcb0a 100644 --- a/src/sentry/tasks/commit_context.py +++ b/src/sentry/tasks/commit_context.py @@ -10,12 +10,11 @@ from django.utils import timezone as django_timezone from sentry_sdk import set_tag -from sentry import analytics, features +from sentry import analytics from sentry.api.serializers.models.release import get_users_for_authors from sentry.integrations.base import IntegrationInstallation from sentry.integrations.utils.code_mapping import get_sorted_code_mapping_configs from sentry.integrations.utils.commit_context import ( - find_commit_context_for_event, find_commit_context_for_event_all_frames, get_or_create_commit_from_blame, ) @@ -33,13 +32,9 @@ from sentry.tasks.groupowner import process_suspect_commits from sentry.utils import metrics from sentry.utils.cache import cache -from sentry.utils.event_frames import munged_filename_and_frames from sentry.utils.locking import UnableToAcquireLock from sentry.utils.sdk import set_current_event_project -PREFERRED_GROUP_OWNERS = 1 -PREFERRED_GROUP_OWNER_AGE = timedelta(days=7) -DEBOUNCE_CACHE_KEY = lambda group_id: f"process-commit-context-{group_id}" DEBOUNCE_PR_COMMENT_CACHE_KEY = lambda pullrequest_id: f"pr-comment-{pullrequest_id}" DEBOUNCE_PR_COMMENT_LOCK_KEY = lambda pullrequest_id: f"queue_comment_task:{pullrequest_id}" PR_COMMENT_TASK_TTL = timedelta(minutes=5).total_seconds() @@ -155,76 +150,25 @@ def process_commit_context( with lock.acquire(): metrics.incr("sentry.tasks.process_commit_context.start") - cache_key = DEBOUNCE_CACHE_KEY(group_id) - set_current_event_project(project_id) project = Project.objects.get_from_cache(id=project_id) set_tag("organization.slug", project.organization.slug) - owners = GroupOwner.objects.filter( - group_id=group_id, - project=project, - organization_id=project.organization_id, - type=GroupOwnerType.SUSPECT_COMMIT.value, - ) basic_logging_details = { "event": event_id, "group": group_id, "organization": project.organization_id, } - # Delete old owners - to_be_deleted = owners.filter( - date_added__lte=django_timezone.now() - PREFERRED_GROUP_OWNER_AGE - ) - - if len(to_be_deleted): - for record in to_be_deleted: - record.delete() - - current_owners = owners.filter( - date_added__gte=django_timezone.now() - PREFERRED_GROUP_OWNER_AGE - ).order_by("-date_added") - - if len(current_owners) >= PREFERRED_GROUP_OWNERS: - # When there exists a Suspect Committer, we want to debounce this task until that Suspect Committer hits the TTL of PREFERRED_GROUP_OWNER_AGE - cache_duration = django_timezone.now() - current_owners[0].date_added - cache_duration = ( - cache_duration - if cache_duration < PREFERRED_GROUP_OWNER_AGE - else PREFERRED_GROUP_OWNER_AGE - ) - cache.set(cache_key, True, cache_duration.total_seconds()) - metrics.incr( - "sentry.tasks.process_commit_context.aborted", - tags={ - "detail": "maxed_owners_none_old", - }, - ) - logger.info( - "process_commit_context.maxed_owners", - extra={ - **basic_logging_details, - "reason": "maxed_owners_none_old", - }, - ) - return code_mappings = get_sorted_code_mapping_configs(project) frames = event_frames or [] - munged = munged_filename_and_frames(event_platform, frames, "munged_filename", sdk_name) - if munged: - frames = munged[1] - in_app_frames = [f for f in frames if f and f.get("in_app", False)][::-1] # First frame in the stacktrace that is "in_app" frame = next(iter(in_app_frames), None) if not frame: - # When we could not find the in_app frame for the event, we will debounce the task for 1 day. - # New events can be unrelated to the original event and may have an "in_app" frame. - cache.set(cache_key, True, timedelta(days=1).total_seconds()) metrics.incr( "sentry.tasks.process_commit_context.aborted", tags={ @@ -247,171 +191,57 @@ def process_commit_context( project_id=project_id, sdk_name=sdk_name, ) - if features.has("organizations:suspect-commits-all-frames", project.organization): - analytics.record( - "integrations.failed_to_fetch_commit_context_all_frames", - organization_id=project.organization_id, - project_id=project_id, - group_id=basic_logging_details["group"], - event_id=basic_logging_details["event"], - num_frames=0, - num_successfully_mapped_frames=0, - reason="could_not_find_in_app_stacktrace_frame", - ) + analytics.record( + "integrations.failed_to_fetch_commit_context_all_frames", + organization_id=project.organization_id, + project_id=project_id, + group_id=basic_logging_details["group"], + event_id=basic_logging_details["event"], + num_frames=0, + num_successfully_mapped_frames=0, + reason="could_not_find_in_app_stacktrace_frame", + ) return - if features.has("organizations:suspect-commits-all-frames", project.organization): - metrics.incr("tasks.process_commit_context_all_frames.start") - blame = None - installation = None - try: - blame, installation = find_commit_context_for_event_all_frames( - code_mappings=code_mappings, - frames=in_app_frames, - organization_id=project.organization_id, - project_id=project_id, - platform=event_platform, - sdk_name=sdk_name, - extra=basic_logging_details, - ) - except ApiError: - logger.info( - "process_commit_context_all_frames.retry", - extra={**basic_logging_details, "retry_count": self.request.retries}, - ) - metrics.incr("tasks.process_commit_context_all_frames.retry") - self.retry() - - if not blame or not installation: - # Fall back to the release logic if we can't find a commit for any of the frames - process_suspect_commits.delay( - event_id=event_id, - event_platform=event_platform, - event_frames=event_frames, - group_id=group_id, - project_id=project_id, - sdk_name=sdk_name, - ) - return - - selected_code_mapping = blame.code_mapping - - commit = get_or_create_commit_from_blame( - blame, organization_id=project.organization_id, extra=basic_logging_details - ) - else: - found_contexts, installation = find_commit_context_for_event( + metrics.incr("tasks.process_commit_context_all_frames.start") + blame = None + installation = None + try: + blame, installation = find_commit_context_for_event_all_frames( code_mappings=code_mappings, - frame=frame, + frames=in_app_frames, + organization_id=project.organization_id, + project_id=project_id, platform=event_platform, sdk_name=sdk_name, - extra={ - **basic_logging_details, - }, + extra=basic_logging_details, + ) + except ApiError: + logger.info( + "process_commit_context_all_frames.retry", + extra={**basic_logging_details, "retry_count": self.request.retries}, + ) + metrics.incr("tasks.process_commit_context_all_frames.retry") + self.retry() + + if not blame or not installation: + # Fall back to the release logic if we can't find a commit for any of the frames + process_suspect_commits.delay( + event_id=event_id, + event_platform=event_platform, + event_frames=event_frames, + group_id=group_id, + project_id=project_id, + sdk_name=sdk_name, ) + return - if not len(found_contexts): - # Couldn't find the blame with any of the code mappings, so we will debounce the task for PREFERRED_GROUP_OWNER_AGE. - # We will clear the debounce cache when the org adds new code mappings for the project of this group. - cache.set(cache_key, True, PREFERRED_GROUP_OWNER_AGE.total_seconds()) + selected_code_mapping = blame.code_mapping - metrics.incr( - "sentry.tasks.process_commit_context.aborted", - tags={ - "detail": "could_not_fetch_commit_context", - }, - ) - logger.info( - "process_commit_context.find_commit_context", - extra={ - **basic_logging_details, - "reason": "could_not_fetch_commit_context", - "code_mappings_count": len(code_mappings), - "fallback": True, - }, - ) - process_suspect_commits.delay( - event_id=event_id, - event_platform=event_platform, - event_frames=event_frames, - group_id=group_id, - project_id=project_id, - sdk_name=sdk_name, - ) - return - - commit = None - new_commit = None - selected_code_mapping = None - for commit_context, code_mapping in found_contexts: - try: - # Find commit and break - commit = Commit.objects.get( - repository_id=code_mapping.repository_id, - key=commit_context.get("commitId"), - ) - assert commit is not None - if commit.message == "": - commit.message = commit_context.get("commitMessage") - commit.save() - selected_code_mapping = code_mapping - break - except Commit.DoesNotExist: - # If the commit has no date, we will not add it to avoid breaking other commit ordered-based logic. - if not new_commit and commit_context.get("committedDate"): - new_commit = { - "context": commit_context, - "repository_id": code_mapping.repository_id, - "code_mapping_id": code_mapping.id, - } - - logger.info( - "process_commit_context.no_commit_in_sentry", - extra={ - **basic_logging_details, - "sha": commit_context.get("commitId"), - "repository_id": code_mapping.repository_id, - "code_mapping_id": code_mapping.id, - "reason": "commit_sha_does_not_exist_in_sentry", - }, - ) - - if not commit: - if new_commit: - context = new_commit["context"] - # If none of the commits exist in sentry_commit, we add the first commit we found - commit_author, _ = CommitAuthor.objects.get_or_create( - organization_id=project.organization_id, - email=context.get("commitAuthorEmail"), - defaults={"name": context.get("commitAuthorName")}, - ) - commit = Commit.objects.create( - organization_id=project.organization_id, - repository_id=new_commit["repository_id"], - key=context.get("commitId"), - date_added=context.get("committedDate"), - author=commit_author, - message=context.get("commitMessage"), - ) - - logger.info( - "process_commit_context.added_commit_to_sentry_commit", - extra={ - **basic_logging_details, - "sha": new_commit.get("commitId"), - "repository_id": new_commit["repository_id"], - "code_mapping_id": new_commit["code_mapping_id"], - "reason": "commit_sha_does_not_exist_in_sentry_for_all_code_mappings", - }, - ) - else: - metrics.incr( - "sentry.tasks.process_commit_context.aborted", - tags={ - "detail": "commit_sha_does_not_exist_in_sentry", - }, - ) + commit = get_or_create_commit_from_blame( + blame, organization_id=project.organization_id, extra=basic_logging_details + ) assert isinstance(commit, Commit) authors = list(CommitAuthor.objects.get_many_from_cache([commit.author_id])) @@ -452,18 +282,6 @@ def process_commit_context( extra={"organization_id": project.organization_id}, ) - if created: - # If owners exceeds the limit, delete the oldest one. - if len(current_owners) + 1 > PREFERRED_GROUP_OWNERS: - try: - owner = current_owners[0] - except IndexError: - pass - else: - owner.delete() - - # Success. We will debounce this task until this Suspect Committer hits the TTL of PREFERRED_GROUP_OWNER_AGE - cache.set(cache_key, True, PREFERRED_GROUP_OWNER_AGE.total_seconds()) logger.info( "process_commit_context.success", extra={ diff --git a/src/sentry/tasks/post_process.py b/src/sentry/tasks/post_process.py index 8c78df6396deae..583bd4fc77e1bb 100644 --- a/src/sentry/tasks/post_process.py +++ b/src/sentry/tasks/post_process.py @@ -1130,7 +1130,7 @@ def process_commits(job: PostProcessJob) -> None: return from sentry.models.commit import Commit - from sentry.tasks.commit_context import DEBOUNCE_CACHE_KEY, process_commit_context + from sentry.tasks.commit_context import process_commit_context from sentry.tasks.groupowner import DEBOUNCE_CACHE_KEY as SUSPECT_COMMITS_DEBOUNCE_CACHE_KEY from sentry.tasks.groupowner import process_suspect_commits @@ -1176,18 +1176,9 @@ def process_commits(job: PostProcessJob) -> None: features.has("organizations:commit-context", event.project.organization) and has_integrations ): - if ( - features.has( - "organizations:suspect-commits-all-frames", event.project.organization - ) - and not job["group_state"]["is_new"] - ): + if not job["group_state"]["is_new"]: return - cache_key = DEBOUNCE_CACHE_KEY(event.group_id) - if cache.get(cache_key): - metrics.incr("sentry.tasks.process_commit_context.debounce") - return process_commit_context.delay( event_id=event.event_id, event_platform=event.platform, diff --git a/tests/sentry/integrations/github/test_client.py b/tests/sentry/integrations/github/test_client.py index f613fa5632dce1..15d87cdc7ea5b9 100644 --- a/tests/sentry/integrations/github/test_client.py +++ b/tests/sentry/integrations/github/test_client.py @@ -224,127 +224,6 @@ def test_get_codeowner_file(self, mock_jwt, mock_check_file): ) assert result == GITHUB_CODEOWNERS - @mock.patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") - @responses.activate - def test_get_blame_for_file(self, get_jwt): - path = "src/sentry/integrations/github/client.py" - ref = "master" - query = f"""query {{ - repository(name: "foo", owner: "Test-Organization") {{ - ref(qualifiedName: "{ref}") {{ - target {{ - ... on Commit {{ - blame(path: "{path}") {{ - ranges {{ - commit {{ - oid - author {{ - name - email - }} - message - committedDate - }} - startingLine - endingLine - age - }} - }} - }} - }} - }} - }} - }}""" - responses.add( - method=responses.POST, - url="https://api.github.com/graphql", - json={"query": query, "data": {"repository": {"ref": {"target": {}}}}}, - content_type="application/json", - ) - resp = self.github_client.get_blame_for_file(self.repo, path, ref, 1) - assert ( - responses.calls[0].request.body - == b'{"query": "query {\\n repository(name: \\"foo\\", owner: \\"Test-Organization\\") {\\n ref(qualifiedName: \\"master\\") {\\n target {\\n ... on Commit {\\n blame(path: \\"src/sentry/integrations/github/client.py\\") {\\n ranges {\\n commit {\\n oid\\n author {\\n name\\n email\\n }\\n message\\n committedDate\\n }\\n startingLine\\n endingLine\\n age\\n }\\n }\\n }\\n }\\n }\\n }\\n }"}' - ) - - assert resp == [] - - @mock.patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") - @responses.activate - def test_get_blame_for_file_errors_no_data(self, get_jwt): - responses.add( - method=responses.POST, - url="https://api.github.com/graphql", - json={"errors": [{"message": "something"}, {"message": "went wrong"}]}, - content_type="application/json", - ) - with pytest.raises(ApiError) as excinfo: - self.github_client.get_blame_for_file(self.repo, "foo.py", "main", 1) - (msg,) = excinfo.value.args - assert msg == "something, went wrong" - - @mock.patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") - @responses.activate - def test_get_blame_for_file_missing_repo(self, get_jwt): - responses.add( - method=responses.POST, - url="https://api.github.com/graphql", - json={ - "data": {"repository": None}, - "errors": [{"message": "something"}, {"message": "went wrong"}], - }, - content_type="application/json", - ) - with pytest.raises(ApiError) as excinfo: - self.github_client.get_blame_for_file(self.repo, "foo.py", "main", 1) - assert excinfo.value.code == 404 - assert excinfo.value.text == "Repository does not exist in GitHub." - - @mock.patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") - @responses.activate - def test_get_blame_for_file_missing_branch(self, get_jwt): - responses.add( - method=responses.POST, - url="https://api.github.com/graphql", - json={ - "data": {"repository": {"ref": None}}, - "errors": [{"message": "something"}, {"message": "went wrong"}], - }, - content_type="application/json", - ) - with pytest.raises(ApiError) as excinfo: - self.github_client.get_blame_for_file(self.repo, "foo.py", "main", 1) - assert excinfo.value.code == 404 - assert excinfo.value.text == "Branch does not exist in GitHub." - - @mock.patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") - @responses.activate - def test_get_blame_for_file_rate_limited(self, get_jwt): - responses.add( - method=responses.POST, - url="https://api.github.com/graphql", - json={ - "errors": [{"message": "something", "type": "RATE_LIMITED"}], - }, - content_type="application/json", - ) - with pytest.raises(ApiRateLimitedError): - self.github_client.get_blame_for_file(self.repo, "foo.py", "main", 1) - - @mock.patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") - @responses.activate - def test_get_blame_for_file_graphql_no_data(self, get_jwt): - responses.add( - method=responses.POST, - url="https://api.github.com/graphql", - json={}, - content_type="application/json", - ) - with pytest.raises(ApiError) as excinfo: - self.github_client.get_blame_for_file(self.repo, "foo.py", "main", 1) - assert excinfo.value.code == 404 - assert excinfo.value.text == "GitHub returned no data." - @responses.activate def test_get_cached_repo_files_caching_functionality(self): """Fetch files for repo. Test caching logic.""" @@ -452,32 +331,6 @@ def test_get_comment_reactions(self, get_jwt): del stored_reactions["url"] assert reactions == stored_reactions - @mock.patch("sentry.integrations.github.client.get_jwt", return_value=ApiError) - @responses.activate - def test_fatal_and_disable_integration(self, get_jwt): - """ - fatal fast shut off with disable flag on, integration should be broken and disabled - """ - responses.add( - responses.POST, - status=403, - url="https://api.github.com/graphql", - json={ - "message": "This installation has been suspended", - "documentation_url": "https://docs.github.com/rest/reference/apps#create-an-installation-access-token-for-an-app", - }, - ) - - self.github_client.integration = None - with pytest.raises(Exception): - self.github_client.get_blame_for_file(self.repo, "foo.py", "main", 1) - - buffer = IntegrationRequestBuffer(self.github_client._get_redis_key()) - self.integration.refresh_from_db() - assert self.integration.status == ObjectStatus.DISABLED - assert [len(item) == 0 for item in buffer._get_broken_range_from_buffer()] - assert len(buffer._get_all_from_buffer()) == 0 - @responses.activate def test_disable_email(self): with self.tasks(): @@ -500,112 +353,6 @@ def test_disable_email(self): in msg.body ) - @mock.patch("sentry.integrations.github.client.get_jwt", return_value=ApiError) - @responses.activate - def test_fatal_integration(self, get_jwt): - """ - fatal fast shut off with disable flag on, integration should be broken and disabled - """ - responses.add( - responses.POST, - status=403, - url="https://api.github.com/graphql", - json={ - "message": "This installation has been suspended", - "documentation_url": "https://docs.github.com/rest/reference/apps#create-an-installation-access-token-for-an-app", - }, - ) - - self.github_client.integration = None - with pytest.raises(Exception): - self.github_client.get_blame_for_file(self.repo, "foo.py", "main", 1) - self.integration.refresh_from_db() - assert self.integration.status == ObjectStatus.DISABLED - - @responses.activate - def test_error_integration(self): - """ - recieve two errors and errors are recorded, integration is not broken yet so no disable - """ - responses.add( - responses.POST, - status=404, - url="https://api.github.com/graphql", - json={ - "message": "Not found", - }, - ) - responses.add( - responses.POST, - status=404, - url="https://api.github.com/graphql", - json={ - "message": "Not found", - }, - ) - self.github_client.integration = None - with pytest.raises(Exception): - self.github_client.get_blame_for_file(self.repo, "foo.py", "main", 1) - with pytest.raises(Exception): - self.github_client.get_blame_for_file(self.repo, "foo.py", "main", 1) - buffer = IntegrationRequestBuffer(self.github_client._get_redis_key()) - assert int(buffer._get_all_from_buffer()[0]["error_count"]) == 2 - assert buffer.is_integration_broken() is False - - @responses.activate - @freeze_time("2022-01-01 03:30:00") - def test_slow_integration_is_not_broken_or_disabled(self): - """ - slow test with disable flag on - put errors and success in buffer for 10 days, assert integration is not broken or disabled - """ - - responses.add( - responses.POST, - status=404, - url="https://api.github.com/graphql", - json={ - "message": "Not found", - }, - ) - buffer = IntegrationRequestBuffer(self.github_client._get_redis_key()) - now = datetime.now() - timedelta(hours=1) - for i in reversed(range(10)): - with freeze_time(now - timedelta(days=i)): - buffer.record_error() - buffer.record_success() - self.github_client.integration = None - with pytest.raises(Exception): - self.github_client.get_blame_for_file(self.repo, "foo.py", "main", 1) - assert buffer.is_integration_broken() is False - self.integration.refresh_from_db() - assert self.integration.status == ObjectStatus.ACTIVE - - @responses.activate - @freeze_time("2022-01-01 03:30:00") - def test_a_slow_integration_is_broken(self): - """ - slow shut off with disable flag on - put errors in buffer for 10 days, assert integration is broken and disabled - """ - responses.add( - responses.POST, - status=404, - url="https://api.github.com/graphql", - json={"message": "Not found"}, - ) - buffer = IntegrationRequestBuffer(self.github_client._get_redis_key()) - now = datetime.now() - timedelta(hours=1) - for i in reversed(range(10)): - with freeze_time(now - timedelta(days=i)): - buffer.record_error() - self.github_client.integration = None - assert self.integration.status == ObjectStatus.ACTIVE - with pytest.raises(Exception): - self.github_client.get_blame_for_file(self.repo, "foo.py", "main", 1) - self.integration.refresh_from_db() - assert self.integration.status == ObjectStatus.DISABLED - @control_silo_test class GithubProxyClientTest(TestCase): @@ -867,6 +614,173 @@ def setUp(self, get_jwt): ) +@region_silo_test +class GitHubClientFileBlameIntegrationDisableTest(TestCase): + @mock.patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") + def setUp(self, get_jwt): + ten_days = datetime.utcnow() + timedelta(days=10) + self.integration = self.create_integration( + organization=self.organization, + provider="github", + name="Github Test Org", + external_id="1", + metadata={ + "access_token": "12345token", + "expires_at": ten_days.strftime("%Y-%m-%dT%H:%M:%S"), + }, + ) + self.repo = Repository.objects.create( + organization_id=self.organization.id, + name="Test-Organization/foo", + url="https://github.com/Test-Organization/foo", + provider="integrations:github", + external_id=123, + integration_id=self.integration.id, + ) + install = self.integration.get_installation(organization_id=self.organization.id) + assert isinstance(install, GitHubIntegration) + self.install = install + self.github_client = self.install.get_client() + self.file = SourceLineInfo( + path="src/sentry/integrations/github/client_1.py", + lineno=10, + ref="master", + repo=self.repo, + code_mapping=None, # type: ignore + ) + + @mock.patch("sentry.integrations.github.client.get_jwt", return_value=ApiError) + @responses.activate + def test_fatal_and_disable_integration(self, get_jwt): + """ + fatal fast shut off with disable flag on, integration should be broken and disabled + """ + responses.add( + responses.POST, + status=403, + url="https://api.github.com/graphql", + json={ + "message": "This installation has been suspended", + "documentation_url": "https://docs.github.com/rest/reference/apps#create-an-installation-access-token-for-an-app", + }, + ) + + self.github_client.integration = None + with pytest.raises(Exception): + self.github_client.get_blame_for_files([self.file], extra={}) + + buffer = IntegrationRequestBuffer(self.github_client._get_redis_key()) + self.integration.refresh_from_db() + assert self.integration.status == ObjectStatus.DISABLED + assert [len(item) == 0 for item in buffer._get_broken_range_from_buffer()] + assert len(buffer._get_all_from_buffer()) == 0 + + @responses.activate + def test_error_integration(self): + """ + recieve two errors and errors are recorded, integration is not broken yet so no disable + """ + responses.add( + method=responses.GET, + url="https://api.github.com/rate_limit", + body=json.dumps( + { + "resources": { + "graphql": { + "limit": 5000, + "used": 1, + "remaining": 4999, + "reset": 1613064000, + } + } + } + ), + status=200, + content_type="application/json", + ) + + responses.add( + responses.POST, + status=404, + url="https://api.github.com/graphql", + json={ + "message": "Not found", + }, + ) + responses.add( + responses.POST, + status=404, + url="https://api.github.com/graphql", + json={ + "message": "Not found", + }, + ) + self.github_client.integration = None + with pytest.raises(Exception): + self.github_client.get_blame_for_files([self.file], extra={}) + with pytest.raises(Exception): + self.github_client.get_blame_for_files([self.file], extra={}) + buffer = IntegrationRequestBuffer(self.github_client._get_redis_key()) + assert ( + int(buffer._get_all_from_buffer()[0]["error_count"]) == 2 + ) # 2 from graphql, 2 from rate_limt check + assert buffer.is_integration_broken() is False + + @responses.activate + @freeze_time("2022-01-01 03:30:00") + def test_slow_integration_is_not_broken_or_disabled(self): + """ + slow test with disable flag on + put errors and success in buffer for 10 days, assert integration is not broken or disabled + """ + + responses.add( + responses.POST, + status=404, + url="https://api.github.com/graphql", + json={ + "message": "Not found", + }, + ) + buffer = IntegrationRequestBuffer(self.github_client._get_redis_key()) + now = datetime.now() - timedelta(hours=1) + for i in reversed(range(10)): + with freeze_time(now - timedelta(days=i)): + buffer.record_error() + buffer.record_success() + self.github_client.integration = None + with pytest.raises(Exception): + self.github_client.get_blame_for_files([self.file], extra={}) + assert buffer.is_integration_broken() is False + self.integration.refresh_from_db() + assert self.integration.status == ObjectStatus.ACTIVE + + @responses.activate + @freeze_time("2022-01-01 03:30:00") + def test_a_slow_integration_is_broken(self): + """ + slow shut off with disable flag on + put errors in buffer for 10 days, assert integration is broken and disabled + """ + responses.add( + responses.POST, + status=404, + url="https://api.github.com/graphql", + json={"message": "Not found"}, + ) + buffer = IntegrationRequestBuffer(self.github_client._get_redis_key()) + now = datetime.now() - timedelta(hours=1) + for i in reversed(range(10)): + with freeze_time(now - timedelta(days=i)): + buffer.record_error() + self.github_client.integration = None + assert self.integration.status == ObjectStatus.ACTIVE + with pytest.raises(Exception): + self.github_client.get_blame_for_files([self.file], extra={}) + self.integration.refresh_from_db() + assert self.integration.status == ObjectStatus.DISABLED + + @region_silo_test class GitHubClientFileBlameQueryBuilderTest(GitHubClientFileBlameBase): """ diff --git a/tests/sentry/integrations/github/test_integration.py b/tests/sentry/integrations/github/test_integration.py index 7c93a0f9e5fc04..af8652d897c9ae 100644 --- a/tests/sentry/integrations/github/test_integration.py +++ b/tests/sentry/integrations/github/test_integration.py @@ -1,6 +1,7 @@ from __future__ import annotations -from datetime import datetime, timedelta, timezone +from dataclasses import asdict +from datetime import datetime, timezone from typing import Any from unittest import mock from unittest.mock import patch @@ -9,7 +10,6 @@ import pytest import responses from django.urls import reverse -from isodate import parse_datetime import sentry from sentry.api.utils import generate_organization_url @@ -20,6 +20,7 @@ GitHubIntegrationProvider, client, ) +from sentry.integrations.mixins.commit_context import CommitInfo, FileBlameInfo, SourceLineInfo from sentry.integrations.utils.code_mapping import Repo, RepoTree from sentry.models.integrations.integration import Integration from sentry.models.integrations.organization_integration import OrganizationIntegration @@ -672,7 +673,13 @@ def set_rate_limit( if status != 200 else { "resources": { - "core": {"limit": limit, "remaining": remaining, "used": "foo", "reset": 123} + "core": {"limit": limit, "remaining": remaining, "used": "foo", "reset": 123}, + "graphql": { + "limit": limit, + "remaining": remaining, + "used": "foo", + "reset": 123, + }, } } ) @@ -887,7 +894,7 @@ def test_get_trees_for_org_falls_back_to_cache_once_MAX_CONNECTION_ERRORS_is_hit ) @responses.activate - def test_get_commit_context(self): + def test_get_commit_context_all_frames(self): self.assert_setup_flow() integration = Integration.objects.get(provider=self.provider.key) with assume_test_silo_mode(SiloMode.REGION): @@ -901,93 +908,66 @@ def test_get_commit_context(self): integration_id=integration.id, ) + self.set_rate_limit() installation = integration.get_installation(self.organization.id) - filepath = "sentry/tasks.py" - event_frame = { - "function": "handle_set_commits", - "abs_path": "/usr/src/sentry/src/sentry/tasks.py", - "module": "sentry.tasks", - "in_app": True, - "lineno": 30, - "filename": "sentry/tasks.py", - } - ref = "master" - query = f"""query {{ - repository(name: "foo", owner: "Test-Organization") {{ - ref(qualifiedName: "{ref}") {{ - target {{ - ... on Commit {{ - blame(path: "{filepath}") {{ - ranges {{ - commit {{ - oid - author {{ - name - email - }} - message - committedDate - }} - startingLine - endingLine - age - }} - }} - }} - }} - }} - }} - }}""" - commit_date = (datetime.now(tz=timezone.utc) - timedelta(days=4)).strftime( - "%Y-%m-%dT%H:%M:%SZ" + file = SourceLineInfo( + path="src/github.py", + lineno=10, + ref="master", + repo=repo, + code_mapping=None, # type: ignore ) + responses.add( - method=responses.POST, + responses.POST, url="https://api.github.com/graphql", json={ - "query": query, "data": { - "repository": { - "ref": { + "repository0": { + "ref0": { "target": { - "blame": { + "blame0": { "ranges": [ { "commit": { - "oid": "d42409d56517157c48bf3bd97d3f75974dde19fb", + "oid": "123", "author": { - "date": commit_date, - "email": "nisanthan.nanthakumar@sentry.io", - "name": "Nisanthan Nanthakumar", + "name": "Foo", + "email": "foo@example.com", }, - "message": "Add installation instructions", - "committedDate": commit_date, + "message": "hello", + "committedDate": "2023-01-01T00:00:00Z", }, - "startingLine": 30, - "endingLine": 30, - "age": 3, - } + "startingLine": 10, + "endingLine": 15, + "age": 0, + }, ] - } + }, } } } - }, + } }, content_type="application/json", + status=200, ) - commit_context = installation.get_commit_context(repo, filepath, ref, event_frame) - - commit_context_expected = { - "commitId": "d42409d56517157c48bf3bd97d3f75974dde19fb", - "committedDate": parse_datetime(commit_date), - "commitMessage": "Add installation instructions", - "commitAuthorName": "Nisanthan Nanthakumar", - "commitAuthorEmail": "nisanthan.nanthakumar@sentry.io", - } - assert commit_context == commit_context_expected + response = installation.get_commit_context_all_frames([file], extra={}) + + assert response == [ + FileBlameInfo( + **asdict(file), + commit=CommitInfo( + commitId="123", + commitMessage="hello", + committedDate=datetime(2023, 1, 1, 0, 0, 0, tzinfo=timezone.utc), + commitAuthorEmail="foo@example.com", + commitAuthorName="Foo", + ), + ) + ] @responses.activate def test_source_url_matches(self): diff --git a/tests/sentry/integrations/github_enterprise/test_integration.py b/tests/sentry/integrations/github_enterprise/test_integration.py index fa9edb9383c7ea..b48088d477b647 100644 --- a/tests/sentry/integrations/github_enterprise/test_integration.py +++ b/tests/sentry/integrations/github_enterprise/test_integration.py @@ -1,10 +1,9 @@ from dataclasses import asdict -from datetime import datetime, timedelta, timezone +from datetime import datetime, timezone from unittest.mock import patch from urllib.parse import parse_qs, urlencode, urlparse import responses -from isodate import parse_datetime from sentry.integrations.github_enterprise import GitHubEnterpriseIntegrationProvider from sentry.integrations.mixins.commit_context import CommitInfo, FileBlameInfo, SourceLineInfo @@ -334,111 +333,6 @@ def test_get_stacktrace_link_use_default_if_version_404(self, get_jwt, _): assert result == "https://github.example.org/Test-Organization/foo/blob/master/README.md" - @patch("sentry.integrations.github_enterprise.integration.get_jwt", return_value="jwt_token_1") - @patch("sentry.integrations.github_enterprise.client.get_jwt", return_value="jwt_token_1") - @responses.activate - def test_get_commit_context(self, get_jwt, _): - self.assert_setup_flow() - integration = Integration.objects.get(provider=self.provider.key) - with assume_test_silo_mode(SiloMode.REGION): - repo = Repository.objects.create( - organization_id=self.organization.id, - name="Test-Organization/foo", - url="https://github.example.org/Test-Organization/foo", - provider="integrations:github_enterprise", - external_id=123, - config={"name": "Test-Organization/foo"}, - integration_id=integration.id, - ) - - installation = integration.get_installation(self.organization.id) - - filepath = "sentry/tasks.py" - event_frame = { - "function": "handle_set_commits", - "abs_path": "/usr/src/sentry/src/sentry/tasks.py", - "module": "sentry.tasks", - "in_app": True, - "lineno": 30, - "filename": "sentry/tasks.py", - } - ref = "master" - query = f"""query {{ - repository(name: "foo", owner: "Test-Organization") {{ - ref(qualifiedName: "{ref}") {{ - target {{ - ... on Commit {{ - blame(path: "{filepath}") {{ - ranges {{ - commit {{ - oid - author {{ - name - email - }} - message - committedDate - }} - startingLine - endingLine - age - }} - }} - }} - }} - }} - }} - }}""" - commit_date = (datetime.now(tz=timezone.utc) - timedelta(days=4)).strftime( - "%Y-%m-%dT%H:%M:%SZ" - ) - responses.add( - method=responses.POST, - url="https://github.example.org/api/graphql", - json={ - "query": query, - "data": { - "repository": { - "ref": { - "target": { - "blame": { - "ranges": [ - { - "commit": { - "oid": "d42409d56517157c48bf3bd97d3f75974dde19fb", - "author": { - "date": commit_date, - "email": "nisanthan.nanthakumar@sentry.io", - "name": "Nisanthan Nanthakumar", - }, - "message": "Add installation instructions", - "committedDate": commit_date, - }, - "startingLine": 30, - "endingLine": 30, - "age": 3, - } - ] - } - } - } - } - }, - }, - content_type="application/json", - ) - commit_context = installation.get_commit_context(repo, filepath, ref, event_frame) - - commit_context_expected = { - "commitId": "d42409d56517157c48bf3bd97d3f75974dde19fb", - "committedDate": parse_datetime(commit_date), - "commitMessage": "Add installation instructions", - "commitAuthorName": "Nisanthan Nanthakumar", - "commitAuthorEmail": "nisanthan.nanthakumar@sentry.io", - } - - assert commit_context == commit_context_expected - @patch("sentry.integrations.github_enterprise.integration.get_jwt", return_value="jwt_token_1") @patch("sentry.integrations.github_enterprise.client.get_jwt", return_value="jwt_token_1") @responses.activate diff --git a/tests/sentry/integrations/gitlab/test_integration.py b/tests/sentry/integrations/gitlab/test_integration.py index e5050f7cb53403..a437eb28831885 100644 --- a/tests/sentry/integrations/gitlab/test_integration.py +++ b/tests/sentry/integrations/gitlab/test_integration.py @@ -7,7 +7,6 @@ import responses from django.core.cache import cache from django.test import override_settings -from isodate import parse_datetime from fixtures.gitlab import GET_COMMIT_RESPONSE, GitLabTestCase from sentry.integrations.gitlab import GitlabIntegrationProvider @@ -350,124 +349,6 @@ def test_get_stacktrace_link_use_default_if_version_404(self): source_url == "https://gitlab.example.com/getsentry/example-repo/blob/master/README.md" ) - @responses.activate - def test_get_commit_context(self): - self.assert_setup_flow() - external_id = 4 - integration = Integration.objects.get(provider=self.provider.key) - instance = integration.metadata["instance"] - with assume_test_silo_mode(SiloMode.REGION): - repo = Repository.objects.create( - organization_id=self.organization.id, - name="Get Sentry / Example Repo", - external_id=f"{instance}:{external_id}", - url="https://gitlab.example.com/getsentry/projects/example-repo", - config={"project_id": external_id, "path": "getsentry/example-repo"}, - provider="integrations:gitlab", - integration_id=integration.id, - ) - installation = integration.get_installation(self.organization.id) - - filepath = "sentry/tasks.py" - encoded_filepath = quote(filepath, safe="") - ref = "master" - event_frame = { - "function": "handle_set_commits", - "abs_path": "/usr/src/sentry/src/sentry/tasks.py", - "module": "sentry.tasks", - "in_app": True, - "lineno": 30, - "filename": "sentry/tasks.py", - } - url = "https://gitlab.example.com/api/v4/projects/{id}/repository/files/{path}/blame?ref={ref}&range%5Bstart%5D={line}&range%5Bend%5D={line}" - - responses.add( - responses.GET, - url.format(id=external_id, path=encoded_filepath, ref=ref, line=event_frame["lineno"]), - json=[ - { - "commit": { - "id": "d42409d56517157c48bf3bd97d3f75974dde19fb", - "message": "Rename title", - "parent_ids": ["cc6e14f9328fa6d7b5a0d3c30dc2002a3f2a3822"], - "authored_date": "2015-11-14T10:12:32.000Z", - "author_name": "Nisanthan Nanthakumar", - "author_email": "nisanthan.nanthakumar@sentry.io", - "committed_date": "2015-11-14T10:12:32.000Z", - "committer_name": "Nisanthan Nanthakumar", - "committer_email": "nisanthan.nanthakumar@sentry.io", - }, - "lines": ["## Installation Docs"], - }, - { - "commit": { - "id": "d42409d56517157c48bf3bd97d3f75974dde19fb", - "message": "Add installation instructions", - "parent_ids": ["cc6e14f9328fa6d7b5a0d3c30dc2002a3f2a3822"], - "authored_date": "2015-12-18T08:12:22.000Z", - "author_name": "Nisanthan Nanthakumar", - "author_email": "nisanthan.nanthakumar@sentry.io", - "committed_date": "2015-12-18T08:12:22.000Z", - "committer_name": "Nisanthan Nanthakumar", - "committer_email": "nisanthan.nanthakumar@sentry.io", - }, - "lines": ["## Docs"], - }, - { - "commit": { - "id": "d42409d56517157c48bf3bd97d3f75974dde19fb", - "message": "Create docs", - "parent_ids": ["cc6e14f9328fa6d7b5a0d3c30dc2002a3f2a3822"], - "authored_date": "2015-10-03T09:34:32.000Z", - "author_name": "Nisanthan Nanthakumar", - "author_email": "nisanthan.nanthakumar@sentry.io", - "committed_date": "2015-10-03T09:34:32.000Z", - "committer_name": "Nisanthan Nanthakumar", - "committer_email": "nisanthan.nanthakumar@sentry.io", - }, - "lines": ["## New"], - }, - ], - ) - commit_context = installation.get_commit_context(repo, filepath, ref, event_frame) - - commit_context_expected = { - "commitId": "d42409d56517157c48bf3bd97d3f75974dde19fb", - "committedDate": parse_datetime("2015-12-18T08:12:22.000Z"), - "commitMessage": "Add installation instructions", - "commitAuthorName": "Nisanthan Nanthakumar", - "commitAuthorEmail": "nisanthan.nanthakumar@sentry.io", - } - - assert commit_context == commit_context_expected - - # We are now going to test the case where the Gitlab instance will return a non-UTC committed_data - # This 2015-12-18T11:12:22.000+03:00 vs 2015-10-03T09:34:32.000Z - event_frame["lineno"] = 31 - responses.add( - responses.GET, - url.format(id=external_id, path=encoded_filepath, ref=ref, line=event_frame["lineno"]), - json=[ - { - "commit": { - "id": "d42409d56517157c48bf3bd97d3f75974dde19fb", - "message": "Add installation instructions", - "parent_ids": ["cc6e14f9328fa6d7b5a0d3c30dc2002a3f2a3822"], - "authored_date": "2015-12-18T11:12:22.000+03:00", - "author_name": "Nisanthan Nanthakumar", - "author_email": "nisanthan.nanthakumar@sentry.io", - "committed_date": "2015-12-18T11:12:22.000+03:00", - "committer_name": "Nisanthan Nanthakumar", - "committer_email": "nisanthan.nanthakumar@sentry.io", - }, - "lines": ["## Docs"], - }, - ], - ) - commit_context = installation.get_commit_context(repo, filepath, ref, event_frame) - # The returned commit context has converted the timezone to UTC (000Z) - assert commit_context == commit_context_expected - @responses.activate def test_get_commit_context_all_frames(self): self.assert_setup_flow() diff --git a/tests/sentry/tasks/test_commit_context.py b/tests/sentry/tasks/test_commit_context.py index 92fecf0f88d17a..233d234a66e655 100644 --- a/tests/sentry/tasks/test_commit_context.py +++ b/tests/sentry/tasks/test_commit_context.py @@ -1,10 +1,10 @@ from datetime import datetime, timedelta from datetime import timezone as datetime_timezone -from unittest.mock import Mock, patch +from unittest.mock import patch import pytest import responses -from celery.exceptions import MaxRetriesExceededError, Retry +from celery.exceptions import Retry from django.utils import timezone from sentry.integrations.github.integration import GitHubIntegrationProvider @@ -16,7 +16,7 @@ from sentry.models.pullrequest import PullRequest, PullRequestComment, PullRequestCommit from sentry.models.repository import Repository from sentry.services.hybrid_cloud.integration import integration_service -from sentry.shared_integrations.exceptions import ApiError, ApiRateLimitedError +from sentry.shared_integrations.exceptions import ApiError from sentry.tasks.commit_context import ( PR_COMMENT_WINDOW, process_commit_context, @@ -24,7 +24,6 @@ ) from sentry.testutils.cases import IntegrationTestCase, TestCase from sentry.testutils.helpers.datetime import before_now, iso_format -from sentry.testutils.helpers.features import with_feature from sentry.testutils.silo import region_silo_test from sentry.testutils.skips import requires_snuba from sentry.utils.committers import get_frame_paths @@ -91,443 +90,6 @@ def setUp(self): ) -@region_silo_test -class TestCommitContext(TestCommitContextMixin): - @patch( - "sentry.integrations.github.GitHubIntegration.get_commit_context", - return_value={ - "commitId": "asdfwreqr", - "committedDate": (datetime.now(tz=datetime_timezone.utc) - timedelta(days=7)), - "commitMessage": "placeholder commit message", - "commitAuthorName": "", - "commitAuthorEmail": "admin@localhost", - }, - ) - def test_simple(self, mock_get_commit_context): - with self.tasks(): - assert not GroupOwner.objects.filter(group=self.event.group).exists() - event_frames = get_frame_paths(self.event) - process_commit_context( - event_id=self.event.event_id, - event_platform=self.event.platform, - event_frames=event_frames, - group_id=self.event.group_id, - project_id=self.event.project_id, - ) - assert GroupOwner.objects.get( - group=self.event.group, - project=self.event.project, - organization=self.event.project.organization, - type=GroupOwnerType.SUSPECT_COMMIT.value, - ) - - assert GroupOwner.objects.get( - group=self.event.group, - project=self.event.project, - organization=self.event.project.organization, - type=GroupOwnerType.SUSPECT_COMMIT.value, - ).context == {"commitId": self.commit.id} - - @patch("sentry.integrations.utils.commit_context.logger.exception") - @patch("sentry.analytics.record") - @patch( - "sentry.integrations.github.GitHubIntegration.get_commit_context", - side_effect=ApiError(text="integration_failed"), - ) - def test_failed_to_fetch_commit_context_apierror( - self, mock_get_commit_context, mock_record, mock_logger_exception - ): - with self.tasks(): - assert not GroupOwner.objects.filter(group=self.event.group).exists() - event_frames = get_frame_paths(self.event) - process_commit_context( - event_id=self.event.event_id, - event_platform=self.event.platform, - event_frames=event_frames, - group_id=self.event.group_id, - project_id=self.event.project_id, - ) - - assert mock_logger_exception.call_count == 1 - mock_record.assert_called_with( - "integrations.failed_to_fetch_commit_context", - organization_id=self.organization.id, - project_id=self.project.id, - code_mapping_id=self.code_mapping.id, - group_id=self.event.group_id, - provider="github", - error_message="integration_failed", - ) - - @patch("sentry.integrations.utils.commit_context.logger.exception") - @patch("sentry.analytics.record") - @patch( - "sentry.integrations.github.GitHubIntegration.get_commit_context", - side_effect=ApiRateLimitedError("exceeded rate limit"), - ) - def test_failed_to_fetch_commit_context_rate_limit( - self, mock_get_commit_context, mock_record, mock_logger_exception - ): - with self.tasks(): - assert not GroupOwner.objects.filter(group=self.event.group).exists() - event_frames = get_frame_paths(self.event) - process_commit_context( - event_id=self.event.event_id, - event_platform=self.event.platform, - event_frames=event_frames, - group_id=self.event.group_id, - project_id=self.event.project_id, - ) - - assert not mock_logger_exception.called - mock_record.assert_called_with( - "integrations.failed_to_fetch_commit_context", - organization_id=self.organization.id, - project_id=self.project.id, - code_mapping_id=self.code_mapping.id, - group_id=self.event.group_id, - provider="github", - error_message="exceeded rate limit", - ) - - @patch("sentry.analytics.record") - @patch( - "sentry.integrations.github.GitHubIntegration.get_commit_context", - side_effect=ApiError(text="integration_failed"), - ) - def test_failed_to_fetch_commit_context_record(self, mock_get_commit_context, mock_record): - with self.tasks(): - assert not GroupOwner.objects.filter(group=self.event.group).exists() - event_frames = get_frame_paths(self.event) - process_commit_context( - event_id=self.event.event_id, - event_platform=self.event.platform, - event_frames=event_frames, - group_id=self.event.group_id, - project_id=self.event.project_id, - ) - - mock_record.assert_called_with( - "integrations.failed_to_fetch_commit_context", - organization_id=self.organization.id, - project_id=self.project.id, - code_mapping_id=self.code_mapping.id, - group_id=self.event.group_id, - provider="github", - error_message="integration_failed", - ) - - @patch("sentry.tasks.commit_context.logger") - @patch( - "sentry.integrations.github.GitHubIntegration.get_commit_context", - return_value={ - "commitId": "asdfasdf", - "committedDate": (datetime.now(tz=datetime_timezone.utc) - timedelta(days=370)), - "commitMessage": "placeholder commit message", - "commitAuthorName": "", - "commitAuthorEmail": "admin@localhost", - }, - ) - def test_found_commit_is_too_old(self, mock_get_commit_context, mock_logger): - with self.tasks(): - assert not GroupOwner.objects.filter(group=self.event.group).exists() - event_frames = get_frame_paths(self.event) - process_commit_context( - event_id=self.event.event_id, - event_platform=self.event.platform, - event_frames=event_frames, - group_id=self.event.group_id, - project_id=self.event.project_id, - ) - - assert mock_logger.info.call_count == 1 - mock_logger.info.assert_called_with( - "process_commit_context.find_commit_context", - extra={ - "event": self.event.event_id, - "group": self.event.group_id, - "organization": self.event.group.project.organization_id, - "reason": "could_not_fetch_commit_context", - "code_mappings_count": 1, - "fallback": True, - }, - ) - - @patch( - "sentry.integrations.github.GitHubIntegration.get_commit_context", - return_value={ - "commitId": "asdfasdf", - "committedDate": (datetime.now(tz=datetime_timezone.utc) - timedelta(days=7)), - "commitMessage": "placeholder commit message", - "commitAuthorName": "", - "commitAuthorEmail": "admin@localhost", - }, - ) - def test_no_matching_commit_in_db(self, mock_get_commit_context): - with self.tasks(): - assert not GroupOwner.objects.filter(group=self.event.group).exists() - assert not Commit.objects.filter(key="asdfasdf").exists() - event_frames = get_frame_paths(self.event) - process_commit_context( - event_id=self.event.event_id, - event_platform=self.event.platform, - event_frames=event_frames, - group_id=self.event.group_id, - project_id=self.event.project_id, - ) - assert Commit.objects.filter(key="asdfasdf").exists() - assert GroupOwner.objects.filter(group=self.event.group).exists() - - @patch( - "sentry.integrations.github.GitHubIntegration.get_commit_context", - return_value={ - "commitId": "asdfwreqr", - "committedDate": (datetime.now(tz=datetime_timezone.utc) - timedelta(days=7)), - "commitMessage": "placeholder commit message", - "commitAuthorName": "", - "commitAuthorEmail": "admin@localhost", - }, - ) - def test_delete_old_entries(self, mock_get_commit_context): - # As new events come in associated with new owners, we should delete old ones. - user_2 = self.create_user("another@user.com", is_superuser=True) - self.create_member(teams=[self.team], user=user_2, organization=self.organization) - owner = GroupOwner.objects.create( - group=self.event.group, - user_id=user_2.id, - project=self.project, - organization=self.organization, - type=GroupOwnerType.SUSPECT_COMMIT.value, - date_added=timezone.now() - timedelta(days=8), - ) - with self.tasks(): - event_frames = get_frame_paths(self.event) - process_commit_context( - event_id=self.event.event_id, - event_platform=self.event.platform, - event_frames=event_frames, - group_id=self.event.group_id, - project_id=self.event.project_id, - ) - assert not GroupOwner.objects.filter(id=owner.id).exists() - assert GroupOwner.objects.filter(group=self.event.group).count() == 1 - assert GroupOwner.objects.filter(group=self.event.group, user_id=self.user.id).exists() - - @patch("sentry.tasks.groupowner.process_suspect_commits.delay") - def test_no_inapp_frame_in_stacktrace(self, mock_process_suspect_commits): - with self.tasks(): - assert not GroupOwner.objects.filter(group=self.event.group).exists() - self.event_2 = self.store_event( - data={ - "message": "Kaboom!", - "platform": "python", - "timestamp": iso_format(before_now(seconds=10)), - "stacktrace": { - "frames": [ - { - "function": "handle_set_commits", - "abs_path": "/usr/src/sentry/src/sentry/tasks.py", - "module": "sentry.tasks", - "in_app": False, - "lineno": 30, - "filename": "sentry/tasks.py", - }, - { - "function": "set_commits", - "abs_path": "/usr/src/sentry/src/sentry/models/release.py", - "module": "sentry.models.release", - "in_app": False, - "lineno": 39, - "filename": "sentry/models/release.py", - }, - ] - }, - "tags": {"sentry:release": self.release.version}, - "fingerprint": ["put-me-in-the-control-group"], - }, - project_id=self.project.id, - ) - event_frames = get_frame_paths(self.event_2) - process_commit_context( - event_id=self.event.event_id, - event_platform=self.event.platform, - event_frames=event_frames, - group_id=self.event.group_id, - project_id=self.event.project_id, - ) - assert mock_process_suspect_commits.call_count == 1 - assert not GroupOwner.objects.filter( - group=self.event.group, - project=self.event.project, - organization=self.event.project.organization, - type=GroupOwnerType.SUSPECT_COMMIT.value, - ).exists() - - @patch( - "sentry.integrations.github.GitHubIntegration.get_commit_context", - return_value={ - "commitId": "somekey", - "committedDate": (datetime.now(tz=datetime_timezone.utc) - timedelta(days=7)), - "commitMessage": "placeholder commit message", - "commitAuthorName": "", - "commitAuthorEmail": "randomuser@sentry.io", - }, - ) - def test_commit_author_not_in_sentry(self, mock_get_commit_context): - self.commit_author_2 = self.create_commit_author( - project=self.project, - ) - self.commit_2 = self.create_commit( - project=self.project, - repo=self.repo, - author=self.commit_author_2, - key="somekey", - message="placeholder commit message", - ) - - with self.tasks(): - assert not GroupOwner.objects.filter(group=self.event.group).exists() - event_frames = get_frame_paths(self.event) - process_commit_context( - event_id=self.event.event_id, - event_platform=self.event.platform, - event_frames=event_frames, - group_id=self.event.group_id, - project_id=self.event.project_id, - ) - assert GroupOwner.objects.filter(group=self.event.group).exists() - assert len(GroupOwner.objects.filter(group=self.event.group)) == 1 - owner = GroupOwner.objects.get(group=self.event.group) - assert owner.type == GroupOwnerType.SUSPECT_COMMIT.value - assert owner.user_id is None - assert owner.team is None - assert owner.context == {"commitId": self.commit_2.id} - - @patch("sentry.tasks.commit_context.get_users_for_authors", return_value={}) - @patch( - "sentry.integrations.github.GitHubIntegration.get_commit_context", - return_value={ - "commitId": "somekey", - "committedDate": (datetime.now(tz=datetime_timezone.utc) - timedelta(days=7)), - "commitMessage": "placeholder commit message", - "commitAuthorName": "", - "commitAuthorEmail": "randomuser@sentry.io", - }, - ) - def test_commit_author_no_user(self, mock_get_commit_context, mock_get_users_for_author): - self.commit_author_2 = self.create_commit_author( - project=self.project, - ) - self.commit_2 = self.create_commit( - project=self.project, - repo=self.repo, - author=self.commit_author_2, - key="somekey", - message="placeholder commit message", - ) - - with self.tasks(), patch( - "sentry.tasks.commit_context.get_users_for_authors", return_value={} - ): - event_frames = get_frame_paths(self.event) - process_commit_context( - event_id=self.event.event_id, - event_platform=self.event.platform, - event_frames=event_frames, - group_id=self.event.group_id, - project_id=self.event.project_id, - ) - assert GroupOwner.objects.filter(group=self.event.group).exists() - assert len(GroupOwner.objects.filter(group=self.event.group)) == 1 - owner = GroupOwner.objects.get(group=self.event.group) - assert owner.type == GroupOwnerType.SUSPECT_COMMIT.value - assert owner.user_id is None - assert owner.team is None - assert owner.context == {"commitId": self.commit_2.id} - - @patch( - "sentry.integrations.github.GitHubIntegration.get_commit_context", - return_value={ - "commitId": "somekey", - "committedDate": (datetime.now(tz=datetime_timezone.utc) - timedelta(days=7)), - "commitMessage": "placeholder commit message", - "commitAuthorName": "", - "commitAuthorEmail": "randomuser@sentry.io", - }, - ) - def test_multiple_matching_code_mappings_but_only_1_repository_has_the_commit_in_db( - self, mock_get_commit_context - ): - self.integration_2 = self.create_integration( - organization=self.organization, - provider="github", - name="GitHub", - external_id="github:2", - ) - - self.repo_2 = Repository.objects.create( - organization_id=self.organization.id, - name="another/example", - integration_id=self.integration_2.id, - ) - self.code_mapping_2 = self.create_code_mapping( - repo=self.repo_2, project=self.project, stack_root="sentry", source_root="sentry" - ) - - self.commit_author_2 = self.create_commit_author( - project=self.project, - ) - self.commit_2 = self.create_commit( - project=self.project, - repo=self.repo_2, - author=self.commit_author_2, - key="somekey", - message="placeholder commit message", - ) - - with self.tasks(): - assert not GroupOwner.objects.filter(group=self.event.group).exists() - event_frames = get_frame_paths(self.event) - process_commit_context( - event_id=self.event.event_id, - event_platform=self.event.platform, - event_frames=event_frames, - group_id=self.event.group_id, - project_id=self.event.project_id, - ) - assert GroupOwner.objects.filter(group=self.event.group).exists() - assert len(GroupOwner.objects.filter(group=self.event.group)) == 1 - owner = GroupOwner.objects.get(group=self.event.group) - assert owner.type == GroupOwnerType.SUSPECT_COMMIT.value - assert owner.user_id is None - assert owner.team is None - assert owner.context == {"commitId": self.commit_2.id} - - @patch( - "sentry.integrations.github.GitHubIntegration.get_commit_context", - side_effect=ApiError(text="integration_failed"), - ) - @patch("sentry.tasks.groupowner.process_suspect_commits.delay") - def test_fallback_if_max_retries_exceeded(self, mock_suspect_commits, mock_get_commit_context): - def after_return(self, status, retval, task_id, args, kwargs, einfo): - raise MaxRetriesExceededError() - - with self.tasks() and pytest.raises(MaxRetriesExceededError): - with patch("celery.app.task.Task.after_return", after_return): - process_commit_context.apply( - kwargs={ - "event_id": self.event.event_id, - "event_platform": self.event.platform, - "event_frames": get_frame_paths(self.event), - "group_id": self.event.group_id, - "project_id": self.event.project_id, - }, - retries=1, - ) - - assert mock_suspect_commits.called - - @region_silo_test class TestCommitContextAllFrames(TestCommitContextMixin): def setUp(self): @@ -593,7 +155,6 @@ def setUp(self): @patch( "sentry.integrations.github.GitHubIntegration.get_commit_context_all_frames", ) - @with_feature("organizations:suspect-commits-all-frames") def test_success_existing_commit(self, mock_get_commit_context, mock_record): """ Tests a simple successful case, where get_commit_context_all_frames returns @@ -656,7 +217,6 @@ def test_success_existing_commit(self, mock_get_commit_context, mock_record): @patch( "sentry.integrations.github.GitHubIntegration.get_commit_context_all_frames", ) - @with_feature("organizations:suspect-commits-all-frames") def test_success_create_commit(self, mock_get_commit_context, mock_record): """ A simple success case where a new commit needs to be created. @@ -702,7 +262,6 @@ def test_success_create_commit(self, mock_get_commit_context, mock_record): @patch( "sentry.integrations.github.GitHubIntegration.get_commit_context_all_frames", ) - @with_feature("organizations:suspect-commits-all-frames") def test_success_multiple_blames(self, mock_get_commit_context, mock_record): """ A simple success case where multiple blames are returned. @@ -739,7 +298,6 @@ def test_success_multiple_blames(self, mock_get_commit_context, mock_record): @patch( "sentry.integrations.github.GitHubIntegration.get_commit_context_all_frames", ) - @with_feature("organizations:suspect-commits-all-frames") def test_maps_correct_files(self, mock_get_commit_context, mock_record): """ Tests that the get_commit_context_all_frames function is called with the correct @@ -811,7 +369,6 @@ def test_maps_correct_files(self, mock_get_commit_context, mock_record): @patch( "sentry.integrations.github.GitHubIntegration.get_commit_context_all_frames", ) - @with_feature("organizations:suspect-commits-all-frames") def test_failure_no_inapp_frames( self, mock_get_commit_context, mock_record, mock_process_suspect_commits ): @@ -890,7 +447,6 @@ def test_failure_no_inapp_frames( @patch( "sentry.integrations.github.GitHubIntegration.get_commit_context_all_frames", ) - @with_feature("organizations:suspect-commits-all-frames") def test_failure_no_blames( self, mock_get_commit_context, mock_record, mock_process_suspect_commits, mock_logger_info ): @@ -950,7 +506,6 @@ def test_failure_no_blames( @patch( "sentry.integrations.github.GitHubIntegration.get_commit_context_all_frames", ) - @with_feature("organizations:suspect-commits-all-frames") def test_failure_old_blame( self, mock_get_commit_context, mock_record, mock_process_suspect_commits, mock_logger_info ): @@ -1009,7 +564,6 @@ def test_failure_old_blame( "sentry.integrations.github.GitHubIntegration.get_commit_context_all_frames", side_effect=ApiError("Unknown API error"), ) - @with_feature("organizations:suspect-commits-all-frames") def test_retry_on_bad_api_error(self, mock_get_commit_context, mock_process_suspect_commits): """ A failure case where the integration hits an unknown API error. @@ -1036,7 +590,6 @@ def test_retry_on_bad_api_error(self, mock_get_commit_context, mock_process_susp "sentry.integrations.github.GitHubIntegration.get_commit_context_all_frames", side_effect=ApiError("File not found", code=404), ) - @with_feature("organizations:suspect-commits-all-frames") def test_no_retry_on_expected_api_error( self, mock_get_commit_context, mock_process_suspect_commits ): @@ -1065,7 +618,6 @@ def test_no_retry_on_expected_api_error( "sentry.integrations.github.GitHubIntegration.get_commit_context_all_frames", side_effect=ApiError("Unknown API error"), ) - @with_feature("organizations:suspect-commits-all-frames") def test_falls_back_on_max_retries( self, mock_get_commit_context, mock_process_suspect_commits, mock_request ): @@ -1098,7 +650,6 @@ def test_falls_back_on_max_retries( "sentry.integrations.github.GitHubIntegration.get_commit_context_all_frames", side_effect=Exception("some other error"), ) - @with_feature("organizations:suspect-commits-all-frames") def test_failure_unknown( self, mock_get_commit_context, @@ -1147,7 +698,6 @@ def test_failure_unknown( @patch( "sentry.integrations.github.GitHubIntegration.get_commit_context_all_frames", ) - @with_feature("organizations:suspect-commits-all-frames") def test_filters_invalid_and_dedupes_frames(self, mock_get_commit_context, mock_record): """ Tests that invalid frames are filtered out and that duplicate frames are deduped. @@ -1233,16 +783,7 @@ def test_filters_invalid_and_dedupes_frames(self, mock_get_commit_context, mock_ @region_silo_test @patch( - "sentry.integrations.github.GitHubIntegration.get_commit_context", - Mock( - return_value={ - "commitId": "asdfwreqr", - "committedDate": (datetime.now(tz=datetime_timezone.utc) - timedelta(days=7)), - "commitMessage": "placeholder commit message", - "commitAuthorName": "", - "commitAuthorEmail": "admin@localhost", - } - ), + "sentry.integrations.github.GitHubIntegration.get_commit_context_all_frames", return_value=[] ) @patch("sentry.tasks.integrations.github.pr_comment.github_comment_workflow.delay") class TestGHCommentQueuing(IntegrationTestCase, TestCommitContextMixin): @@ -1270,6 +811,20 @@ def setUp(self): updated_at=iso_format(before_now(days=1)), group_ids=[], ) + self.blame = FileBlameInfo( + repo=self.repo, + path="sentry/models/release.py", + ref="master", + code_mapping=self.code_mapping, + lineno=39, + commit=CommitInfo( + commitId="asdfwreqr", + committedDate=(datetime.now(tz=datetime_timezone.utc) - timedelta(days=7)), + commitMessage="placeholder commit message", + commitAuthorName="", + commitAuthorEmail="admin@localhost", + ), + ) def add_responses(self): responses.add( @@ -1279,8 +834,9 @@ def add_responses(self): json=[{"merge_commit_sha": self.pull_request.merge_commit_sha}], ) - def test_gh_comment_not_github(self, mock_comment_workflow): + def test_gh_comment_not_github(self, mock_comment_workflow, mock_get_commit_context): """Non github repos shouldn't be commented on""" + mock_get_commit_context.return_value = [self.blame] self.repo.provider = "integrations:gitlab" self.repo.save() with self.tasks(): @@ -1294,8 +850,9 @@ def test_gh_comment_not_github(self, mock_comment_workflow): ) assert not mock_comment_workflow.called - def test_gh_comment_org_option(self, mock_comment_workflow): + def test_gh_comment_org_option(self, mock_comment_workflow, mock_get_commit_context): """No comments on org with organization option disabled""" + mock_get_commit_context.return_value = [self.blame] OrganizationOption.objects.set_value( organization=self.project.organization, key="sentry:github_pr_bot", value=False ) @@ -1313,8 +870,11 @@ def test_gh_comment_org_option(self, mock_comment_workflow): @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @responses.activate - def test_gh_comment_no_pr_from_api(self, get_jwt, mock_comment_workflow): + def test_gh_comment_no_pr_from_api( + self, get_jwt, mock_comment_workflow, mock_get_commit_context + ): """No comments on suspect commit with no pr returned from API response""" + mock_get_commit_context.return_value = [self.blame] self.pull_request.delete() responses.add( @@ -1338,9 +898,11 @@ def test_gh_comment_no_pr_from_api(self, get_jwt, mock_comment_workflow): @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @patch("sentry_sdk.capture_exception") @responses.activate - def test_gh_comment_api_error(self, mock_capture_exception, get_jwt, mock_comment_workflow): + def test_gh_comment_api_error( + self, mock_capture_exception, get_jwt, mock_comment_workflow, mock_get_commit_context + ): """Captures exception if Github API call errors""" - + mock_get_commit_context.return_value = [self.blame] responses.add( responses.GET, self.base_url + f"/repos/example/commits/{self.commit.key}/pulls", @@ -1362,9 +924,11 @@ def test_gh_comment_api_error(self, mock_capture_exception, get_jwt, mock_commen @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @responses.activate - def test_gh_comment_commit_not_in_default_branch(self, get_jwt, mock_comment_workflow): + def test_gh_comment_commit_not_in_default_branch( + self, get_jwt, mock_comment_workflow, mock_get_commit_context + ): """No comments on commit not in default branch""" - + mock_get_commit_context.return_value = [self.blame] responses.add( responses.GET, self.base_url + f"/repos/example/commits/{self.commit.key}/pulls", @@ -1385,8 +949,11 @@ def test_gh_comment_commit_not_in_default_branch(self, get_jwt, mock_comment_wor @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @responses.activate - def test_gh_comment_no_pr_from_query(self, get_jwt, mock_comment_workflow): + def test_gh_comment_no_pr_from_query( + self, get_jwt, mock_comment_workflow, mock_get_commit_context + ): """No comments on suspect commit with no pr row in table""" + mock_get_commit_context.return_value = [self.blame] self.pull_request.delete() self.add_responses() @@ -1404,8 +971,9 @@ def test_gh_comment_no_pr_from_query(self, get_jwt, mock_comment_workflow): @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @responses.activate - def test_gh_comment_pr_too_old(self, get_jwt, mock_comment_workflow): + def test_gh_comment_pr_too_old(self, get_jwt, mock_comment_workflow, mock_get_commit_context): """No comment on pr that's older than PR_COMMENT_WINDOW""" + mock_get_commit_context.return_value = [self.blame] self.pull_request.date_added = iso_format(before_now(days=PR_COMMENT_WINDOW + 1)) self.pull_request.save() @@ -1425,8 +993,9 @@ def test_gh_comment_pr_too_old(self, get_jwt, mock_comment_workflow): @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @responses.activate - def test_gh_comment_repeat_issue(self, get_jwt, mock_comment_workflow): + def test_gh_comment_repeat_issue(self, get_jwt, mock_comment_workflow, mock_get_commit_context): """No comment on a pr that has a comment with the issue in the same pr list""" + mock_get_commit_context.return_value = [self.blame] self.pull_request_comment.group_ids.append(self.event.group_id) self.pull_request_comment.save() @@ -1446,8 +1015,11 @@ def test_gh_comment_repeat_issue(self, get_jwt, mock_comment_workflow): @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @responses.activate - def test_gh_comment_create_queued(self, get_jwt, mock_comment_workflow): + def test_gh_comment_create_queued( + self, get_jwt, mock_comment_workflow, mock_get_commit_context + ): """Task queued if no prior comment exists""" + mock_get_commit_context.return_value = [self.blame] self.pull_request_comment.delete() self.add_responses() @@ -1469,8 +1041,11 @@ def test_gh_comment_create_queued(self, get_jwt, mock_comment_workflow): @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @responses.activate - def test_gh_comment_create_queued_existing_pr_commit(self, get_jwt, mock_comment_workflow): + def test_gh_comment_create_queued_existing_pr_commit( + self, get_jwt, mock_comment_workflow, mock_get_commit_context + ): """Task queued if no prior comment exists""" + mock_get_commit_context.return_value = [self.blame] pr_commit = PullRequestCommit.objects.create( commit=self.commit, pull_request=self.pull_request ) @@ -1495,9 +1070,9 @@ def test_gh_comment_create_queued_existing_pr_commit(self, get_jwt, mock_comment @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @responses.activate - def test_gh_comment_update_queue(self, get_jwt, mock_comment_workflow): + def test_gh_comment_update_queue(self, get_jwt, mock_comment_workflow, mock_get_commit_context): """Task queued if new issue for prior comment""" - + mock_get_commit_context.return_value = [self.blame] self.add_responses() with self.tasks(): @@ -1516,8 +1091,9 @@ def test_gh_comment_update_queue(self, get_jwt, mock_comment_workflow): assert len(pr_commits) == 1 assert pr_commits[0].commit == self.commit - def test_gh_comment_no_repo(self, mock_comment_workflow): + def test_gh_comment_no_repo(self, mock_comment_workflow, mock_get_commit_context): """No comments on suspect commit if no repo row exists""" + mock_get_commit_context.return_value = [self.blame] self.repo.delete() with self.tasks(): event_frames = get_frame_paths(self.event) @@ -1533,9 +1109,9 @@ def test_gh_comment_no_repo(self, mock_comment_workflow): @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @responses.activate - def test_gh_comment_debounces(self, get_jwt, mock_comment_workflow): + def test_gh_comment_debounces(self, get_jwt, mock_comment_workflow, mock_get_commit_context): + mock_get_commit_context.return_value = [self.blame] self.add_responses() - assert not GroupOwner.objects.filter(group=self.event.group).exists() groupowner = GroupOwner.objects.create( diff --git a/tests/sentry/tasks/test_post_process.py b/tests/sentry/tasks/test_post_process.py index ce586fb926d858..23e6b743b19fc9 100644 --- a/tests/sentry/tasks/test_post_process.py +++ b/tests/sentry/tasks/test_post_process.py @@ -1427,28 +1427,7 @@ def setUp(self): @with_feature("organizations:commit-context") @patch( - "sentry.integrations.github.GitHubIntegration.get_commit_context", - return_value=github_blame_return_value, - ) - def test_debounce_cache_is_set(self, mock_get_commit_context): - with self.tasks(): - self.call_post_process_group( - is_new=True, - is_regression=False, - is_new_group_environment=True, - event=self.created_event, - ) - assert GroupOwner.objects.get( - group=self.created_event.group, - project=self.created_event.project, - organization=self.created_event.project.organization, - type=GroupOwnerType.SUSPECT_COMMIT.value, - ) - assert cache.has_key(f"process-commit-context-{self.created_event.group_id}") - - @with_feature("organizations:commit-context") - @patch( - "sentry.integrations.github.GitHubIntegration.get_commit_context", + "sentry.integrations.github.GitHubIntegration.get_commit_context_all_frames", return_value=github_blame_return_value, ) def test_logic_fallback_no_scm(self, mock_get_commit_context): @@ -1465,14 +1444,15 @@ def test_logic_fallback_no_scm(self, mock_get_commit_context): is_new_group_environment=True, event=self.created_event, ) - assert not cache.has_key(f"process-commit-context-{self.created_event.group_id}") + + assert not mock_get_commit_context.called @with_feature("organizations:commit-context") @patch( - "sentry.integrations.github_enterprise.GitHubEnterpriseIntegration.get_commit_context", - return_value=github_blame_return_value, + "sentry.integrations.github_enterprise.GitHubEnterpriseIntegration.get_commit_context_all_frames", ) def test_github_enterprise(self, mock_get_commit_context): + mock_get_commit_context.return_value = self.github_blame_all_files_return_value with assume_test_silo_mode(SiloMode.CONTROL): with unguarded_write(using=router.db_for_write(Integration)): Integration.objects.all().delete() @@ -1504,12 +1484,10 @@ def test_github_enterprise(self, mock_get_commit_context): ) @with_feature("organizations:commit-context") - @with_feature("organizations:suspect-commits-all-frames") @patch("sentry.integrations.github.GitHubIntegration.get_commit_context_all_frames") def test_skip_when_not_is_new(self, mock_get_commit_context): """ - Tests that when the organizations:suspect-commits-all-frames feature is enabled, - and the group is not new, that we do not process commit context. + Tests that we do not process commit context if the group isn't new. """ with self.tasks(): self.call_post_process_group( @@ -1527,14 +1505,12 @@ def test_skip_when_not_is_new(self, mock_get_commit_context): ).exists() @with_feature("organizations:commit-context") - @with_feature("organizations:suspect-commits-all-frames") @patch( "sentry.integrations.github.GitHubIntegration.get_commit_context_all_frames", ) def test_does_not_skip_when_is_new(self, mock_get_commit_context): """ - Tests that when the organizations:suspect-commits-all-frames feature is enabled, - and the group is new, the commit context should be processed. + Tests that the commit context should be processed when the group is new. """ mock_get_commit_context.return_value = self.github_blame_all_files_return_value with self.tasks(): From 3ba1cf59398d819ce35b87b90987f9fc331f8dbb Mon Sep 17 00:00:00 2001 From: Colleen O'Rourke Date: Tue, 13 Feb 2024 11:15:06 -0800 Subject: [PATCH 328/357] ref(rules): Remove unused endpoint (#65044) A follow up to https://github.com/getsentry/sentry/pull/59557 to remove the `ProjectCombinedRuleIndexEndpoint`. It is completely unused in the app, we've informed the 2 customers using it repeatedly since November about its removal, and there are no [recent logs in GCP](https://console.cloud.google.com/logs/query;query=resource.type%20%3D%20k8s_container%0Aresource.labels.namespace_name%20%3D%20default%0Aresource.labels.container_name%20%3D%20sentry%0Alabels.name%20%3D%20sentry.access.api%0AjsonPayload.view%3D~%22ProjectCombinedRuleIndexEndpoint%22;summaryFields=:true:32:beginning;lfeCustomFields=jsonPayload%252Fview;cursorTimestamp=2023-11-20T16:33:08.603699123Z;duration=PT30M?project=internal-sentry&pli=1&rapt=AEjHL4Ob28iCogAq6wlg8sKW8yfVAXlQ_eyRUgwpKwLActhCkhdsMKJgynyVGs_PYjB3SdoJzgWwxn0VdLJHFA5mSuusIS_WkymoZccwO2xjUASEkI8kVdg). --- src/sentry/api/urls.py | 10 +- .../endpoints/project_alert_rule_index.py | 47 ----- .../test_project_alert_rule_index.py | 197 +----------------- 3 files changed, 2 insertions(+), 252 deletions(-) diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py index c284cdb2919f0a..3cefdd198f58fd 100644 --- a/src/sentry/api/urls.py +++ b/src/sentry/api/urls.py @@ -90,10 +90,7 @@ OrganizationIncidentSubscriptionIndexEndpoint, ) from sentry.incidents.endpoints.project_alert_rule_details import ProjectAlertRuleDetailsEndpoint -from sentry.incidents.endpoints.project_alert_rule_index import ( - ProjectAlertRuleIndexEndpoint, - ProjectCombinedRuleIndexEndpoint, -) +from sentry.incidents.endpoints.project_alert_rule_index import ProjectAlertRuleIndexEndpoint from sentry.incidents.endpoints.project_alert_rule_task_details import ( ProjectAlertRuleTaskDetailsEndpoint, ) @@ -2097,11 +2094,6 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: ProjectAlertRuleTaskDetailsEndpoint.as_view(), name="sentry-api-0-project-alert-rule-task-details", ), - re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/combined-rules/$", - ProjectCombinedRuleIndexEndpoint.as_view(), - name="sentry-api-0-project-combined-rules", - ), re_path( r"^(?P[^\/]+)/(?P[^\/]+)/create-sample/$", ProjectCreateSampleEndpoint.as_view(), diff --git a/src/sentry/incidents/endpoints/project_alert_rule_index.py b/src/sentry/incidents/endpoints/project_alert_rule_index.py index 9cb90581563428..520ee9ca73b9f0 100644 --- a/src/sentry/incidents/endpoints/project_alert_rule_index.py +++ b/src/sentry/incidents/endpoints/project_alert_rule_index.py @@ -1,60 +1,13 @@ from __future__ import annotations -from datetime import datetime - from rest_framework.request import Request from rest_framework.response import Response -from sentry import features from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import region_silo_endpoint from sentry.api.bases.project import ProjectAlertRulePermission, ProjectEndpoint -from sentry.api.helpers.deprecation import deprecated -from sentry.api.paginator import CombinedQuerysetIntermediary, CombinedQuerysetPaginator -from sentry.api.serializers import CombinedRuleSerializer, serialize -from sentry.constants import ObjectStatus from sentry.incidents.endpoints.organization_alert_rule_index import AlertRuleIndexMixin -from sentry.incidents.models import AlertRule -from sentry.models.rule import Rule -from sentry.snuba.dataset import Dataset - - -@region_silo_endpoint -class ProjectCombinedRuleIndexEndpoint(ProjectEndpoint): - owner = ApiOwner.ISSUES - DEPRECATION_DATE = datetime.fromisoformat("2024-02-07T00:00:00+00:00:00") - publish_status = { - "GET": ApiPublishStatus.PRIVATE, - } - - @deprecated(DEPRECATION_DATE, "sentry-api-0-organization-combined-rules") - def get(self, request: Request, project) -> Response: - """ - Fetches alert rules and legacy rules for a project. @deprecated. Use OrganizationCombinedRuleIndexEndpoint instead. - """ - alert_rules = AlertRule.objects.fetch_for_project(project) - if not features.has("organizations:performance-view", project.organization): - # Filter to only error alert rules - alert_rules = alert_rules.filter(snuba_query__dataset=Dataset.Events.value) - - alert_rule_intermediary = CombinedQuerysetIntermediary(alert_rules, ["date_added"]) - rule_intermediary = CombinedQuerysetIntermediary( - Rule.objects.filter( - project=project, - status=ObjectStatus.ACTIVE, - ), - ["date_added"], - ) - - return self.paginate( - request, - paginator_cls=CombinedQuerysetPaginator, - on_results=lambda x: serialize(x, request.user, CombinedRuleSerializer()), - default_per_page=25, - intermediaries=[alert_rule_intermediary, rule_intermediary], - desc=True, - ) @region_silo_endpoint diff --git a/tests/sentry/incidents/endpoints/test_project_alert_rule_index.py b/tests/sentry/incidents/endpoints/test_project_alert_rule_index.py index 07292ad092066f..e65b5e05b67166 100644 --- a/tests/sentry/incidents/endpoints/test_project_alert_rule_index.py +++ b/tests/sentry/incidents/endpoints/test_project_alert_rule_index.py @@ -1,7 +1,4 @@ from copy import deepcopy -from datetime import timezone - -import requests from sentry import audit_log from sentry.api.serializers import serialize @@ -10,12 +7,10 @@ from sentry.silo import SiloMode from sentry.snuba.dataset import Dataset from sentry.testutils.cases import APITestCase -from sentry.testutils.helpers.datetime import before_now, freeze_time +from sentry.testutils.helpers.datetime import freeze_time from sentry.testutils.outbox import outbox_runner from sentry.testutils.silo import assume_test_silo_mode, region_silo_test from sentry.testutils.skips import requires_snuba -from sentry.utils import json -from tests.sentry.api.serializers.test_alert_rule import BaseAlertRuleSerializerTest pytestmark = [requires_snuba] @@ -171,193 +166,3 @@ def test_project_not_in_request(self): resp.renderer_context["request"].META["REMOTE_ADDR"] == list(audit_log_entry)[0].ip_address ) - - -@region_silo_test -class ProjectCombinedRuleIndexEndpointTest(BaseAlertRuleSerializerTest, APITestCase): - endpoint = "sentry-api-0-project-combined-rules" - - def test_no_perf_alerts(self): - self.create_team(organization=self.organization, members=[self.user]) - self.create_alert_rule() - perf_alert_rule = self.create_alert_rule(query="p95", dataset=Dataset.Transactions) - self.login_as(self.user) - with self.feature("organizations:incidents"): - resp = self.get_success_response(self.organization.slug, self.project.slug) - assert perf_alert_rule.id not in [x["id"] for x in list(resp.data)] - - with self.feature(["organizations:incidents", "organizations:performance-view"]): - resp = self.get_success_response(self.organization.slug, self.project.slug) - assert perf_alert_rule.id in [int(x["id"]) for x in list(resp.data)] - - def setup_project_and_rules(self): - self.org = self.create_organization(owner=self.user, name="Rowdy Tiger") - self.team = self.create_team(organization=self.org, name="Mariachi Band") - self.project = self.create_project(organization=self.org, teams=[self.team], name="Bengal") - self.login_as(self.user) - self.projects = [self.project, self.create_project()] - self.alert_rule = self.create_alert_rule( - projects=self.projects, date_added=before_now(minutes=6).replace(tzinfo=timezone.utc) - ) - self.other_alert_rule = self.create_alert_rule( - projects=self.projects, date_added=before_now(minutes=5).replace(tzinfo=timezone.utc) - ) - self.issue_rule = self.create_issue_alert_rule( - data={ - "project": self.project, - "name": "Issue Rule Test", - "conditions": [], - "actions": [], - "actionMatch": "all", - "date_added": before_now(minutes=4).replace(tzinfo=timezone.utc), - } - ) - self.yet_another_alert_rule = self.create_alert_rule( - projects=self.projects, date_added=before_now(minutes=3).replace(tzinfo=timezone.utc) - ) - self.combined_rules_url = ( - f"/api/0/projects/{self.org.slug}/{self.project.slug}/combined-rules/" - ) - - def test_invalid_limit(self): - self.setup_project_and_rules() - with self.feature(["organizations:incidents", "organizations:performance-view"]): - request_data = {"per_page": "notaninteger"} - response = self.client.get( - path=self.combined_rules_url, data=request_data, content_type="application/json" - ) - assert response.status_code == 400 - - def test_limit_higher_than_results_no_cursor(self): - self.setup_project_and_rules() - # Test limit above result count (which is 4), no cursor. - with self.feature(["organizations:incidents", "organizations:performance-view"]): - request_data = {"per_page": "5"} - response = self.client.get( - path=self.combined_rules_url, data=request_data, content_type="application/json" - ) - assert response.status_code == 200 - result = json.loads(response.content) - assert len(result) == 4 - self.assert_alert_rule_serialized(self.yet_another_alert_rule, result[0], skip_dates=True) - assert result[1]["id"] == str(self.issue_rule.id) - assert result[1]["type"] == "rule" - self.assert_alert_rule_serialized(self.other_alert_rule, result[2], skip_dates=True) - self.assert_alert_rule_serialized(self.alert_rule, result[3], skip_dates=True) - - def test_limit_as_1_with_paging(self): - self.setup_project_and_rules() - - # Test Limit as 1, no cursor: - with self.feature(["organizations:incidents", "organizations:performance-view"]): - request_data = {"per_page": "1"} - response = self.client.get( - path=self.combined_rules_url, data=request_data, content_type="application/json" - ) - assert response.status_code == 200 - - result = json.loads(response.content) - assert len(result) == 1 - self.assert_alert_rule_serialized(self.yet_another_alert_rule, result[0], skip_dates=True) - - links = requests.utils.parse_header_links(response["link"].rstrip(">").replace(">,<", ",<")) - next_cursor = links[1]["cursor"] - - # Test Limit as 1, next page of previous request: - with self.feature(["organizations:incidents", "organizations:performance-view"]): - request_data = {"cursor": next_cursor, "per_page": "1"} - response = self.client.get( - path=self.combined_rules_url, data=request_data, content_type="application/json" - ) - assert response.status_code == 200 - result = json.loads(response.content) - assert len(result) == 1 - assert result[0]["id"] == str(self.issue_rule.id) - assert result[0]["type"] == "rule" - - def test_limit_as_2_with_paging(self): - self.setup_project_and_rules() - - # Test Limit as 2, no cursor: - with self.feature(["organizations:incidents", "organizations:performance-view"]): - request_data = {"per_page": "2"} - response = self.client.get( - path=self.combined_rules_url, data=request_data, content_type="application/json" - ) - assert response.status_code == 200 - - result = json.loads(response.content) - assert len(result) == 2 - self.assert_alert_rule_serialized(self.yet_another_alert_rule, result[0], skip_dates=True) - assert result[1]["id"] == str(self.issue_rule.id) - assert result[1]["type"] == "rule" - - links = requests.utils.parse_header_links(response["link"].rstrip(">").replace(">,<", ",<")) - next_cursor = links[1]["cursor"] - # Test Limit 2, next page of previous request: - with self.feature(["organizations:incidents", "organizations:performance-view"]): - request_data = {"cursor": next_cursor, "per_page": "2"} - response = self.client.get( - path=self.combined_rules_url, data=request_data, content_type="application/json" - ) - assert response.status_code == 200 - - result = json.loads(response.content) - assert len(result) == 2 - self.assert_alert_rule_serialized(self.other_alert_rule, result[0], skip_dates=True) - self.assert_alert_rule_serialized(self.alert_rule, result[1], skip_dates=True) - - links = requests.utils.parse_header_links(response["link"].rstrip(">").replace(">,<", ",<")) - next_cursor = links[1]["cursor"] - - # Test Limit 2, next page of previous request - should get no results since there are only 4 total: - with self.feature(["organizations:incidents", "organizations:performance-view"]): - request_data = {"cursor": next_cursor, "per_page": "2"} - response = self.client.get( - path=self.combined_rules_url, data=request_data, content_type="application/json" - ) - assert response.status_code == 200 - - result = json.loads(response.content) - assert len(result) == 0 - - def test_offset_pagination(self): - self.setup_project_and_rules() - - date_added = before_now(minutes=1) - self.one_alert_rule = self.create_alert_rule( - projects=self.projects, date_added=date_added.replace(tzinfo=timezone.utc) - ) - self.two_alert_rule = self.create_alert_rule( - projects=self.projects, date_added=date_added.replace(tzinfo=timezone.utc) - ) - self.three_alert_rule = self.create_alert_rule(projects=self.projects) - - with self.feature(["organizations:incidents", "organizations:performance-view"]): - request_data = {"per_page": "2"} - response = self.client.get( - path=self.combined_rules_url, data=request_data, content_type="application/json" - ) - assert response.status_code == 200 - - result = json.loads(response.content) - assert len(result) == 2 - self.assert_alert_rule_serialized(self.three_alert_rule, result[0], skip_dates=True) - self.assert_alert_rule_serialized(self.one_alert_rule, result[1], skip_dates=True) - - links = requests.utils.parse_header_links(response["link"].rstrip(">").replace(">,<", ",<")) - next_cursor = links[1]["cursor"] - assert next_cursor.split(":")[1] == "1" # Assert offset is properly calculated. - - with self.feature(["organizations:incidents", "organizations:performance-view"]): - request_data = {"cursor": next_cursor, "per_page": "2"} - response = self.client.get( - path=self.combined_rules_url, data=request_data, content_type="application/json" - ) - assert response.status_code == 200 - - result = json.loads(response.content) - assert len(result) == 2 - - self.assert_alert_rule_serialized(self.two_alert_rule, result[0], skip_dates=True) - self.assert_alert_rule_serialized(self.yet_another_alert_rule, result[1], skip_dates=True) From e0ac90d8017641e09e43e2a077e34e372bdb8abf Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Tue, 13 Feb 2024 11:25:41 -0800 Subject: [PATCH 329/357] ref(grouping): Clean up `event_manager_grouping` tests (#65019) This does a small amount of cleanup of the tests in `test_event_manager_grouping.py`. Specifically: - Use constants for grouping config names. - Split updating group metadata into a separate test. - Simplify test showing transaction events ignore manual fingerprinting. --- .../test_event_manager_grouping.py | 79 +++++++++---------- 1 file changed, 38 insertions(+), 41 deletions(-) diff --git a/tests/sentry/event_manager/test_event_manager_grouping.py b/tests/sentry/event_manager/test_event_manager_grouping.py index 8c0adfeba82231..198a33609fd35d 100644 --- a/tests/sentry/event_manager/test_event_manager_grouping.py +++ b/tests/sentry/event_manager/test_event_manager_grouping.py @@ -4,19 +4,20 @@ from unittest import mock from unittest.mock import MagicMock -from sentry import tsdb from sentry.grouping.result import CalculatedHashes from sentry.models.group import Group from sentry.testutils.cases import TestCase -from sentry.testutils.helpers.datetime import freeze_time from sentry.testutils.helpers.eventprocessing import save_new_event from sentry.testutils.silo import region_silo_test from sentry.testutils.skips import requires_snuba -from sentry.tsdb.base import TSDBModel pytestmark = [requires_snuba] +LEGACY_CONFIG = "legacy:2019-03-12" +NEWSTYLE_CONFIG = "newstyle:2023-01-11" + + def get_relevant_metrics_calls(mock_fn: MagicMock, key: str) -> list[mock._Call]: return [call for call in mock_fn.call_args_list if call.args[0] == key] @@ -35,12 +36,6 @@ def test_puts_events_with_matching_fingerprints_in_same_group(self): assert event.group_id == event2.group_id - group = Group.objects.get(id=event.group_id) - - assert group.times_seen == 2 - assert group.last_seen == event2.datetime - assert group.message == event2.message - def test_puts_events_with_different_fingerprints_in_different_groups(self): event = save_new_event( {"message": "Dogs are great!", "fingerprint": ["maisey"]}, self.project @@ -58,12 +53,11 @@ def test_adds_default_fingerprint_if_none_in_event(self): assert event.data.get("fingerprint") == ["{{ default }}"] - @freeze_time() def test_ignores_fingerprint_on_transaction_event(self): - event1 = save_new_event( + error_event = save_new_event( {"message": "Dogs are great!", "fingerprint": ["charlie"]}, self.project ) - event2 = save_new_event( + transaction_event = save_new_event( { "transaction": "dogpark", "fingerprint": ["charlie"], @@ -82,30 +76,8 @@ def test_ignores_fingerprint_on_transaction_event(self): self.project, ) - assert event1.group is not None - assert event2.group is None - assert event1.group_id != event2.group_id - assert ( - tsdb.backend.get_sums( - TSDBModel.project, - [self.project.id], - event1.datetime, - event1.datetime, - tenant_ids={"organization_id": 123, "referrer": "r"}, - )[self.project.id] - == 1 - ) - - assert ( - tsdb.backend.get_sums( - TSDBModel.group, - [event1.group.id], - event1.datetime, - event1.datetime, - tenant_ids={"organization_id": 123, "referrer": "r"}, - )[event1.group.id] - == 1 - ) + # Events are assigned to different groups even though they had identical fingerprints + assert error_event.group_id != transaction_event.group_id def test_none_exception(self): """Test that when the exception is None, the group is still formed.""" @@ -113,13 +85,38 @@ def test_none_exception(self): assert event.group + def test_updates_group_metadata(self): + event1 = save_new_event( + {"message": "Dogs are great!", "fingerprint": ["maisey"]}, self.project + ) + + group = Group.objects.get(id=event1.group_id) + + assert group.times_seen == 1 + assert group.last_seen == event1.datetime + assert group.message == event1.message + + # Normally this should go into a different group, since the messages don't match, but the + # fingerprint takes precedence. (We need to make the messages different in order to show + # that the group's message gets updated.) + event2 = save_new_event( + {"message": "Adopt don't shop", "fingerprint": ["maisey"]}, self.project + ) + + assert event1.group_id == event2.group_id + group = Group.objects.get(id=event2.group_id) + + assert group.times_seen == 2 + assert group.last_seen == event2.datetime + assert group.message == event2.message + @region_silo_test class EventManagerGroupingMetricsTest(TestCase): @mock.patch("sentry.event_manager.metrics.incr") def test_records_num_calculations(self, mock_metrics_incr: MagicMock): project = self.project - project.update_option("sentry:grouping_config", "legacy:2019-03-12") + project.update_option("sentry:grouping_config", LEGACY_CONFIG) project.update_option("sentry:secondary_grouping_config", None) save_new_event({"message": "Dogs are great!"}, self.project) @@ -130,8 +127,8 @@ def test_records_num_calculations(self, mock_metrics_incr: MagicMock): assert len(hashes_calculated_calls) == 1 assert hashes_calculated_calls[0].kwargs["amount"] == 1 - project.update_option("sentry:grouping_config", "newstyle:2023-01-11") - project.update_option("sentry:secondary_grouping_config", "legacy:2019-03-12") + project.update_option("sentry:grouping_config", NEWSTYLE_CONFIG) + project.update_option("sentry:secondary_grouping_config", LEGACY_CONFIG) project.update_option("sentry:secondary_grouping_expiry", time() + 3600) save_new_event({"message": "Dogs are great!"}, self.project) @@ -146,8 +143,8 @@ def test_records_num_calculations(self, mock_metrics_incr: MagicMock): @mock.patch("sentry.grouping.ingest._should_run_secondary_grouping", return_value=True) def test_records_hash_comparison(self, _, mock_metrics_incr: MagicMock): project = self.project - project.update_option("sentry:grouping_config", "newstyle:2023-01-11") - project.update_option("sentry:secondary_grouping_config", "legacy:2019-03-12") + project.update_option("sentry:grouping_config", NEWSTYLE_CONFIG) + project.update_option("sentry:secondary_grouping_config", LEGACY_CONFIG) cases = [ # primary_hashes, secondary_hashes, expected_tag From b58652a49b3419330c957c943045c0f1288be948 Mon Sep 17 00:00:00 2001 From: Alexander Tarasov Date: Tue, 13 Feb 2024 20:33:06 +0100 Subject: [PATCH 330/357] chore: upgrade marked (#65084) * Fixes dependabot alerts. * TypeScript support (https://github.com/markedjs/marked/releases/tag/v6.0.0) so we no longer need `@types/marked`. --------- Co-authored-by: Yagiz Nizipli --- .github/dependabot.yml | 2 -- package.json | 3 +-- static/app/utils/marked.tsx | 12 +++++++----- yarn.lock | 13 ++++--------- 4 files changed, 12 insertions(+), 18 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 4227278496cd9f..c8f8de9a951e8d 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -33,14 +33,12 @@ updates: # We ignore everything that hasn't yet been upgrade, this way we will # only get the _freshest_ of new packages to consider upgrading - - dependency-name: '@types/marked' - dependency-name: '@types/react-router' - dependency-name: '@types/react-select' - dependency-name: '@types/reflux' - dependency-name: 'babel-jest' - dependency-name: 'gettext-parser' - dependency-name: 'jest-junit' - - dependency-name: 'marked' - dependency-name: 'react-lazyload' - dependency-name: 'react-refresh' - dependency-name: 'react-router' diff --git a/package.json b/package.json index 8e95fba3b2894f..70beecc4d26093 100644 --- a/package.json +++ b/package.json @@ -73,7 +73,6 @@ "@types/js-beautify": "^1.13.3", "@types/js-cookie": "^3.0.2", "@types/lodash": "^4.14.182", - "@types/marked": "^0.7.2", "@types/mini-css-extract-plugin": "^2.5.1", "@types/papaparse": "^5.3.5", "@types/pegjs": "^0.10.3", @@ -129,7 +128,7 @@ "less-loader": "^11.1.0", "lightningcss": "^1.22.1", "lodash": "^4.17.19", - "marked": "0.7.0", + "marked": "6.0.0", "mini-css-extract-plugin": "^2.7.2", "mobx": "^6.8.0", "mobx-react": "~7.6.0", diff --git a/static/app/utils/marked.tsx b/static/app/utils/marked.tsx index 4ed7f675b62d50..e71dc8400f33e3 100644 --- a/static/app/utils/marked.tsx +++ b/static/app/utils/marked.tsx @@ -1,5 +1,6 @@ import dompurify from 'dompurify'; -import marked from 'marked'; // eslint-disable-line no-restricted-imports +import type {MarkedOptions} from 'marked'; // eslint-disable-line no-restricted-imports +import {marked} from 'marked'; // eslint-disable-line no-restricted-imports import Prism from 'prismjs'; import {NODE_ENV} from 'sentry/constants'; @@ -62,7 +63,7 @@ marked.setOptions({ } loadPrismLanguage(lang, { - onLoad: () => callback?.(null, Prism.highlight(code, Prism.languages[lang], lang)), + onLoad: () => callback?.(null!, Prism.highlight(code, Prism.languages[lang], lang)), onError: error => callback?.(error, code), suppressExistenceWarning: true, }); @@ -80,10 +81,11 @@ marked.setOptions({ silent: NODE_ENV === 'test', }); -const sanitizedMarked = (...args: Parameters) => - dompurify.sanitize(marked(...args)); +const sanitizedMarked = (src: string, options?: MarkedOptions) => { + return dompurify.sanitize(marked(src, options)); +}; -const singleLineRenderer = (text: string, options: marked.MarkedOptions = {}) => +const singleLineRenderer = (text: string, options: MarkedOptions = {}) => sanitizedMarked(text, {...options, renderer: new NoParagraphRenderer()}); export {singleLineRenderer}; diff --git a/yarn.lock b/yarn.lock index 1a2243c092b0c4..44dd5aa2f3d6da 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3369,11 +3369,6 @@ resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.182.tgz#05301a4d5e62963227eaafe0ce04dd77c54ea5c2" integrity sha512-/THyiqyQAP9AfARo4pF+aCGcyiQ94tX/Is2I7HofNRqoYLgN1PBoOWu2/zTA5zMxzP5EFutMtWtGAFRKUe961Q== -"@types/marked@^0.7.2": - version "0.7.2" - resolved "https://registry.yarnpkg.com/@types/marked/-/marked-0.7.2.tgz#1393f076773b55cc7078c0fbeb86a497c69db97e" - integrity sha512-A3EDyNaq6OCcpaOia2HQ/tu2QYt8DKuj4ExP21VU3cU3HTo2FLslvbqa2T1vux910RHvuSVqpwKnnykSFcRWOA== - "@types/mime@^1": version "1.3.2" resolved "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.2.tgz#93e25bf9ee75fe0fd80b594bc4feb0e862111b5a" @@ -8663,10 +8658,10 @@ map-obj@^4.1.0: resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-4.3.0.tgz#9304f906e93faae70880da102a9f1df0ea8bb05a" integrity sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ== -marked@0.7.0: - version "0.7.0" - resolved "https://registry.yarnpkg.com/marked/-/marked-0.7.0.tgz#b64201f051d271b1edc10a04d1ae9b74bb8e5c0e" - integrity sha512-c+yYdCZJQrsRjTPhUx7VKkApw9bwDkNbHUKo1ovgcfDjb2kc8rLuRbIFyXL5WOEUwzSSKo3IXpph2K6DqB/KZg== +marked@6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/marked/-/marked-6.0.0.tgz#80cd7f51145437cffe9f541a318b9882f75601df" + integrity sha512-7E3m/xIlymrFL5gWswIT4CheIE3fDeh51NV09M4x8iOc7NDYlyERcQMLAIHcSlrvwliwbPQ4OGD+MpPSYiQcqw== mathml-tag-names@^2.1.3: version "2.1.3" From ee379a750efcc0bd68f335cf7f92610b1f0e6742 Mon Sep 17 00:00:00 2001 From: Josh Ferge Date: Tue, 13 Feb 2024 11:39:47 -0800 Subject: [PATCH 331/357] ref(typing): type issues.issue_occurence (#65054) - [x] type sentry.issues.issue_occurrence - [x] remove it from exclusion list --- pyproject.toml | 1 - src/sentry/issues/issue_occurrence.py | 10 ++++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 54b921907c8996..502d5afde0de95 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -372,7 +372,6 @@ module = [ "sentry.integrations.vsts.repository", "sentry.integrations.vsts_extension.integration", "sentry.issues.escalating_group_forecast", - "sentry.issues.issue_occurrence", "sentry.issues.occurrence_consumer", "sentry.issues.search", "sentry.issues.status_change", diff --git a/src/sentry/issues/issue_occurrence.py b/src/sentry/issues/issue_occurrence.py index 69b8c692da24c8..02c09800d1a8bb 100644 --- a/src/sentry/issues/issue_occurrence.py +++ b/src/sentry/issues/issue_occurrence.py @@ -35,7 +35,7 @@ class IssueOccurrenceData(TypedDict): detection_time: float level: str | None culprit: str | None - initial_issue_priority: NotRequired[int] + initial_issue_priority: NotRequired[int | None] @dataclass(frozen=True) @@ -168,11 +168,13 @@ def build_storage_identifier(cls, id_: str, project_id: int) -> str: return f"i-o:{identifier}" def save(self) -> None: - nodestore.set(self.build_storage_identifier(self.id, self.project_id), self.to_dict()) + nodestore.backend.set( + self.build_storage_identifier(self.id, self.project_id), self.to_dict() + ) @classmethod def fetch(cls, id_: str, project_id: int) -> IssueOccurrence | None: - results = nodestore.get(cls.build_storage_identifier(id_, project_id)) + results = nodestore.backend.get(cls.build_storage_identifier(id_, project_id)) if results: return IssueOccurrence.from_dict(results) return None @@ -180,7 +182,7 @@ def fetch(cls, id_: str, project_id: int) -> IssueOccurrence | None: @classmethod def fetch_multi(cls, ids: Sequence[str], project_id: int) -> Sequence[IssueOccurrence | None]: ids = [cls.build_storage_identifier(id, project_id) for id in ids] - results = nodestore.get_multi(ids) + results = nodestore.backend.get_multi(ids) return [ IssueOccurrence.from_dict(results[_id]) if results.get(_id) else None for _id in ids ] From adb2588fc573cf65e868c378f48daaa98603cfb7 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Tue, 13 Feb 2024 11:43:38 -0800 Subject: [PATCH 332/357] ref(grouping): Do final `_save_aggregate_new` cleanup tasks (#64975) This does a last batch of cleanup in `_save_aggregate_new` before the new logic is added. Notes: - `release` is only used once, and can be pulled straight out of `job`, so it doesn't need to be passed separately. - The logging about missing severity scores is leftover from the initial POC back in September, mostly just me being paranoid. We don't need it anymore. - I nearly took the second check of the double-check lock out before I realized what it was doing there. Fortunately Markus name-dropped double-entrant locking in his [PR description](https://github.com/getsentry/sentry/pull/23577) when he put it in. Since it's not immediately obvious why it's there, I added some explanation. - The `metrics.timer` context manager's `metric_tags` was shadowing the `metric_tags` being passed in. Fixing that will let us use the latter tags in `record_new_group_metrics` if we ever want to. --- src/sentry/event_manager.py | 57 +++++++++++++++++-------------------- 1 file changed, 26 insertions(+), 31 deletions(-) diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index 3cda576eb94fb1..7980e15da0e101 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -1378,7 +1378,6 @@ def assign_event_to_group(event: Event, job: Job, metric_tags: MutableTags) -> G group_info = _save_aggregate_new( event=event, job=job, - release=job["release"], metric_tags=metric_tags, ) else: @@ -1626,7 +1625,6 @@ def _save_aggregate( def _save_aggregate_new( event: Event, job: Job, - release: Release | None, metric_tags: MutableTags, ) -> GroupInfo | None: project = event.project @@ -1650,58 +1648,55 @@ def _save_aggregate_new( if existing_grouphash is None: check_for_group_creation_load_shed(project, event) - with sentry_sdk.start_span( - op="event_manager.create_group_transaction" - ) as span, metrics.timer( - "event_manager.create_group_transaction" - ) as metric_tags, transaction.atomic( - router.db_for_write(GroupHash) + with ( + sentry_sdk.start_span(op="event_manager.create_group_transaction") as span, + metrics.timer("event_manager.create_group_transaction") as metrics_timer_tags, + transaction.atomic(router.db_for_write(GroupHash)), ): span.set_tag("create_group_transaction.outcome", "no_group") - metric_tags["create_group_transaction.outcome"] = "no_group" - + metrics_timer_tags["create_group_transaction.outcome"] = "no_group" + + # If we're in this branch, we checked our grouphashes and didn't find one with a group + # attached. We thus want to create a new group, but we need to guard against another + # event with the same hash coming in before we're done here and also thinking it needs + # to create a new group. To prevent this, we're using double-checked locking + # (https://en.wikipedia.org/wiki/Double-checked_locking). + + # First, try to lock the relevant rows in the `GroupHash` table. If another (identically + # hashed) event is also in the process of creating a group and has grabbed the lock + # before us, we'll block here until it's done. If not, we've now got the lock and other + # identically-hashed events will have to wait for us. grouphashes = list( GroupHash.objects.filter( id__in=[h.id for h in grouphashes], ).select_for_update() ) + # Now check again to see if any of our grouphashes have a group. In the first race + # condition scenario above, we'll have been blocked long enough for the other event to + # have created the group and updated our grouphashes with a group id, which means this + # time, we'll find something. existing_grouphash = find_existing_grouphash_new(grouphashes) + # If we still haven't found a matching grouphash, we're now safe to go ahead and create + # the group. if existing_grouphash is None: group = _create_group(project, event, **group_processing_kwargs) - if ( - features.has("projects:first-event-severity-calculation", event.project) - and group.data.get("metadata", {}).get("severity") is None - ): - logger.error( - "Group created without severity score", - extra={ - "event_id": event.data["event_id"], - "group_id": group.id, - }, - ) - add_group_id_to_grouphashes(group, grouphashes) - is_new = True - is_regression = False - span.set_tag("create_group_transaction.outcome", "new_group") - metric_tags["create_group_transaction.outcome"] = "new_group" + metrics_timer_tags["create_group_transaction.outcome"] = "new_group" record_new_group_metrics(event) - return GroupInfo(group, is_new, is_regression) + return GroupInfo(group=group, is_new=True, is_regression=False) group = Group.objects.get(id=existing_grouphash.group_id) if check_for_category_mismatch(group): return None - is_new = False - # There may still be secondary hashes that we did not use to find an # existing group. A classic example is when grouping makes changes to # the app-hash (changes to in_app logic), but the system hash stays @@ -1732,10 +1727,10 @@ def _save_aggregate_new( group=group, event=event, incoming_group_values=group_processing_kwargs, - release=release, + release=job["release"], ) - return GroupInfo(group, is_new, is_regression) + return GroupInfo(group=group, is_new=False, is_regression=is_regression) def _create_group(project: Project, event: Event, **kwargs: Any) -> Group: From 592725dad033b2655b688b5f763c4360f483451d Mon Sep 17 00:00:00 2001 From: Matt Duncan <14761+mrduncan@users.noreply.github.com> Date: Tue, 13 Feb 2024 12:00:56 -0800 Subject: [PATCH 333/357] fix(issues): Fix typo in codeowner errors message (#65090) --- .../views/settings/project/projectOwnership/codeownerErrors.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/app/views/settings/project/projectOwnership/codeownerErrors.tsx b/static/app/views/settings/project/projectOwnership/codeownerErrors.tsx index 0fe80c6221e1ac..2a5c0502b2d47e 100644 --- a/static/app/views/settings/project/projectOwnership/codeownerErrors.tsx +++ b/static/app/views/settings/project/projectOwnership/codeownerErrors.tsx @@ -124,7 +124,7 @@ export function CodeOwnerErrors({ case 'teams_without_access': return ( `/settings/${orgSlug}/teams/${value.slice(1)}/projects/` From 2657fb9813633421e7db4e09e0d7cb6a478e1187 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Tue, 13 Feb 2024 12:10:25 -0800 Subject: [PATCH 334/357] fix(issues): add regression test for linked comment to assert string (#65105) Adds an assertion to an existing test to ensure that the default linked comment is a string. Prevents issues similar to these: https://sentry.sentry.io/issues/?project=-1&query=Error+Communicating+with+GitHub&referrer=issue-list&statsPeriod=7d These issues were occurring because there was a bug introduced in a previous PR which converted the default comment to a tuple, leading to an API error. --- tests/sentry/integrations/github/test_issues.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/sentry/integrations/github/test_issues.py b/tests/sentry/integrations/github/test_issues.py index 4e8d1bfdeb2de5..b6d41d1dd59794 100644 --- a/tests/sentry/integrations/github/test_issues.py +++ b/tests/sentry/integrations/github/test_issues.py @@ -429,6 +429,10 @@ def test_linked_issue_comment(self): # assert comment wording for linked feedback is correct assert "Sentry Feedback" in resp[2]["default"] + # ensure linked comment is a string + assert isinstance(resp[1]["default"], str) + assert isinstance(resp[0]["default"], str) + @responses.activate def after_link_issue(self): responses.add( From 75de404b41b2f163ae19921235b54be10cf97e48 Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Tue, 13 Feb 2024 12:27:35 -0800 Subject: [PATCH 335/357] fix(issues): Trace timeline cleanup, useMemo events (#65100) --- .../issueDetails/traceTimeline/traceLink.tsx | 8 +-- .../traceTimeline/traceTimeline.tsx | 10 ++-- .../traceTimeline/traceTimelineEvents.tsx | 59 ++++++++++--------- .../traceTimeline/useTraceTimelineEvents.tsx | 10 +++- .../issueDetails/traceTimeline/utils.tsx | 6 +- 5 files changed, 51 insertions(+), 42 deletions(-) diff --git a/static/app/views/issueDetails/traceTimeline/traceLink.tsx b/static/app/views/issueDetails/traceTimeline/traceLink.tsx index 985f646ada2340..dffa4a005fb842 100644 --- a/static/app/views/issueDetails/traceTimeline/traceLink.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceLink.tsx @@ -19,7 +19,7 @@ interface TraceLinkProps { export function TraceLink({event}: TraceLinkProps) { const organization = useOrganization(); - const {data} = useTraceTimelineEvents({event}); + const {traceEvents} = useTraceTimelineEvents({event}); const traceTarget = generateTraceTarget(event, organization); if (!event.contexts?.trace?.trace_id) { @@ -49,11 +49,11 @@ export function TraceLink({event}: TraceLinkProps) { > {t('View Full Trace')} - {data.length > 0 && ( + {traceEvents.length > 0 && ( - {data.length >= 100 + {traceEvents.length >= 100 ? t(' (100+ issues)') - : tn(' (%s issue)', ' (%s issues)', data.length)} + : tn(' (%s issue)', ' (%s issues)', traceEvents.length)} )} diff --git a/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx b/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx index c992edf8616992..355d58a212b3bc 100644 --- a/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceTimeline.tsx @@ -24,14 +24,14 @@ export function TraceTimeline({event}: TraceTimelineProps) { const timelineRef = useRef(null); const {width} = useDimensions({elementRef: timelineRef}); const hasFeature = hasTraceTimelineFeature(organization); - const {isError, isLoading, data} = useTraceTimelineEvents({event}, hasFeature); + const {isError, isLoading, traceEvents} = useTraceTimelineEvents({event}, hasFeature); const hasTraceId = !!event.contexts?.trace?.trace_id; let timelineStatus: string | undefined; if (hasFeature) { if (hasTraceId && !isLoading) { - timelineStatus = data.length > 1 ? 'shown' : 'empty'; + timelineStatus = traceEvents.length > 1 ? 'shown' : 'empty'; } else if (!hasTraceId) { timelineStatus = 'no_trace_id'; } @@ -42,10 +42,12 @@ export function TraceTimeline({event}: TraceTimelineProps) { return null; } - const noEvents = !isLoading && data.length === 0; + const noEvents = !isLoading && traceEvents.length === 0; // Timelines with only the current event are not useful const onlySelfEvent = - !isLoading && data.length > 0 && data.every(item => item.id === event.id); + !isLoading && + traceEvents.length > 0 && + traceEvents.every(item => item.id === event.id); if (isError || noEvents || onlySelfEvent) { // display empty placeholder to reduce layout shift return
    ; diff --git a/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx b/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx index 37a91061570881..c8fe149c925f13 100644 --- a/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx +++ b/static/app/views/issueDetails/traceTimeline/traceTimelineEvents.tsx @@ -1,4 +1,4 @@ -import {Fragment} from 'react'; +import {Fragment, useMemo} from 'react'; import styled from '@emotion/styled'; import color from 'color'; @@ -24,7 +24,7 @@ interface TraceTimelineEventsProps { } export function TraceTimelineEvents({event, width}: TraceTimelineEventsProps) { - const {startTimestamp, endTimestamp, data} = useTraceTimelineEvents({event}); + const {startTimestamp, endTimestamp, traceEvents} = useTraceTimelineEvents({event}); let paddedStartTime = startTimestamp; let paddedEndTime = endTimestamp; // Duration is 0, pad both sides, this is how we end up with 1 dot in the middle @@ -36,12 +36,11 @@ export function TraceTimelineEvents({event, width}: TraceTimelineEventsProps) { const durationMs = paddedEndTime - paddedStartTime; const totalColumns = Math.floor(width / PARENT_WIDTH); - const eventsByColumn = getEventsByColumn( - durationMs, - data, - totalColumns, - paddedStartTime + const eventsByColumn = useMemo( + () => getEventsByColumn(traceEvents, durationMs, totalColumns, paddedStartTime), + [durationMs, traceEvents, totalColumns, paddedStartTime] ); + const columnSize = width / totalColumns; // If the duration is less than 2 minutes, show seconds @@ -54,7 +53,7 @@ export function TraceTimelineEvents({event, width}: TraceTimelineEventsProps) { {/* Add padding to the total columns, 1 column of padding on each side */} - {Array.from(eventsByColumn.entries()).map(([column, colEvents]) => { + {eventsByColumn.map(([column, colEvents]) => { // Calculate the timestamp range that this column represents const timeRange = getChunkTimeRange( paddedStartTime, @@ -139,35 +138,33 @@ function NodeGroup({ timeRange: [number, number]; }) { const totalSubColumns = Math.floor(columnSize / CHILD_WIDTH); - const durationMs = timeRange[1] - timeRange[0]; - const eventsByColumn = getEventsByColumn( - durationMs, - colEvents, - totalSubColumns, - timeRange[0] - ); - - const columns = Array.from(eventsByColumn.keys()); - const minColumn = Math.min(...columns); - const maxColumn = Math.max(...columns); + const {eventsByColumn, columns} = useMemo(() => { + const durationMs = timeRange[1] - timeRange[0]; + const eventColumns = getEventsByColumn( + colEvents, + durationMs, + totalSubColumns, + timeRange[0] + ); + return { + eventsByColumn: eventColumns, + columns: eventColumns.map(([column]) => column).sort(), + }; + }, [colEvents, totalSubColumns, timeRange]); return ( - {Array.from(eventsByColumn.entries()).map(([column, groupEvents]) => { + {eventsByColumn.map(([column, groupEvents]) => { const isCurrentNode = groupEvents.some(e => e.id === currentEventId); return ( - - {isCurrentNode && ( + + {isCurrentNode ? ( - )} - {!isCurrentNode && + ) : ( groupEvents .slice(0, 5) .map(groupEvent => @@ -176,7 +173,8 @@ function NodeGroup({ ) : ( ) - )} + ) + )} ); })} @@ -193,7 +191,10 @@ function NodeGroup({ > 1 ? `${minColumn} / ${maxColumn}` : columns[0], + gridColumn: + columns.length > 1 + ? `${columns.at(0)} / ${columns.at(-1)}` + : columns.at(0)!, width: 8 * columns.length, }} data-test-id={`trace-timeline-tooltip-${currentColumn}`} diff --git a/static/app/views/issueDetails/traceTimeline/useTraceTimelineEvents.tsx b/static/app/views/issueDetails/traceTimeline/useTraceTimelineEvents.tsx index e9d6e9efaec347..b2d50505be433c 100644 --- a/static/app/views/issueDetails/traceTimeline/useTraceTimelineEvents.tsx +++ b/static/app/views/issueDetails/traceTimeline/useTraceTimelineEvents.tsx @@ -36,7 +36,13 @@ interface UseTraceTimelineEventsOptions { export function useTraceTimelineEvents( {event}: UseTraceTimelineEventsOptions, isEnabled: boolean = true -) { +): { + endTimestamp: number; + isError: boolean; + isLoading: boolean; + startTimestamp: number; + traceEvents: TimelineEvent[]; +} { const organization = useOrganization(); const {start, end} = getTraceTimeRangeFromEvent(event); @@ -150,7 +156,7 @@ export function useTraceTimelineEvents( ]); return { - data: eventData.data, + traceEvents: eventData.data, startTimestamp: eventData.startTimestamp, endTimestamp: eventData.endTimestamp, isLoading: isLoadingIssuePlatform || isLoadingDiscover, diff --git a/static/app/views/issueDetails/traceTimeline/utils.tsx b/static/app/views/issueDetails/traceTimeline/utils.tsx index fa66b4e545e375..51ccb3cdb63e18 100644 --- a/static/app/views/issueDetails/traceTimeline/utils.tsx +++ b/static/app/views/issueDetails/traceTimeline/utils.tsx @@ -7,11 +7,11 @@ function getEventTimestamp(start: number, event: TimelineEvent) { } export function getEventsByColumn( - durationMs: number, events: TimelineEvent[], + durationMs: number, totalColumns: number, start: number -) { +): Array<[column: number, events: TimelineEvent[]]> { const eventsByColumn = events.reduce((map, event) => { const columnPositionCalc = Math.floor((getEventTimestamp(start, event) / durationMs) * (totalColumns - 1)) + 1; @@ -27,7 +27,7 @@ export function getEventsByColumn( return map; }, new Map()); - return eventsByColumn; + return Array.from(eventsByColumn.entries()); } export function getChunkTimeRange( From 8274baa16d6c9fff540af06eb8d8335bf1757a8c Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Tue, 13 Feb 2024 12:33:47 -0800 Subject: [PATCH 336/357] chore(grouping): Run `_save_aggregate` tests for both versions of `_save_aggregate` (#65041) This modifies the `_save_aggregate` tests to run on both versions of `_save_aggregate`, by parameterizing both the function name and the arguments. It also includes a few cosmetic changes to make the tests easier to understand: - Add labels to existing race-condition parameter values. - Rename value being tested to `group_info` for clarity. - Move all test setup code out of inner `save_event` function, since there's now a lot of it and it's not really part of saving the event. --- .../event_manager/test_save_aggregate.py | 90 ++++++++++++------- 1 file changed, 57 insertions(+), 33 deletions(-) diff --git a/tests/sentry/event_manager/test_save_aggregate.py b/tests/sentry/event_manager/test_save_aggregate.py index 6da926892ef5cd..03cd46f636306d 100644 --- a/tests/sentry/event_manager/test_save_aggregate.py +++ b/tests/sentry/event_manager/test_save_aggregate.py @@ -1,12 +1,13 @@ import contextlib import time +from collections.abc import Callable from threading import Thread from unittest.mock import patch import pytest from django.db import router, transaction -from sentry.event_manager import _save_aggregate +from sentry.event_manager import GroupInfo, _save_aggregate, _save_aggregate_new from sentry.eventstore.models import Event from sentry.grouping.result import CalculatedHashes from sentry.models.grouphash import GroupHash @@ -15,6 +16,11 @@ @django_db_all(transaction=True) +@pytest.mark.parametrize( + "use_save_aggregate_new", + (True, False), + ids=(" use_save_aggregate_new: True ", " use_save_aggregate_new: False "), +) @pytest.mark.parametrize( "is_race_free", [ @@ -31,9 +37,12 @@ # necessary so far. False, ], + ids=(" is_race_free: True ", " is_race_free: False "), ) @region_silo_test -def test_group_creation_race(monkeypatch, default_project, is_race_free): +def test_group_creation_race_new( + monkeypatch, default_project, is_race_free, use_save_aggregate_new +): CONCURRENCY = 2 if not is_race_free: @@ -53,40 +62,55 @@ def atomic(*args, **kwds): return_values = [] + event = Event( + default_project.id, + "11212012123120120415201309082013", + data={"timestamp": time.time()}, + ) + hashes = CalculatedHashes( + hashes=["pound sign", "octothorpe"], + hierarchical_hashes=[], + tree_labels=[], + ) + + # Mypy has a bug and can't handle the combo of a `...` input type and a ternary for the value + # See https://github.com/python/mypy/issues/14661 + save_aggregate_fn: Callable[..., GroupInfo | None] = ( + _save_aggregate_new if use_save_aggregate_new else _save_aggregate # type: ignore + ) + group_kwargs_fn_name = ( + "_get_group_processing_kwargs" if use_save_aggregate_new else "_get_group_creation_kwargs" + ) + + group_processing_kwargs = {"level": 10, "culprit": "", "data": {}} + save_aggregate_kwargs = { + "event": event, + "job": {"event_metadata": {}, "release": "dogpark"}, + "metric_tags": {}, + } + if not use_save_aggregate_new: + save_aggregate_kwargs.update( + { + "release": None, + "received_timestamp": 0, + } + ) + def save_event(): try: - data = {"timestamp": time.time()} - evt = Event( - default_project.id, - "89aeed6a472e4c5fb992d14df4d7e1b6", - data=data, - ) - group_creation_kwargs = {"level": 10, "culprit": ""} - hashes = CalculatedHashes( - hashes=["a" * 32, "b" * 32], - hierarchical_hashes=[], - tree_labels=[], - ) - with patch( "sentry.event_manager.get_hash_values", return_value=(hashes, hashes, hashes), ): with patch( - "sentry.event_manager._get_group_creation_kwargs", - return_value=group_creation_kwargs, + f"sentry.event_manager.{group_kwargs_fn_name}", + return_value=group_processing_kwargs, ): with patch("sentry.event_manager._materialize_metadata_many"): - ret = _save_aggregate( - evt, - job={"event_metadata": {}}, - release=None, - received_timestamp=0, - metric_tags={}, - ) - - assert ret is not None - return_values.append(ret) + group_info = save_aggregate_fn(**save_aggregate_kwargs) + + assert group_info is not None + return_values.append(group_info) finally: transaction.get_connection(router.db_for_write(GroupHash)).close() @@ -100,10 +124,10 @@ def save_event(): thread.join() if is_race_free: - # assert one group is new - assert len({rv.group.id for rv in return_values}) == 1 - assert sum(rv.is_new for rv in return_values) == 1 + # assert only one new group was created + assert len({group_info.group.id for group_info in return_values}) == 1 + assert sum(group_info.is_new for group_info in return_values) == 1 else: - # assert many groups are new - assert 1 < len({rv.group.id for rv in return_values}) <= CONCURRENCY - assert 1 < sum(rv.is_new for rv in return_values) <= CONCURRENCY + # assert multiple new groups were created + assert 1 < len({group_info.group.id for group_info in return_values}) <= CONCURRENCY + assert 1 < sum(group_info.is_new for group_info in return_values) <= CONCURRENCY From d2910f180f60db729dcb20e8d5deee31d1ff8a3a Mon Sep 17 00:00:00 2001 From: Jonas Date: Tue, 13 Feb 2024 15:49:14 -0500 Subject: [PATCH 337/357] feat(trace): add resizing for columns (#65034) Simpler than the context approach + we can resize the columns in real-time which provides faster visual indicator to users of how the view will change. https://github.com/getsentry/sentry/assets/9317857/0a94060d-ff17-4d6e-a092-e3a5cd9a09db --------- Co-authored-by: Abdkhan14 <60121741+Abdkhan14@users.noreply.github.com> Co-authored-by: Abdullah Khan --- .../interfaces/spans/scrollbarManager.tsx | 1 - .../performance/waterfall/utils.tsx | 8 +- .../performance/newTraceDetails/guards.tsx | 5 +- .../performance/newTraceDetails/trace.tsx | 693 ++++++++++++++---- .../newTraceDetails/traceTree.spec.tsx | 446 +++++++---- .../performance/newTraceDetails/traceTree.tsx | 256 +++++-- .../virtualizedViewManager.tsx | 228 ++++++ 7 files changed, 1300 insertions(+), 337 deletions(-) create mode 100644 static/app/views/performance/newTraceDetails/virtualizedViewManager.tsx diff --git a/static/app/components/events/interfaces/spans/scrollbarManager.tsx b/static/app/components/events/interfaces/spans/scrollbarManager.tsx index 47c3bf55fa8946..4cf499b6029589 100644 --- a/static/app/components/events/interfaces/spans/scrollbarManager.tsx +++ b/static/app/components/events/interfaces/spans/scrollbarManager.tsx @@ -90,7 +90,6 @@ export class Provider extends Component { componentDidMount() { // React will guarantee that refs are set before componentDidMount() is called; // but only for DOM elements that actually got rendered - this.initializeScrollState(); } diff --git a/static/app/components/performance/waterfall/utils.tsx b/static/app/components/performance/waterfall/utils.tsx index ad5ffa97657b39..f469cf6d776562 100644 --- a/static/app/components/performance/waterfall/utils.tsx +++ b/static/app/components/performance/waterfall/utils.tsx @@ -254,10 +254,10 @@ export const pickBarColor = (input: string | undefined): string => { return barColors[input]; } - const letterIndex1 = getLetterIndex(input.slice(0, 1)); - const letterIndex2 = getLetterIndex(input.slice(1, 2)); - const letterIndex3 = getLetterIndex(input.slice(2, 3)); - const letterIndex4 = getLetterIndex(input.slice(3, 4)); + const letterIndex1 = getLetterIndex(input[0]); + const letterIndex2 = getLetterIndex(input[1]); + const letterIndex3 = getLetterIndex(input[2]); + const letterIndex4 = getLetterIndex(input[3]); return colorsAsArray[ (letterIndex1 + letterIndex2 + letterIndex3 + letterIndex4) % colorsAsArray.length diff --git a/static/app/views/performance/newTraceDetails/guards.tsx b/static/app/views/performance/newTraceDetails/guards.tsx index c65d4c16fcab0b..b589b699ad70b6 100644 --- a/static/app/views/performance/newTraceDetails/guards.tsx +++ b/static/app/views/performance/newTraceDetails/guards.tsx @@ -18,7 +18,10 @@ export function isMissingInstrumentationNode( export function isSpanNode( node: TraceTreeNode ): node is TraceTreeNode { - return !!(node.value && !('transaction' in node.value) && 'span_id' in node.value); + return ( + !!(node.value && !('transaction' in node.value) && 'span_id' in node.value) && + !('autogrouped_by' in node.value) + ); } export function isTransactionNode( diff --git a/static/app/views/performance/newTraceDetails/trace.tsx b/static/app/views/performance/newTraceDetails/trace.tsx index 5a15c4a654cfb8..27c5bae0d84704 100644 --- a/static/app/views/performance/newTraceDetails/trace.tsx +++ b/static/app/views/performance/newTraceDetails/trace.tsx @@ -1,8 +1,12 @@ import {Fragment, useCallback, useMemo, useRef, useState} from 'react'; import {AutoSizer, List} from 'react-virtualized'; +import {type Theme, useTheme} from '@emotion/react'; import styled from '@emotion/styled'; import ProjectAvatar from 'sentry/components/avatar/projectAvatar'; +import LoadingIndicator from 'sentry/components/loadingIndicator'; +import {pickBarColor} from 'sentry/components/performance/waterfall/utils'; +import Placeholder from 'sentry/components/placeholder'; import {IconChevron} from 'sentry/icons'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; @@ -18,11 +22,14 @@ import useProjects from 'sentry/utils/useProjects'; import { isAutogroupedNode, isMissingInstrumentationNode, + isParentAutogroupedNode, isSpanNode, isTraceErrorNode, + isTraceNode, isTransactionNode, } from './guards'; import {ParentAutogroupNode, TraceTree, type TraceTreeNode} from './traceTree'; +import {VirtualizedViewManager} from './virtualizedViewManager'; interface TraceProps { trace: TraceSplitResults | null; @@ -30,19 +37,42 @@ interface TraceProps { } export function Trace(props: TraceProps) { + const theme = useTheme(); const api = useApi(); + const {projects} = useProjects(); const organization = useOrganization(); + const virtualizedListRef = useRef(null); + const viewManager = useRef(null); + + const [_rerender, setRender] = useState(0); const traceTree = useMemo(() => { if (!props.trace) { - return TraceTree.Empty(); + return TraceTree.Loading({ + project_slug: projects?.[0]?.slug ?? '', + event_id: props.trace_id, + }); } return TraceTree.FromTrace(props.trace); - }, [props.trace]); + }, [props.trace, props.trace_id, projects]); + + if (!viewManager.current) { + viewManager.current = new VirtualizedViewManager({ + list: {width: 0.5, column_refs: []}, + span_list: {width: 0.5, column_refs: []}, + }); + } + + if ( + traceTree.root.space && + (traceTree.root.space[0] !== viewManager.current.spanSpace[0] || + traceTree.root.space[1] !== viewManager.current.spanSpace[1]) + ) { + viewManager.current.initializeSpanSpace(traceTree.root.space); + } - const [_rerender, setRender] = useState(0); const treeRef = useRef(traceTree); treeRef.current = traceTree; @@ -68,8 +98,6 @@ export function Trace(props: TraceProps) { [] ); - const {projects} = useProjects(); - const projectLookup = useMemo(() => { return projects.reduce>((acc, project) => { acc[project.slug] = project; @@ -78,51 +106,101 @@ export function Trace(props: TraceProps) { }, [projects]); return ( - - - {({width, height}) => ( - ( - - )} - /> - )} - - + + viewManager.current?.onContainerRef(r)} + className={traceTree.type === 'loading' ? 'Loading' : ''} + style={{ + backgroundColor: '#FFF', + height: '100%', + width: '100%', + position: 'absolute', + }} + > + viewManager.current?.registerDividerRef(r)} /> + + {({width, height}) => ( + { + return traceTree.type === 'loading' ? ( + + ) : ( + + ); + }} + /> + )} + + + ); } +const TraceDivider = styled('div')` + position: absolute; + height: 100%; + background-color: transparent; + top: 0; + z-index: 1; + cursor: col-resize; + + &:before { + content: ''; + position: absolute; + width: 1px; + height: 100%; + background-color: ${p => p.theme.border}; + left: 50%; + } + + &:hover&:before { + background-color: ${p => p.theme.purple300}; + } +`; + function RenderRow(props: { index: number; node: TraceTreeNode; onExpandNode: (node: TraceTreeNode, value: boolean) => void; onFetchChildren: (node: TraceTreeNode, value: boolean) => void; projects: Record; + startIndex: number; style: React.CSSProperties; + theme: Theme; trace_id: string; + viewManager: VirtualizedViewManager; }) { + const virtualizedIndex = props.index - props.startIndex; if (!props.node.value) { return null; } @@ -134,22 +212,50 @@ function RenderRow(props: { style={{ top: props.style.top, height: props.style.height, - paddingLeft: props.node.depth * 23, }} > -
    - - props.onExpandNode(props.node, !props.node.expanded)} +
    props.viewManager.registerColumnRef('list', r, virtualizedIndex)} + style={{ + width: props.viewManager.columns.list.width * 100 + '%', + }} + > +
    - {props.node.groupCount}{' '} - +
    + + props.onExpandNode(props.node, !props.node.expanded)} + > + {props.node.groupCount}{' '} + +
    + + {t('Autogrouped')} + + {props.node.value.autogrouped_by.op} +
    +
    +
    props.viewManager.registerColumnRef('span_list', r, virtualizedIndex)} + style={{ + width: props.viewManager.columns.span_list.width * 100 + '%', + backgroundColor: + props.index % 2 ? undefined : props.theme.backgroundSecondary, + }} + > +
    - - {t('Autogrouped')} - - {props.node.value.autogrouped_by.op}
    ); } @@ -161,33 +267,63 @@ function RenderRow(props: { style={{ top: props.style.top, height: props.style.height, - paddingLeft: props.node.depth * 23, }} >
    props.viewManager.registerColumnRef('list', r, virtualizedIndex)} + style={{ + width: props.viewManager.columns.list.width * 100 + '%', + }} > - - {props.node.children.length > 0 ? ( - props.onExpandNode(props.node, !props.node.expanded)} +
    +
    - {props.node.children.length}{' '} - - ) : null} + + {props.node.children.length > 0 ? ( + props.onExpandNode(props.node, !props.node.expanded)} + > + {props.node.children.length}{' '} + + ) : null} +
    + + {props.node.value['transaction.op']} + + {props.node.value.transaction} + {props.node.canFetchData ? ( + + ) : null} +
    +
    +
    props.viewManager.registerColumnRef('span_list', r, virtualizedIndex)} + className="TraceRightColumn" + style={{ + width: props.viewManager.columns.span_list.width * 100 + '%', + backgroundColor: + props.index % 2 ? undefined : props.theme.backgroundSecondary, + }} + > +
    - - {props.node.value['transaction.op']} - - {props.node.value.transaction} - {props.node.canFetchData ? ( - - ) : null}
    ); } @@ -199,34 +335,64 @@ function RenderRow(props: { style={{ top: props.style.top, height: props.style.height, - paddingLeft: props.node.depth * 23, }} >
    props.viewManager.registerColumnRef('list', r, virtualizedIndex)} + style={{ + width: props.viewManager.columns.list.width * 100 + '%', + }} > - - {props.node.children.length > 0 ? ( - props.onExpandNode(props.node, !props.node.expanded)} +
    +
    - {props.node.children.length}{' '} - - ) : null} + + {props.node.children.length > 0 ? ( + props.onExpandNode(props.node, !props.node.expanded)} + > + {props.node.children.length}{' '} + + ) : null} +
    + {props.node.value.op ?? ''} + + + {props.node.value.description ?? ''} + + {props.node.canFetchData ? ( + + ) : null} +
    +
    +
    props.viewManager.registerColumnRef('span_list', r, virtualizedIndex)} + className="TraceRightColumn" + style={{ + width: props.viewManager.columns.span_list.width * 100 + '%', + backgroundColor: + props.index % 2 ? undefined : props.theme.backgroundSecondary, + }} + > +
    - {props.node.value.op ?? ''} - - - {props.node.value.description ?? ''} - - {props.node.canFetchData ? ( - - ) : null}
    ); } @@ -238,42 +404,98 @@ function RenderRow(props: { style={{ top: props.style.top, height: props.style.height, - paddingLeft: props.node.depth * 23, }} > -
    - +
    props.viewManager.registerColumnRef('list', r, virtualizedIndex)} + style={{ + width: props.viewManager.columns.list.width * 100 + '%', + }} + > +
    +
    + +
    + {t('Missing instrumentation')} +
    +
    +
    props.viewManager.registerColumnRef('span_list', r, virtualizedIndex)} + className="TraceRightColumn" + style={{ + width: props.viewManager.columns.span_list.width * 100 + '%', + backgroundColor: + props.index % 2 ? undefined : props.theme.backgroundSecondary, + }} + > +
    - {t('Missing instrumentation')}
    ); } - if ('orphan_errors' in props.node.value) { + if (isTraceNode(props.node)) { return (
    -
    - - {props.node.children.length > 0 ? ( - props.onExpandNode(props.node, !props.node.expanded)} - > - {props.node.children.length}{' '} - - ) : null} +
    props.viewManager.registerColumnRef('list', r, virtualizedIndex)} + style={{ + width: props.viewManager.columns.list.width * 100 + '%', + }} + > +
    +
    + + {props.node.children.length > 0 ? ( + props.onExpandNode(props.node, !props.node.expanded)} + > + {props.node.children.length}{' '} + + ) : null} +
    + + {t('Trace')} + + {props.trace_id} +
    +
    +
    props.viewManager.registerColumnRef('span_list', r, virtualizedIndex)} + className="TraceRightColumn" + style={{ + width: props.viewManager.columns.span_list.width * 100 + '%', + backgroundColor: + props.index % 2 ? undefined : props.theme.backgroundSecondary, + }} + > +
    - - {t('Trace')} - - {props.trace_id}
    ); } @@ -284,34 +506,131 @@ function RenderRow(props: { style={{ top: props.style.top, height: props.style.height, - paddingLeft: props.node.depth * 23, }} > -
    - - {props.node.children.length > 0 ? ( - props.onExpandNode(props.node, !props.node.expanded)} - > - {props.node.children.length}{' '} - - ) : null} +
    props.viewManager.registerColumnRef('list', r, virtualizedIndex)} + style={{ + width: + (props.viewManager.columns.list.width / props.viewManager.width) * 100 + '%', + }} + > +
    +
    + + {props.node.children.length > 0 ? ( + props.onExpandNode(props.node, !props.node.expanded)} + > + {props.node.children.length}{' '} + + ) : null} +
    + + {t('Error')} + + {props.node.value.title} +
    +
    +
    props.viewManager.registerColumnRef('span_list', r, virtualizedIndex)} + className="TraceRightColumn" + style={{ + width: props.viewManager.columns.span_list.width * 100 + '%', + backgroundColor: props.index % 2 ? undefined : props.theme.backgroundSecondary, + }} + > + {/* @TODO: figure out what to do with trace errors */} + {/* */}
    - - {t('Error')} - - {props.node.value.title}
    ; } return null; } +function RenderPlaceholderRow(props: { + index: number; + node: TraceTreeNode; + projects: Record; + startIndex: number; + style: React.CSSProperties; + theme: Theme; + viewManager: VirtualizedViewManager; +}) { + const virtualizedIndex = props.index - props.startIndex; + return ( +
    +
    props.viewManager.registerColumnRef('list', r, virtualizedIndex)} + style={{width: props.viewManager.columns.list.width * 100 + '%'}} + > +
    +
    + + {props.node.children.length > 0 ? ( + void 0} + > + {props.node.children.length}{' '} + + ) : null} +
    + {isTraceNode(props.node) ? : null} + {isTraceNode(props.node) ? ( + 'Loading trace...' + ) : ( + + )} +
    +
    +
    props.viewManager.registerColumnRef('span_list', r, virtualizedIndex)} + style={{ + width: props.viewManager.columns.span_list.width * 100 + '%', + }} + > + {isTraceNode(props.node) ? null : ( + + )} +
    +
    + ); +} + function Connectors(props: {node: TraceTreeNode}) { const showVerticalConnector = ((props.node.expanded || props.node.zoomedIn) && props.node.children.length > 0) || - (props.node.value && 'autogrouped_by' in props.node.value); + (props.node.value && isParentAutogroupedNode(props.node)); // If the tail node of the collapsed node has no children, // we don't want to render the vertical connector as no children @@ -349,6 +668,26 @@ function Connectors(props: {node: TraceTreeNode}) { ); } +function SmallLoadingIndicator() { + return ( + + ); +} + +const StyledLoadingIndicator = styled(LoadingIndicator)` + transform: translate(-5px, 0); + + div:first-child { + border-left: 6px solid ${p => p.theme.gray300}; + animation: loading 900ms infinite linear; + } +`; + function ProjectBadge(props: {project: Project}) { return ; } @@ -370,6 +709,32 @@ function ChildrenCountButton(props: { ); } +interface TraceBarProps { + color: string; + node_space: [number, number] | null; + viewManager: VirtualizedViewManager; + virtualizedIndex: number; +} +function TraceBar(props: TraceBarProps) { + if (!props.node_space) { + return null; + } + + return ( +
    + props.viewManager.registerSpanBarRef(r, props.node_space!, props.virtualizedIndex) + } + className="TraceBar" + style={{ + position: 'absolute', + transform: props.viewManager.computeSpanMatrixTransform(props.node_space), + backgroundColor: props.color, + }} + /> + ); +} + /** * This is a wrapper around the Trace component to apply styles * to the trace tree. It exists because we _do not_ want to trigger @@ -377,11 +742,49 @@ function ChildrenCountButton(props: { * the scrolling to flicker. */ const TraceStylingWrapper = styled('div')` + position: relative; + + @keyframes show { + 0% { + opacity: 0; + transform: translate(0, 2px); + } + 100% { + opacity: .7; + transform: translate(0, 0px); + } + }; + + @keyframes showPlaceholder { + 0% { + opacity: 0; + transform: translate(-8px, 0px); + } + 100% { + opacity: .7; + transform: translate(0, 0px); + } + }; + + &.Loading { + .TraceRow { + opacity: 0; + animation: show 0.2s ease-in-out forwards; + } + + .Placeholder { + opacity: 0; + transform: translate(-8px, 0px); + animation: showPlaceholder 0.2s ease-in-out forwards; + } + } + .TraceRow { display: flex; align-items: center; position: absolute; width: 100%; + transition: background-color 0.15s ease-in-out 0s; font-size: ${p => p.theme.fontSizeSmall}; &:hover { @@ -402,6 +805,38 @@ const TraceStylingWrapper = styled('div')` } } + .TraceLeftColumn { + height: 100%; + white-space: nowrap; + display: flex; + align-items: center; + overflow: hidden; + will-change: width; + + .TraceLeftColumnInner { + width: 100%; + height: 100%; + white-space: nowrap; + display: flex; + align-items: center; + } + } + + .TraceRightColumn { + height: 100%; + position: relative; + display: flex; + align-items: center; + will-change: width; + } + + .TraceBar { + height: 64%; + width: 100%; + background-color: black; + transform-origin: left center; + } + .TraceChildrenCount { height: 16px; white-space: nowrap; diff --git a/static/app/views/performance/newTraceDetails/traceTree.spec.tsx b/static/app/views/performance/newTraceDetails/traceTree.spec.tsx index a7d60bb15f1f23..42a968fbaa5867 100644 --- a/static/app/views/performance/newTraceDetails/traceTree.spec.tsx +++ b/static/app/views/performance/newTraceDetails/traceTree.spec.tsx @@ -44,12 +44,13 @@ function makeTransaction(overrides: Partial = {}): TraceFullD } as TraceFullDetailed; } -function makeRawSpan(overrides: Partial = {}): RawSpanType { +function makeSpan(overrides: Partial = {}): RawSpanType { return { op: '', description: '', + span_id: '', start_timestamp: 0, - timestamp: 1, + timestamp: 10, ...overrides, } as RawSpanType; } @@ -120,6 +121,78 @@ function assertParentAutogroupedNode( // } // } +describe('TreeNode', () => { + it('expands transaction nodes by default', () => { + const node = new TraceTreeNode(null, makeTransaction(), { + project_slug: '', + event_id: '', + }); + expect(node.expanded).toBe(true); + }); + it('points parent to node', () => { + const root = new TraceTreeNode(null, makeTransaction(), { + project_slug: '', + event_id: '', + }); + const child = new TraceTreeNode(root, makeTransaction(), { + project_slug: '', + event_id: '', + }); + expect(child.parent).toBe(root); + }); + it('depth', () => { + const root = new TraceTreeNode(null, makeTransaction(), { + project_slug: '', + event_id: '', + }); + const child = new TraceTreeNode(root, makeTransaction(), { + project_slug: '', + event_id: '', + }); + const grandChild = new TraceTreeNode(child, makeTransaction(), { + project_slug: '', + event_id: '', + }); + expect(grandChild.depth).toBe(1); + }); + it('getVisibleChildren', () => { + const root = new TraceTreeNode(null, makeTransaction(), { + project_slug: '', + event_id: '', + }); + + const child = new TraceTreeNode(root, makeTransaction(), { + project_slug: '', + event_id: '', + }); + + root.children.push(child); + expect(root.getVisibleChildren()).toHaveLength(1); + expect(root.getVisibleChildren()[0]).toBe(child); + + root.expanded = false; + expect(root.getVisibleChildren()).toHaveLength(0); + }); + + it('getVisibleChildrenCount', () => { + const root = new TraceTreeNode(null, makeTransaction(), { + project_slug: '', + event_id: '', + }); + + const child = new TraceTreeNode(root, makeTransaction(), { + project_slug: '', + event_id: '', + }); + + root.children.push(child); + expect(root.getVisibleChildrenCount()).toBe(1); + + root.expanded = false; + expect(root.getVisibleChildrenCount()).toBe(0); + }); +}); + describe('TraceTree', () => { beforeEach(() => { MockApiClient.clearMockResponses(); @@ -169,10 +242,10 @@ describe('TraceTree', () => { ); const node = TraceTree.FromSpans(root, [ - makeRawSpan({start_timestamp: 0, op: '1', span_id: '1'}), - makeRawSpan({start_timestamp: 1, op: '2', span_id: '2', parent_span_id: '1'}), - makeRawSpan({start_timestamp: 2, op: '3', span_id: '3', parent_span_id: '2'}), - makeRawSpan({start_timestamp: 3, op: '4', span_id: '4', parent_span_id: '1'}), + makeSpan({start_timestamp: 0, op: '1', span_id: '1'}), + makeSpan({start_timestamp: 1, op: '2', span_id: '2', parent_span_id: '1'}), + makeSpan({start_timestamp: 2, op: '3', span_id: '3', parent_span_id: '2'}), + makeSpan({start_timestamp: 3, op: '4', span_id: '4', parent_span_id: '1'}), ]); if (!isSpanNode(node.children[0])) { @@ -203,15 +276,15 @@ describe('TraceTree', () => { const date = new Date().getTime(); const node = TraceTree.FromSpans(root, [ - makeRawSpan({ + makeSpan({ start_timestamp: date, - timestamp: date + 100, + timestamp: date + 1, span_id: '1', op: 'span 1', }), - makeRawSpan({ - start_timestamp: date + 200, - timestamp: date + 400, + makeSpan({ + start_timestamp: date + 2, + timestamp: date + 4, op: 'span 2', span_id: '2', }), @@ -250,15 +323,15 @@ describe('TraceTree', () => { url: '/organizations/org-slug/events/undefined:undefined/', method: 'GET', body: makeEvent({startTimestamp: 0}, [ - makeRawSpan({start_timestamp: 1, op: 'span 1', span_id: '1'}), - makeRawSpan({ + makeSpan({start_timestamp: 1, op: 'span 1', span_id: '1'}), + makeSpan({ start_timestamp: 2, op: 'span 2', span_id: '2', parent_span_id: '1', }), - makeRawSpan({start_timestamp: 3, op: 'span 3', parent_span_id: '2'}), - makeRawSpan({start_timestamp: 4, op: 'span 4', parent_span_id: '1'}), + makeSpan({start_timestamp: 3, op: 'span 3', parent_span_id: '2'}), + makeSpan({start_timestamp: 4, op: 'span 4', parent_span_id: '1'}), ]), }); @@ -445,9 +518,7 @@ describe('TraceTree', () => { MockApiClient.addMockResponse({ url: '/organizations/org-slug/events/project:event_id/', method: 'GET', - body: makeEvent({}, [ - makeRawSpan({start_timestamp: 0, op: 'span', span_id: '1'}), - ]), + body: makeEvent({}, [makeSpan({start_timestamp: 0, op: 'span', span_id: '1'})]), }); expect(tree.list.length).toBe(2); @@ -601,7 +672,7 @@ describe('TraceTree', () => { const request = MockApiClient.addMockResponse({ url: '/organizations/org-slug/events/project:event_id/', method: 'GET', - body: makeEvent({}, [makeRawSpan()]), + body: makeEvent({}, [makeSpan()]), }); const node = tree.list[1]; @@ -632,7 +703,7 @@ describe('TraceTree', () => { MockApiClient.addMockResponse({ url: '/organizations/org-slug/events/project:event_id/', method: 'GET', - body: makeEvent({}, [makeRawSpan({span_id: 'span1', description: 'span1'})]), + body: makeEvent({}, [makeSpan({span_id: 'span1', description: 'span1'})]), }); tree.zoomIn(tree.list[1], true, { api: new MockApiClient(), @@ -669,7 +740,7 @@ describe('TraceTree', () => { MockApiClient.addMockResponse({ url: '/organizations/org-slug/events/project:event_id/', method: 'GET', - body: makeEvent({}, [makeRawSpan({span_id: 'span 1', description: 'span1'})]), + body: makeEvent({}, [makeSpan({span_id: 'span 1', description: 'span1'})]), }); // Zoom in tree.zoomIn(tree.list[1], true, { @@ -723,7 +794,7 @@ describe('TraceTree', () => { const request = MockApiClient.addMockResponse({ url: '/organizations/org-slug/events/other_project:event_id/', method: 'GET', - body: makeEvent({}, [makeRawSpan({description: 'span1'})]), + body: makeEvent({}, [makeSpan({description: 'span1'})]), }); tree.expand(tree.list[1], true); tree.zoomIn(tree.list[2], true, { @@ -767,8 +838,8 @@ describe('TraceTree', () => { url: '/organizations/org-slug/events/project:event_id/', method: 'GET', body: makeEvent({}, [ - makeRawSpan({description: 'span1'}), - makeRawSpan({description: 'span2'}), + makeSpan({description: 'span1'}), + makeSpan({description: 'span2'}), ]), }); @@ -799,14 +870,14 @@ describe('TraceTree', () => { describe('autogrouping', () => { it('auto groups sibling spans and preserves tail spans', () => { - const root = new TraceTreeNode(null, makeRawSpan({description: 'span1'}), { + const root = new TraceTreeNode(null, makeSpan({description: 'span1'}), { project_slug: '', event_id: '', }); for (let i = 0; i < 5; i++) { root.children.push( - new TraceTreeNode(root, makeRawSpan({description: 'span', op: 'db'}), { + new TraceTreeNode(root, makeSpan({description: 'span', op: 'db'}), { project_slug: '', event_id: '', }) @@ -814,7 +885,7 @@ describe('TraceTree', () => { } root.children.push( - new TraceTreeNode(root, makeRawSpan({description: 'span', op: 'http'}), { + new TraceTreeNode(root, makeSpan({description: 'span', op: 'http'}), { project_slug: '', event_id: '', }) @@ -828,14 +899,14 @@ describe('TraceTree', () => { }); it('autogroups when number of children is exactly 5', () => { - const root = new TraceTreeNode(null, makeRawSpan({description: 'span1'}), { + const root = new TraceTreeNode(null, makeSpan({description: 'span1'}), { project_slug: '', event_id: '', }); for (let i = 0; i < 5; i++) { root.children.push( - new TraceTreeNode(root, makeRawSpan({description: 'span', op: 'db'}), { + new TraceTreeNode(root, makeSpan({description: 'span', op: 'db'}), { project_slug: '', event_id: '', }) @@ -850,14 +921,14 @@ describe('TraceTree', () => { }); it('autogroups when number of children is > 5', () => { - const root = new TraceTreeNode(null, makeRawSpan({description: 'span1'}), { + const root = new TraceTreeNode(null, makeSpan({description: 'span1'}), { project_slug: '', event_id: '', }); for (let i = 0; i < 7; i++) { root.children.push( - new TraceTreeNode(root, makeRawSpan({description: 'span', op: 'db'}), { + new TraceTreeNode(root, makeSpan({description: 'span', op: 'db'}), { project_slug: '', event_id: '', }) @@ -871,46 +942,46 @@ describe('TraceTree', () => { expect(root.children.length).toBe(1); }); - it('autogroups children case', () => { - // span1 : db - // ---span2 : http - // ------ span3 : http - - // to + it('autogroups direct children case', () => { + // db db db + // http -> parent autogroup (3) -> parent autogroup (3) + // http http + // http http + // http - // span1 : db - // ---autogrouped(span2) : http - // ------ span2 : http - // --------- span3 : http - - const root = new TraceTreeNode( + const root: TraceTreeNode = new TraceTreeNode( null, - makeRawSpan({description: 'span1', span_id: '1', op: 'db'}), - { - project_slug: '', - event_id: '', - } + makeSpan({ + description: `span1`, + span_id: `1`, + op: 'db', + }), + {project_slug: '', event_id: ''} ); - const child = new TraceTreeNode( - root, - makeRawSpan({description: 'span2', span_id: '2', op: 'http'}), - { - project_slug: '', - event_id: '', - } - ); - root.children.push(child); + let last: TraceTreeNode = root; - const grandChild = new TraceTreeNode( - child, - makeRawSpan({description: 'span3', span_id: '3', op: 'http'}), - { - project_slug: '', - event_id: '', - } - ); - child.children.push(grandChild); + for (let i = 0; i < 3; i++) { + const node = new TraceTreeNode( + last, + makeSpan({ + description: `span${i}`, + span_id: `${i}`, + op: 'http', + }), + { + project_slug: '', + event_id: '', + } + ); + + last.children.push(node); + last = node; + } + + if (!root) { + throw new Error('root is null'); + } expect(root.children.length).toBe(1); expect(root.children[0].children.length).toBe(1); @@ -919,106 +990,201 @@ describe('TraceTree', () => { expect(root.children.length).toBe(1); - const autoGroupedNode = root.children[0]; - expect(autoGroupedNode.children.length).toBe(0); - - autoGroupedNode.expanded = true; + assertAutogroupedNode(root.children[0]); + expect(root.children[0].children.length).toBe(0); - expect((autoGroupedNode.children[0].value as RawSpanType).description).toBe( - 'span2' + root.children[0].expanded = true; + expect((root.children[0].children[0].value as RawSpanType).description).toBe( + 'span0' ); }); it('autogrouping direct children skips rendering intermediary nodes', () => { + // db db db + // http autogrouped (3) autogrouped (3) + // http -> db -> http + // http http + // db http + // db const root = new TraceTreeNode( null, - makeRawSpan({span_id: 'span1', description: 'span1', op: 'db'}), + makeSpan({span_id: 'span1', description: 'span1', op: 'db'}), { project_slug: '', event_id: '', } ); - const child = new TraceTreeNode( - root, - makeRawSpan({span_id: 'span2', description: 'span2', op: 'http'}), + let last = root; + for (let i = 0; i < 4; i++) { + const node = new TraceTreeNode( + last, + makeSpan({ + span_id: `span`, + description: `span`, + op: i === 3 ? 'db' : 'http', + }), + { + project_slug: '', + event_id: '', + } + ); + last.children.push(node); + last = node; + } + + TraceTree.AutogroupDirectChildrenSpanNodes(root); + + const autoGroupedNode = root.children[0]; + assertAutogroupedNode(autoGroupedNode); + + expect(autoGroupedNode.children.length).toBe(1); + expect((autoGroupedNode.children[0].value as RawSpanType).op).toBe('db'); + + autoGroupedNode.expanded = true; + expect(autoGroupedNode.children.length).toBe(1); + expect((autoGroupedNode.children[0].value as RawSpanType).op).toBe('http'); + }); + + it('nested direct autogrouping', () => { + // db db db + // http -> parent autogroup (3) -> parent autogroup (3) + // http db http + // http parent autogroup (3) http + // db http + // http db + // http parent autogrouped (3) + // http http + // http + // http + const root = new TraceTreeNode( + null, + makeSpan({span_id: 'span', description: 'span', op: 'db'}), { project_slug: '', event_id: '', } ); - root.children.push(child); - const grandChild = new TraceTreeNode( - child, - makeRawSpan({span_id: 'span3', description: 'span3', op: 'http'}), + let last = root; + + for (let i = 0; i < 3; i++) { + if (i === 1) { + const autogroupBreakingSpan = new TraceTreeNode( + last, + makeSpan({span_id: 'span', description: 'span', op: 'db'}), + { + project_slug: '', + event_id: '', + } + ); + + last.children.push(autogroupBreakingSpan); + last = autogroupBreakingSpan; + } else { + for (let j = 0; j < 3; j++) { + const node = new TraceTreeNode( + last, + makeSpan({span_id: `span${j}`, description: `span${j}`, op: 'http'}), + { + project_slug: '', + event_id: '', + } + ); + last.children.push(node); + last = node; + } + } + } + + TraceTree.AutogroupDirectChildrenSpanNodes(root); + + assertAutogroupedNode(root.children[0]); + assertAutogroupedNode(root.children[0].children[0].children[0]); + }); + + it('sibling autogrouping', () => { + // db db + // http sibling autogrouped (5) + // http + // http -> + // http + // http + const root = new TraceTreeNode( + null, + makeTransaction({start_timestamp: 0, timestamp: 10}), { project_slug: '', event_id: '', } ); - child.children.push(grandChild); - expect(root.children.length).toBe(1); - expect(root.children[0].children.length).toBe(1); - - TraceTree.AutogroupDirectChildrenSpanNodes(root); + for (let i = 0; i < 5; i++) { + root.children.push( + new TraceTreeNode(root, makeSpan({start_timestamp: i, timestamp: i + 1}), { + project_slug: '', + event_id: '', + }) + ); + } + TraceTree.AutogroupSiblingSpanNodes(root); expect(root.children.length).toBe(1); - - const autoGroupedNode = root.children[0]; - expect(autoGroupedNode.children.length).toBe(0); - - autoGroupedNode.expanded = true; - - expect((autoGroupedNode.children[0].value as RawSpanType).description).toBe( - 'span2' - ); + assertAutogroupedNode(root.children[0]); }); - it('renders children of autogrouped sibling nodes', async () => { - const tree = TraceTree.FromTrace( - makeTrace({ - transactions: [ - makeTransaction({ - transaction: '/', - project_slug: 'project', - event_id: 'event_id', - }), - ], - }) + it('multiple sibling autogrouping', () => { + // db db + // http sibling autogrouped (5) + // http db + // http -> sibling autogrouped (5) + // http + // http + // db + // http + // http + // http + // http + // http + const root = new TraceTreeNode( + null, + makeTransaction({start_timestamp: 0, timestamp: 10}), + { + project_slug: '', + event_id: '', + } ); - MockApiClient.addMockResponse({ - url: '/organizations/org-slug/events/project:event_id/', - method: 'GET', - body: makeEvent({}, [ - makeRawSpan({description: 'parent span', op: 'http', span_id: '1'}), - makeRawSpan({description: 'span', op: 'db', span_id: '2', parent_span_id: '1'}), - makeRawSpan({description: 'span', op: 'db', span_id: '3', parent_span_id: '1'}), - makeRawSpan({description: 'span', op: 'db', span_id: '4', parent_span_id: '1'}), - makeRawSpan({description: 'span', op: 'db', span_id: '5', parent_span_id: '1'}), - makeRawSpan({description: 'span', op: 'db', span_id: '5', parent_span_id: '1'}), - ]), - }); - - expect(tree.list.length).toBe(2); - tree.zoomIn(tree.list[1], true, { - api: new MockApiClient(), - organization: OrganizationFixture(), - }); - - await waitFor(() => { - expect(tree.list.length).toBe(4); - }); + for (let i = 0; i < 10; i++) { + if (i === 5) { + root.children.push( + new TraceTreeNode( + root, + makeSpan({start_timestamp: i, timestamp: i + 1, op: 'db'}), + { + project_slug: '', + event_id: '', + } + ) + ); + } - const autogroupedNode = tree.list[tree.list.length - 1]; + root.children.push( + new TraceTreeNode( + root, + makeSpan({start_timestamp: i, timestamp: i + 1, op: 'http'}), + { + project_slug: '', + event_id: '', + } + ) + ); + } - assertAutogroupedNode(autogroupedNode); - expect(autogroupedNode.value.autogrouped_by).toBeTruthy(); - expect(autogroupedNode.children.length).toBe(5); - tree.expand(autogroupedNode, true); - expect(tree.list.length).toBe(9); + TraceTree.AutogroupSiblingSpanNodes(root); + assertAutogroupedNode(root.children[0]); + expect(root.children).toHaveLength(3); + assertAutogroupedNode(root.children[2]); }); it('renders children of autogrouped direct children nodes', async () => { @@ -1038,18 +1204,18 @@ describe('TraceTree', () => { url: '/organizations/org-slug/events/project:event_id/', method: 'GET', body: makeEvent({}, [ - makeRawSpan({description: 'parent span', op: 'http', span_id: '1'}), - makeRawSpan({description: 'span', op: 'db', span_id: '2', parent_span_id: '1'}), - makeRawSpan({description: 'span', op: 'db', span_id: '3', parent_span_id: '2'}), - makeRawSpan({description: 'span', op: 'db', span_id: '4', parent_span_id: '3'}), - makeRawSpan({description: 'span', op: 'db', span_id: '5', parent_span_id: '4'}), - makeRawSpan({ + makeSpan({description: 'parent span', op: 'http', span_id: '1'}), + makeSpan({description: 'span', op: 'db', span_id: '2', parent_span_id: '1'}), + makeSpan({description: 'span', op: 'db', span_id: '3', parent_span_id: '2'}), + makeSpan({description: 'span', op: 'db', span_id: '4', parent_span_id: '3'}), + makeSpan({description: 'span', op: 'db', span_id: '5', parent_span_id: '4'}), + makeSpan({ description: 'span', op: 'redis', span_id: '6', parent_span_id: '5', }), - makeRawSpan({description: 'span', op: 'https', parent_span_id: '1'}), + makeSpan({description: 'span', op: 'https', parent_span_id: '1'}), ]), }); diff --git a/static/app/views/performance/newTraceDetails/traceTree.tsx b/static/app/views/performance/newTraceDetails/traceTree.tsx index fa477f8545bf7a..78c5c4211a99b6 100644 --- a/static/app/views/performance/newTraceDetails/traceTree.tsx +++ b/static/app/views/performance/newTraceDetails/traceTree.tsx @@ -152,7 +152,7 @@ function maybeInsertMissingInstrumentationSpan( return; } - if (node.value.start_timestamp - lastInsertedSpan.value.timestamp < 100) { + if (node.value.start_timestamp - lastInsertedSpan.value.timestamp < 0.1) { return; } @@ -174,13 +174,23 @@ function maybeInsertMissingInstrumentationSpan( } export class TraceTree { + type: 'loading' | 'trace' = 'trace'; root: TraceTreeNode = TraceTreeNode.Root(); + private _spanPromises: Map, Promise> = new Map(); private _list: TraceTreeNode[] = []; static Empty() { - return new TraceTree().build(); + const tree = new TraceTree().build(); + tree.type = 'trace'; + return tree; + } + + static Loading(metadata: TraceTree.Metadata): TraceTree { + const tree = makeExampleTrace(metadata); + tree.type = 'loading'; + return tree; } static FromTrace(trace: TraceTree.Trace): TraceTree { @@ -215,10 +225,18 @@ export class TraceTree { }); // Trace is always expanded by default - traceNode.expanded = true; tree.root.children.push(traceNode); + let traceStart = Number.POSITIVE_INFINITY; + let traceEnd = Number.NEGATIVE_INFINITY; + for (const transaction of trace.transactions) { + if (transaction.start_timestamp < traceStart) { + traceStart = transaction.start_timestamp; + } + if (transaction.timestamp > traceEnd) { + traceEnd = transaction.timestamp; + } visit(traceNode, transaction); } @@ -226,6 +244,8 @@ export class TraceTree { visit(traceNode, trace_error); } + traceNode.space = [traceStart, traceEnd - traceStart]; + tree.root.space = [traceStart, traceEnd - traceStart]; return tree.build(); } @@ -266,7 +286,6 @@ export class TraceTree { }); const parentLinkMetadata = childrenLinks.get(span.span_id); - node.expanded = true; node.canFetchData = !!parentLinkMetadata; if (parentLinkMetadata) { @@ -350,8 +369,8 @@ export class TraceTree { event_id: undefined, project_slug: undefined, }, - head as TraceTreeNode, - tail as TraceTreeNode + head, + tail ); if (!node.parent) { @@ -359,9 +378,14 @@ export class TraceTree { } autoGroupedNode.groupCount = groupMatchCount + 1; + autoGroupedNode.space = [ + head.value.start_timestamp, + tail.value.timestamp - head.value.start_timestamp, + ]; for (const c of tail.children) { c.parent = autoGroupedNode; + queue.push(c); } const index = node.parent.children.indexOf(node); @@ -382,24 +406,23 @@ export class TraceTree { continue; } - let startIndex = 0; + let index = 0; let matchCount = 0; - - for (let i = 0; i < node.children.length - 1; i++) { - const current = node.children[i] as TraceTreeNode; - const next = node.children[i + 1] as TraceTreeNode; + while (index < node.children.length) { + const current = node.children[index] as TraceTreeNode; + const next = node.children[index + 1] as TraceTreeNode; if ( + next && next.children.length === 0 && current.children.length === 0 && - // @TODO this should check for typeof op and description - // to be of type string for runtime safety. Afaik it is impossible - // for these to be anything else but a string, but we should still check next.value.op === current.value.op && next.value.description === current.value.description ) { matchCount++; - if (i < node.children.length - 2) { + // If the next node is the last node in the list, we keep iterating + if (index + 1 < node.children.length) { + index++; continue; } } @@ -420,20 +443,21 @@ export class TraceTree { } ); - // Copy the children under the new node. - autoGroupedNode.children = node.children.slice(startIndex, matchCount + 1); autoGroupedNode.groupCount = matchCount + 1; - - // Remove the old children from the parent and insert the new node. - node.children.splice(startIndex, matchCount + 1, autoGroupedNode); - - for (let j = 0; j < autoGroupedNode.children.length; j++) { - autoGroupedNode.children[j].parent = autoGroupedNode; + const start = index - matchCount; + for (let j = start; j < index - 1; j++) { + autoGroupedNode.children.push(node.children[j]); + autoGroupedNode.children[autoGroupedNode.children.length - 1].parent = + autoGroupedNode; } - } - startIndex = i; - matchCount = 0; + node.children.splice(start, matchCount + 1, autoGroupedNode); + index = start + 1; + matchCount = 0; + } else { + index++; + matchCount = 0; + } } } } @@ -492,8 +516,12 @@ export class TraceTree { if (node.expanded) { const index = this._list.indexOf(node); this._list.splice(index + 1, node.getVisibleChildrenCount()); + // Flip expanded after collecting visible children + node.expanded = expanded; } else { const index = this._list.indexOf(node); + // Flip expanded so that we can collect visible children + node.expanded = expanded; this._list.splice(index + 1, 0, ...node.getVisibleChildren()); } node.expanded = expanded; @@ -590,38 +618,9 @@ export class TraceTree { * Prints the tree in a human readable format, useful for debugging and testing */ print() { + // root nodes are -1 indexed, so we add 1 to the depth so .repeat doesnt throw const print = this.list - .map(t => { - const padding = ' '.repeat(t.depth); - - if (isAutogroupedNode(t)) { - if (isParentAutogroupedNode(t)) { - return padding + 'parent autogroup'; - } - if (isSiblingAutogroupedNode(t)) { - return padding + 'sibling autogroup'; - } - - return padding + 'autogroup'; - } - if (isSpanNode(t)) { - return padding + t.value?.op ?? 'unknown span op'; - } - if (isTransactionNode(t)) { - return padding + t.value.transaction ?? 'unknown transaction'; - } - if (isMissingInstrumentationNode(t)) { - return padding + 'missing_instrumentation'; - } - if (isRootNode(t)) { - return padding + 'Root'; - } - if (isTraceNode(t)) { - return padding + 'Trace'; - } - - throw new Error('Not implemented'); - }) + .map(t => printNode(t, 0)) .filter(Boolean) .join('\n'); @@ -646,6 +645,8 @@ export class TraceTreeNode { event_id: undefined, }; + space: [number, number] | null = null; + private _depth: number | undefined; private _children: TraceTreeNode[] = []; private _spanChildren: TraceTreeNode< @@ -662,7 +663,11 @@ export class TraceTreeNode { this.value = value; this.metadata = metadata; - if (isTransactionNode(this)) { + if (value && 'timestamp' in value && 'start_timestamp' in value) { + this.space = [value.start_timestamp, value.timestamp - value.start_timestamp]; + } + + if (isTransactionNode(this) || isTraceNode(this) || isSpanNode(this)) { this.expanded = true; } } @@ -798,8 +803,12 @@ export class TraceTreeNode { const stack: TraceTreeNode[] = []; let count = 0; - for (let i = this.children.length - 1; i >= 0; i--) { - if (this.children[i].expanded || isParentAutogroupedNode(this.children[i])) { + if ( + this.expanded || + isParentAutogroupedNode(this) || + isMissingInstrumentationNode(this) + ) { + for (let i = this.children.length - 1; i >= 0; i--) { stack.push(this.children[i]); } } @@ -823,8 +832,12 @@ export class TraceTreeNode { const stack: TraceTreeNode[] = []; const children: TraceTreeNode[] = []; - for (let i = this.children.length - 1; i >= 0; i--) { - if (this.children[i].expanded || isParentAutogroupedNode(this.children[i])) { + if ( + this.expanded || + isParentAutogroupedNode(this) || + isMissingInstrumentationNode(this) + ) { + for (let i = this.children.length - 1; i >= 0; i--) { stack.push(this.children[i]); } } @@ -844,6 +857,19 @@ export class TraceTreeNode { return children; } + print() { + // root nodes are -1 indexed, so we add 1 to the depth so .repeat doesnt throw + const offset = this.depth === -1 ? 1 : 0; + const nodes = [this, ...this.getVisibleChildren()]; + const print = nodes + .map(t => printNode(t, offset)) + .filter(Boolean) + .join('\n'); + + // eslint-disable-next-line no-console + console.log(print); + } + static Root() { return new TraceTreeNode(null, null, { event_id: undefined, @@ -889,3 +915,109 @@ export class SiblingAutogroupNode extends TraceTreeNode +): TraceTree.Transaction { + return { + start_timestamp: 0, + timestamp: 0, + errors: [], + performance_issues: [], + parent_span_id: '', + span_id: '', + parent_event_id: '', + project_id: 0, + 'transaction.duration': 0, + 'transaction.op': 'db', + 'transaction.status': 'ok', + generation: 0, + project_slug: '', + event_id: `event_id`, + transaction: `transaction`, + children: [], + ...partial, + }; +} + +export function makeExampleTrace(metadata: TraceTree.Metadata): TraceTree { + const trace: TraceTree.Trace = { + transactions: [], + orphan_errors: [], + }; + + function randomBetween(min: number, max: number) { + return Math.floor(Math.random() * (max - min + 1) + min); + } + + let start = new Date().getTime(); + + for (let i = 0; i < 25; i++) { + const end = start + randomBetween(100, 200); + const nest = i > 0 && Math.random() > 0.5; + + if (nest) { + const parent = trace.transactions[trace.transactions.length - 1]; + parent.children.push( + partialTransaction({ + ...metadata, + generation: 0, + start_timestamp: start, + transaction: `parent transaction ${i}`, + timestamp: end, + }) + ); + parent.timestamp = end; + } else { + trace.transactions.push( + partialTransaction({ + ...metadata, + generation: 0, + start_timestamp: start, + transaction: 'loading...', + ['transaction.op']: 'loading', + timestamp: end, + }) + ); + } + + start = end; + } + + const tree = TraceTree.FromTrace(trace); + + return tree; +} + +function printNode(t: TraceTreeNode, offset: number): string { + // +1 because we may be printing from the root which is -1 indexed + const padding = ' '.repeat(t.depth + offset); + + if (isAutogroupedNode(t)) { + if (isParentAutogroupedNode(t)) { + return padding + `parent autogroup (${t.groupCount})`; + } + if (isSiblingAutogroupedNode(t)) { + return padding + `sibling autogroup (${t.groupCount})`; + } + + return padding + 'autogroup'; + } + if (isSpanNode(t)) { + return padding + t.value?.op ?? 'unknown span op'; + } + if (isTransactionNode(t)) { + return padding + t.value.transaction ?? 'unknown transaction'; + } + if (isMissingInstrumentationNode(t)) { + return padding + 'missing_instrumentation'; + } + if (isRootNode(t)) { + return padding + 'Root'; + } + if (isTraceNode(t)) { + return padding + 'Trace'; + } + + throw new Error('Not implemented'); +} diff --git a/static/app/views/performance/newTraceDetails/virtualizedViewManager.tsx b/static/app/views/performance/newTraceDetails/virtualizedViewManager.tsx new file mode 100644 index 00000000000000..de30cf64a592a7 --- /dev/null +++ b/static/app/views/performance/newTraceDetails/virtualizedViewManager.tsx @@ -0,0 +1,228 @@ +const DIVIDER_WIDTH = 6; + +type ViewColumn = { + column_refs: (HTMLElement | undefined)[]; + width: number; +}; + +type Matrix2D = [number, number, number, number, number, number]; + +/** + * Tracks the state of the virtualized view and manages the resizing of the columns. + * Children components should call `registerColumnRef` and `registerDividerRef` to register + * their respective refs. + */ +export class VirtualizedViewManager { + width: number = 0; + + container: HTMLElement | null = null; + dividerRef: HTMLElement | null = null; + resizeObserver: ResizeObserver | null = null; + + dividerStartVec: [number, number] | null = null; + + spanDrawMatrix: Matrix2D = [1, 0, 0, 1, 0, 0]; + spanScalingFactor: number = 1; + minSpanScalingFactor: number = 0.02; + + spanSpace: [number, number] = [0, 1000]; + spanView: [number, number] = [0, 1000]; + + columns: { + list: ViewColumn; + span_list: ViewColumn; + }; + + span_bars: ({ref: HTMLElement; space: [number, number]} | undefined)[] = []; + + constructor(columns: { + list: ViewColumn; + span_list: ViewColumn; + }) { + this.columns = columns; + + this.onDividerMouseDown = this.onDividerMouseDown.bind(this); + this.onDividerMouseUp = this.onDividerMouseUp.bind(this); + this.onDividerMouseMove = this.onDividerMouseMove.bind(this); + } + + onContainerRef(container: HTMLElement | null) { + if (container) { + this.initialize(container); + } else { + this.teardown(); + } + } + + registerDividerRef(ref: HTMLElement | null) { + if (!ref) { + if (this.dividerRef) { + this.dividerRef.removeEventListener('mousedown', this.onDividerMouseDown); + } + this.dividerRef = null; + return; + } + + this.dividerRef = ref; + this.dividerRef.style.width = `${DIVIDER_WIDTH}px`; + this.dividerRef.style.transform = `translateX(${ + this.width * (this.columns.list.width - (2 * DIVIDER_WIDTH) / this.width) + }px)`; + + ref.addEventListener('mousedown', this.onDividerMouseDown, {passive: true}); + } + + onDividerMouseDown(event: MouseEvent) { + if (!this.container) { + return; + } + + this.dividerStartVec = [event.clientX, event.clientY]; + this.container.style.userSelect = 'none'; + + this.container.addEventListener('mouseup', this.onDividerMouseUp, {passive: true}); + this.container.addEventListener('mousemove', this.onDividerMouseMove, { + passive: true, + }); + } + + onDividerMouseUp(event: MouseEvent) { + if (!this.container || !this.dividerStartVec) { + return; + } + + const distance = event.clientX - this.dividerStartVec[0]; + const distancePercentage = distance / this.width; + + this.columns.list.width = this.columns.list.width + distancePercentage; + this.columns.span_list.width = this.columns.span_list.width - distancePercentage; + + this.container.style.userSelect = 'auto'; + this.dividerStartVec = null; + + this.container.removeEventListener('mouseup', this.onDividerMouseUp); + this.container.removeEventListener('mousemove', this.onDividerMouseMove); + } + + onDividerMouseMove(event: MouseEvent) { + if (!this.dividerStartVec || !this.dividerRef) { + return; + } + + const distance = event.clientX - this.dividerStartVec[0]; + const distancePercentage = distance / this.width; + + this.computeSpanDrawMatrix( + this.width, + this.columns.span_list.width - distancePercentage + ); + + this.dividerRef.style.transform = `translateX(${ + this.width * (this.columns.list.width + distancePercentage) - DIVIDER_WIDTH / 2 + }px)`; + + const listWidth = this.columns.list.width * 100 + distancePercentage * 100 + '%'; + const spanWidth = this.columns.span_list.width * 100 - distancePercentage * 100 + '%'; + + for (let i = 0; i < this.columns.list.column_refs.length; i++) { + const list = this.columns.list.column_refs[i]; + if (list) { + list.style.width = listWidth; + } + const span = this.columns.span_list.column_refs[i]; + if (span) { + span.style.width = spanWidth; + } + const span_bar = this.span_bars[i]; + if (span_bar) { + span_bar.ref.style.transform = this.computeSpanMatrixTransform(span_bar.space); + } + } + } + + registerSpanBarRef(ref: HTMLElement | null, space: [number, number], index: number) { + this.span_bars[index] = ref ? {ref, space} : undefined; + } + + registerColumnRef(column: string, ref: HTMLElement | null, index: number) { + if (!this.columns[column]) { + throw new TypeError('Invalid column'); + } + + this.columns[column].column_refs[index] = ref ?? undefined; + } + + initialize(container: HTMLElement) { + this.teardown(); + + this.container = container; + this.resizeObserver = new ResizeObserver(entries => { + const entry = entries[0]; + if (!entry) { + throw new Error('ResizeObserver entry is undefined'); + } + + this.width = entry.contentRect.width; + this.computeSpanDrawMatrix(this.width, this.columns.span_list.width); + + if (this.dividerRef) { + this.dividerRef.style.transform = `translateX(${ + this.width * this.columns.list.width - DIVIDER_WIDTH / 2 + }px)`; + } + }); + + this.resizeObserver.observe(container); + } + + initializeSpanSpace(spanSpace: [number, number], spanView?: [number, number]) { + this.spanSpace = [...spanSpace]; + this.spanView = spanView ?? [...spanSpace]; + + this.computeSpanDrawMatrix(this.width, this.columns.span_list.width); + } + + computeSpanDrawMatrix(width: number, span_column_width: number): Matrix2D { + // https://developer.mozilla.org/en-US/docs/Web/CSS/transform-function/matrix + // biome-ignore format: off + const mat3: Matrix2D = [ + 1, 0, 0, + 1, 0, 0, + ]; + + if (this.spanSpace[1] === 0 || this.spanView[1] === 0) { + return mat3; + } + + const spanColumnWidth = width * span_column_width; + const viewToSpace = this.spanSpace[1] / this.spanView[1]; + const physicalToView = spanColumnWidth / this.spanView[1]; + + mat3[0] = viewToSpace * physicalToView; + + this.spanScalingFactor = viewToSpace; + this.minSpanScalingFactor = window.devicePixelRatio / this.width; + this.spanDrawMatrix = mat3; + return mat3; + } + + computeSpanMatrixTransform(span_space: [number, number]): string { + const scale = Math.max( + this.minSpanScalingFactor, + (span_space[1] / this.spanView[1]) * this.spanScalingFactor + ); + + const x = span_space[0] - this.spanView[0]; + const translateInPixels = x * this.spanDrawMatrix[0]; + + return `matrix(${scale},0,0,1,${translateInPixels},0)`; + } + + draw() {} + + teardown() { + if (this.resizeObserver) { + this.resizeObserver.disconnect(); + } + } +} From 1ce7041288de272e18c9f2444655d8444ce811e4 Mon Sep 17 00:00:00 2001 From: Cathy Teng <70817427+cathteng@users.noreply.github.com> Date: Tue, 13 Feb 2024 12:51:01 -0800 Subject: [PATCH 338/357] chore(roles): remove effectiveOrgRole (#64735) --- static/app/components/teamRoleSelect.tsx | 20 +-- static/app/utils/orgRole.tsx | 16 --- .../teamSelect/teamSelectForMember.tsx | 28 +---- .../organizationMemberDetail.spec.tsx | 115 ------------------ .../organizationTeams/teamMembers.spec.tsx | 69 +---------- 5 files changed, 10 insertions(+), 238 deletions(-) delete mode 100644 static/app/utils/orgRole.tsx diff --git a/static/app/components/teamRoleSelect.tsx b/static/app/components/teamRoleSelect.tsx index c3329690a4b30d..2ef317fe9dc841 100644 --- a/static/app/components/teamRoleSelect.tsx +++ b/static/app/components/teamRoleSelect.tsx @@ -4,7 +4,6 @@ import type {ControlProps} from 'sentry/components/forms/controls/selectControl' import RoleSelectControl from 'sentry/components/roleSelectControl'; import {space} from 'sentry/styles/space'; import type {Organization, Team, TeamMember, TeamRole} from 'sentry/types'; -import {getEffectiveOrgRole} from 'sentry/utils/orgRole'; import { hasOrgRoleOverwrite, RoleOverwriteIcon, @@ -30,30 +29,21 @@ function TeamRoleSelect({ const {orgRoleList, teamRoleList, features} = organization; const hasTeamRoles = features.includes('team-roles'); - // Determine the org-role, including if the current team has an org role - // and adding the user to the current team changes their minimum team-role - const possibleOrgRoles = [member.orgRole]; - if (member.groupOrgRoles && member.groupOrgRoles.length > 0) { - possibleOrgRoles.push(member.groupOrgRoles[0].role.id); - } - if (team.orgRole) { - possibleOrgRoles.push(team.orgRole); - } - const effectiveOrgRole = getEffectiveOrgRole(possibleOrgRoles, orgRoleList); + const memberOrgRole = orgRoleList.find(r => r.id === member.orgRole); // If the member's org-role has elevated permission, their team-role will // inherit scopes from it - if (hasOrgRoleOverwrite({orgRole: effectiveOrgRole?.id, orgRoleList, teamRoleList})) { + if (hasOrgRoleOverwrite({orgRole: memberOrgRole?.id, orgRoleList, teamRoleList})) { const effectiveTeamRole = teamRoleList.find( - r => r.id === effectiveOrgRole?.minimumTeamRole + r => r.id === memberOrgRole?.minimumTeamRole ); return ( - {effectiveTeamRole?.name || effectiveOrgRole?.minimumTeamRole} + {effectiveTeamRole?.name || memberOrgRole?.minimumTeamRole} diff --git a/static/app/utils/orgRole.tsx b/static/app/utils/orgRole.tsx deleted file mode 100644 index f255d7da713ac8..00000000000000 --- a/static/app/utils/orgRole.tsx +++ /dev/null @@ -1,16 +0,0 @@ -import type {OrgRole} from 'sentry/types'; - -export function getEffectiveOrgRole( - memberOrgRoles: string[], - orgRoleList: OrgRole[] -): OrgRole { - const orgRoleMap = orgRoleList.reduce((acc, role, index) => { - acc[role.id] = {index, role}; - return acc; - }, {}); - - // sort by ascending index (high to low priority) - memberOrgRoles.sort((a, b) => orgRoleMap[b].index - orgRoleMap[a].index); - - return orgRoleMap[memberOrgRoles[0]]?.role; -} diff --git a/static/app/views/settings/components/teamSelect/teamSelectForMember.tsx b/static/app/views/settings/components/teamSelect/teamSelectForMember.tsx index 88874ed47b2d09..176167e7fd1b53 100644 --- a/static/app/views/settings/components/teamSelect/teamSelectForMember.tsx +++ b/static/app/views/settings/components/teamSelect/teamSelectForMember.tsx @@ -1,6 +1,5 @@ import {Fragment} from 'react'; import styled from '@emotion/styled'; -import startCase from 'lodash/startCase'; import {Button} from 'sentry/components/button'; import EmptyMessage from 'sentry/components/emptyMessage'; @@ -17,7 +16,6 @@ import {IconSubtract} from 'sentry/icons'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import type {Member, Organization, Team} from 'sentry/types'; -import {getEffectiveOrgRole} from 'sentry/utils/orgRole'; import {useTeams} from 'sentry/utils/useTeams'; import {RoleOverwritePanelAlert} from 'sentry/views/settings/organizationTeams/roleOverwriteWarning'; import {getButtonHelpText} from 'sentry/views/settings/organizationTeams/utils'; @@ -62,19 +60,6 @@ function TeamSelect({ const selectedTeamSlugs = new Set(selectedTeamRoles.map(tm => tm.teamSlug)); const selectedTeams = teams.filter(tm => selectedTeamSlugs.has(tm.slug)); - // Determine if adding a team changes the minimum team-role - // Get org-roles from team membership, if any - const groupOrgRoles = selectedTeams - .filter(team => team.orgRole) - .map(team => team.orgRole as string); - if (selectedOrgRole) { - groupOrgRoles.push(selectedOrgRole); - } - - // Sort them and to get the highest priority role - // Highest priority role may change minimum team role - const effectiveOrgRole = getEffectiveOrgRole(groupOrgRoles, orgRoleList); - const renderBody = () => { if (selectedTeams.length === 0) { return {t('No Teams assigned')}; @@ -82,9 +67,9 @@ function TeamSelect({ return ( - {effectiveOrgRole && ( + {selectedOrgRole && ( @@ -98,7 +83,6 @@ function TeamSelect({ team={team} member={{ ...member, - groupOrgRoles: [{role: effectiveOrgRole, teamSlug: ''}], orgRole: selectedOrgRole, teamRoles: selectedTeamRoles, }} @@ -156,11 +140,9 @@ function TeamRow({ }) { const hasOrgAdmin = organization.access.includes('org:admin'); const isIdpProvisioned = team.flags['idp:provisioned']; - const isPermissionGroup = !!team.orgRole && !hasOrgAdmin; - const isRemoveDisabled = disabled || isIdpProvisioned || isPermissionGroup; + const isRemoveDisabled = disabled || isIdpProvisioned || !hasOrgAdmin; - const buttonHelpText = getButtonHelpText(isIdpProvisioned, isPermissionGroup); - const orgRoleFromTeam = team.orgRole ? `${startCase(team.orgRole)} Team` : null; + const buttonHelpText = getButtonHelpText(isIdpProvisioned, !hasOrgAdmin); return ( @@ -170,8 +152,6 @@ function TeamRow({
    -
    {orgRoleFromTeam}
    -
    , - { - context: routerContext, - organization: regularOrg, - } - ); - expect(screen.getByText('Manager Team')).toBeInTheDocument(); - expect(screen.getByRole('button', {name: 'Remove'})).toBeDisabled(); - }); - - it('cannot join org role team if missing org:admin', async function () { - const regularOrg = OrganizationFixture({ - teams, - features: ['team-roles'], - access: ['org:write'], - }); - - const {routerContext, routerProps} = initializeOrg({organization: regularOrg}); - render( - , - { - context: routerContext, - organization: regularOrg, - } - ); - - await userEvent.click(screen.getByText('Add Team')); - await userEvent.hover(screen.getByText('#org-role-team')); - expect( - await screen.findByText( - 'Membership to a team with an organization role is managed by org owners.' - ) - ).toBeInTheDocument(); - }); - it('joins a team and assign a team-role', async function () { const {routerContext, routerProps} = initializeOrg({organization}); @@ -875,43 +798,5 @@ describe('OrganizationMemberDetail', function () { selectEvent.openMenu(teamRoleSelect); expect(screen.queryAllByText('...').length).toBe(0); }); - - it('overwrites when member joins a manager team', async () => { - const {routerContext, routerProps} = initializeOrg({}); - render( - , - { - context: routerContext, - organization, - } - ); - - // Role info box is hidden - expect(screen.queryByTestId('alert-role-overwrite')).not.toBeInTheDocument(); - - // Dropdown has correct value set - const teamRow = screen.getByTestId('team-row-for-member'); - const teamRoleSelect = within(teamRow).getByText('Contributor'); - - // Join manager team - await userEvent.click(screen.getByText('Add Team')); - // Click the first item - await userEvent.click(screen.getByText('#manager-team')); - - // Role info box is shown - expect(screen.queryByTestId('alert-role-overwrite')).toBeInTheDocument(); - - // Dropdowns have correct value set - const teamRows = screen.getAllByTestId('team-row-for-member'); - within(teamRows[0]).getByText('Team Admin'); - within(teamRows[1]).getByText('Team Admin'); - - // Dropdown options are not visible - expect(screen.queryAllByText('...').length).toBe(0); - - // Dropdown cannot be opened - selectEvent.openMenu(teamRoleSelect); - expect(screen.queryAllByText('...').length).toBe(0); - }); }); }); diff --git a/static/app/views/settings/organizationTeams/teamMembers.spec.tsx b/static/app/views/settings/organizationTeams/teamMembers.spec.tsx index d3565efa973bf6..f936fb99136c30 100644 --- a/static/app/views/settings/organizationTeams/teamMembers.spec.tsx +++ b/static/app/views/settings/organizationTeams/teamMembers.spec.tsx @@ -23,7 +23,7 @@ describe('TeamMembers', function () { const organization = OrganizationFixture(); const team = TeamFixture(); - const managerTeam = TeamFixture({orgRole: 'manager'}); + const managerTeam = TeamFixture(); const members = MembersFixture(); const member = MemberFixture({ id: '9', @@ -381,31 +381,6 @@ describe('TeamMembers', function () { expect(contributors).toHaveLength(2); }); - it('adding member to manager team makes them team admin', async function () { - MockApiClient.addMockResponse({ - url: `/teams/${organization.slug}/${managerTeam.slug}/members/`, - method: 'GET', - body: [], - }); - const orgWithTeamRoles = OrganizationFixture({features: ['team-roles']}); - render( - - ); - - await userEvent.click( - (await screen.findAllByRole('button', {name: 'Add Member'}))[0] - ); - await userEvent.click(screen.getAllByTestId('letter_avatar-avatar')[0]); - - const admin = screen.queryByText('Team Admin'); - expect(admin).toBeInTheDocument(); - }); - it('cannot add or remove members if team is idp:provisioned', function () { const team2 = TeamFixture({ flags: { @@ -458,46 +433,4 @@ describe('TeamMembers', function () { expect(screen.findByRole('button', {name: 'Remove'})).toBeDisabled(); }); }); - - it('cannot add or remove members or leave if team has org role and no access', function () { - const team2 = TeamFixture({orgRole: 'manager'}); - - const me = MemberFixture({ - id: '123', - email: 'foo@example.com', - role: 'member', - }); - - MockApiClient.clearMockResponses(); - MockApiClient.addMockResponse({ - url: `/organizations/${organization.slug}/members/`, - method: 'GET', - body: [...members, me], - }); - MockApiClient.addMockResponse({ - url: `/teams/${organization.slug}/${team2.slug}/members/`, - method: 'GET', - body: members, - }); - MockApiClient.addMockResponse({ - url: `/teams/${organization.slug}/${team2.slug}/`, - method: 'GET', - body: team2, - }); - - render( - - ); - - waitFor(() => { - expect(screen.findByRole('button', {name: 'Add Member'})).toBeDisabled(); - expect(screen.findByRole('button', {name: 'Remove'})).toBeDisabled(); - expect(screen.findByRole('button', {name: 'Leave'})).toBeDisabled(); - }); - }); }); From a88c061b1e91c03564a509c39dc08063fd8aa032 Mon Sep 17 00:00:00 2001 From: Josh Ferge Date: Tue, 13 Feb 2024 12:55:20 -0800 Subject: [PATCH 339/357] refItyping): type escalating_group_forecast (#65055) - type the file and remove from exclusion list --- pyproject.toml | 1 - src/sentry/issues/escalating_group_forecast.py | 6 +++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 502d5afde0de95..de343aab114291 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -371,7 +371,6 @@ module = [ "sentry.integrations.vsts.issues", "sentry.integrations.vsts.repository", "sentry.integrations.vsts_extension.integration", - "sentry.issues.escalating_group_forecast", "sentry.issues.occurrence_consumer", "sentry.issues.search", "sentry.issues.status_change", diff --git a/src/sentry/issues/escalating_group_forecast.py b/src/sentry/issues/escalating_group_forecast.py index 3de99a996f3339..993925bac4ebb6 100644 --- a/src/sentry/issues/escalating_group_forecast.py +++ b/src/sentry/issues/escalating_group_forecast.py @@ -43,7 +43,7 @@ class EscalatingGroupForecast: date_added: datetime def save(self) -> None: - nodestore.set( + nodestore.backend.set( self.build_storage_identifier(self.project_id, self.group_id), self.to_dict(), ttl=timedelta(GROUP_FORECAST_TTL), @@ -69,9 +69,9 @@ def fetch(cls, project_id: int, group_id: int) -> EscalatingGroupForecast | None from sentry.issues.forecasts import generate_and_save_missing_forecasts if not cls._should_fetch_escalating(group_id=group_id): - return + return None - results = nodestore.get(cls.build_storage_identifier(project_id, group_id)) + results = nodestore.backend.get(cls.build_storage_identifier(project_id, group_id)) if results: return EscalatingGroupForecast.from_dict(results) generate_and_save_missing_forecasts.delay(group_id=group_id) From dbfcca9a267aadd855120eafdd7bdce40c55e9ec Mon Sep 17 00:00:00 2001 From: Leander Rodrigues Date: Tue, 13 Feb 2024 13:11:59 -0800 Subject: [PATCH 340/357] Revert "chore(issues): #64876" (#65109) This reverts commit ee681aec5d50da852c1995571718191a0cd03fdb. --- .../api/endpoints/codeowners/__init__.py | 18 +- src/sentry/api/endpoints/codeowners/index.py | 66 ++++--- src/sentry/api/endpoints/project_ownership.py | 52 +++-- .../serializers/models/projectownership.py | 16 +- .../apidocs/examples/project_examples.py | 1 + src/sentry/conf/server.py | 3 + src/sentry/features/__init__.py | 1 + .../slack/message_builder/issues.py | 25 +-- .../notifications/utils/participants.py | 28 ++- .../api/endpoints/test_project_codeowners.py | 185 ++++++++++-------- .../api/endpoints/test_project_ownership.py | 128 ++++++------ .../slack/test_message_builder.py | 3 +- .../notifications/utils/test_participants.py | 6 + 13 files changed, 308 insertions(+), 224 deletions(-) diff --git a/src/sentry/api/endpoints/codeowners/__init__.py b/src/sentry/api/endpoints/codeowners/__init__.py index adbd5ffef1f52d..fa7dac3df8ad7e 100644 --- a/src/sentry/api/endpoints/codeowners/__init__.py +++ b/src/sentry/api/endpoints/codeowners/__init__.py @@ -69,12 +69,20 @@ def validate(self, attrs: Mapping[str, Any]) -> Mapping[str, Any]: ) # Convert IssueOwner syntax into schema syntax + has_targeting_context = features.has( + "organizations:streamline-targeting-context", self.context["project"].organization + ) try: - validated_data = create_schema_from_issue_owners( - issue_owners=issue_owner_rules, - project_id=self.context["project"].id, - add_owner_ids=True, - ) + if has_targeting_context: + validated_data = create_schema_from_issue_owners( + issue_owners=issue_owner_rules, + project_id=self.context["project"].id, + add_owner_ids=True, + ) + else: + validated_data = create_schema_from_issue_owners( + issue_owners=issue_owner_rules, project_id=self.context["project"].id + ) return { **attrs, "schema": validated_data, diff --git a/src/sentry/api/endpoints/codeowners/index.py b/src/sentry/api/endpoints/codeowners/index.py index a280d5e332f765..73960c7ef3bdf5 100644 --- a/src/sentry/api/endpoints/codeowners/index.py +++ b/src/sentry/api/endpoints/codeowners/index.py @@ -3,7 +3,7 @@ from rest_framework.request import Request from rest_framework.response import Response -from sentry import analytics +from sentry import analytics, features from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import region_silo_endpoint @@ -26,33 +26,28 @@ class ProjectCodeOwnersEndpoint(ProjectEndpoint, ProjectCodeOwnersMixin): "POST": ApiPublishStatus.PRIVATE, } - def refresh_codeowners_schema(self, codeowner: ProjectCodeOwners, project: Project) -> None: - if ( - not hasattr(codeowner, "schema") - or codeowner.schema is None - or codeowner.schema.get("rules") is None + def add_owner_id_to_schema(self, codeowner: ProjectCodeOwners, project: Project) -> None: + if not hasattr(codeowner, "schema") or ( + codeowner.schema + and codeowner.schema.get("rules") + and "id" not in codeowner.schema["rules"][0]["owners"][0].keys() ): - return - - # Convert raw to issue owners syntax so that the schema can be created - raw = codeowner.raw - associations, _ = validate_codeowners_associations(codeowner.raw, project) - codeowner.raw = convert_codeowners_syntax( - codeowner.raw, - associations, - codeowner.repository_project_path_config, - ) - codeowner.schema = create_schema_from_issue_owners( - codeowner.raw, - project.id, - add_owner_ids=True, - remove_deleted_owners=True, - ) + # Convert raw to issue owners syntax so that the schema can be created + raw = codeowner.raw + associations, _ = validate_codeowners_associations(codeowner.raw, project) + codeowner.raw = convert_codeowners_syntax( + codeowner.raw, + associations, + codeowner.repository_project_path_config, + ) + codeowner.schema = create_schema_from_issue_owners( + codeowner.raw, project.id, add_owner_ids=True, remove_deleted_owners=True + ) - # Convert raw back to codeowner type to be saved - codeowner.raw = raw + # Convert raw back to codeowner type to be saved + codeowner.raw = raw - codeowner.save() + codeowner.save() def get(self, request: Request, project: Project) -> Response: """ @@ -70,11 +65,17 @@ def get(self, request: Request, project: Project) -> Response: expand = request.GET.getlist("expand", []) expand.append("errors") + has_targeting_context = features.has( + "organizations:streamline-targeting-context", project.organization + ) + codeowners = list(ProjectCodeOwners.objects.filter(project=project).order_by("-date_added")) - for codeowner in codeowners: - self.refresh_codeowners_schema(codeowner, project) - expand.append("renameIdentifier") - expand.append("hasTargetingContext") + + if has_targeting_context and codeowners: + for codeowner in codeowners: + self.add_owner_id_to_schema(codeowner, project) + expand.append("renameIdentifier") + expand.append("hasTargetingContext") return Response( serialize( @@ -114,7 +115,12 @@ def post(self, request: Request, project: Project) -> Response: codeowners_id=project_codeowners.id, ) - expand = ["ownershipSyntax", "errors", "hasTargetingContext"] + expand = ["ownershipSyntax", "errors"] + has_targeting_context = features.has( + "organizations:streamline-targeting-context", project.organization + ) + if has_targeting_context: + expand.append("hasTargetingContext") return Response( serialize( diff --git a/src/sentry/api/endpoints/project_ownership.py b/src/sentry/api/endpoints/project_ownership.py index 08f62a677ba160..71affd9fbb0d6b 100644 --- a/src/sentry/api/endpoints/project_ownership.py +++ b/src/sentry/api/endpoints/project_ownership.py @@ -92,9 +92,17 @@ def validate(self, attrs): {"raw": f"Raw needs to be <= {max_length} characters in length"} ) - schema = create_schema_from_issue_owners( - attrs["raw"], self.context["ownership"].project_id, add_owner_ids=True - ) + if features.has( + "organizations:streamline-targeting-context", + self.context["ownership"].project.organization, + ): + schema = create_schema_from_issue_owners( + attrs["raw"], self.context["ownership"].project_id, add_owner_ids=True + ) + else: + schema = create_schema_from_issue_owners( + attrs["raw"], self.context["ownership"].project_id + ) self._validate_no_codeowners(schema["rules"]) @@ -194,18 +202,16 @@ def get_ownership(self, project): last_updated=None, ) - def refresh_ownership_schema(self, ownership: ProjectOwnership, project: Project) -> None: - if ( - not hasattr(ownership, "schema") - or ownership.schema is None - or ownership.schema.get("rules") is None + def add_owner_id_to_schema(self, ownership: ProjectOwnership, project: Project) -> None: + if not hasattr(ownership, "schema") or ( + ownership.schema + and ownership.schema.get("rules") + and "id" not in ownership.schema["rules"][0]["owners"][0].keys() ): - return - - ownership.schema = create_schema_from_issue_owners( - ownership.raw, project.id, add_owner_ids=True, remove_deleted_owners=True - ) - ownership.save() + ownership.schema = create_schema_from_issue_owners( + ownership.raw, project.id, add_owner_ids=True, remove_deleted_owners=True + ) + ownership.save() def rename_schema_identifier_for_parsing(self, ownership: ProjectOwnership) -> None: """ @@ -234,12 +240,17 @@ def get(self, request: Request, project) -> Response: Returns details on a project's ownership configuration. """ ownership = self.get_ownership(project) + should_return_schema = features.has( + "organizations:streamline-targeting-context", project.organization + ) - if ownership: - self.refresh_ownership_schema(ownership, project) + if should_return_schema and ownership: + self.add_owner_id_to_schema(ownership, project) self.rename_schema_identifier_for_parsing(ownership) - return Response(serialize(ownership, request.user)) + return Response( + serialize(ownership, request.user, should_return_schema=should_return_schema) + ) @extend_schema( operation_id="Update Ownership Configuration for a Project", @@ -269,6 +280,9 @@ def put(self, request: Request, project) -> Response: if list(request.data) != ["raw"] and not has_project_write: raise PermissionDenied + should_return_schema = features.has( + "organizations:streamline-targeting-context", project.organization + ) serializer = ProjectOwnershipRequestSerializer( data=request.data, partial=True, context={"ownership": self.get_ownership(project)} ) @@ -291,5 +305,7 @@ def put(self, request: Request, project) -> Response: data={**change_data, **project.get_audit_log_data()}, ) ownership_rule_created.send_robust(project=project, sender=self.__class__) - return Response(serialize(ownership, request.user)) + return Response( + serialize(ownership, request.user, should_return_schema=should_return_schema) + ) return Response(serializer.errors, status=400) diff --git a/src/sentry/api/serializers/models/projectownership.py b/src/sentry/api/serializers/models/projectownership.py index ea993e006124d0..78abdcb6add764 100644 --- a/src/sentry/api/serializers/models/projectownership.py +++ b/src/sentry/api/serializers/models/projectownership.py @@ -23,15 +23,15 @@ class ProjectOwnershipResponse(ProjectOwnershipResponseOptional): @register(ProjectOwnership) class ProjectOwnershipSerializer(Serializer): - def serialize(self, obj, attrs, user, **kwargs) -> ProjectOwnershipResponse: + def serialize( + self, obj, attrs, user, should_return_schema=False, **kwargs + ) -> ProjectOwnershipResponse: assignment = ( "Auto Assign to Suspect Commits" if obj.auto_assignment and obj.suspect_committer_auto_assignment - else ( - "Auto Assign to Issue Owner" - if obj.auto_assignment and not obj.suspect_committer_auto_assignment - else "Turn off Auto-Assignment" - ) + else "Auto Assign to Issue Owner" + if obj.auto_assignment and not obj.suspect_committer_auto_assignment + else "Turn off Auto-Assignment" ) project_ownership_data: ProjectOwnershipResponse = { @@ -43,6 +43,8 @@ def serialize(self, obj, attrs, user, **kwargs) -> ProjectOwnershipResponse: "autoAssignment": assignment, "codeownersAutoSync": obj.codeowners_auto_sync, } - project_ownership_data["schema"] = obj.schema + + if should_return_schema: + project_ownership_data["schema"] = obj.schema return project_ownership_data diff --git a/src/sentry/apidocs/examples/project_examples.py b/src/sentry/apidocs/examples/project_examples.py index 749c5b3c026d21..a540a0a7a0b3b7 100644 --- a/src/sentry/apidocs/examples/project_examples.py +++ b/src/sentry/apidocs/examples/project_examples.py @@ -213,6 +213,7 @@ "performance-metrics-backed-transaction-summary", "performance-db-main-thread-detector", "issue-platform", + "streamline-targeting-context", "performance-consecutive-db-issue", "performance-consecutive-http-post-process-group", "performance-n-plus-one-api-calls-detector", diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index f0e2321e1bd590..dc83dc62066e53 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1436,6 +1436,7 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:source-maps-debugger-blue-thunder-edition": "Enable source maps debugger", "organizations:sourcemaps-bundle-flat-file-indexing": "Enable the new flat file indexing system for sourcemaps.", "organizations:sourcemaps-upload-release-as-artifact-bundle": "Upload release bundles as artifact bundles", + "organizations:streamline-targeting-context": "Enable the new suggested assignees feature", "organizations:user-feedback-ui": "Enable User Feedback v2 UI", } @@ -1908,6 +1909,8 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:starfish-view": False, # Enable starfish dropdown on the webservice view for switching chart visualization "organizations:starfish-wsv-chart-dropdown": False, + # Enable the new suggested assignees feature + "organizations:streamline-targeting-context": False, # Allow organizations to configure all symbol sources. "organizations:symbol-sources": True, # Enable team insights page diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index a3d561cff3b891..4d02301aaaac93 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -271,6 +271,7 @@ default_manager.add("organizations:starfish-test-endpoint", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:starfish-view", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:starfish-wsv-chart-dropdown", OrganizationFeature, FeatureHandlerStrategy.REMOTE) +default_manager.add("organizations:streamline-targeting-context", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:symbol-sources", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:team-workflow-notifications", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:trace-view-load-more", OrganizationFeature, FeatureHandlerStrategy.REMOTE) diff --git a/src/sentry/integrations/slack/message_builder/issues.py b/src/sentry/integrations/slack/message_builder/issues.py index 58f6b8076e1f8a..ba351081e23607 100644 --- a/src/sentry/integrations/slack/message_builder/issues.py +++ b/src/sentry/integrations/slack/message_builder/issues.py @@ -306,13 +306,16 @@ def get_suggested_assignees( ): # we don't want every user in the project to be a suggested assignee resolved_owners = ActorTuple.resolve_many(issue_owners) suggested_assignees = RpcActor.many_from_object(resolved_owners) - try: - suspect_commit_users = RpcActor.many_from_object(get_suspect_commit_users(project, event)) - suggested_assignees.extend(suspect_commit_users) - except (Release.DoesNotExist, Commit.DoesNotExist): - logger.info("Skipping suspect committers because release does not exist.") - except Exception: - logger.exception("Could not get suspect committers. Continuing execution.") + if features.has("organizations:streamline-targeting-context", project.organization): + try: + suspect_commit_users = RpcActor.many_from_object( + get_suspect_commit_users(project, event) + ) + suggested_assignees.extend(suspect_commit_users) + except (Release.DoesNotExist, Commit.DoesNotExist): + logger.info("Skipping suspect committers because release does not exist.") + except Exception: + logger.exception("Could not get suspect committers. Continuing execution.") if suggested_assignees: suggested_assignees = dedupe_suggested_assignees(suggested_assignees) assignee_texts = [] @@ -472,11 +475,9 @@ def _assign_button(use_block_kit) -> MessageAction: label="Select Assignee...", type="select", selected_options=format_actor_options([assignee]) if assignee else [], - option_groups=( - get_option_groups(group) - if not use_block_kit - else get_option_groups_block_kit(group) - ), + option_groups=get_option_groups(group) + if not use_block_kit + else get_option_groups_block_kit(group), ) return assign_button diff --git a/src/sentry/notifications/utils/participants.py b/src/sentry/notifications/utils/participants.py index 042fb36db96b4f..8e7de3f924482a 100644 --- a/src/sentry/notifications/utils/participants.py +++ b/src/sentry/notifications/utils/participants.py @@ -338,25 +338,23 @@ def determine_eligible_recipients( suggested_assignees.append(assignee_actor) suspect_commit_users = None - - try: - suspect_commit_users = RpcActor.many_from_object( - get_suspect_commit_users(project, event) - ) - suggested_assignees.extend(suspect_commit_users) - except (Release.DoesNotExist, Commit.DoesNotExist): - logger.info("Skipping suspect committers because release does not exist.") - except Exception: - logger.exception("Could not get suspect committers. Continuing execution.") + if features.has("organizations:streamline-targeting-context", project.organization): + try: + suspect_commit_users = RpcActor.many_from_object( + get_suspect_commit_users(project, event) + ) + suggested_assignees.extend(suspect_commit_users) + except (Release.DoesNotExist, Commit.DoesNotExist): + logger.info("Skipping suspect committers because release does not exist.") + except Exception: + logger.exception("Could not get suspect committers. Continuing execution.") metrics.incr( "features.owners.send_to", tags={ - "outcome": ( - outcome - if outcome == "match" or fallthrough_choice is None - else fallthrough_choice.value - ), + "outcome": outcome + if outcome == "match" or fallthrough_choice is None + else fallthrough_choice.value, "hasSuspectCommitters": str(bool(suspect_commit_users)), }, ) diff --git a/tests/sentry/api/endpoints/test_project_codeowners.py b/tests/sentry/api/endpoints/test_project_codeowners.py index 5e978d86540ec1..e60afd81be384f 100644 --- a/tests/sentry/api/endpoints/test_project_codeowners.py +++ b/tests/sentry/api/endpoints/test_project_codeowners.py @@ -257,8 +257,8 @@ def test_schema_is_correct(self, get_codeowner_mock_file): { "matcher": {"pattern": "docs/*", "type": "codeowners"}, "owners": [ - {"id": self.user.id, "identifier": self.user.email, "type": "user"}, - {"id": self.team.id, "identifier": self.team.slug, "type": "team"}, + {"identifier": self.user.email, "type": "user"}, + {"identifier": self.team.slug, "type": "team"}, ], } ], @@ -281,8 +281,8 @@ def test_schema_preserves_comments(self, get_codeowner_mock_file): { "matcher": {"pattern": "docs/*", "type": "codeowners"}, "owners": [ - {"id": self.user.id, "identifier": self.user.email, "type": "user"}, - {"id": self.team.id, "identifier": self.team.slug, "type": "team"}, + {"identifier": self.user.email, "type": "user"}, + {"identifier": self.team.slug, "type": "team"}, ], } ], @@ -305,8 +305,8 @@ def test_raw_email_correct_schema(self, get_codeowner_mock_file): { "matcher": {"pattern": "docs/*", "type": "codeowners"}, "owners": [ - {"id": self.user.id, "identifier": self.user.email, "type": "user"}, - {"id": self.team.id, "identifier": self.team.slug, "type": "team"}, + {"identifier": self.user.email, "type": "user"}, + {"identifier": self.team.slug, "type": "team"}, ], } ], @@ -381,9 +381,10 @@ def test_users_without_access(self, get_codeowner_mock_file): "sentry.integrations.mixins.repositories.RepositoryMixin.get_codeowner_file", return_value={"html_url": "https://github.com/test/CODEOWNERS"}, ) - def test_post_with_schema(self, get_codeowner_mock_file): + def test_post_with_streamline_targeting(self, get_codeowner_mock_file): with self.feature({"organizations:integrations-codeowners": True}): - response = self.client.post(self.url, self.data) + with self.feature({"organizations:streamline-targeting-context": True}): + response = self.client.post(self.url, self.data) assert response.status_code == 201 assert response.data["raw"] == "docs/* @NisanthanNanthakumar @getsentry/ecosystem" assert response.data["codeMappingId"] == str(self.code_mapping.id) @@ -405,48 +406,58 @@ def test_post_with_schema(self, get_codeowner_mock_file): return_value={"html_url": "https://github.com/test/CODEOWNERS"}, ) def test_get(self, get_codeowner_mock_file): - self.client.post(self.url, self.data) - response = self.client.get(self.url) - - response_data = response.data[0] - assert response.status_code == 200 - assert response_data["raw"] == "docs/* @NisanthanNanthakumar @getsentry/ecosystem" - assert response_data["codeMappingId"] == str(self.code_mapping.id) - assert response_data["schema"] == { - "$version": 1, - "rules": [ - { - "matcher": {"type": "codeowners", "pattern": "docs/*"}, - "owners": [ + # Test post + get without the streamline-targeting-context flag + with self.feature({"organizations:integrations-codeowners": True}): + self.client.post(self.url, self.data) + response_no_schema = self.client.get(self.url) + assert "schema" not in response_no_schema.data[0].keys() + assert "codeOwnersUrl" not in response_no_schema.data[0].keys() + + # Test get after with the streamline-targeting-context flag + with self.feature({"organizations:streamline-targeting-context": True}): + self.client.get(self.url) + response = self.client.get(self.url) + response_data = response.data[0] + assert response.status_code == 200 + assert ( + response_data["raw"] == "docs/* @NisanthanNanthakumar @getsentry/ecosystem" + ) + assert response_data["codeMappingId"] == str(self.code_mapping.id) + assert response_data["schema"] == { + "$version": 1, + "rules": [ { - "type": "user", - "id": self.user.id, - "name": "admin@sentry.io", - }, - {"type": "team", "id": self.team.id, "name": "tiger-team"}, + "matcher": {"type": "codeowners", "pattern": "docs/*"}, + "owners": [ + { + "type": "user", + "id": self.user.id, + "name": "admin@sentry.io", + }, + {"type": "team", "id": self.team.id, "name": "tiger-team"}, + ], + } ], } - ], - } - assert response_data["codeOwnersUrl"] == "https://github.com/test/CODEOWNERS" - - # Assert that "identifier" is not renamed to "name" in the backend - ownership = ProjectCodeOwners.objects.get(project=self.project) - assert ownership.schema["rules"] == [ - { - "matcher": {"type": "codeowners", "pattern": "docs/*"}, - "owners": [ - {"type": "user", "identifier": "admin@sentry.io", "id": self.user.id}, - {"type": "team", "identifier": "tiger-team", "id": self.team.id}, - ], - } - ] + assert response_data["codeOwnersUrl"] == "https://github.com/test/CODEOWNERS" + + # Assert that "identifier" is not renamed to "name" in the backend + ownership = ProjectCodeOwners.objects.get(project=self.project) + assert ownership.schema["rules"] == [ + { + "matcher": {"type": "codeowners", "pattern": "docs/*"}, + "owners": [ + {"type": "user", "identifier": "admin@sentry.io", "id": self.user.id}, + {"type": "team", "identifier": "tiger-team", "id": self.team.id}, + ], + } + ] @patch( "sentry.integrations.mixins.repositories.RepositoryMixin.get_codeowner_file", return_value={"html_url": "https://github.com/test/CODEOWNERS"}, ) - def test_get_rule_one_deleted_owner(self, get_codeowner_mock_file): + def test_get_rule_one_deleted_owner_with_streamline_targeting(self, get_codeowner_mock_file): self.member_user_delete = self.create_user("member_delete@localhost", is_superuser=False) self.create_member( user=self.member_user_delete, @@ -459,25 +470,29 @@ def test_get_rule_one_deleted_owner(self, get_codeowner_mock_file): ) self.data["raw"] = "docs/* @delete @getsentry/ecosystem" + # Post without the streamline-targeting-context flag with self.feature({"organizations:integrations-codeowners": True}): self.client.post(self.url, self.data) - self.external_delete_user.delete() - response = self.client.get(self.url) - assert response.data[0]["schema"] == { - "$version": 1, - "rules": [ - { - "matcher": {"type": "codeowners", "pattern": "docs/*"}, - "owners": [{"type": "team", "name": "tiger-team", "id": self.team.id}], - } - ], - } + + # Test get after with the streamline-targeting-context flag + with self.feature({"organizations:streamline-targeting-context": True}): + self.external_delete_user.delete() + response = self.client.get(self.url) + assert response.data[0]["schema"] == { + "$version": 1, + "rules": [ + { + "matcher": {"type": "codeowners", "pattern": "docs/*"}, + "owners": [{"type": "team", "name": "tiger-team", "id": self.team.id}], + } + ], + } @patch( "sentry.integrations.mixins.repositories.RepositoryMixin.get_codeowner_file", return_value={"html_url": "https://github.com/test/CODEOWNERS"}, ) - def test_get_no_rule_deleted_owner(self, get_codeowner_mock_file): + def test_get_no_rule_deleted_owner_with_streamline_targeting(self, get_codeowner_mock_file): self.member_user_delete = self.create_user("member_delete@localhost", is_superuser=False) self.create_member( user=self.member_user_delete, @@ -490,17 +505,23 @@ def test_get_no_rule_deleted_owner(self, get_codeowner_mock_file): ) self.data["raw"] = "docs/* @delete" + # Post without the streamline-targeting-context flag with self.feature({"organizations:integrations-codeowners": True}): self.client.post(self.url, self.data) - self.external_delete_user.delete() - response = self.client.get(self.url) - assert response.data[0]["schema"] == {"$version": 1, "rules": []} + + # Test get after with the streamline-targeting-context flag + with self.feature({"organizations:streamline-targeting-context": True}): + self.external_delete_user.delete() + response = self.client.get(self.url) + assert response.data[0]["schema"] == {"$version": 1, "rules": []} @patch( "sentry.integrations.mixins.repositories.RepositoryMixin.get_codeowner_file", return_value={"html_url": "https://github.com/test/CODEOWNERS"}, ) - def test_get_multiple_rules_deleted_owners(self, get_codeowner_mock_file): + def test_get_multiple_rules_deleted_owners_with_streamline_targeting( + self, get_codeowner_mock_file + ): self.member_user_delete = self.create_user("member_delete@localhost", is_superuser=False) self.create_member( user=self.member_user_delete, @@ -525,27 +546,31 @@ def test_get_multiple_rules_deleted_owners(self, get_codeowner_mock_file): "raw" ] = "docs/* @delete\n*.py @getsentry/ecosystem @delete\n*.css @delete2\n*.rb @NisanthanNanthakumar" + # Post without the streamline-targeting-context flag with self.feature({"organizations:integrations-codeowners": True}): self.client.post(self.url, self.data) - self.external_delete_user.delete() - self.external_delete_user2.delete() - response = self.client.get(self.url) - assert response.data[0]["schema"] == { - "$version": 1, - "rules": [ - { - "matcher": {"type": "codeowners", "pattern": "*.py"}, - "owners": [{"type": "team", "name": "tiger-team", "id": self.team.id}], - }, - { - "matcher": {"type": "codeowners", "pattern": "*.rb"}, - "owners": [ - { - "type": "user", - "name": "admin@sentry.io", - "id": self.user.id, - } - ], - }, - ], - } + + # Test get after with the streamline-targeting-context flag + with self.feature({"organizations:streamline-targeting-context": True}): + self.external_delete_user.delete() + self.external_delete_user2.delete() + response = self.client.get(self.url) + assert response.data[0]["schema"] == { + "$version": 1, + "rules": [ + { + "matcher": {"type": "codeowners", "pattern": "*.py"}, + "owners": [{"type": "team", "name": "tiger-team", "id": self.team.id}], + }, + { + "matcher": {"type": "codeowners", "pattern": "*.rb"}, + "owners": [ + { + "type": "user", + "name": "admin@sentry.io", + "id": self.user.id, + } + ], + }, + ], + } diff --git a/tests/sentry/api/endpoints/test_project_ownership.py b/tests/sentry/api/endpoints/test_project_ownership.py index 1c423afba61092..7e434d46e876ec 100644 --- a/tests/sentry/api/endpoints/test_project_ownership.py +++ b/tests/sentry/api/endpoints/test_project_ownership.py @@ -14,6 +14,7 @@ from sentry.silo import SiloMode from sentry.testutils.cases import APITestCase from sentry.testutils.helpers.datetime import before_now, iso_format +from sentry.testutils.helpers.features import with_feature from sentry.testutils.outbox import outbox_runner from sentry.testutils.silo import assume_test_silo_mode, region_silo_test from sentry.testutils.skips import requires_snuba @@ -79,7 +80,6 @@ def test_empty_state(self): "dateCreated": None, "lastUpdated": None, "codeownersAutoSync": True, - "schema": None, } def test_update(self): @@ -91,6 +91,7 @@ def test_update(self): assert resp.data["dateCreated"] is not None assert resp.data["lastUpdated"] is not None assert resp.data["codeownersAutoSync"] is True + assert "schema" not in resp.data.keys() resp = self.client.put(self.path, {"fallthrough": False}) assert resp.status_code == 200 @@ -186,7 +187,8 @@ def test_audit_log_ownership_change(self): assert len(auditlog) == 1 assert "modified" in auditlog[0].data["ownership_rules"] - def test_update_schema(self): + @with_feature("organizations:streamline-targeting-context") + def test_update_with_streamline_targeting(self): resp = self.client.put(self.path, {"raw": "*.js admin@localhost #tiger-team"}) assert resp.data["schema"] == { "$version": 1, @@ -202,35 +204,41 @@ def test_update_schema(self): } def test_get(self): + # Test put + get without the streamline-targeting-context flag self.client.put(self.path, {"raw": "*.js admin@localhost #tiger-team"}) - resp = self.client.get(self.path) - assert "schema" in resp.data.keys() - assert resp.data["schema"] == { - "$version": 1, - "rules": [ + resp_no_schema = self.client.get(self.path) + assert "schema" not in resp_no_schema.data.keys() + + # Test get after with the streamline-targeting-context flag + with self.feature({"organizations:streamline-targeting-context": True}): + resp = self.client.get(self.path) + assert resp.data["schema"] == { + "$version": 1, + "rules": [ + { + "matcher": {"type": "path", "pattern": "*.js"}, + "owners": [ + {"type": "user", "id": self.user.id, "name": "admin@localhost"}, + {"type": "team", "id": self.team.id, "name": "tiger-team"}, + ], + } + ], + } + + # Assert that "identifier" is not renamed to "name" in the backend + ownership = ProjectOwnership.objects.get(project=self.project) + assert ownership.schema["rules"] == [ { "matcher": {"type": "path", "pattern": "*.js"}, "owners": [ - {"type": "user", "id": self.user.id, "name": "admin@localhost"}, - {"type": "team", "id": self.team.id, "name": "tiger-team"}, + {"type": "user", "identifier": "admin@localhost", "id": self.user.id}, + {"type": "team", "identifier": "tiger-team", "id": self.team.id}, ], } - ], - } - - # Assert that "identifier" is not renamed to "name" in the backend - ownership = ProjectOwnership.objects.get(project=self.project) - assert ownership.schema["rules"] == [ - { - "matcher": {"type": "path", "pattern": "*.js"}, - "owners": [ - {"type": "user", "identifier": "admin@localhost", "id": self.user.id}, - {"type": "team", "identifier": "tiger-team", "id": self.team.id}, - ], - } - ] + ] - def test_get_empty_schema(self): + @with_feature("organizations:streamline-targeting-context") + def test_get_empty_with_streamline_targeting(self): resp = self.client.get(self.path) assert resp.status_code == 200 assert resp.data == { @@ -244,7 +252,7 @@ def test_get_empty_schema(self): "schema": None, } - def test_get_rule_deleted_owner(self): + def test_get_rule_deleted_owner_with_streamline_targeting(self): self.member_user_delete = self.create_user("member_delete@localhost", is_superuser=False) self.create_member( user=self.member_user_delete, @@ -252,22 +260,26 @@ def test_get_rule_deleted_owner(self): role="member", teams=[self.team], ) + # Put without the streamline-targeting-context flag self.client.put(self.path, {"raw": "*.js member_delete@localhost #tiger-team"}) with assume_test_silo_mode(SiloMode.CONTROL): self.member_user_delete.delete() - resp = self.client.get(self.path) - assert resp.data["schema"] == { - "$version": 1, - "rules": [ - { - "matcher": {"type": "path", "pattern": "*.js"}, - "owners": [{"type": "team", "name": "tiger-team", "id": self.team.id}], - } - ], - } - def test_get_no_rule_deleted_owner(self): + # Get after with the streamline-targeting-context flag + with self.feature({"organizations:streamline-targeting-context": True}): + resp = self.client.get(self.path) + assert resp.data["schema"] == { + "$version": 1, + "rules": [ + { + "matcher": {"type": "path", "pattern": "*.js"}, + "owners": [{"type": "team", "name": "tiger-team", "id": self.team.id}], + } + ], + } + + def test_get_no_rule_deleted_owner_with_streamline_targeting(self): self.member_user_delete = self.create_user("member_delete@localhost", is_superuser=False) self.create_member( user=self.member_user_delete, @@ -275,15 +287,18 @@ def test_get_no_rule_deleted_owner(self): role="member", teams=[self.team], ) + # Put without the streamline-targeting-context flag self.client.put(self.path, {"raw": "*.js member_delete@localhost"}) with assume_test_silo_mode(SiloMode.CONTROL): self.member_user_delete.delete() - resp = self.client.get(self.path) - assert resp.data["schema"] == {"$version": 1, "rules": []} + # Get after with the streamline-targeting-context flag + with self.feature({"organizations:streamline-targeting-context": True}): + resp = self.client.get(self.path) + assert resp.data["schema"] == {"$version": 1, "rules": []} - def test_get_multiple_rules_deleted_owners(self): + def test_get_multiple_rules_deleted_owners_with_streamline_targeting(self): self.member_user_delete = self.create_user("member_delete@localhost", is_superuser=False) self.create_member( user=self.member_user_delete, @@ -298,6 +313,7 @@ def test_get_multiple_rules_deleted_owners(self): role="member", teams=[self.team], ) + # Put without the streamline-targeting-context flag self.client.put( self.path, { @@ -309,22 +325,24 @@ def test_get_multiple_rules_deleted_owners(self): self.member_user_delete.delete() self.member_user_delete2.delete() - resp = self.client.get(self.path) - assert resp.data["schema"] == { - "$version": 1, - "rules": [ - { - "matcher": {"pattern": "*.py", "type": "path"}, - "owners": [{"id": self.team.id, "name": "tiger-team", "type": "team"}], - }, - { - "matcher": {"pattern": "*.rb", "type": "path"}, - "owners": [ - {"id": self.member_user.id, "name": "member@localhost", "type": "user"} - ], - }, - ], - } + # Get after with the streamline-targeting-context flag + with self.feature({"organizations:streamline-targeting-context": True}): + resp = self.client.get(self.path) + assert resp.data["schema"] == { + "$version": 1, + "rules": [ + { + "matcher": {"pattern": "*.py", "type": "path"}, + "owners": [{"id": self.team.id, "name": "tiger-team", "type": "team"}], + }, + { + "matcher": {"pattern": "*.rb", "type": "path"}, + "owners": [ + {"id": self.member_user.id, "name": "member@localhost", "type": "user"} + ], + }, + ], + } def test_invalid_email(self): resp = self.client.put(self.path, {"raw": "*.js idont@exist.com #tiger-team"}) diff --git a/tests/sentry/integrations/slack/test_message_builder.py b/tests/sentry/integrations/slack/test_message_builder.py index c596dd8081849d..e42d4419247144 100644 --- a/tests/sentry/integrations/slack/test_message_builder.py +++ b/tests/sentry/integrations/slack/test_message_builder.py @@ -605,7 +605,6 @@ def test_issue_alert_with_suspect_commits(self, mock_external_url): group=group, event=event, suspect_commit_text=suspect_commit_text, - suggested_assignees=commit_author.email, ) @patch( @@ -677,10 +676,10 @@ def test_issue_alert_with_suspect_commits_unknown_provider(self, mock_external_u group=group, event=event, suspect_commit_text=suspect_commit_text, - suggested_assignees=commit_author.email, ) @with_feature("organizations:slack-block-kit") + @with_feature("organizations:streamline-targeting-context") def test_issue_alert_with_suggested_assignees(self): self.project.flags.has_releases = True self.project.save(update_fields=["flags"]) diff --git a/tests/sentry/notifications/utils/test_participants.py b/tests/sentry/notifications/utils/test_participants.py index ca22217529f48c..180138b88e4f96 100644 --- a/tests/sentry/notifications/utils/test_participants.py +++ b/tests/sentry/notifications/utils/test_participants.py @@ -483,6 +483,7 @@ def test_send_to_current_assignee_and_owners(self): slack=[self.user.id, self.user2.id, member.id], ) + @with_feature("organizations:streamline-targeting-context") def test_send_to_suspect_committers(self): """ Test suspect committer is added as suggested assignee, where "organizations:commit-context" @@ -523,6 +524,7 @@ def test_send_to_suspect_committers(self): slack=[self.user_suspect_committer.id, self.user.id], ) + @with_feature("organizations:streamline-targeting-context") @with_feature("organizations:commit-context") def test_send_to_suspect_committers_with_commit_context_feature_flag(self): """ @@ -551,6 +553,7 @@ def test_send_to_suspect_committers_with_commit_context_feature_flag(self): slack=[self.user_suspect_committer.id, self.user.id], ) + @with_feature("organizations:streamline-targeting-context") @with_feature("organizations:commit-context") def test_send_to_suspect_committers_no_owners_with_commit_context_feature_flag(self): """ @@ -605,6 +608,7 @@ def test_send_to_suspect_committers_no_owners_with_commit_context_feature_flag(s slack=[self.user_suspect_committer.id], ) + @with_feature("organizations:streamline-targeting-context") @with_feature("organizations:commit-context") def test_send_to_suspect_committers_dupe_with_commit_context_feature_flag(self): """ @@ -631,6 +635,7 @@ def test_send_to_suspect_committers_dupe_with_commit_context_feature_flag(self): self.get_send_to_owners(event), email=[self.user.id], slack=[self.user.id] ) + @with_feature("organizations:streamline-targeting-context") @with_feature("organizations:commit-context") def test_send_to_suspect_committers_exception_with_commit_context_feature_flag(self): """ @@ -657,6 +662,7 @@ def test_send_to_suspect_committers_exception_with_commit_context_feature_flag(s self.get_send_to_owners(event), email=[self.user.id], slack=[self.user.id] ) + @with_feature("organizations:streamline-targeting-context") @with_feature("organizations:commit-context") def test_send_to_suspect_committers_not_project_member_commit_context_feature_flag(self): """ From c22635d3e0c6e0d322d86dc9df94f3c35c5cfb69 Mon Sep 17 00:00:00 2001 From: Cathy Teng <70817427+cathteng@users.noreply.github.com> Date: Tue, 13 Feb 2024 13:21:19 -0800 Subject: [PATCH 341/357] ref(superuser): only allow superuser write to add/delete internal app tokens (#64690) --- src/sentry/api/bases/sentryapps.py | 2 +- .../test_sentry_internal_app_token_details.py | 115 +++++++++--------- .../test_sentry_internal_app_tokens.py | 115 +++++++++++------- 3 files changed, 134 insertions(+), 98 deletions(-) diff --git a/src/sentry/api/bases/sentryapps.py b/src/sentry/api/bases/sentryapps.py index 86714ea68d994d..04ba65c939a6ab 100644 --- a/src/sentry/api/bases/sentryapps.py +++ b/src/sentry/api/bases/sentryapps.py @@ -429,7 +429,7 @@ def has_object_permission(self, request: Request, view, sentry_app): ) self.determine_access(request, owner_app) - if is_active_superuser(request): + if superuser_has_permission(request): return True return ensure_scoped_permission(request, self.scope_map.get(request.method)) diff --git a/tests/sentry/api/endpoints/test_sentry_internal_app_token_details.py b/tests/sentry/api/endpoints/test_sentry_internal_app_token_details.py index da1f83f408a12f..85a0266ec40213 100644 --- a/tests/sentry/api/endpoints/test_sentry_internal_app_token_details.py +++ b/tests/sentry/api/endpoints/test_sentry_internal_app_token_details.py @@ -1,12 +1,17 @@ -from django.urls import reverse +from django.test import override_settings +from rest_framework import status from sentry.models.apitoken import ApiToken from sentry.testutils.cases import APITestCase +from sentry.testutils.helpers.features import with_feature from sentry.testutils.silo import control_silo_test @control_silo_test class SentryInternalAppTokenCreationTest(APITestCase): + endpoint = "sentry-api-0-sentry-internal-app-token-details" + method = "delete" + def setUp(self): self.user = self.create_user(email="boop@example.com") self.org = self.create_organization(owner=self.user, name="My Org") @@ -18,41 +23,36 @@ def setUp(self): self.api_token = ApiToken.objects.get(application=self.internal_sentry_app.application) - self.url = reverse( - "sentry-api-0-sentry-internal-app-token-details", - args=[self.internal_sentry_app.slug, self.api_token.id], - ) + self.superuser = self.create_user(is_superuser=True) def test_delete_token(self): self.login_as(user=self.user) - response = self.client.delete(self.url, format="json") - assert response.status_code == 204 + self.get_success_response( + self.internal_sentry_app.slug, + self.api_token.id, + status_code=status.HTTP_204_NO_CONTENT, + ) assert not ApiToken.objects.filter(pk=self.api_token.id).exists() def test_delete_invalid_token(self): self.login_as(user=self.user) - url = reverse( - "sentry-api-0-sentry-internal-app-token-details", - args=[self.internal_sentry_app.slug, "random"], + self.get_error_response( + self.internal_sentry_app.slug, + "random", + status_code=status.HTTP_404_NOT_FOUND, ) - response = self.client.delete(url, format="json") - assert response.status_code == 404 - def test_delete_token_another_app(self): - another_app = self.create_internal_integration(name="Another app", organization=self.org) api_token = ApiToken.objects.get(application=another_app.application) - url = reverse( - "sentry-api-0-sentry-internal-app-token-details", - args=[self.internal_sentry_app.slug, api_token.id], - ) - self.login_as(user=self.user) - response = self.client.delete(url, format="json") - assert response.status_code == 404 + self.get_error_response( + self.internal_sentry_app.slug, + api_token.id, + status_code=status.HTTP_404_NOT_FOUND, + ) def test_non_internal_app(self): sentry_app = self.create_sentry_app(name="My External App", organization=self.org) @@ -61,56 +61,61 @@ def test_non_internal_app(self): slug=sentry_app.slug, organization=self.org, user=self.user ) - url = reverse( - "sentry-api-0-sentry-internal-app-token-details", - args=[install.sentry_app.slug, install.api_token.id], - ) - self.login_as(user=self.user) - response = self.client.delete(url, format="json") - assert response.status_code == 403 + response = self.get_error_response( + install.sentry_app.slug, + install.api_token.id, + status_code=status.HTTP_403_FORBIDDEN, + ) assert response.data == "This route is limited to internal integrations only" def test_sentry_app_not_found(self): - - url = reverse( - "sentry-api-0-sentry-internal-app-token-details", - args=["not_a_slug", self.api_token.id], - ) - self.login_as(user=self.user) - response = self.client.delete(url, format="json") - assert response.status_code == 404 + self.get_error_response( + "not_a_slug", + self.api_token.id, + status_code=status.HTTP_404_NOT_FOUND, + ) def test_cannot_delete_partner_app_token(self): self.login_as(user=self.user) self.internal_sentry_app.update(metadata={"partnership_restricted": True}) - response = self.client.delete(self.url) - assert response.status_code == 403 + self.get_error_response( + self.internal_sentry_app.slug, + self.api_token.id, + status_code=status.HTTP_403_FORBIDDEN, + ) + def test_superuser_can_delete(self): + self.login_as(self.superuser, superuser=True) + self.get_success_response( + self.internal_sentry_app.slug, + self.api_token.id, + status_code=status.HTTP_204_NO_CONTENT, + ) + assert not ApiToken.objects.filter(pk=self.api_token.id).exists() -@control_silo_test -class NewSentryInternalAppTokenCreationTest(APITestCase): - def setUp(self): - self.user = self.create_user(email="boop@example.com") - self.org = self.create_organization(owner=self.user, name="My Org") - self.project = self.create_project(organization=self.org) + @override_settings(SENTRY_SELF_HOSTED=False) + @with_feature("auth:enterprise-superuser-read-write") + def test_superuser_read_write_delete(self): + self.login_as(self.superuser, superuser=True) - self.internal_sentry_app = self.create_internal_integration( - name="My Internal App", organization=self.org + # superuser read cannot delete + self.get_error_response( + self.internal_sentry_app.slug, + self.api_token.id, + status_code=status.HTTP_403_FORBIDDEN, ) + assert ApiToken.objects.filter(pk=self.api_token.id).exists() - self.api_token = ApiToken.objects.get(application=self.internal_sentry_app.application) + # superuser write can delete + self.add_user_permission(self.superuser, "superuser.write") - self.url = reverse( - "sentry-api-0-sentry-internal-app-token-details", - args=[self.internal_sentry_app.slug, self.api_token.id], + self.get_success_response( + self.internal_sentry_app.slug, + self.api_token.id, + status_code=status.HTTP_204_NO_CONTENT, ) - - def test_delete_token(self): - self.login_as(user=self.user) - response = self.client.delete(self.url, format="json") - assert response.status_code == 204 assert not ApiToken.objects.filter(pk=self.api_token.id).exists() diff --git a/tests/sentry/api/endpoints/test_sentry_internal_app_tokens.py b/tests/sentry/api/endpoints/test_sentry_internal_app_tokens.py index a209e7bfeb8c3c..21dffef984dd2e 100644 --- a/tests/sentry/api/endpoints/test_sentry_internal_app_tokens.py +++ b/tests/sentry/api/endpoints/test_sentry_internal_app_tokens.py @@ -1,13 +1,16 @@ -from django.urls import reverse +from django.test import override_settings +from rest_framework import status from sentry.models.apitoken import ApiToken from sentry.models.integrations.sentry_app import MASKED_VALUE from sentry.testutils.cases import APITestCase +from sentry.testutils.helpers.features import with_feature from sentry.testutils.silo import control_silo_test -from sentry.utils import json class SentryInternalAppTokenTest(APITestCase): + endpoint = "sentry-api-0-sentry-internal-app-tokens" + def setUp(self): self.user = self.create_user(email="boop@example.com") self.org = self.create_organization(owner=self.user, name="My Org") @@ -16,62 +19,80 @@ def setUp(self): self.internal_sentry_app = self.create_internal_integration( name="My Internal App", organization=self.org ) - - self.url = reverse( - "sentry-api-0-sentry-internal-app-tokens", args=[self.internal_sentry_app.slug] - ) + self.superuser = self.create_user(is_superuser=True) @control_silo_test class PostSentryInternalAppTokenTest(SentryInternalAppTokenTest): + method = "post" + def test_create_token(self): self.login_as(user=self.user) - response = self.client.post(self.url, format="json") - assert response.status_code == 201 + response = self.get_success_response( + self.internal_sentry_app.slug, status_code=status.HTTP_201_CREATED + ) assert ApiToken.objects.get(token=response.data["token"]) def test_non_internal_app(self): sentry_app = self.create_sentry_app(name="My External App", organization=self.org) - url = reverse("sentry-api-0-sentry-internal-app-tokens", args=[sentry_app.slug]) - self.login_as(user=self.user) - response = self.client.post(url, format="json") + response = self.get_error_response(sentry_app.slug, status_code=status.HTTP_403_FORBIDDEN) - assert response.status_code == 403 assert response.data == "This route is limited to internal integrations only" def test_sentry_app_not_found(self): - - url = reverse("sentry-api-0-sentry-internal-app-tokens", args=["not_a_slug"]) - self.login_as(user=self.user) - response = self.client.post(url, format="json") - - assert response.status_code == 404 + self.get_error_response("not_a_slug", status_code=status.HTTP_404_NOT_FOUND) def test_token_limit(self): self.login_as(user=self.user) # we already have one token created so just need to make 19 more first - for i in range(19): - response = self.client.post(self.url, format="json") - assert response.status_code == 201 + for _ in range(19): + self.get_success_response( + self.internal_sentry_app.slug, status_code=status.HTTP_201_CREATED + ) - response = self.client.post(self.url, format="json") - assert response.status_code == 403 + response = self.get_error_response( + self.internal_sentry_app.slug, status_code=status.HTTP_403_FORBIDDEN + ) assert response.data == "Cannot generate more than 20 tokens for a single integration" def test_cannot_create_partner_app_token(self): self.login_as(user=self.user) self.internal_sentry_app.update(metadata={"partnership_restricted": True}) - response = self.client.post(self.url, format="json") - assert response.status_code == 403 + + self.get_error_response( + self.internal_sentry_app.slug, status_code=status.HTTP_403_FORBIDDEN + ) + + def test_superuser_post(self): + self.login_as(self.superuser, superuser=True) + self.get_success_response( + self.internal_sentry_app.slug, status_code=status.HTTP_201_CREATED + ) + + @override_settings(SENTRY_SELF_HOSTED=False) + @with_feature("auth:enterprise-superuser-read-write") + def test_superuser_read_write_post(self): + # only superuser write can hit post + self.login_as(self.superuser, superuser=True) + self.get_error_response( + self.internal_sentry_app.slug, status_code=status.HTTP_403_FORBIDDEN + ) + + self.add_user_permission(self.superuser, "superuser.write") + self.get_success_response( + self.internal_sentry_app.slug, status_code=status.HTTP_201_CREATED + ) @control_silo_test class GetSentryInternalAppTokenTest(SentryInternalAppTokenTest): + method = "get" + def test_get_tokens(self): self.login_as(self.user) @@ -79,23 +100,20 @@ def test_get_tokens(self): token = ApiToken.objects.get(application_id=self.internal_sentry_app.application_id) - response = self.client.get(self.url, format="json") - - assert response.status_code == 200 - response_content = json.loads(response.content) + response = self.get_success_response(self.internal_sentry_app.slug) # should not include tokens from other internal app - assert len(response_content) == 1 - - assert response_content[0]["id"] == str(token.id) + assert len(response.data) == 1 + assert response.data[0]["id"] == str(token.id) def no_access_for_members(self): user = self.create_user(email="meep@example.com") self.create_member(organization=self.org, user=user) self.login_as(user) - response = self.client.get(self.url, format="json") - assert response.status_code == 403 + self.get_error_response( + self.internal_sentry_app.slug, status_code=status.HTTP_403_FORBIDDEN + ) def test_token_is_masked(self): user = self.create_user(email="meep@example.com") @@ -107,12 +125,10 @@ def test_token_is_masked(self): self.login_as(user) - url = reverse("sentry-api-0-sentry-internal-app-tokens", args=[sentry_app.slug]) - response = self.client.get(url, format="json") - response_content = json.loads(response.content) + response = self.get_success_response(sentry_app.slug) - assert response_content[0]["token"] == MASKED_VALUE - assert response_content[0]["refreshToken"] == MASKED_VALUE + assert response.data[0]["token"] == MASKED_VALUE + assert response.data[0]["refreshToken"] == MASKED_VALUE def test_deny_token_access(self): self.login_as(self.user) @@ -120,6 +136,21 @@ def test_deny_token_access(self): sentry_app = self.create_internal_integration(name="OtherInternal", organization=self.org) - url = reverse("sentry-api-0-sentry-internal-app-tokens", args=[sentry_app.slug]) - response = self.client.get(url, format="json", HTTP_AUTHORIZATION=f"Bearer {token.token}") - assert response.status_code == 403, response.content + self.get_error_response( + sentry_app.slug, + status_code=status.HTTP_403_FORBIDDEN, + extra_headers={"HTTP_AUTHORIZATION": f"Bearer {token.token}"}, + ) + + def test_superuser_get(self): + self.login_as(self.superuser, superuser=True) + self.get_success_response(self.internal_sentry_app.slug) + + @override_settings(SENTRY_SELF_HOSTED=False) + @with_feature("auth:enterprise-superuser-read-write") + def test_superuser_read_write_get(self): + self.login_as(self.superuser, superuser=True) + self.get_success_response(self.internal_sentry_app.slug) + + self.add_user_permission(self.superuser, "superuser.write") + self.get_success_response(self.internal_sentry_app.slug) From a1ba3d0f24f9975b77c173324d67535e90b1048e Mon Sep 17 00:00:00 2001 From: Yagiz Nizipli Date: Tue, 13 Feb 2024 16:30:02 -0500 Subject: [PATCH 342/357] perf: use optional chaining more (#65092) This change is supported and persisted using **[Biome's useOptionalChain rule](https://biomejs.dev/linter/rules/use-optional-chain/)** All browsers that we ship to supports `optional chaining`. This will eventually get transpiled to the previous change, unless we move away from Babel and use ES6+ on our webpack config but for development, it makes it a lot easier to review and understand the code. - You can look into caniuse.com to see the supported browsers for optional chaining: https://caniuse.com/?search=optional%20chaining - Relevant [MDN documentation](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Optional_chaining) --- biome.json | 3 +- build-utils/sentry-instrumentation.ts | 7 +- static/app/actionCreators/group.tsx | 2 +- static/app/actionCreators/plugins.tsx | 2 +- static/app/components/acl/access.tsx | 2 +- static/app/components/acl/feature.tsx | 4 +- static/app/components/activity/note/input.tsx | 3 +- .../components/avatar/organizationAvatar.tsx | 6 +- static/app/components/avatar/teamAvatar.tsx | 4 +- static/app/components/avatar/userAvatar.tsx | 2 +- static/app/components/avatarUploader.tsx | 2 +- static/app/components/charts/barChartZoom.tsx | 2 +- static/app/components/charts/chartZoom.tsx | 2 +- static/app/components/charts/eventsChart.tsx | 4 +- .../app/components/charts/eventsRequest.tsx | 2 +- .../charts/onDemandMetricRequest.tsx | 6 +- static/app/components/charts/pieChart.tsx | 3 +- static/app/components/commitRow.tsx | 2 +- static/app/components/contextPickerModal.tsx | 2 +- .../components/deprecatedAsyncComponent.tsx | 12 +-- .../app/components/deprecatedDropdownMenu.tsx | 2 +- .../app/components/deprecatedforms/form.tsx | 6 +- .../components/deprecatedforms/formField.tsx | 10 +- .../deprecatedforms/genericField.tsx | 2 +- .../deprecatedforms/selectAsyncField.tsx | 4 +- .../deprecatedforms/selectField.tsx | 4 +- .../discover/quickContextCommitRow.tsx | 2 +- .../components/discover/transactionsList.tsx | 3 +- .../components/discover/transactionsTable.tsx | 5 +- .../eventComparison/eventDisplay.tsx | 3 +- .../exception/banners/stacktraceBanners.tsx | 3 +- .../crashContent/stackTrace/content.tsx | 2 +- .../hierarchicalGroupingContent.tsx | 2 +- .../crashContent/stackTrace/nativeContent.tsx | 2 +- .../interfaces/frame/deprecatedLine.tsx | 4 +- .../events/interfaces/frame/line/index.tsx | 2 +- .../events/interfaces/frame/packageStatus.tsx | 2 +- .../events/interfaces/nativeFrame.tsx | 2 +- .../interfaces/performance/durationChart.tsx | 6 +- .../events/interfaces/performance/utils.tsx | 2 +- .../spans/newTraceDetailsSpanBar.tsx | 2 +- .../events/interfaces/spans/spanBar.tsx | 2 +- .../events/interfaces/spans/utils.tsx | 22 ++--- .../components/events/interfaces/threads.tsx | 2 +- .../app/components/events/meta/metaProxy.tsx | 4 +- .../abstractExternalIssueForm.tsx | 4 +- static/app/components/forms/model.tsx | 4 +- static/app/components/gridEditable/index.tsx | 11 +-- static/app/components/hookOrDefault.tsx | 2 +- static/app/components/idBadge/userBadge.tsx | 2 +- .../inviteMembersModal/renderEmailValue.tsx | 2 +- .../modals/recoveryOptionsModal.tsx | 2 +- .../onboardingWizard/useOnboardingDocs.tsx | 17 ++-- .../flamegraphToolbar/flamegraphSearch.tsx | 2 +- .../profiling/functionsMiniGrid.tsx | 75 +++++++-------- static/app/components/progressRing.tsx | 2 +- static/app/components/quickTrace/index.tsx | 4 +- static/app/components/resultGrid.tsx | 2 +- static/app/components/search/searchResult.tsx | 5 +- .../components/search/sources/apiSource.tsx | 46 ++++----- static/app/components/selectMembers/index.tsx | 2 +- .../app/components/smartSearchBar/index.tsx | 8 +- .../smartSearchBar/searchDropdown.tsx | 55 ++++++----- static/app/components/u2f/u2finterface.tsx | 2 +- .../app/data/forms/projectGeneralSettings.tsx | 2 +- .../forms/projectSecurityAndPrivacyGroups.tsx | 9 +- .../app/plugins/components/issueActions.tsx | 2 +- static/app/plugins/components/settings.tsx | 2 +- .../app/plugins/jira/components/settings.tsx | 6 +- static/app/plugins/pluginComponentBase.tsx | 8 +- static/app/stores/groupStore.tsx | 2 +- static/app/utils/dates.tsx | 4 +- static/app/utils/discover/eventView.tsx | 11 +-- static/app/utils/metrics/dashboardImport.tsx | 2 +- static/app/utils/metrics/index.tsx | 2 +- static/app/utils/parseApiError.tsx | 4 +- .../performance/anomalies/anomaliesQuery.tsx | 2 +- .../performance/contexts/onDemandControl.tsx | 2 +- .../quickTrace/quickTraceQuery.tsx | 6 +- .../utils/performance/quickTrace/utils.tsx | 4 +- .../utils/profiling/hooks/useContextMenu.tsx | 2 +- static/app/utils/projects.tsx | 4 +- .../utils/releases/releasesProvider.spec.tsx | 5 +- static/app/utils/useApiRequests.tsx | 8 +- static/app/utils/useMedia.tsx | 4 +- static/app/utils/useTeamsById.tsx | 2 +- static/app/utils/withLatestContext.tsx | 2 +- .../app/views/acceptProjectTransfer/index.tsx | 4 +- static/app/views/alerts/rules/issue/index.tsx | 2 +- .../app/views/alerts/rules/issue/ruleNode.tsx | 2 +- .../alerts/rules/metric/ruleNameOwnerForm.tsx | 2 +- .../metric/triggers/actionsPanel/index.tsx | 4 +- .../alerts/rules/metric/triggers/form.tsx | 6 +- .../alerts/utils/getIncidentDiscoverUrl.tsx | 2 +- .../alerts/utils/getMetricRuleDiscoverUrl.tsx | 2 +- .../views/dashboards/manage/dashboardCard.tsx | 2 +- .../app/views/dashboards/widgetCard/chart.tsx | 4 +- static/app/views/discover/landing.tsx | 2 +- static/app/views/discover/miniGraph.tsx | 2 +- static/app/views/discover/queryList.spec.tsx | 2 +- static/app/views/discover/queryList.tsx | 6 +- static/app/views/discover/querycard.tsx | 2 +- .../app/views/discover/savedQuery/utils.tsx | 9 +- static/app/views/discover/table/index.tsx | 2 +- .../table/quickContext/issueContext.tsx | 3 +- .../table/quickContext/releaseContext.tsx | 3 +- static/app/views/discover/table/tableView.tsx | 2 +- static/app/views/discover/utils.tsx | 2 +- .../integrationOrganizationLink/index.tsx | 2 +- .../views/issueDetails/activitySection.tsx | 2 +- .../app/views/issueDetails/groupDetails.tsx | 2 +- static/app/views/issueDetails/utils.tsx | 2 +- static/app/views/issueList/actions/index.tsx | 2 +- static/app/views/issueList/overview.tsx | 4 +- .../organizationActivity/activityFeedItem.tsx | 2 +- .../components/performanceScoreRing.tsx | 2 +- static/app/views/performance/charts/chart.tsx | 2 +- .../widgets/components/performanceWidget.tsx | 2 +- .../transforms/transformTrendsDiscover.tsx | 6 +- .../traceDetails/newTraceDetailsContent.tsx | 2 +- .../transactionOverview/tagExplorer.tsx | 2 +- .../transactionOverview/trendChart/index.tsx | 6 +- .../transactionTags/tagValueTable.tsx | 2 +- .../transactionTags/tagsHeatMap.tsx | 13 +-- .../trends/changedTransactions.tsx | 6 +- static/app/views/performance/utils.tsx | 2 +- static/app/views/projects/projectContext.tsx | 2 +- static/app/views/releases/detail/utils.tsx | 13 ++- static/app/views/releases/list/index.tsx | 16 +++- .../list/releaseCard/releaseCardCommits.tsx | 2 +- .../thresholdsList/thresholdGroupRows.tsx | 2 +- .../thresholdsList/thresholdGroupTable.tsx | 27 +++--- static/app/views/relocation/uploadBackup.tsx | 2 +- .../detail/breadcrumbs/breadcrumbRow.tsx | 3 +- .../replays/detail/console/consoleLogRow.tsx | 3 +- static/app/views/routeError.tsx | 2 +- .../components/settingsNavigationGroup.tsx | 2 +- .../organizationApiKeysList.tsx | 63 ++++++------ .../views/settings/organizationAuth/index.tsx | 2 +- .../sentryApplicationDetails.tsx | 2 +- .../addIntegration.tsx | 2 +- .../installedIntegration.tsx | 2 +- .../sentryAppExternalForm.tsx | 8 +- .../components/membersFilter.tsx | 2 +- .../organizationMembers/inviteRequestRow.tsx | 2 +- .../organizationMemberDetail.tsx | 3 +- .../organizationMemberRow.tsx | 2 +- .../organizationTeams/teamMembers.tsx | 3 +- .../project/navigationConfiguration.tsx | 2 +- .../projectKeys/details/keyRateLimitsForm.tsx | 3 +- .../projectOwnership/addCodeOwnerModal.tsx | 2 +- .../project/projectOwnership/modal.tsx | 2 +- .../project/projectOwnership/ownerInput.tsx | 2 +- .../project/projectOwnership/selectOwners.tsx | 2 +- .../views/settings/projectMetrics/access.tsx | 5 +- .../views/settings/projectPlugins/details.tsx | 11 +-- .../app/views/starfish/components/chart.tsx | 2 +- .../components/stackTraceMiniFrame.tsx | 2 +- .../queries/useProjectSpanMetricsCounts.tsx | 2 +- .../views/starfish/queries/useReleases.tsx | 7 +- .../views/webServiceView/spanGroupBar.tsx | 96 +++++++++---------- .../views/userFeedback/userFeedbackEmpty.tsx | 7 +- 162 files changed, 440 insertions(+), 510 deletions(-) diff --git a/biome.json b/biome.json index 9ce566a9eac1e7..16bad66ed07247 100644 --- a/biome.json +++ b/biome.json @@ -23,7 +23,8 @@ "useIsNan": "error" }, "complexity": { - "useFlatMap": "error" + "useFlatMap": "error", + "useOptionalChain": "error" }, "nursery": { "noDuplicateJsonKeys": "error", diff --git a/build-utils/sentry-instrumentation.ts b/build-utils/sentry-instrumentation.ts index c72afb3ae58d8a..c99b6dcd94d34f 100644 --- a/build-utils/sentry-instrumentation.ts +++ b/build-utils/sentry-instrumentation.ts @@ -1,10 +1,9 @@ /* eslint-env node */ +import type Sentry from '@sentry/node'; +import type {Transaction} from '@sentry/types'; import crypto from 'node:crypto'; import https from 'node:https'; import os from 'node:os'; - -import type Sentry from '@sentry/node'; -import type {Transaction} from '@sentry/types'; import type webpack from 'webpack'; const { @@ -72,7 +71,7 @@ class SentryInstrumentation { sentry.setTag('arch', os.arch()); sentry.setTag( 'cpu', - cpus && cpus.length ? `${cpus[0].model} (cores: ${cpus.length})}` : 'N/A' + cpus?.length ? `${cpus[0].model} (cores: ${cpus.length})}` : 'N/A' ); this.Sentry = sentry; diff --git a/static/app/actionCreators/group.tsx b/static/app/actionCreators/group.tsx index ee9dca999fb28d..662cb2298446a1 100644 --- a/static/app/actionCreators/group.tsx +++ b/static/app/actionCreators/group.tsx @@ -38,7 +38,7 @@ export function assignToUser(params: AssignToUserParams) { const id = uniqueId(); GroupStore.onAssignTo(id, params.id, { - email: (params.member && params.member.email) || '', + email: params.member?.email ?? '', }); const request = api.requestPromise(endpoint, { diff --git a/static/app/actionCreators/plugins.tsx b/static/app/actionCreators/plugins.tsx index 1ac673eebef875..adf23b6ac13432 100644 --- a/static/app/actionCreators/plugins.tsx +++ b/static/app/actionCreators/plugins.tsx @@ -50,7 +50,7 @@ function doUpdate({orgId, projectId, pluginId, update, ...params}: DoUpdateParam }) .catch(resp => { const err = - resp && resp.responseJSON && typeof resp.responseJSON.detail === 'string' + typeof resp?.responseJSON?.detail === 'string' ? new Error(resp.responseJSON.detail) : new Error('Unable to update plugin'); PluginsStore.onUpdateError(pluginId, err); diff --git a/static/app/components/acl/access.tsx b/static/app/components/acl/access.tsx index d6c23a6f4f2a45..9e51c3647f669f 100644 --- a/static/app/components/acl/access.tsx +++ b/static/app/components/acl/access.tsx @@ -63,7 +63,7 @@ function Access({ project = project ?? undefined; const hasAccess = hasEveryAccess(access, {organization, team, project}); - const hasSuperuser = !!(user && user.isSuperuser); + const hasSuperuser = Boolean(user?.isSuperuser); const renderProps: ChildRenderProps = { hasAccess, diff --git a/static/app/components/acl/feature.tsx b/static/app/components/acl/feature.tsx index fe33bda64d38ab..5b7d703dafa564 100644 --- a/static/app/components/acl/feature.tsx +++ b/static/app/components/acl/feature.tsx @@ -116,8 +116,8 @@ class Feature extends Component { return { configFeatures: config.features ? Array.from(config.features) : [], - organization: (organization && organization.features) || [], - project: (project && project.features) || [], + organization: organization?.features ?? [], + project: project?.features ?? [], }; } diff --git a/static/app/components/activity/note/input.tsx b/static/app/components/activity/note/input.tsx index 399a7367118582..5e64720754d9ce 100644 --- a/static/app/components/activity/note/input.tsx +++ b/static/app/components/activity/note/input.tsx @@ -154,8 +154,7 @@ function NoteInput({ (errorJSON && (typeof errorJSON.detail === 'string' ? errorJSON.detail - : (errorJSON.detail && errorJSON.detail.message) || - t('Unable to post comment'))) || + : errorJSON.detail?.message || t('Unable to post comment'))) || null; return ( diff --git a/static/app/components/avatar/organizationAvatar.tsx b/static/app/components/avatar/organizationAvatar.tsx index 051a351980cba9..eee0b47d0d4200 100644 --- a/static/app/components/avatar/organizationAvatar.tsx +++ b/static/app/components/avatar/organizationAvatar.tsx @@ -10,14 +10,14 @@ function OrganizationAvatar({organization, ...props}: Props) { if (!organization) { return null; } - const slug = (organization && organization.slug) || ''; + const slug = organization?.slug || ''; const title = explodeSlug(slug); return ( { uploadClick = (ev: React.MouseEvent) => { ev.preventDefault(); - this.file.current && this.file.current.click(); + this.file.current?.click(); }; renderImageCrop() { diff --git a/static/app/components/charts/barChartZoom.tsx b/static/app/components/charts/barChartZoom.tsx index 7be2b66d58a5d1..1bf981335ab49e 100644 --- a/static/app/components/charts/barChartZoom.tsx +++ b/static/app/components/charts/barChartZoom.tsx @@ -87,7 +87,7 @@ class BarChartZoom extends Component { } // This attempts to activate the area zoom toolbox feature - const zoom = chart._componentsViews?.find(c => c._features && c._features.dataZoom); + const zoom = chart._componentsViews?.find(c => c._features?.dataZoom); if (zoom && !zoom._features.dataZoom._isZoomActive) { // Calling dispatchAction will re-trigger handleChartFinished chart.dispatchAction({ diff --git a/static/app/components/charts/chartZoom.tsx b/static/app/components/charts/chartZoom.tsx index a133d1d87b8c97..e3dc99f26a5c37 100644 --- a/static/app/components/charts/chartZoom.tsx +++ b/static/app/components/charts/chartZoom.tsx @@ -245,7 +245,7 @@ class ChartZoom extends Component { } // This attempts to activate the area zoom toolbox feature - const zoom = chart._componentsViews?.find(c => c._features && c._features.dataZoom); + const zoom = chart._componentsViews?.find(c => c._features?.dataZoom); if (zoom && !zoom._features.dataZoom._isZoomActive) { // Calling dispatchAction will re-trigger handleChartFinished chart.dispatchAction({ diff --git a/static/app/components/charts/eventsChart.tsx b/static/app/components/charts/eventsChart.tsx index a85066d058236c..b4d6050fb2dfb8 100644 --- a/static/app/components/charts/eventsChart.tsx +++ b/static/app/components/charts/eventsChart.tsx @@ -226,7 +226,7 @@ class Chart extends Component { } // Temporary fix to improve performance on pages with a high number of releases. - const releases = releaseSeries && releaseSeries[0]; + const releases = releaseSeries?.[0]; const hideReleasesByDefault = Array.isArray(releaseSeries) && (releases as any)?.markLine?.data && @@ -267,7 +267,7 @@ class Chart extends Component { ...theme.charts.getColorPalette(timeseriesData.length - 2 - (hasOther ? 1 : 0)), ] : undefined; - if (chartColors && chartColors.length && hasOther) { + if (chartColors?.length && hasOther) { chartColors.push(theme.chartOther); } const chartOptions = { diff --git a/static/app/components/charts/eventsRequest.tsx b/static/app/components/charts/eventsRequest.tsx index 65032c0d771127..7cfe63f360d46f 100644 --- a/static/app/components/charts/eventsRequest.tsx +++ b/static/app/components/charts/eventsRequest.tsx @@ -332,7 +332,7 @@ class EventsRequest extends PureComponent { { diff --git a/static/app/components/commitRow.tsx b/static/app/components/commitRow.tsx index 2b02cf8eaac1d5..6c5679e7b0745b 100644 --- a/static/app/components/commitRow.tsx +++ b/static/app/components/commitRow.tsx @@ -120,7 +120,7 @@ function CommitRow({ - {commit.pullRequest && commit.pullRequest.externalUrl && ( + {commit.pullRequest?.externalUrl && (
    ); diff --git a/static/app/views/relocation/uploadBackup.tsx b/static/app/views/relocation/uploadBackup.tsx index ba1f4d3f7a6b33..edee1800b3a444 100644 --- a/static/app/views/relocation/uploadBackup.tsx +++ b/static/app/views/relocation/uploadBackup.tsx @@ -75,7 +75,7 @@ export function UploadBackup({onComplete}: StepProps) { }; const onFileUploadLinkClick = () => { - inputFileRef.current && inputFileRef.current.click(); + inputFileRef.current?.click(); }; const handleStartRelocation = async () => { diff --git a/static/app/views/replays/detail/breadcrumbs/breadcrumbRow.tsx b/static/app/views/replays/detail/breadcrumbs/breadcrumbRow.tsx index 13f5054ee81b81..2f8946255e3144 100644 --- a/static/app/views/replays/detail/breadcrumbs/breadcrumbRow.tsx +++ b/static/app/views/replays/detail/breadcrumbs/breadcrumbRow.tsx @@ -51,8 +51,7 @@ export default function BreadcrumbRow({ [onDimensionChange, index] ); const handleObjectInspectorExpanded = useCallback( - (path, expandedState, e) => - onInspectorExpanded && onInspectorExpanded(index, path, expandedState, e), + (path, expandedState, e) => onInspectorExpanded?.(index, path, expandedState, e), [index, onInspectorExpanded] ); diff --git a/static/app/views/replays/detail/console/consoleLogRow.tsx b/static/app/views/replays/detail/console/consoleLogRow.tsx index 02f90f80d806df..28302f528eca69 100644 --- a/static/app/views/replays/detail/console/consoleLogRow.tsx +++ b/static/app/views/replays/detail/console/consoleLogRow.tsx @@ -39,8 +39,7 @@ function UnmemoizedConsoleLogRow({ style, }: Props) { const handleDimensionChange = useCallback( - (path, expandedState, e) => - onDimensionChange && onDimensionChange(index, path, expandedState, e), + (path, expandedState, e) => onDimensionChange?.(index, path, expandedState, e), [onDimensionChange, index] ); diff --git a/static/app/views/routeError.tsx b/static/app/views/routeError.tsx index 2a98e2ad338b14..99a89fc84c1ba1 100644 --- a/static/app/views/routeError.tsx +++ b/static/app/views/routeError.tsx @@ -100,7 +100,7 @@ function RouteError({error, disableLogSentry, disableReport, project}: Props) {

    {t("If you're daring, you may want to try the following:")}

    - {window && window.adblockSuspected && ( + {window?.adblockSuspected && ( {t( "We detected something AdBlock-like. Try disabling it, as it's known to cause issues." diff --git a/static/app/views/settings/components/settingsNavigationGroup.tsx b/static/app/views/settings/components/settingsNavigationGroup.tsx index 5d4e52bc3932ae..d817c4819c00c6 100644 --- a/static/app/views/settings/components/settingsNavigationGroup.tsx +++ b/static/app/views/settings/components/settingsNavigationGroup.tsx @@ -28,7 +28,7 @@ function SettingsNavigationGroup(props: NavigationGroupProps) { if (recordAnalytics && to !== window.location.pathname && organization) { trackAnalytics('sidebar.item_clicked', { organization, - project_id: project && project.id, + project_id: project?.id, sidebar_item_id: id, dest: path, }); diff --git a/static/app/views/settings/organizationApiKeys/organizationApiKeysList.tsx b/static/app/views/settings/organizationApiKeys/organizationApiKeysList.tsx index fc6cfd82d7e7f9..f4adb362e1ff62 100644 --- a/static/app/views/settings/organizationApiKeys/organizationApiKeysList.tsx +++ b/static/app/views/settings/organizationApiKeys/organizationApiKeysList.tsx @@ -47,7 +47,7 @@ function OrganizationApiKeysList({ onAddApiKey, onRemove, }: Props) { - const hasKeys = keys && keys.length; + const hasKeys = Boolean(keys?.length); const action = (
    ); diff --git a/static/app/views/settings/organizationAuth/index.tsx b/static/app/views/settings/organizationAuth/index.tsx index 188df305ea0687..415042ff384500 100644 --- a/static/app/views/settings/organizationAuth/index.tsx +++ b/static/app/views/settings/organizationAuth/index.tsx @@ -78,7 +78,7 @@ class OrganizationAuth extends DeprecatedAsyncView { data: {provider, init: true}, success: data => { // Redirect to auth provider URL - if (data && data.auth_url) { + if (data?.auth_url) { window.location.href = data.auth_url; } }, diff --git a/static/app/views/settings/organizationDeveloperSettings/sentryApplicationDetails.tsx b/static/app/views/settings/organizationDeveloperSettings/sentryApplicationDetails.tsx index 7471cad23a8107..344e9cdef261ca 100644 --- a/static/app/views/settings/organizationDeveloperSettings/sentryApplicationDetails.tsx +++ b/static/app/views/settings/organizationDeveloperSettings/sentryApplicationDetails.tsx @@ -241,7 +241,7 @@ class SentryApplicationDetails extends DeprecatedAsyncView { get showAuthInfo() { const {app} = this.state; - return !(app && app.clientSecret && app.clientSecret[0] === '*'); + return !(app?.clientSecret && app.clientSecret[0] === '*'); } onAddToken = async (evt: React.MouseEvent): Promise => { diff --git a/static/app/views/settings/organizationIntegrations/addIntegration.tsx b/static/app/views/settings/organizationIntegrations/addIntegration.tsx index 6df4fcf4d2df10..c65344e521efee 100644 --- a/static/app/views/settings/organizationIntegrations/addIntegration.tsx +++ b/static/app/views/settings/organizationIntegrations/addIntegration.tsx @@ -94,7 +94,7 @@ export default class AddIntegration extends Component { const opts = `scrollbars=yes,width=${width},height=${height},top=${top},left=${left}`; this.dialog = window.open(installUrl, name, opts); - this.dialog && this.dialog.focus(); + this.dialog?.focus(); }; didReceiveMessage = (message: MessageEvent) => { diff --git a/static/app/views/settings/organizationIntegrations/installedIntegration.tsx b/static/app/views/settings/organizationIntegrations/installedIntegration.tsx index 51648388f50fea..038703d382da8e 100644 --- a/static/app/views/settings/organizationIntegrations/installedIntegration.tsx +++ b/static/app/views/settings/organizationIntegrations/installedIntegration.tsx @@ -39,7 +39,7 @@ export default class InstalledIntegration extends Component { }; getRemovalBodyAndText(aspects: Integration['provider']['aspects']) { - if (aspects && aspects.removal_dialog) { + if (aspects?.removal_dialog) { return { body: aspects.removal_dialog.body, actionText: aspects.removal_dialog.actionText, diff --git a/static/app/views/settings/organizationIntegrations/sentryAppExternalForm.tsx b/static/app/views/settings/organizationIntegrations/sentryAppExternalForm.tsx index 92ad64dafae6e5..331dedb3a70e2d 100644 --- a/static/app/views/settings/organizationIntegrations/sentryAppExternalForm.tsx +++ b/static/app/views/settings/organizationIntegrations/sentryAppExternalForm.tsx @@ -111,7 +111,7 @@ export class SentryAppExternalForm extends Component { // For alert-rule-actions, the forms are entirely custom, extra fields are // passed in on submission, not as part of the form. See handleAlertRuleSubmit(). if (element === 'alert-rule-action') { - const defaultResetValues = (this.props.resetValues || {}).settings || []; + const defaultResetValues = this.props.resetValues?.settings || []; const initialData = defaultResetValues.reduce((acc, curr) => { acc[curr.name] = curr.value; return acc; @@ -150,7 +150,7 @@ export class SentryAppExternalForm extends Component { }; getDefaultOptions = (field: FieldFromSchema) => { - const savedOption = ((this.props.resetValues || {}).settings || []).find( + const savedOption = (this.props.resetValues?.settings || []).find( value => value.name === field.name ); const currentOptions = (field.choices || []).map(([value, label]) => ({ @@ -181,9 +181,7 @@ export class SentryAppExternalForm extends Component { defaultValue = getFieldDefault(field); } - const reset = ((resetValues || {}).settings || []).find( - value => value.name === field.name - ); + const reset = resetValues?.settings?.find(value => value.name === field.name); if (reset) { defaultValue = reset.value; diff --git a/static/app/views/settings/organizationMembers/components/membersFilter.tsx b/static/app/views/settings/organizationMembers/components/membersFilter.tsx index a4a0c41509b20b..8f5e54948d3a77 100644 --- a/static/app/views/settings/organizationMembers/components/membersFilter.tsx +++ b/static/app/views/settings/organizationMembers/components/membersFilter.tsx @@ -23,7 +23,7 @@ const getBooleanValue = (list: string[]) => { return 'all'; } - return list && list.map(v => v.toLowerCase()).includes('true') ? 'true' : 'false'; + return list?.map(v => v.toLowerCase()).includes('true') ? 'true' : 'false'; }; const booleanOptions = [ diff --git a/static/app/views/settings/organizationMembers/inviteRequestRow.tsx b/static/app/views/settings/organizationMembers/inviteRequestRow.tsx index a5462ca4b4f057..c1730128f39e2b 100644 --- a/static/app/views/settings/organizationMembers/inviteRequestRow.tsx +++ b/static/app/views/settings/organizationMembers/inviteRequestRow.tsx @@ -35,7 +35,7 @@ function InviteRequestRow({ allRoles, }: Props) { const role = allRoles.find(r => r.id === inviteRequest.role); - const roleDisallowed = !(role && role.allowed); + const roleDisallowed = !role?.allowed; const {access} = organization; const canApprove = access.includes('member:admin'); diff --git a/static/app/views/settings/organizationMembers/organizationMemberDetail.tsx b/static/app/views/settings/organizationMembers/organizationMemberDetail.tsx index 4af2e70355fb9e..d7f9d274136e2d 100644 --- a/static/app/views/settings/organizationMembers/organizationMemberDetail.tsx +++ b/static/app/views/settings/organizationMembers/organizationMemberDetail.tsx @@ -120,8 +120,7 @@ class OrganizationMemberDetail extends DeprecatedAsyncView { }); addSuccessMessage(t('Saved')); } catch (resp) { - const errorMessage = - (resp && resp.responseJSON && resp.responseJSON.detail) || t('Could not save...'); + const errorMessage = resp?.responseJSON?.detail || t('Could not save...'); this.setState({busy: false}); addErrorMessage(errorMessage); } diff --git a/static/app/views/settings/organizationMembers/organizationMemberRow.tsx b/static/app/views/settings/organizationMembers/organizationMemberRow.tsx index e8cd90c0909bfb..05b372ccfaf1e6 100644 --- a/static/app/views/settings/organizationMembers/organizationMemberRow.tsx +++ b/static/app/views/settings/organizationMembers/organizationMemberRow.tsx @@ -116,7 +116,7 @@ export default class OrganizationMemberRow extends PureComponent { canRemoveMembers && !isCurrentUser && !isIdpProvisioned && !isPartnershipUser; // member has a `user` property if they are registered with sentry // i.e. has accepted an invite to join org - const has2fa = user && user.has2fa; + const has2fa = user?.has2fa; const detailsUrl = `/settings/${organization.slug}/members/${id}/`; const isInviteSuccessful = status === 'success'; const isInviting = status === 'loading'; diff --git a/static/app/views/settings/organizationTeams/teamMembers.tsx b/static/app/views/settings/organizationTeams/teamMembers.tsx index 865c68f23b0042..fc922b1b7550a7 100644 --- a/static/app/views/settings/organizationTeams/teamMembers.tsx +++ b/static/app/views/settings/organizationTeams/teamMembers.tsx @@ -147,8 +147,7 @@ class TeamMembers extends DeprecatedAsyncView { }, error: resp => { const errorMessage = - (resp && resp.responseJSON && resp.responseJSON.detail) || - t('Unable to add team member.'); + resp?.responseJSON?.detail || t('Unable to add team member.'); addErrorMessage(errorMessage); }, } diff --git a/static/app/views/settings/project/navigationConfiguration.tsx b/static/app/views/settings/project/navigationConfiguration.tsx index d005a5bf5efdf9..ea3658211ac508 100644 --- a/static/app/views/settings/project/navigationConfiguration.tsx +++ b/static/app/views/settings/project/navigationConfiguration.tsx @@ -15,7 +15,7 @@ export default function getConfiguration({ organization, debugFilesNeedsReview, }: ConfigParams): NavigationSection[] { - const plugins = ((project && project.plugins) || []).filter(plugin => plugin.enabled); + const plugins = (project?.plugins || []).filter(plugin => plugin.enabled); return [ { name: t('Project'), diff --git a/static/app/views/settings/project/projectKeys/details/keyRateLimitsForm.tsx b/static/app/views/settings/project/projectKeys/details/keyRateLimitsForm.tsx index a1b8e49bf55d34..099ff66cd598b9 100644 --- a/static/app/views/settings/project/projectKeys/details/keyRateLimitsForm.tsx +++ b/static/app/views/settings/project/projectKeys/details/keyRateLimitsForm.tsx @@ -142,8 +142,7 @@ function KeyRateLimitsForm({data, disabled, organization, params}: Props) { validate={({form}) => { // TODO(TS): is validate actually doing anything because it's an unexpected prop const isValid = - form && - form.rateLimit && + form?.rateLimit && typeof form.rateLimit.count !== 'undefined' && typeof form.rateLimit.window !== 'undefined'; diff --git a/static/app/views/settings/project/projectOwnership/addCodeOwnerModal.tsx b/static/app/views/settings/project/projectOwnership/addCodeOwnerModal.tsx index c1324a1c2e9a3b..2b00a4e117921f 100644 --- a/static/app/views/settings/project/projectOwnership/addCodeOwnerModal.tsx +++ b/static/app/views/settings/project/projectOwnership/addCodeOwnerModal.tsx @@ -131,7 +131,7 @@ class AddCodeOwnerModal extends DeprecatedAsyncComponent { }; handleAddedFile(data: CodeOwner) { - this.props.onSave && this.props.onSave(data); + this.props.onSave?.(data); this.props.closeModal(); } diff --git a/static/app/views/settings/project/projectOwnership/modal.tsx b/static/app/views/settings/project/projectOwnership/modal.tsx index 5cd775621a2e1e..19855ac871d40b 100644 --- a/static/app/views/settings/project/projectOwnership/modal.tsx +++ b/static/app/views/settings/project/projectOwnership/modal.tsx @@ -48,7 +48,7 @@ function getFrameSuggestions(eventData?: Event) { } // Only display in-app frames - frames = frames.filter(frame => frame && frame.inApp).reverse(); + frames = frames.filter(frame => frame?.inApp).reverse(); return uniq(frames.map(frame => frame.filename || frame.absPath || '')); } diff --git a/static/app/views/settings/project/projectOwnership/ownerInput.tsx b/static/app/views/settings/project/projectOwnership/ownerInput.tsx index 54185d4d43a693..43be17710928dd 100644 --- a/static/app/views/settings/project/projectOwnership/ownerInput.tsx +++ b/static/app/views/settings/project/projectOwnership/ownerInput.tsx @@ -89,7 +89,7 @@ class OwnerInput extends Component { hasChanges: false, text, }, - () => onSave && onSave(text) + () => onSave?.(text) ); trackIntegrationAnalytics('project_ownership.saved', { page, diff --git a/static/app/views/settings/project/projectOwnership/selectOwners.tsx b/static/app/views/settings/project/projectOwnership/selectOwners.tsx index 7b2d8dd5cb3cb1..efb525a8f7ead7 100644 --- a/static/app/views/settings/project/projectOwnership/selectOwners.tsx +++ b/static/app/views/settings/project/projectOwnership/selectOwners.tsx @@ -40,7 +40,7 @@ function ValueComponent({data, removeProps}: MultiValueProps) { } const getSearchKeyForUser = (user: User) => - `${user.email && user.email.toLowerCase()} ${user.name && user.name.toLowerCase()}`; + `${user.email?.toLowerCase()} ${user.name?.toLowerCase()}`; type Props = { api: Client; diff --git a/static/app/views/settings/projectMetrics/access.tsx b/static/app/views/settings/projectMetrics/access.tsx index 52967a053dfc4e..6cdcf17391b279 100644 --- a/static/app/views/settings/projectMetrics/access.tsx +++ b/static/app/views/settings/projectMetrics/access.tsx @@ -1,9 +1,8 @@ +import {hasEveryAccess} from 'sentry/components/acl/access'; import type {Project, Scope, Team} from 'sentry/types'; import useOrganization from 'sentry/utils/useOrganization'; import {useUser} from 'sentry/utils/useUser'; -import {hasEveryAccess} from '../../../components/acl/access'; - type Props = { /** * List of required access levels @@ -35,7 +34,7 @@ export function useAccess({access = [], team, project}: Props) { project = project ?? undefined; const hasAccess = hasEveryAccess(access, {organization, team, project}); - const hasSuperuser = !!(user && user.isSuperuser); + const hasSuperuser = Boolean(user?.isSuperuser); return { hasAccess, diff --git a/static/app/views/settings/projectPlugins/details.tsx b/static/app/views/settings/projectPlugins/details.tsx index bda79238b3cfad..43d037989ad598 100644 --- a/static/app/views/settings/projectPlugins/details.tsx +++ b/static/app/views/settings/projectPlugins/details.tsx @@ -65,7 +65,7 @@ class ProjectPluginDetails extends DeprecatedAsyncView { getTitle() { const {plugin} = this.state; - if (plugin && plugin.name) { + if (plugin?.name) { return plugin.name; } return 'Sentry'; @@ -145,12 +145,11 @@ class ProjectPluginDetails extends DeprecatedAsyncView { const {pluginDetails} = this.state; const {plugins} = this.props; - const plugin = - plugins && - plugins.plugins && - plugins.plugins.find(({slug}) => slug === this.props.params.pluginId); + const plugin = plugins?.plugins?.find( + ({slug}) => slug === this.props.params.pluginId + ); - return plugin ? plugin.enabled : pluginDetails && pluginDetails.enabled; + return plugin ? plugin.enabled : pluginDetails?.enabled; } renderActions() { diff --git a/static/app/views/starfish/components/chart.tsx b/static/app/views/starfish/components/chart.tsx index 9d47c7389ad16c..0fccb0f10d1dd1 100644 --- a/static/app/views/starfish/components/chart.tsx +++ b/static/app/views/starfish/components/chart.tsx @@ -326,7 +326,7 @@ function Chart({ return tooltipFormatter( value, aggregateOutputFormat ?? - aggregateOutputType(data && data.length ? data[0].seriesName : seriesName) + aggregateOutputType(data?.length ? data[0].seriesName : seriesName) ); }, nameFormatter(value: string) { diff --git a/static/app/views/starfish/components/stackTraceMiniFrame.tsx b/static/app/views/starfish/components/stackTraceMiniFrame.tsx index 77e67f712c79c9..8ddf1dad9b0ca3 100644 --- a/static/app/views/starfish/components/stackTraceMiniFrame.tsx +++ b/static/app/views/starfish/components/stackTraceMiniFrame.tsx @@ -120,7 +120,7 @@ function SourceCodeIntegrationLink({ projectSlug: project.slug, }); - if (match && match.config && match.sourceUrl && frame.lineNo && !isLoading) { + if (match?.config && match.sourceUrl && frame.lineNo && !isLoading) { return ( parseInt(id, 10)), + projects: projectId?.map(id => parseInt(id, 10)), dataset: DiscoverDatasets.SPANS_METRICS, version: 2, }); diff --git a/static/app/views/starfish/queries/useReleases.tsx b/static/app/views/starfish/queries/useReleases.tsx index 8a989205f460f1..04298ac5cc5128 100644 --- a/static/app/views/starfish/queries/useReleases.tsx +++ b/static/app/views/starfish/queries/useReleases.tsx @@ -36,10 +36,7 @@ export function useReleases(searchTerm?: string) { {staleTime: Infinity, enabled: isReady} ); - const chunks = - releaseResults.data && releaseResults.data.length - ? chunk(releaseResults.data, 10) - : []; + const chunks = releaseResults.data?.length ? chunk(releaseResults.data, 10) : []; const releaseMetrics = useQueries({ queries: chunks.map(releases => { @@ -91,7 +88,7 @@ export function useReleases(searchTerm?: string) { version: string; count?: number; }[] = - releaseResults.data && releaseResults.data.length && metricsFetched + releaseResults.data?.length && metricsFetched ? releaseResults.data.flatMap(release => { const releaseVersion = release.version; const dateCreated = release.dateCreated; diff --git a/static/app/views/starfish/views/webServiceView/spanGroupBar.tsx b/static/app/views/starfish/views/webServiceView/spanGroupBar.tsx index 8f32f05b8f82c6..01dbcbfe6fe99a 100644 --- a/static/app/views/starfish/views/webServiceView/spanGroupBar.tsx +++ b/static/app/views/starfish/views/webServiceView/spanGroupBar.tsx @@ -67,62 +67,54 @@ export function SpanGroupBar(props: SpanGroupBarProps) { {t('Time Spent Breakdown')} - {segments && - segments.map((value, index) => { - const percent = getPercent(value, total); - const spanModule = value['span.module']; - const to = - spanModule === 'db' - ? `/${routingContext.baseURL}/performance/database/` - : ''; - function handleModulePin() { - if (spanModule === pinnedModule) { - setPinnedModule(null); - onHover(null); - } else { - setPinnedModule(spanModule); - onHover(spanModule); - } + {segments?.map((value, index) => { + const percent = getPercent(value, total); + const spanModule = value['span.module']; + const to = + spanModule === 'db' ? `/${routingContext.baseURL}/performance/database/` : ''; + function handleModulePin() { + if (spanModule === pinnedModule) { + setPinnedModule(null); + onHover(null); + } else { + setPinnedModule(spanModule); + onHover(spanModule); } - const tooltipHttp = ( -
    pinnedModule === null && debouncedHover(spanModule)} + onMouseLeave={() => pinnedModule === null && debouncedHover(null)} + onClick={handleModulePin} + > +
    + {tct('Time spent on [spanModule] across all endpoints', {spanModule})} +
    + {percent}% +
    + ); + return ( + + pinnedModule === null && debouncedHover(spanModule)} onMouseLeave={() => pinnedModule === null && debouncedHover(null)} - onClick={handleModulePin} - > -
    - {tct('Time spent on [spanModule] across all endpoints', {spanModule})} -
    - {percent}% -
    - ); - return ( - - pinnedModule === null && debouncedHover(spanModule)} - onMouseLeave={() => pinnedModule === null && debouncedHover(null)} - > - - {spanModule === pinnedModule && ( - - )} - {spanModule} {percent}% - - - - ); - })} + + {spanModule === pinnedModule && ( + + )} + {spanModule} {percent}% + + + + ); + })} ); diff --git a/static/app/views/userFeedback/userFeedbackEmpty.tsx b/static/app/views/userFeedback/userFeedbackEmpty.tsx index 840c72812a840c..4b49526085a8a0 100644 --- a/static/app/views/userFeedback/userFeedbackEmpty.tsx +++ b/static/app/views/userFeedback/userFeedbackEmpty.tsx @@ -22,10 +22,9 @@ export function UserFeedbackEmpty({projectIds}: Props) { const loadingProjects = !initiallyLoaded; const organization = useOrganization(); - const selectedProjects = - projectIds && projectIds.length - ? projects.filter(({id}) => projectIds.includes(id)) - : projects; + const selectedProjects = projectIds?.length + ? projects.filter(({id}) => projectIds.includes(id)) + : projects; const hasAnyFeedback = selectedProjects.some(({hasUserReports}) => hasUserReports); From 3e4bf991a2eb2490cc81baeaf29d15ddb82a1692 Mon Sep 17 00:00:00 2001 From: David Wang Date: Tue, 13 Feb 2024 14:02:21 -0800 Subject: [PATCH 343/357] feat(crons): Add sort selector (#64805) Add sort selector component to be used on listing page, will follow up with bulk edit modal sort image --- .../components/overviewTimeline/index.tsx | 10 ++- .../overviewTimeline/sortSelector.tsx | 78 +++++++++++++++++++ static/app/views/monitors/utils.tsx | 15 +++- 3 files changed, 98 insertions(+), 5 deletions(-) create mode 100644 static/app/views/monitors/components/overviewTimeline/sortSelector.tsx diff --git a/static/app/views/monitors/components/overviewTimeline/index.tsx b/static/app/views/monitors/components/overviewTimeline/index.tsx index 70a56debd9487d..06fccbe4743a16 100644 --- a/static/app/views/monitors/components/overviewTimeline/index.tsx +++ b/static/app/views/monitors/components/overviewTimeline/index.tsx @@ -18,6 +18,7 @@ import { GridLineOverlay, GridLineTimeLabels, } from 'sentry/views/monitors/components/overviewTimeline/gridLines'; +import {SortSelector} from 'sentry/views/monitors/components/overviewTimeline/sortSelector'; import {makeMonitorListQueryKey} from 'sentry/views/monitors/utils'; import type {Monitor} from '../../types'; @@ -145,9 +146,10 @@ export function OverviewTimeline({monitorList}: Props) { return ( - + - + + ) => void; + onChangeSort?: (sort: SelectOption) => void; + order?: MonitorSortOrder; + sort?: MonitorSortOption; +} + +export function SortSelector({onChangeOrder, onChangeSort, order, sort}: Props) { + const {replace, location} = useRouter(); + + const selectedSort = sort ?? location.query?.sort ?? MonitorSortOption.STATUS; + const selectedOrder = order ?? location.query?.asc ?? MonitorSortOrder.DESCENDING; + + const defaultOnChange = (newSort: MonitorSortOption, newOrder: MonitorSortOrder) => { + replace({...location, query: {...location.query, asc: newOrder, sort: newSort}}); + }; + const handleChangeSort = + onChangeSort ?? (newSort => defaultOnChange(newSort.value, selectedOrder)); + const handleChangeOrder = + onChangeOrder ?? (newOrder => defaultOnChange(selectedSort, newOrder.value)); + + return ( + ( + + )} + > + + + + ); +} diff --git a/static/app/views/monitors/utils.tsx b/static/app/views/monitors/utils.tsx index 2ec6148540a383..6b0794c7b05951 100644 --- a/static/app/views/monitors/utils.tsx +++ b/static/app/views/monitors/utils.tsx @@ -11,11 +11,22 @@ export function makeMonitorListQueryKey( organization: Organization, params: Record ) { - const {query, project, environment, cursor} = params; + const {query, project, environment, cursor, sort, asc} = params; return [ `/organizations/${organization.slug}/monitors/`, - {query: {cursor, query, project, environment, includeNew: true, per_page: 20}}, + { + query: { + cursor, + query, + project, + environment, + includeNew: true, + per_page: 20, + sort, + asc, + }, + }, ] as const; } From 4969d8929ffd92d5d85da30e93a5e2e6918ad3a6 Mon Sep 17 00:00:00 2001 From: David Wang Date: Tue, 13 Feb 2024 14:59:58 -0800 Subject: [PATCH 344/357] fix(crons): Don't show expected date for first check-in (#64641) to avoid situations like this: image --- .../monitors/components/monitorCheckIns.tsx | 30 +++++++++++-------- 1 file changed, 17 insertions(+), 13 deletions(-) diff --git a/static/app/views/monitors/components/monitorCheckIns.tsx b/static/app/views/monitors/components/monitorCheckIns.tsx index ca75b24758c3ed..431191fe8cd913 100644 --- a/static/app/views/monitors/components/monitorCheckIns.tsx +++ b/static/app/views/monitors/components/monitorCheckIns.tsx @@ -184,19 +184,23 @@ function MonitorCheckIns({monitor, monitorEnvs, orgSlug}: Props) { )} {!hasMultiEnv ? null :
    {checkIn.environment}
    }
    - - } - > - - + {checkIn.expectedTime ? ( + + } + > + + + ) : ( + emptyCell + )}
    ))} From 4ac7d8db40b1eb092ee2a1346a083387a3ba1265 Mon Sep 17 00:00:00 2001 From: David Wang Date: Tue, 13 Feb 2024 15:01:45 -0800 Subject: [PATCH 345/357] feat(crons): Add exact duration as a tooltip in monitor details (#65126) image From https://github.com/getsentry/team-crons/issues/126 --- static/app/views/monitors/components/monitorCheckIns.tsx | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/static/app/views/monitors/components/monitorCheckIns.tsx b/static/app/views/monitors/components/monitorCheckIns.tsx index 431191fe8cd913..b98ed447359fdf 100644 --- a/static/app/views/monitors/components/monitorCheckIns.tsx +++ b/static/app/views/monitors/components/monitorCheckIns.tsx @@ -136,7 +136,11 @@ function MonitorCheckIns({monitor, monitorEnvs, orgSlug}: Props) { emptyCell )} {defined(checkIn.duration) ? ( - +
    + }> + + +
    ) : ( emptyCell )} From 311224521e783a1dd332f124039313e71929e99e Mon Sep 17 00:00:00 2001 From: Cathy Teng <70817427+cathteng@users.noreply.github.com> Date: Tue, 13 Feb 2024 15:05:01 -0800 Subject: [PATCH 346/357] chore(roles): remove groupOrgRoles from FE (#65114) --- fixtures/js-stubs/member.tsx | 1 - fixtures/js-stubs/members.tsx | 3 --- static/app/types/organization.tsx | 2 -- .../organizationMembers/organizationMemberDetail.tsx | 5 +---- .../organizationMembers/organizationMemberRow.spec.tsx | 1 - .../organizationMembers/organizationMembersList.spec.tsx | 9 +-------- 6 files changed, 2 insertions(+), 19 deletions(-) diff --git a/fixtures/js-stubs/member.tsx b/fixtures/js-stubs/member.tsx index 9ff9c5dfb585b7..a0f68e632ee8fa 100644 --- a/fixtures/js-stubs/member.tsx +++ b/fixtures/js-stubs/member.tsx @@ -8,7 +8,6 @@ export function MemberFixture(params: Partial = {}): Member { email: 'sentry1@test.com', name: 'Sentry 1 Name', orgRole: 'member', - groupOrgRoles: [], teamRoles: [], role: 'member', roleName: 'Member', diff --git a/fixtures/js-stubs/members.tsx b/fixtures/js-stubs/members.tsx index ec6e402ae6f1b8..ce3312ee1335a9 100644 --- a/fixtures/js-stubs/members.tsx +++ b/fixtures/js-stubs/members.tsx @@ -11,7 +11,6 @@ export function MembersFixture(params: Member[] = []): Member[] { name: 'Sentry 2 Name', email: 'sentry2@test.com', orgRole: 'member', - groupOrgRoles: [], teamRoles: [], dateCreated: '', role: 'member', @@ -42,7 +41,6 @@ export function MembersFixture(params: Member[] = []): Member[] { name: 'Sentry 3 Name', email: 'sentry3@test.com', orgRole: 'owner', - groupOrgRoles: [], teamRoles: [], role: 'owner', dateCreated: '', @@ -73,7 +71,6 @@ export function MembersFixture(params: Member[] = []): Member[] { name: 'Sentry 4 Name', email: 'sentry4@test.com', orgRole: 'owner', - groupOrgRoles: [], teamRoles: [], dateCreated: '', role: 'owner', diff --git a/static/app/types/organization.tsx b/static/app/types/organization.tsx index a27769dbd3c002..4a9e7c9f839178 100644 --- a/static/app/types/organization.tsx +++ b/static/app/types/organization.tsx @@ -173,8 +173,6 @@ export interface Member { * User may be null when the member represents an invited member */ user: User | null; - // TODO: Move to global store - groupOrgRoles?: {role: OrgRole; teamSlug: string}[]; } /** diff --git a/static/app/views/settings/organizationMembers/organizationMemberDetail.tsx b/static/app/views/settings/organizationMembers/organizationMemberDetail.tsx index d7f9d274136e2d..e597c67184cb27 100644 --- a/static/app/views/settings/organizationMembers/organizationMemberDetail.tsx +++ b/static/app/views/settings/organizationMembers/organizationMemberDetail.tsx @@ -54,7 +54,6 @@ interface Props extends RouteComponentProps { } interface State extends AsyncViewState { - groupOrgRoles: Member['groupOrgRoles']; // Form state member: Member | null; orgRole: Member['orgRole']; // Form state teamRoles: Member['teamRoles']; // Form state @@ -74,7 +73,6 @@ class OrganizationMemberDetail extends DeprecatedAsyncView { getDefaultState(): State { return { ...super.getDefaultState(), - groupOrgRoles: [], member: null, orgRole: '', teamRoles: [], @@ -90,11 +88,10 @@ class OrganizationMemberDetail extends DeprecatedAsyncView { onRequestSuccess({data, stateKey}: {data: Member; stateKey: string}) { if (stateKey === 'member') { - const {orgRole, teamRoles, groupOrgRoles} = data; + const {orgRole, teamRoles} = data; this.setState({ orgRole, teamRoles, - groupOrgRoles, }); } } diff --git a/static/app/views/settings/organizationMembers/organizationMemberRow.spec.tsx b/static/app/views/settings/organizationMembers/organizationMemberRow.spec.tsx index d3ecf25c507634..93b786c0a22e85 100644 --- a/static/app/views/settings/organizationMembers/organizationMemberRow.spec.tsx +++ b/static/app/views/settings/organizationMembers/organizationMemberRow.spec.tsx @@ -27,7 +27,6 @@ describe('OrganizationMemberRow', function () { has2fa: false, name: 'sentry@test.com', }), - groupOrgRoles: [], }); const currentUser = UserFixture({ diff --git a/static/app/views/settings/organizationMembers/organizationMembersList.spec.tsx b/static/app/views/settings/organizationMembers/organizationMembersList.spec.tsx index 4979d0babf0564..dfa259470d3a85 100644 --- a/static/app/views/settings/organizationMembers/organizationMembersList.spec.tsx +++ b/static/app/views/settings/organizationMembers/organizationMembersList.spec.tsx @@ -21,7 +21,6 @@ import { import {addErrorMessage, addSuccessMessage} from 'sentry/actionCreators/indicator'; import ConfigStore from 'sentry/stores/configStore'; import OrganizationsStore from 'sentry/stores/organizationsStore'; -import {OrgRoleFixture} from 'sentry/types/role'; import {trackAnalytics} from 'sentry/utils/analytics'; import OrganizationMembersList from 'sentry/views/settings/organizationMembers/organizationMembersList'; @@ -54,7 +53,7 @@ const roles = [ describe('OrganizationMembersList', function () { const members = MembersFixture(); - const ownerTeam = TeamFixture({slug: 'owner-team', orgRole: 'owner'}); + const ownerTeam = TeamFixture({slug: 'owner-team'}); const member = MemberFixture({ id: '5', email: 'member@sentry.io', @@ -73,12 +72,6 @@ describe('OrganizationMembersList', function () { 'partnership:restricted': false, 'sso:invalid': false, }, - groupOrgRoles: [ - { - teamSlug: ownerTeam.slug, - role: OrgRoleFixture({id: 'owner'}), - }, - ], }); const currentUser = members[1]; From 7c9a0ac75883b9c677b5dfa73dac1a59e264a04f Mon Sep 17 00:00:00 2001 From: Yash Kamothi Date: Tue, 13 Feb 2024 15:13:37 -0800 Subject: [PATCH 347/357] feat(integration): Give each action a UUID (#64922) Give each Rule Action a UUID so it's identifiable in future work. Currently we only store related information about what the action should do, but not how to differentiate it uniquely across the platform between one another. For example, a singular Rule could have 3 actions, each one describing that it should be a Slack notification to a different channel. To differentiate, you would have to check the channel id against each other. This is even harder when you need to compare against other rule action types (Discord, email, etc). To understand which action has what side effect, this will get us closer to that visibility. > Why is this in the serializer? We use the validated data to create and update actions, which are subsets of a Rule. This makes sure all locations where we need validated data are going to get a UUID. We could optionally move this to the "save" method in the Rule model, but felt like this is not the responsibility of the Rule model to ensure. Resolves: https://github.com/getsentry/sentry/issues/64916 --- src/sentry/api/endpoints/project_rules.py | 273 ++++++++++++++---- .../api/serializers/rest_framework/rule.py | 31 ++ src/sentry/testutils/cases.py | 1 + .../api/endpoints/test_project_rules.py | 29 +- .../serializers/rest_framework/test_rule.py | 90 ++++++ .../test_organization_monitor_details.py | 7 +- 6 files changed, 366 insertions(+), 65 deletions(-) create mode 100644 tests/sentry/api/serializers/rest_framework/test_rule.py diff --git a/src/sentry/api/endpoints/project_rules.py b/src/sentry/api/endpoints/project_rules.py index 927454ded816de..5bd401840e7756 100644 --- a/src/sentry/api/endpoints/project_rules.py +++ b/src/sentry/api/endpoints/project_rules.py @@ -1,3 +1,7 @@ +from collections.abc import Callable +from dataclasses import dataclass +from typing import Any + from django.conf import settings from django.db.models.signals import pre_save from django.dispatch import receiver @@ -44,66 +48,219 @@ def pre_save_rule(instance, sender, *args, **kwargs): clean_rule_data(instance.data.get("actions", [])) +@dataclass +class MatcherResult: + has_key: bool = False + key_matches: bool = False + + +class DuplicateRuleEvaluator: + ACTIONS_KEY = "actions" + ENVIRONMENT_KEY = "environment" + SPECIAL_FIELDS = [ACTIONS_KEY, ENVIRONMENT_KEY] + + EXCLUDED_FIELDS = ["name", "user_id"] + + def __init__( + self, + project_id: int, + rule_data: dict[Any, Any] | None = None, + rule_id: int | None = None, + rule: Rule | None = None, + ) -> None: + """ + rule.data will supersede rule_data if passed in + """ + self._project_id: int = project_id + self._rule_data: dict[Any, Any] = rule.data if rule else rule_data + self._rule_id: int | None = rule_id + self._rule: Rule | None = rule + + self._keys_to_check: set[str] = self._get_keys_to_check() + + self._matcher_funcs_by_key: dict[str, Callable[[Rule, str], MatcherResult]] = { + self.ENVIRONMENT_KEY: self._environment_matcher, + self.ACTIONS_KEY: self._actions_matcher, + } + + def _get_keys_to_check(self) -> set[str]: + """ + Returns a set of keys that should be checked against all existing rules. + Some keys are ignored as they are not part of the logic. + Some keys are required to check, and are added on top. + """ + keys_to_check: set[str] = { + key for key in list(self._rule_data.keys()) if key not in self.EXCLUDED_FIELDS + } + keys_to_check.update(self.SPECIAL_FIELDS) + + return keys_to_check + + def _get_func_to_call(self, key_to_check: str) -> Callable: + return self._matcher_funcs_by_key.get(key_to_check, self._default_matcher) + + def _default_matcher(self, existing_rule: Rule, key_to_check: str) -> MatcherResult: + """ + Default function that checks if the key exists in both rules for comparison, and compares the values. + """ + match_results = MatcherResult() + + existing_rule_key_data = existing_rule.data.get(key_to_check) + current_rule_key_data = self._rule_data.get(key_to_check) + if existing_rule_key_data and current_rule_key_data: + match_results.has_key = True + + if match_results.has_key: + match_results.key_matches = existing_rule_key_data == current_rule_key_data + return match_results + + def _environment_matcher(self, existing_rule: Rule, key_to_check: str) -> MatcherResult: + """ + Special function that checks if the environments are the same. + """ + + # Do the default check to see if both rules have the same environment key, and if they do, use the result. + if ( + base_result := self._default_matcher(existing_rule, key_to_check) + ) and base_result.has_key: + return base_result + + # Otherwise, we need to do the special checking for keys + match_results = MatcherResult() + if self._rule: + if existing_rule.environment_id and self._rule.environment_id: + # If the existing rule and our rule both have environment ids, check if it's the same + match_results.has_key = True + match_results.key_matches = ( + existing_rule.environment_id == self._rule.environment_id + ) + elif ( + existing_rule.environment_id + and not self._rule.environment_id + or not existing_rule.environment_id + and self._rule.environment_id + ): + # Otherwise, if one of the rules has an environment key, but the other does not, the key was checked, + # but it is obviously not the same anymore + match_results.has_key = True + else: + current_rule_key_data = self._rule_data.get(key_to_check) + if existing_rule.environment_id and current_rule_key_data: + match_results.has_key = True + match_results.key_matches = existing_rule.environment_id == current_rule_key_data + elif ( + existing_rule.environment_id + and not current_rule_key_data + or not existing_rule.environment_id + and current_rule_key_data + ): + match_results.has_key = True + + return match_results + + def _actions_matcher(self, existing_rule: Rule, key_to_check: str) -> MatcherResult: + """ + Special function that checks if the actions are the same against a rule. + """ + match_results = MatcherResult() + + existing_actions = existing_rule.data.get(key_to_check) + current_actions = self._rule_data.get(key_to_check) + if not existing_actions and not current_actions: + return match_results + + # At this point, either both have the key, or one of the rules has the key, so this has to be true + match_results.has_key = True + # Only compare if both have the key + if existing_actions and current_actions: + match_results.key_matches = self._compare_lists_of_dicts( + keys_to_ignore=["uuid"], list1=existing_actions, list2=current_actions + ) + + return match_results + + @classmethod + def _compare_lists_of_dicts( + cls, + keys_to_ignore: list[str], + list1: list[dict[Any, Any]] | None = None, + list2: list[dict[Any, Any]] | None = None, + ) -> bool: + if list1 is None or list2 is None: + return False + + if len(list1) != len(list2): + return False + + for i, left in enumerate(list1): + right = list2[i] + raw_left = {k: v for k, v in left.items() if k not in keys_to_ignore} + raw_right = {k: v for k, v in right.items() if k not in keys_to_ignore} + + # TODO (Yash): This code commented below is the corrected logic which accounts for bad key values. + # clean_left = cls._get_clean_actions_dict(raw_left) + # clean_right = cls._get_clean_actions_dict(raw_right) + # if clean_left != clean_right: + # return False + """ + This is a bug in the current logic. + When comparing DB values to serialized values, the values that are `None` are not properly converted to + empty strings. + This means we end up incorrectly evaluating the actions aren't the same, when they actually are. + """ + if raw_left != raw_right: + return False + + return True + + @classmethod + def _get_clean_actions_dict(cls, actions_dict: dict[Any, Any]) -> dict[Any, Any]: + """ + Returns a dictionary where None is substituted with empty string to help compare DB values vs serialized values + """ + cleaned_dict = {} + for k, v in actions_dict.items(): + cleaned_dict[k] = "" if v is None else v + + return cleaned_dict + + def find_duplicate(self) -> Rule | None: + """ + Determines whether specified rule already exists, and if it does, returns it. + """ + existing_rules = Rule.objects.exclude(id=self._rule_id).filter( + project__id=self._project_id, status=ObjectStatus.ACTIVE + ) + for existing_rule in existing_rules: + keys_checked = 0 + keys_matched = 0 + for key_to_check in self._keys_to_check: + func = self._get_func_to_call(key_to_check=key_to_check) + results: MatcherResult = func( + existing_rule=existing_rule, key_to_check=key_to_check + ) + if results.has_key: + keys_checked += 1 + if results.key_matches: + keys_matched += 1 + + if keys_checked > 0 and keys_checked == keys_matched: + return existing_rule + + return None + + def find_duplicate_rule(project, rule_data=None, rule_id=None, rule=None): - if rule: - rule_data = rule.data - - matchers = {key for key in list(rule_data.keys()) if key not in ("name", "user_id")} - extra_fields = ["actions", "environment"] - matchers.update(extra_fields) - existing_rules = Rule.objects.exclude(id=rule_id).filter( - project=project, status=ObjectStatus.ACTIVE + """ + TODO(Yash): Refactor to remove this function, but for now keep it as a catch all for all existing flows. + """ + evaluator = DuplicateRuleEvaluator( + project_id=project.id, + rule_data=rule_data, + rule_id=rule_id, + rule=rule, ) - - for existing_rule in existing_rules: - keys = 0 - matches = 0 - for matcher in matchers: - if existing_rule.data.get(matcher) and rule_data.get(matcher): - keys += 1 - - if existing_rule.data[matcher] == rule_data[matcher]: - matches += 1 - - elif matcher in extra_fields: - if matcher == "environment": - if rule: - # we have to compare env data differently if coming from db rather than app - if existing_rule.environment_id and rule.environment_id: - keys += 1 - if existing_rule.environment_id == rule.environment_id: - matches += 1 - elif ( - existing_rule.environment_id - and not rule.environment_id - or not existing_rule.environment_id - and rule.environment_id - ): - keys += 1 - - else: - if existing_rule.environment_id and rule_data.get(matcher): - keys += 1 - if existing_rule.environment_id == rule_data.get(matcher): - matches += 1 - elif ( - existing_rule.environment_id - and not rule_data.get(matcher) - or not existing_rule.environment_id - and rule_data.get(matcher) - ): - keys += 1 - elif not existing_rule.data.get(matcher) and not rule_data.get(matcher): - # neither rule has the matcher - continue - - else: - # one rule has the matcher and the other one doesn't - keys += 1 - - if keys == matches: - return existing_rule - return None + return evaluator.find_duplicate() class ProjectRulesPostSerializer(serializers.Serializer): diff --git a/src/sentry/api/serializers/rest_framework/rule.py b/src/sentry/api/serializers/rest_framework/rule.py index ad8d9cb8a23f3d..66f3f804f45b94 100644 --- a/src/sentry/api/serializers/rest_framework/rule.py +++ b/src/sentry/api/serializers/rest_framework/rule.py @@ -1,3 +1,6 @@ +from typing import Any +from uuid import UUID, uuid4 + from drf_spectacular.types import OpenApiTypes from drf_spectacular.utils import extend_schema_field from rest_framework import serializers @@ -184,6 +187,32 @@ def save(self, rule): return rule +ACTION_UUID_KEY = "uuid" + + +def ensure_action_uuid(action: dict[Any, Any]) -> None: + """ + Ensure that each action is uniquely identifiable. + The function will check that a UUID key and value exists in the action. + If the key does not exist, or it's not a valid UUID, it will create a new one and assign it to the action. + + Does not add an uuid to the action if it is empty. + """ + if not action: + return + + if ACTION_UUID_KEY in action: + existing_uuid = action[ACTION_UUID_KEY] + try: + UUID(existing_uuid) + except (ValueError, TypeError): + pass + else: + return + + action[ACTION_UUID_KEY] = str(uuid4()) + + def validate_actions(attrs): # XXX(meredith): For rules that have the Slack integration as an action # we need to check if the channel_id needs to be looked up via an async task. @@ -191,6 +220,8 @@ def validate_actions(attrs): # project_rule(_details) endpoints by setting it on attrs actions = attrs.get("actions", tuple()) for action in actions: + ensure_action_uuid(action) + if action.get("name"): del action["name"] # XXX(colleen): For ticket rules we need to ensure the user has diff --git a/src/sentry/testutils/cases.py b/src/sentry/testutils/cases.py index 43dd4a3894eaf6..098aecf7f9cb13 100644 --- a/src/sentry/testutils/cases.py +++ b/src/sentry/testutils/cases.py @@ -3054,6 +3054,7 @@ def _create_alert_rule(self, monitor): "id": "sentry.mail.actions.NotifyEmailAction", "targetIdentifier": self.user.id, "targetType": "Member", + "uuid": str(uuid4()), }, ] rule = Creator( diff --git a/tests/sentry/api/endpoints/test_project_rules.py b/tests/sentry/api/endpoints/test_project_rules.py index 3a3012574fc00f..e0074a39c3394e 100644 --- a/tests/sentry/api/endpoints/test_project_rules.py +++ b/tests/sentry/api/endpoints/test_project_rules.py @@ -4,6 +4,7 @@ from copy import deepcopy from typing import Any from unittest.mock import patch +from uuid import uuid4 import responses from django.test import override_settings @@ -44,7 +45,9 @@ def setUp(self): self.first_seen_condition = [ {"id": "sentry.rules.conditions.first_seen_event.FirstSeenEventCondition"} ] - self.notify_event_action = [{"id": "sentry.rules.actions.notify_event.NotifyEventAction"}] + self.notify_event_action = [ + {"id": "sentry.rules.actions.notify_event.NotifyEventAction", "uuid": str(uuid4())} + ] self.notify_issue_owners_action = [ { "targetType": "IssueOwners", @@ -52,6 +55,7 @@ def setUp(self): "id": "sentry.mail.actions.NotifyEmailAction", "targetIdentifier": "", "name": "Send a notification to IssueOwners and if none can be found then send a notification to ActiveMembers", + "uuid": str(uuid4()), } ] @@ -167,6 +171,7 @@ def test_with_name(self): { "id": "sentry.rules.actions.notify_event.NotifyEventAction", "name": "Send a notification to IssueOwners and if none can be found then send a notification to ActiveMembers", + "uuid": str(uuid4()), } ] @@ -288,10 +293,12 @@ def test_duplicate_rule_environment(self): def test_pre_save(self): """Test that a rule with name data in the conditions and actions is saved without it""" + action_uuid = str(uuid4()) actions = [ { "id": "sentry.rules.actions.notify_event.NotifyEventAction", "name": "Send a notification to IssueOwners and if none can be found then send a notification to ActiveMembers", + "uuid": action_uuid, } ] response = self.get_success_response( @@ -308,7 +315,8 @@ def test_pre_save(self): ) rule = Rule.objects.get(id=response.data.get("id")) assert rule.data["actions"][0] == { - "id": "sentry.rules.actions.notify_event.NotifyEventAction" + "id": "sentry.rules.actions.notify_event.NotifyEventAction", + "uuid": action_uuid, } assert rule.data["conditions"][0] == { "id": "sentry.rules.conditions.first_seen_event.FirstSeenEventCondition" @@ -401,7 +409,9 @@ def test_exceed_limit_fast_conditions(self): @override_settings(MAX_SLOW_CONDITION_ISSUE_ALERTS=1) @override_settings(MAX_MORE_SLOW_CONDITION_ISSUE_ALERTS=2) def test_exceed_limit_slow_conditions(self): - actions = [{"id": "sentry.rules.actions.notify_event.NotifyEventAction"}] + actions = [ + {"id": "sentry.rules.actions.notify_event.NotifyEventAction", "uuid": str(uuid4())} + ] conditions = [ { "id": "sentry.rules.conditions.event_frequency.EventFrequencyPercentCondition", @@ -437,6 +447,7 @@ def test_exceed_limit_slow_conditions(self): "fallthroughType": "ActiveMembers", "id": "sentry.mail.actions.NotifyEmailAction", "targetIdentifier": self.team.id, + "uuid": str(uuid4()), } ) with self.feature("organizations:more-slow-alerts"): @@ -552,7 +563,7 @@ def test_with_filters(self): {"id": "sentry.rules.filters.issue_occurrences.IssueOccurrencesFilter", "value": 10} ] actions: list[dict[str, Any]] = [ - {"id": "sentry.rules.actions.notify_event.NotifyEventAction"} + {"id": "sentry.rules.actions.notify_event.NotifyEventAction", "uuid": str(uuid4())} ] self.run_test( actions=actions, @@ -566,7 +577,7 @@ def test_with_no_filter_match(self): {"id": "sentry.rules.conditions.first_seen_event.FirstSeenEventCondition"} ] actions: list[dict[str, Any]] = [ - {"id": "sentry.rules.actions.notify_event.NotifyEventAction"} + {"id": "sentry.rules.actions.notify_event.NotifyEventAction", "uuid": str(uuid4())} ] self.run_test( @@ -642,6 +653,7 @@ def test_kicks_off_slack_async_job( "channel": "#team-team-team", "channel_id": "", "tags": "", + "uuid": str(uuid4()), } ] payload: dict[str, Any] = { @@ -687,7 +699,9 @@ def test_comparison_condition(self): "interval": "1h", "value": 50, } - actions = [{"id": "sentry.rules.actions.notify_event.NotifyEventAction"}] + actions = [ + {"id": "sentry.rules.actions.notify_event.NotifyEventAction", "uuid": str(uuid4())} + ] self.run_test( actions=actions, conditions=[condition], @@ -708,6 +722,7 @@ def test_comparison_condition(self): "fallthroughType": "ActiveMembers", "id": "sentry.mail.actions.NotifyEmailAction", "targetIdentifier": self.team.id, + "uuid": str(uuid4()), } ) self.run_test(actions=actions, conditions=[condition]) @@ -720,6 +735,7 @@ def test_comparison_condition(self): "fallthroughType": "ActiveMembers", "id": "sentry.mail.actions.NotifyEmailAction", "targetIdentifier": self.user.id, + "uuid": str(uuid4()), } ) self.run_test(actions=actions, conditions=[condition]) @@ -812,6 +828,7 @@ def test_create_sentry_app_action_success(self): "settings": self.sentry_app_settings_payload, "sentryAppInstallationUuid": self.sentry_app_installation.uuid, "hasSchemaFormConfig": True, + "uuid": str(uuid4()), }, ] payload = { diff --git a/tests/sentry/api/serializers/rest_framework/test_rule.py b/tests/sentry/api/serializers/rest_framework/test_rule.py new file mode 100644 index 00000000000000..545183bc187737 --- /dev/null +++ b/tests/sentry/api/serializers/rest_framework/test_rule.py @@ -0,0 +1,90 @@ +import uuid +from typing import Any + +from sentry.api.serializers.rest_framework.rule import ( + ACTION_UUID_KEY, + ensure_action_uuid, + validate_actions, +) + + +class TestEnsureActionUuid: + def test_overrides_bad_key(self) -> None: + original_bad_key = "BAD_KEY-12312312" + action = { + "uuid": original_bad_key, + } + + ensure_action_uuid(action) + assert ACTION_UUID_KEY in action + assert action[ACTION_UUID_KEY] != original_bad_key + assert uuid.UUID(action[ACTION_UUID_KEY]) + + def test_when_key_is_empty(self) -> None: + action = {"uuid": ""} + + ensure_action_uuid(action) + assert ACTION_UUID_KEY in action + assert action[ACTION_UUID_KEY] != "" + assert uuid.UUID(action[ACTION_UUID_KEY]) + + def test_when_key_is_none(self) -> None: + action = {"uuid": None} + + ensure_action_uuid(action) + assert ACTION_UUID_KEY in action + assert action[ACTION_UUID_KEY] is not None + assert uuid.UUID(action[ACTION_UUID_KEY]) + + def test_respects_good_key(self) -> None: + original_good_key = str(uuid.uuid4()) + action = { + "uuid": original_good_key, + } + + ensure_action_uuid(action) + assert ACTION_UUID_KEY in action + assert action[ACTION_UUID_KEY] == original_good_key + assert uuid.UUID(action[ACTION_UUID_KEY]) + + def test_adds_uuid_key_when_not_found(self) -> None: + action: dict[Any, Any] = { + "some_other_key": "foo", + } + + ensure_action_uuid(action) + + assert ACTION_UUID_KEY in action + assert action[ACTION_UUID_KEY] is not None + assert uuid.UUID(action[ACTION_UUID_KEY]) + + def test_ignores_empty_dicts(self) -> None: + action: dict[Any, Any] = {} + ensure_action_uuid(action) + assert ACTION_UUID_KEY not in action + + +class TestValidateActions: + def test_updates_actions(self) -> None: + bad_action = { + "id": "whatever", + } + good_action_uuid = str(uuid.uuid4()) + good_action = { + "id": "whatever", + "uuid": good_action_uuid, + } + + actions = [good_action, bad_action] + attributes = { + "actions": actions, + } + + validated_data = validate_actions(attributes) + validated_actions = validated_data["actions"] + assert len(validated_actions) == len(actions) + + assert validated_actions[0][ACTION_UUID_KEY] == good_action_uuid + + assert ACTION_UUID_KEY in validated_actions[1] + assert uuid.UUID(validated_actions[1][ACTION_UUID_KEY]) diff --git a/tests/sentry/monitors/endpoints/test_organization_monitor_details.py b/tests/sentry/monitors/endpoints/test_organization_monitor_details.py index 30292e88a937f1..6393483b4734cd 100644 --- a/tests/sentry/monitors/endpoints/test_organization_monitor_details.py +++ b/tests/sentry/monitors/endpoints/test_organization_monitor_details.py @@ -328,7 +328,12 @@ def test_existing_alert_rule(self): monitor_rule = monitor.get_alert_rule() assert monitor_rule.id == rule.id assert monitor_rule.label == "Monitor Alert: new-name" - assert monitor_rule.data["actions"] == [ + + monitor_rule_actions = monitor_rule.data["actions"].copy() + assert len(monitor_rule_actions) == 1 + monitor_rule_actions[0].pop("uuid") + + assert monitor_rule_actions == [ { "id": "sentry.mail.actions.NotifyEmailAction", "targetIdentifier": new_user.id, From 0d24de00eb5eaa44d9d362230d1fd11aa502471b Mon Sep 17 00:00:00 2001 From: Malachi Willey Date: Tue, 13 Feb 2024 15:15:49 -0800 Subject: [PATCH 348/357] ref(tsc): Convert GroupingConfigSelect to FC + useApiQuery (#65124) --- .../groupingInfo/groupingConfigSelect.tsx | 93 +++++++++---------- .../events/groupingInfo/index.spec.tsx | 61 +++++++++++- .../components/events/groupingInfo/index.tsx | 3 - 3 files changed, 102 insertions(+), 55 deletions(-) diff --git a/static/app/components/events/groupingInfo/groupingConfigSelect.tsx b/static/app/components/events/groupingInfo/groupingConfigSelect.tsx index 4bd45b54f36434..e9385051144ca2 100644 --- a/static/app/components/events/groupingInfo/groupingConfigSelect.tsx +++ b/static/app/components/events/groupingInfo/groupingConfigSelect.tsx @@ -1,69 +1,68 @@ +import {useMemo} from 'react'; import styled from '@emotion/styled'; -import DeprecatedAsyncComponent from 'sentry/components/deprecatedAsyncComponent'; import DropdownAutoComplete from 'sentry/components/dropdownAutoComplete'; import DropdownButton from 'sentry/components/dropdownButton'; +import LoadingError from 'sentry/components/loadingError'; import {Tooltip} from 'sentry/components/tooltip'; import {t} from 'sentry/locale'; import type {EventGroupingConfig} from 'sentry/types'; +import {useApiQuery} from 'sentry/utils/queryClient'; +import useOrganization from 'sentry/utils/useOrganization'; import {GroupingConfigItem} from '.'; -type Props = DeprecatedAsyncComponent['props'] & { +type GroupingConfigSelectProps = { configId: string; eventConfigId: string; onSelect: (selection: any) => void; - organizationSlug: string; }; -type State = DeprecatedAsyncComponent['state'] & { - configs: EventGroupingConfig[]; -}; - -class GroupingConfigSelect extends DeprecatedAsyncComponent { - getDefaultState() { - return { - ...super.getDefaultState(), - configs: [], - }; - } +export function GroupingConfigSelect({ + configId, + eventConfigId, + onSelect, +}: GroupingConfigSelectProps) { + const organization = useOrganization(); + const { + data: configs, + isLoading, + isError, + } = useApiQuery( + [`/organizations/${organization.slug}/grouping-configs/`], + {staleTime: 0, retry: false} + ); - getEndpoints(): ReturnType { - const {organizationSlug} = this.props; - return [['configs', `/organizations/${organizationSlug}/grouping-configs/`]]; - } + const options = useMemo( + () => + (configs ?? []).map(({id, hidden}) => ({ + value: id, + label: ( + + {id} + + ), + })), + [configs, eventConfigId] + ); - renderLoading() { - return this.renderBody(); + if (isError) { + return ; } - renderBody() { - const {configId, eventConfigId, onSelect} = this.props; - const {configs} = this.state; - - const options = configs.map(({id, hidden}) => ({ - value: id, - label: ( - - {id} - - ), - })); - - return ( - - {({isOpen}) => ( - - - - {configId} - - - - )} - - ); - } + return ( + + {({isOpen}) => ( + + + + {configId} + + + + )} + + ); } const StyledDropdownButton = styled(DropdownButton)` diff --git a/static/app/components/events/groupingInfo/index.spec.tsx b/static/app/components/events/groupingInfo/index.spec.tsx index 20c177b9f1771e..2575c97790183b 100644 --- a/static/app/components/events/groupingInfo/index.spec.tsx +++ b/static/app/components/events/groupingInfo/index.spec.tsx @@ -8,29 +8,38 @@ import {EventGroupVariantType, IssueCategory} from 'sentry/types'; describe('EventGroupingInfo', function () { const group = GroupFixture(); - const event = EventFixture(); + const event = EventFixture({ + groupingConfig: { + id: 'default:XXXX', + }, + }); const defaultProps = { event, projectSlug: 'project-slug', - showGroupingConfig: true, + showGroupingConfig: false, group, }; - it('fetches and renders grouping info for errors', async function () { - MockApiClient.addMockResponse({ + let groupingInfoRequest = jest.fn(); + + beforeEach(() => { + MockApiClient.clearMockResponses(); + groupingInfoRequest = MockApiClient.addMockResponse({ url: `/projects/org-slug/project-slug/events/${event.id}/grouping-info/`, body: { app: { description: 'variant description', hash: '123', - hasMismatch: false, + hashMismatch: false, key: 'key', type: EventGroupVariantType.CHECKSUM, }, }, }); + }); + it('fetches and renders grouping info for errors', async function () { render(); await screen.findByText('variant description'); @@ -56,5 +65,47 @@ describe('EventGroupingInfo', function () { expect(screen.queryByText('123')).not.toBeInTheDocument(); await userEvent.click(screen.getByRole('button', {name: 'Show Details'})); expect(screen.getByText('123')).toBeInTheDocument(); + + // Should not make grouping-info request + expect(groupingInfoRequest).not.toHaveBeenCalled(); + }); + + it('can switch grouping configs', async function () { + MockApiClient.addMockResponse({ + url: `/organizations/org-slug/grouping-configs/`, + body: [ + {id: 'default:XXXX', hidden: false}, + {id: 'new:XXXX', hidden: false}, + ], + }); + + render(); + + await userEvent.click(screen.getByRole('button', {name: 'Show Details'})); + + // Should show first hash + await screen.findByText('123'); + + expect(screen.getByText('default:XXXX')).toBeInTheDocument(); + + MockApiClient.addMockResponse({ + url: `/projects/org-slug/project-slug/events/${event.id}/grouping-info/`, + query: {config: 'new:XXXX'}, + body: { + app: { + description: 'variant description', + hash: '789', + hashMismatch: false, + key: 'key', + type: EventGroupVariantType.CHECKSUM, + }, + }, + }); + + await userEvent.click(screen.getAllByRole('button', {name: 'default:XXXX'})[0]); + await userEvent.click(screen.getByRole('option', {name: 'new:XXXX'})); + + // Should show new hash + await screen.findByText('789'); }); }); diff --git a/static/app/components/events/groupingInfo/index.tsx b/static/app/components/events/groupingInfo/index.tsx index 6700b382a39a32..6cb24b85e82c31 100644 --- a/static/app/components/events/groupingInfo/index.tsx +++ b/static/app/components/events/groupingInfo/index.tsx @@ -76,8 +76,6 @@ function GroupConfigSelect({ event: Event; setConfigOverride: (value: string) => void; }) { - const organization = useOrganization(); - if (!event.groupingConfig) { return null; } @@ -86,7 +84,6 @@ function GroupConfigSelect({ return ( setConfigOverride(selection.value)} From 956db53336cc15a019ea84aad6948a4d390673ce Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Tue, 13 Feb 2024 23:31:24 +0000 Subject: [PATCH 349/357] Revert "ref(backpressure): Track reasons for unhealthy consumers and services (#64998)" This reverts commit 9551b59401287cbbc7407071c3bf9824a59e3f3f. Co-authored-by: anonrig <1935246+anonrig@users.noreply.github.com> --- src/sentry/processing/backpressure/health.py | 42 +++++------------- src/sentry/processing/backpressure/memory.py | 14 +++--- src/sentry/processing/backpressure/monitor.py | 29 ++++++------ .../processing/backpressure/test_checking.py | 20 +++------ .../backpressure/test_monitoring.py | 44 ++++++++----------- 5 files changed, 54 insertions(+), 95 deletions(-) diff --git a/src/sentry/processing/backpressure/health.py b/src/sentry/processing/backpressure/health.py index abc3dd98981ed6..d8dd387b8b26f7 100644 --- a/src/sentry/processing/backpressure/health.py +++ b/src/sentry/processing/backpressure/health.py @@ -1,19 +1,15 @@ import logging from collections.abc import Mapping -from typing import Any import sentry_sdk from django.conf import settings from sentry import options -from sentry.processing.backpressure.memory import ServiceMemory from sentry.processing.backpressure.topology import CONSUMERS from sentry.utils import metrics, redis logger = logging.getLogger(__name__) -UnhealthyReasons = Exception | list[ServiceMemory] - def _prefix_key(key_name: str) -> str: return f"bp1:{key_name}" @@ -93,48 +89,30 @@ def is_consumer_healthy(consumer_name: str = "default") -> bool: return False -def record_consumer_health(unhealthy_services: Mapping[str, UnhealthyReasons]) -> None: +def record_consumer_health(service_health: Mapping[str, bool]) -> None: with service_monitoring_cluster.pipeline() as pipeline: key_ttl = options.get("backpressure.status_ttl") - for name, unhealthy_reasons in unhealthy_services.items(): - pipeline.set(_service_key(name), "false" if unhealthy_reasons else "true", ex=key_ttl) - - extra: dict[str, Any] = {} - if unhealthy_reasons: - if isinstance(unhealthy_reasons, Exception): - extra = {"exception": unhealthy_reasons} - else: - for memory in unhealthy_reasons: - extra[memory.name] = { - "used": memory.used, - "available": memory.available, - "percentage": memory.percentage, - } + for name, is_healthy in service_health.items(): + pipeline.set(_service_key(name), "true" if is_healthy else "false", ex=key_ttl) + if not is_healthy: metrics.incr("backpressure.monitor.service.unhealthy", tags={"service": name}) with sentry_sdk.push_scope(): sentry_sdk.set_tag("service", name) - logger.error("Service `%s` marked as unhealthy", name, extra=extra) + logger.error("Service `%s` marked as unhealthy", name) for name, dependencies in CONSUMERS.items(): - unhealthy_dependencies = [] + is_healthy = True for dependency in dependencies: - if unhealthy_services[dependency]: - unhealthy_dependencies.append(dependency) + is_healthy = is_healthy and service_health[dependency] - pipeline.set( - _consumer_key(name), "false" if unhealthy_dependencies else "true", ex=key_ttl - ) + pipeline.set(_consumer_key(name), "true" if is_healthy else "false", ex=key_ttl) - if unhealthy_dependencies: + if not is_healthy: metrics.incr("backpressure.monitor.consumer.unhealthy", tags={"consumer": name}) with sentry_sdk.push_scope(): sentry_sdk.set_tag("consumer", name) - logger.error( - "Consumer `%s` marked as unhealthy", - name, - extra={"unhealthy_dependencies": unhealthy_dependencies}, - ) + logger.error("Consumer `%s` marked as unhealthy", name) pipeline.execute() diff --git a/src/sentry/processing/backpressure/memory.py b/src/sentry/processing/backpressure/memory.py index 859fc7e3f77fca..26bc8077042be9 100644 --- a/src/sentry/processing/backpressure/memory.py +++ b/src/sentry/processing/backpressure/memory.py @@ -9,13 +9,11 @@ @dataclass class ServiceMemory: - name: str used: int available: int percentage: float - def __init__(self, name: str, used: int, available: int): - self.name = name + def __init__(self, used: int, available: int): self.used = used self.available = available self.percentage = used / available @@ -33,7 +31,7 @@ def query_rabbitmq_memory_usage(host: str) -> ServiceMemory: response = requests.get(url, timeout=3) response.raise_for_status() json = response.json() - return ServiceMemory(host, json[0]["mem_used"], json[0]["mem_limit"]) + return ServiceMemory(json[0]["mem_used"], json[0]["mem_limit"]) # Based on configuration, this could be: @@ -43,13 +41,13 @@ def query_rabbitmq_memory_usage(host: str) -> ServiceMemory: Cluster = Union[RedisCluster, rb.Cluster] -def get_memory_usage(node_id: str, info: Mapping[str, Any]) -> ServiceMemory: +def get_memory_usage(info: Mapping[str, Any]) -> ServiceMemory: # or alternatively: `used_memory_rss`? memory_used = info.get("used_memory", 0) # `maxmemory` might be 0 in development memory_available = info.get("maxmemory", 0) or info["total_system_memory"] - return ServiceMemory(node_id, memory_used, memory_available) + return ServiceMemory(memory_used, memory_available) def iter_cluster_memory_usage(cluster: Cluster) -> Generator[ServiceMemory, None, None]: @@ -65,5 +63,5 @@ def iter_cluster_memory_usage(cluster: Cluster) -> Generator[ServiceMemory, None promise = client.info() cluster_info = promise.value - for node_id, info in cluster_info.items(): - yield get_memory_usage(node_id, info) + for info in cluster_info.values(): + yield get_memory_usage(info) diff --git a/src/sentry/processing/backpressure/monitor.py b/src/sentry/processing/backpressure/monitor.py index 68b2d125d8206d..0164ed1748c08f 100644 --- a/src/sentry/processing/backpressure/monitor.py +++ b/src/sentry/processing/backpressure/monitor.py @@ -1,6 +1,6 @@ import logging import time -from collections.abc import Generator, Mapping, MutableMapping +from collections.abc import Generator, Mapping from dataclasses import dataclass from typing import Union @@ -8,7 +8,7 @@ from django.conf import settings from sentry import options -from sentry.processing.backpressure.health import UnhealthyReasons, record_consumer_health +from sentry.processing.backpressure.health import record_consumer_health # from sentry import options from sentry.processing.backpressure.memory import ( @@ -77,21 +77,19 @@ def assert_all_services_defined(services: dict[str, Service]) -> None: ) -def check_service_health(services: Mapping[str, Service]) -> MutableMapping[str, UnhealthyReasons]: - unhealthy_services: MutableMapping[str, UnhealthyReasons] = {} +def check_service_health(services: Mapping[str, Service]) -> Mapping[str, bool]: + service_health = {} for name, service in services.items(): high_watermark = options.get(f"backpressure.high_watermarks.{name}") - reasons = [] + is_healthy = True logger.info("Checking service `%s` (configured high watermark: %s):", name, high_watermark) try: for memory in check_service_memory(service): - if memory.percentage >= high_watermark: - reasons.append(memory) + is_healthy = is_healthy and memory.percentage < high_watermark logger.info( - " name: %s, used: %s, available: %s, percentage: %s", - memory.name, + " used: %s, available: %s, percentage: %s", memory.used, memory.available, memory.percentage, @@ -100,13 +98,12 @@ def check_service_health(services: Mapping[str, Service]) -> MutableMapping[str, with sentry_sdk.push_scope() as scope: scope.set_tag("service", name) sentry_sdk.capture_exception(e) - unhealthy_services[name] = e - else: - unhealthy_services[name] = reasons + is_healthy = False - logger.info(" => healthy: %s", not unhealthy_services[name]) + service_health[name] = is_healthy + logger.info(" => healthy: %s", is_healthy) - return unhealthy_services + return service_health def start_service_monitoring() -> None: @@ -120,11 +117,11 @@ def start_service_monitoring() -> None: with sentry_sdk.start_transaction(name="backpressure.monitoring", sampled=True): # first, check each base service and record its health - unhealthy_services = check_service_health(services) + service_health = check_service_health(services) # then, check the derived services and record their health try: - record_consumer_health(unhealthy_services) + record_consumer_health(service_health) except Exception as e: sentry_sdk.capture_exception(e) diff --git a/tests/sentry/processing/backpressure/test_checking.py b/tests/sentry/processing/backpressure/test_checking.py index 0aead28697a86a..290299ee389388 100644 --- a/tests/sentry/processing/backpressure/test_checking.py +++ b/tests/sentry/processing/backpressure/test_checking.py @@ -22,15 +22,7 @@ } ) def test_backpressure_unhealthy(): - record_consumer_health( - { - "celery": Exception("Couldn't check celery"), - "attachments-store": [], - "processing-store": [], - "processing-locks": [], - "post-process-locks": [], - } - ) + record_consumer_health({"celery": False}) with raises(MessageRejected): process_one_message() @@ -60,11 +52,11 @@ def test_bad_config(): def test_backpressure_healthy(process_profile_task): record_consumer_health( { - "celery": [], - "attachments-store": [], - "processing-store": [], - "processing-locks": [], - "post-process-locks": [], + "celery": True, + "attachments-store": True, + "processing-store": True, + "processing-locks": True, + "post-process-locks": True, } ) process_one_message() diff --git a/tests/sentry/processing/backpressure/test_monitoring.py b/tests/sentry/processing/backpressure/test_monitoring.py index a7e2dfc703562e..77cc9a68a6148e 100644 --- a/tests/sentry/processing/backpressure/test_monitoring.py +++ b/tests/sentry/processing/backpressure/test_monitoring.py @@ -1,13 +1,7 @@ -from collections.abc import MutableMapping - import pytest from django.test.utils import override_settings -from sentry.processing.backpressure.health import ( - UnhealthyReasons, - is_consumer_healthy, - record_consumer_health, -) +from sentry.processing.backpressure.health import is_consumer_healthy, record_consumer_health from sentry.processing.backpressure.monitor import ( Redis, assert_all_services_defined, @@ -46,8 +40,8 @@ def test_check_redis_health() -> None: "backpressure.high_watermarks.redis": 1.0, } ): - unhealthy_services = check_service_health(services) - assert not unhealthy_services["redis"] + service_health = check_service_health(services) + assert service_health["redis"] is True with override_options( { @@ -55,8 +49,8 @@ def test_check_redis_health() -> None: "backpressure.high_watermarks.redis": 0.0, } ): - unhealthy_services = check_service_health(services) - assert unhealthy_services["redis"] + service_health = check_service_health(services) + assert service_health["redis"] is False @override_options( @@ -67,27 +61,27 @@ def test_check_redis_health() -> None: } ) def test_record_consumer_health() -> None: - unhealthy_services: MutableMapping[str, UnhealthyReasons] = { - "celery": [], - "attachments-store": [], - "processing-store": [], - "processing-locks": [], - "post-process-locks": [], + service_health = { + "celery": True, + "attachments-store": True, + "processing-store": True, + "processing-locks": True, + "post-process-locks": True, } - record_consumer_health(unhealthy_services) + record_consumer_health(service_health) assert is_consumer_healthy() is True - unhealthy_services["celery"] = Exception("Couldn't check celery") - record_consumer_health(unhealthy_services) + service_health["celery"] = False + record_consumer_health(service_health) assert is_consumer_healthy() is False with pytest.raises(KeyError): record_consumer_health( { - "sellerie": [], # oops - "attachments-store": [], - "processing-store": [], - "processing-locks": [], - "post-process-locks": [], + "sellerie": True, # oops + "attachments-store": True, + "processing-store": True, + "processing-locks": True, + "post-process-locks": True, } ) From 65e8ab1aa52477401addab5d7b86ccf0dbe98a52 Mon Sep 17 00:00:00 2001 From: Nar Saynorath Date: Tue, 13 Feb 2024 19:20:09 -0500 Subject: [PATCH 350/357] fix(app-start): Pass along device class consistently to event samples (#65106) We exposed the `device.class:Unknown` filter for indexed spans and span metrics, this passes the filter along in a compatible way. --- .../views/appStartup/screenSummary/eventSamples.tsx | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/static/app/views/starfish/views/appStartup/screenSummary/eventSamples.tsx b/static/app/views/starfish/views/appStartup/screenSummary/eventSamples.tsx index 22cc64f7e2b35a..9a5e0328b3b571 100644 --- a/static/app/views/starfish/views/appStartup/screenSummary/eventSamples.tsx +++ b/static/app/views/starfish/views/appStartup/screenSummary/eventSamples.tsx @@ -60,20 +60,13 @@ export function EventSamples({ 'OR', 'span.description:"Warm Start"', ')', + ...(deviceClass ? [`${SpanMetricsField.DEVICE_CLASS}:${deviceClass}`] : []), // TODO: Add this back in once we have the ability to filter by start type // `${SpanMetricsField.APP_START_TYPE}:${ // startType || `[${COLD_START_TYPE},${WARM_START_TYPE}]` // }`, ]); - if (deviceClass) { - if (deviceClass === 'Unknown') { - searchQuery.addFilterValue('!has', 'device.class'); - } else { - searchQuery.addFilterValue('device.class', deviceClass); - } - } - const sort = fromSorts(decodeScalar(location.query[sortKey]))[0] ?? DEFAULT_SORT; const columnNameMap = { From 5372ccea948eaa14c17dcd3d399cc8b5e9bb886c Mon Sep 17 00:00:00 2001 From: Nar Saynorath Date: Tue, 13 Feb 2024 19:20:27 -0500 Subject: [PATCH 351/357] fix(app-start): Pass along device class to span samples (#65110) Pass along device class to the span detail drawer This wasn't passed before so the metrics were for all device classes, which did not match the table. --- .../views/starfish/views/appStartup/screenSummary/index.tsx | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/static/app/views/starfish/views/appStartup/screenSummary/index.tsx b/static/app/views/starfish/views/appStartup/screenSummary/index.tsx index 5375d21aa527c2..9de21f713014dc 100644 --- a/static/app/views/starfish/views/appStartup/screenSummary/index.tsx +++ b/static/app/views/starfish/views/appStartup/screenSummary/index.tsx @@ -31,6 +31,7 @@ import {ScreenLoadSpanSamples} from 'sentry/views/starfish/views/screens/screenL import AppStartWidgets from './widgets'; type Query = { + 'device.class': string; primaryRelease: string; project: string; secondaryRelease: string; @@ -54,6 +55,7 @@ function ScreenSummary() { spanDescription, spanOp, spanAppStartType, + 'device.class': deviceClass, } = location.query; const startupModule: LocationDescriptor = { @@ -199,6 +201,9 @@ function ScreenSummary() { Date: Wed, 14 Feb 2024 05:59:32 +0100 Subject: [PATCH 352/357] fix(vue): Correct typo in browserTracingIntegration call (#64982) This commit fixes a typo in the call to Sentry's browserTracingIntegration function. Previously, the call had a typo, which lead to syntax errors. There was a typo "Intergation" vs "integration" on the Vue3 snippet for "Configure Vue SDK". Co-authored-by: ArthurKnaus --- static/app/gettingStartedDocs/javascript/vue.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/app/gettingStartedDocs/javascript/vue.tsx b/static/app/gettingStartedDocs/javascript/vue.tsx index b6801a88b1ee6e..934f05d22f9531 100644 --- a/static/app/gettingStartedDocs/javascript/vue.tsx +++ b/static/app/gettingStartedDocs/javascript/vue.tsx @@ -47,7 +47,7 @@ const getSentryInitLayout = (params: Params, siblingOption: string): string => { integrations: [${ params.isPerformanceSelected ? ` - Sentry.browserTracingIntergation(),` + Sentry.browserTracingIntegration(),` : '' }${ params.isReplaySelected From a20b6c9cfb6a8f8f0ecd83e66104e2fb5322371f Mon Sep 17 00:00:00 2001 From: ArthurKnaus Date: Wed, 14 Feb 2024 09:08:56 +0100 Subject: [PATCH 353/357] feat(ddm): Switch to query endpoint (ddm part) (#65140) Switch from `metrics/data` to `metrics/query` endpoint in DDM related code. In this PR we simply map the new response to the old structure. There will be a follow-up that removes the mapping and properly handles the new response shape. - relates to https://github.com/getsentry/sentry/issues/64770 --- static/app/utils/metrics/index.tsx | 28 +---- static/app/utils/metrics/useMetricsData.tsx | 113 ++++++++++++++++-- .../widgetCard/metricWidgetQueries.tsx | 8 -- static/app/views/ddm/chart.tsx | 1 + 4 files changed, 107 insertions(+), 43 deletions(-) diff --git a/static/app/utils/metrics/index.tsx b/static/app/utils/metrics/index.tsx index 136bda4a4b42ae..485a4afd1a68ad 100644 --- a/static/app/utils/metrics/index.tsx +++ b/static/app/utils/metrics/index.tsx @@ -20,7 +20,7 @@ import { parseStatsPeriod, } from 'sentry/components/organizations/pageFilters/parse'; import {t} from 'sentry/locale'; -import type {MetricsApiResponse, Organization, PageFilters} from 'sentry/types'; +import type {Organization, PageFilters} from 'sentry/types'; import type { MetricMeta, MetricsApiRequestMetric, @@ -327,32 +327,6 @@ export function getFieldFromMetricsQuery(metricsQuery: MetricsQuery) { return formatMRIField(MRIToField(metricsQuery.mri, metricsQuery.op!)); } -// TODO(ddm): remove this and all of its usages once backend sends mri fields -export function mapToMRIFields( - data: MetricsApiResponse | undefined, - fields: string[] -): void { - if (!data) { - return; - } - - data.groups.forEach(group => { - group.series = swapObjectKeys(group.series, fields); - group.totals = swapObjectKeys(group.totals, fields); - }); -} - -function swapObjectKeys(obj: Record | undefined, newKeys: string[]) { - if (!obj) { - return {}; - } - - return Object.keys(obj).reduce((acc, key, index) => { - acc[newKeys[index]] = obj[key]; - return acc; - }, {}); -} - export function stringifyMetricWidget(metricWidget: MetricsQuerySubject): string { const {mri, op, query, groupBy} = metricWidget; diff --git a/static/app/utils/metrics/useMetricsData.tsx b/static/app/utils/metrics/useMetricsData.tsx index c267b53c282753..f650c832475d78 100644 --- a/static/app/utils/metrics/useMetricsData.tsx +++ b/static/app/utils/metrics/useMetricsData.tsx @@ -1,13 +1,78 @@ -import {useCallback, useEffect, useState} from 'react'; +import {useCallback, useEffect, useMemo, useState} from 'react'; -import type {DateString, MetricsApiResponse} from 'sentry/types'; -import {getMetricsApiRequestQuery, mapToMRIFields} from 'sentry/utils/metrics'; +import type {DateString, MetricsApiResponse, PageFilters} from 'sentry/types'; +import {getDateTimeParams, getDDMInterval} from 'sentry/utils/metrics'; +import {getUseCaseFromMRI, parseField} from 'sentry/utils/metrics/mri'; import type {MetricsQuery} from 'sentry/utils/metrics/types'; import {useApiQuery} from 'sentry/utils/queryClient'; import useOrganization from 'sentry/utils/useOrganization'; import type {MetricsApiRequestQueryOptions} from '../../types/metrics'; +function createMqlQuery({ + field, + query, + groupBy = [], +}: {field: string; query: string; groupBy?: string[]}) { + let mql = field; + if (query) { + mql = `${mql}{${query}}`; + } + if (groupBy.length) { + mql = `${mql} by (${groupBy.join(',')})`; + } + return mql; +} + +export function getMetricsApiRequestQuery( + { + field, + query, + groupBy, + orderBy, + }: {field: string; query: string; groupBy?: string[]; orderBy?: 'asc' | 'desc'}, + {projects, environments, datetime}: PageFilters, + {intervalLadder, ...overrides}: Partial = {} +) { + const {mri: mri} = parseField(field) ?? {}; + const useCase = getUseCaseFromMRI(mri) ?? 'custom'; + const interval = getDDMInterval(datetime, useCase, intervalLadder); + + return { + query: { + ...getDateTimeParams(datetime), + project: projects, + environment: environments, + interval, + ...overrides, + }, + body: { + queries: [ + { + name: 'query_1', + mql: createMqlQuery({field, query, groupBy}), + }, + ], + formulas: [{mql: '$query_1', limit: overrides.limit, order: orderBy ?? 'desc'}], + }, + }; +} + +interface NewMetricsApiResponse { + data: { + by: Record; + series: Array; + totals: Record; + }[][]; + end: string; + intervals: string[]; + meta: [ + {name: string; type: string}, + {group_bys: string[]; limit: number | null; order: string | null}, + ][]; + start: string; +} + export function useMetricsData( {mri, op, datetime, projects, environments, query, groupBy}: MetricsQuery, overrides: Partial = {} @@ -16,7 +81,7 @@ export function useMetricsData( const field = op ? `${op}(${mri})` : mri; - const queryToSend = getMetricsApiRequestQuery( + const {query: queryToSend, body} = getMetricsApiRequestQuery( { field, query: query ?? '', @@ -26,8 +91,11 @@ export function useMetricsData( {...overrides} ); - const metricsApiRepsonse = useApiQuery( - [`/organizations/${organization.slug}/metrics/data/`, {query: queryToSend}], + const metricsApiResponse = useApiQuery( + [ + `/organizations/${organization.slug}/metrics/query/`, + {query: queryToSend, data: body, method: 'POST'}, + ], { retry: 0, staleTime: 0, @@ -36,9 +104,38 @@ export function useMetricsData( refetchInterval: false, } ); - mapToMRIFields(metricsApiRepsonse.data, [field]); - return metricsApiRepsonse; + const dataInOldShape = useMemo( + () => mapToOldResponseShape(metricsApiResponse.data, field), + [field, metricsApiResponse.data] + ); + + return { + ...metricsApiResponse, + data: dataInOldShape, + }; +} + +function mapToOldResponseShape( + responseData: NewMetricsApiResponse | undefined, + field: string +): MetricsApiResponse | undefined { + return ( + responseData && + ({ + groups: responseData.data[0].map(group => ({ + ...group, + series: { + [field]: group.series, + }, + })), + intervals: responseData.intervals, + meta: [], + query: '', + start: responseData.start, + end: responseData.end, + } satisfies MetricsApiResponse) + ); } // Wraps useMetricsData and provides two additional features: diff --git a/static/app/views/dashboards/widgetCard/metricWidgetQueries.tsx b/static/app/views/dashboards/widgetCard/metricWidgetQueries.tsx index 4aa26a632ed074..0401b1a3c1f0ed 100644 --- a/static/app/views/dashboards/widgetCard/metricWidgetQueries.tsx +++ b/static/app/views/dashboards/widgetCard/metricWidgetQueries.tsx @@ -8,7 +8,6 @@ import type {MetricsApiResponse, Organization, PageFilters} from 'sentry/types'; import type {Series} from 'sentry/types/echarts'; import type {TableDataWithTitle} from 'sentry/utils/discover/discoverQuery'; import {TOP_N} from 'sentry/utils/discover/types'; -import {mapToMRIFields} from 'sentry/utils/metrics'; import {MetricsConfig} from '../datasetConfig/metrics'; import type {DashboardFilters, Widget} from '../types'; @@ -113,11 +112,6 @@ class MetricWidgetQueries extends Component { ); }; - afterFetchData = (data: MetricsApiResponse) => { - const fields = this.props.widget.queries[0].aggregates; - mapToMRIFields(data, fields); - }; - render() { const { api, @@ -144,8 +138,6 @@ class MetricWidgetQueries extends Component { onDataFetched={onDataFetched} loading={undefined} customDidUpdateComparator={this.customDidUpdateComparator} - afterFetchTableData={this.afterFetchData} - afterFetchSeriesData={this.afterFetchData} > {({errorMessage, ...rest}) => children({ diff --git a/static/app/views/ddm/chart.tsx b/static/app/views/ddm/chart.tsx index 103c06bcc8b99e..0428907b2a7ec9 100644 --- a/static/app/views/ddm/chart.tsx +++ b/static/app/views/ddm/chart.tsx @@ -99,6 +99,7 @@ export const MetricChart = forwardRef( ...s, silent: true, data: s.data.slice(0, -fogOfWarBuckets), + connectNulls: true, }, displayType === MetricDisplayType.BAR ? createFogOfWarBarSeries(s, fogOfWarBuckets) From bae91e582249b0a5401808957be10e63f9c80cab Mon Sep 17 00:00:00 2001 From: ArthurKnaus Date: Wed, 14 Feb 2024 09:37:56 +0100 Subject: [PATCH 354/357] ref(ddm): Query endpoint cleanup (#65141) Remove mapping to old response shape. Fix type / function naming. Fix wrong key usage in metrics setting. - relates to https://github.com/getsentry/sentry/issues/64770 - relates to #65140 --- static/app/types/metrics.tsx | 15 +++ static/app/utils/metrics/index.tsx | 6 +- .../app/utils/metrics/useMetricsData.spec.tsx | 37 ++++++++ static/app/utils/metrics/useMetricsData.tsx | 93 +++++-------------- .../dashboards/datasetConfig/metrics.tsx | 24 ++++- .../widgetCard/metricWidgetCard/index.tsx | 11 ++- static/app/views/ddm/createAlertModal.tsx | 14 ++- static/app/views/ddm/widget.tsx | 35 +++---- .../projectMetrics/projectMetricsDetails.tsx | 10 +- 9 files changed, 136 insertions(+), 109 deletions(-) create mode 100644 static/app/utils/metrics/useMetricsData.spec.tsx diff --git a/static/app/types/metrics.tsx b/static/app/types/metrics.tsx index 136003efca53b7..1283e4199069e7 100644 --- a/static/app/types/metrics.tsx +++ b/static/app/types/metrics.tsx @@ -58,6 +58,21 @@ export type MetricsApiResponse = { start: string; }; +export interface MetricsQueryApiResponse { + data: { + by: Record; + series: Array; + totals: number; + }[][]; + end: string; + intervals: string[]; + meta: [ + {name: string; type: string}, + {group_bys: string[]; limit: number | null; order: string | null}, + ][]; + start: string; +} + export type MetricsGroup = { by: Record; series: Record>; diff --git a/static/app/utils/metrics/index.tsx b/static/app/utils/metrics/index.tsx index 485a4afd1a68ad..e1fd9cc7951afa 100644 --- a/static/app/utils/metrics/index.tsx +++ b/static/app/utils/metrics/index.tsx @@ -27,7 +27,6 @@ import type { MetricsApiRequestQuery, MetricsApiRequestQueryOptions, MetricsDataIntervalLadder, - MetricsGroup, MetricsOperation, MRI, UseCase, @@ -263,10 +262,9 @@ export function useClearQuery() { }, [routerRef]); } -export function getMetricsSeriesName(group: MetricsGroup) { - const groupByEntries = Object.entries(group.by ?? {}); +export function getMetricsSeriesName(field: string, groupBy?: Record) { + const groupByEntries = Object.entries(groupBy ?? {}); if (!groupByEntries.length) { - const field = Object.keys(group.series)?.[0]; const {mri} = parseField(field) ?? {mri: field}; const name = formatMRI(mri as MRI); diff --git a/static/app/utils/metrics/useMetricsData.spec.tsx b/static/app/utils/metrics/useMetricsData.spec.tsx new file mode 100644 index 00000000000000..6c9420372e29ec --- /dev/null +++ b/static/app/utils/metrics/useMetricsData.spec.tsx @@ -0,0 +1,37 @@ +import {createMqlQuery} from 'sentry/utils/metrics/useMetricsData'; + +describe('createMqlQuery', () => { + it('should create a basic mql query', () => { + const field = 'avg(transaction.duration)'; + + const result = createMqlQuery({field}); + expect(result).toEqual(`avg(transaction.duration)`); + }); + + it('should create a mql query with a query', () => { + const field = 'avg(transaction.duration)'; + const query = 'event.type:error'; + + const result = createMqlQuery({field, query}); + expect(result).toEqual(`avg(transaction.duration){event.type:error}`); + }); + + it('should create a mql query with a groupBy', () => { + const field = 'avg(transaction.duration)'; + const groupBy = ['environment']; + + const result = createMqlQuery({field, groupBy}); + expect(result).toEqual(`avg(transaction.duration) by (environment)`); + }); + + it('should create a mql query with a query and groupBy', () => { + const field = 'avg(transaction.duration)'; + const query = 'event.type:error'; + const groupBy = ['environment', 'project']; + + const result = createMqlQuery({field, query, groupBy}); + expect(result).toEqual( + `avg(transaction.duration){event.type:error} by (environment,project)` + ); + }); +}); diff --git a/static/app/utils/metrics/useMetricsData.tsx b/static/app/utils/metrics/useMetricsData.tsx index f650c832475d78..001807983a0646 100644 --- a/static/app/utils/metrics/useMetricsData.tsx +++ b/static/app/utils/metrics/useMetricsData.tsx @@ -1,19 +1,22 @@ -import {useCallback, useEffect, useMemo, useState} from 'react'; +import {useCallback, useEffect, useState} from 'react'; -import type {DateString, MetricsApiResponse, PageFilters} from 'sentry/types'; +import type {DateString, PageFilters} from 'sentry/types'; import {getDateTimeParams, getDDMInterval} from 'sentry/utils/metrics'; import {getUseCaseFromMRI, parseField} from 'sentry/utils/metrics/mri'; import type {MetricsQuery} from 'sentry/utils/metrics/types'; import {useApiQuery} from 'sentry/utils/queryClient'; import useOrganization from 'sentry/utils/useOrganization'; -import type {MetricsApiRequestQueryOptions} from '../../types/metrics'; +import type { + MetricsApiRequestQueryOptions, + MetricsQueryApiResponse, +} from '../../types/metrics'; -function createMqlQuery({ +export function createMqlQuery({ field, query, groupBy = [], -}: {field: string; query: string; groupBy?: string[]}) { +}: {field: string; groupBy?: string[]; query?: string}) { let mql = field; if (query) { mql = `${mql}{${query}}`; @@ -24,7 +27,7 @@ function createMqlQuery({ return mql; } -export function getMetricsApiRequestQuery( +function getMetricsQueryApiRequestPayload( { field, query, @@ -58,22 +61,7 @@ export function getMetricsApiRequestQuery( }; } -interface NewMetricsApiResponse { - data: { - by: Record; - series: Array; - totals: Record; - }[][]; - end: string; - intervals: string[]; - meta: [ - {name: string; type: string}, - {group_bys: string[]; limit: number | null; order: string | null}, - ][]; - start: string; -} - -export function useMetricsData( +export function useMetricsQuery( {mri, op, datetime, projects, environments, query, groupBy}: MetricsQuery, overrides: Partial = {} ) { @@ -81,7 +69,7 @@ export function useMetricsData( const field = op ? `${op}(${mri})` : mri; - const {query: queryToSend, body} = getMetricsApiRequestQuery( + const {query: queryToSend, body} = getMetricsQueryApiRequestPayload( { field, query: query ?? '', @@ -91,7 +79,7 @@ export function useMetricsData( {...overrides} ); - const metricsApiResponse = useApiQuery( + return useApiQuery( [ `/organizations/${organization.slug}/metrics/query/`, {query: queryToSend, data: body, method: 'POST'}, @@ -104,54 +92,22 @@ export function useMetricsData( refetchInterval: false, } ); - - const dataInOldShape = useMemo( - () => mapToOldResponseShape(metricsApiResponse.data, field), - [field, metricsApiResponse.data] - ); - - return { - ...metricsApiResponse, - data: dataInOldShape, - }; -} - -function mapToOldResponseShape( - responseData: NewMetricsApiResponse | undefined, - field: string -): MetricsApiResponse | undefined { - return ( - responseData && - ({ - groups: responseData.data[0].map(group => ({ - ...group, - series: { - [field]: group.series, - }, - })), - intervals: responseData.intervals, - meta: [], - query: '', - start: responseData.start, - end: responseData.end, - } satisfies MetricsApiResponse) - ); } // Wraps useMetricsData and provides two additional features: // 1. return data is undefined only during the initial load // 2. provides a callback to trim the data to a specific time range when chart zoom is used -export function useMetricsDataZoom( +export function useMetricsQueryZoom( metricsQuery: MetricsQuery, overrides: Partial = {} ) { - const [metricsData, setMetricsData] = useState(); + const [metricsData, setMetricsData] = useState(); const { data: rawData, isLoading, isError, error, - } = useMetricsData(metricsQuery, overrides); + } = useMetricsQuery(metricsQuery, overrides); useEffect(() => { if (rawData) { @@ -161,10 +117,10 @@ export function useMetricsDataZoom( const trimData = useCallback( ( - currentData: MetricsApiResponse | undefined, + currentData: MetricsQueryApiResponse | undefined, start, end - ): MetricsApiResponse | undefined => { + ): MetricsQueryApiResponse | undefined => { if (!currentData) { return currentData; } @@ -180,15 +136,12 @@ export function useMetricsDataZoom( return { ...currentData, intervals: currentData.intervals.slice(startIndex, endIndex), - groups: currentData.groups.map(group => ({ - ...group, - series: Object.fromEntries( - Object.entries(group.series).map(([seriesName, series]) => [ - seriesName, - series.slice(startIndex, endIndex), - ]) - ), - })), + data: currentData.data.map(group => + group.map(entry => ({ + ...entry, + series: entry.series.slice(startIndex, endIndex), + })) + ), }; }, [] diff --git a/static/app/views/dashboards/datasetConfig/metrics.tsx b/static/app/views/dashboards/datasetConfig/metrics.tsx index 6301845a25ae62..165d7e2405dad7 100644 --- a/static/app/views/dashboards/datasetConfig/metrics.tsx +++ b/static/app/views/dashboards/datasetConfig/metrics.tsx @@ -4,6 +4,8 @@ import type {Client, ResponseMeta} from 'sentry/api'; import {t} from 'sentry/locale'; import type { MetricsApiResponse, + MetricsGroup, + MRI, Organization, PageFilters, TagCollection, @@ -13,13 +15,10 @@ import type {CustomMeasurementCollection} from 'sentry/utils/customMeasurements/ import type {TableData} from 'sentry/utils/discover/discoverQuery'; import type {EventData} from 'sentry/utils/discover/eventView'; import {NumberContainer} from 'sentry/utils/discover/styles'; -import { - getMetricsApiRequestQuery, - getMetricsSeriesName, - groupByOp, -} from 'sentry/utils/metrics'; +import {getMetricsApiRequestQuery, groupByOp} from 'sentry/utils/metrics'; import {formatMetricUsingUnit} from 'sentry/utils/metrics/formatters'; import { + formatMRI, formatMRIField, getMRI, getUseCaseFromMRI, @@ -353,6 +352,21 @@ export function transformMetricsResponseToSeries( ]; } + function getMetricsSeriesName(group: MetricsGroup) { + const groupByEntries = Object.entries(group.by ?? {}); + if (!groupByEntries.length) { + const field = Object.keys(group.series)?.[0]; + const {mri} = parseField(field) ?? {mri: field}; + const name = formatMRI(mri as MRI); + + return name ?? '(none)'; + } + + return groupByEntries + .map(([_key, value]) => `${String(value).length ? value : t('(none)')}`) + .join(', '); + } + data.groups.forEach(group => { Object.keys(group.series).forEach(field => { results.push({ diff --git a/static/app/views/dashboards/widgetCard/metricWidgetCard/index.tsx b/static/app/views/dashboards/widgetCard/metricWidgetCard/index.tsx index dd803960bd5b95..da7724be2cb3fc 100644 --- a/static/app/views/dashboards/widgetCard/metricWidgetCard/index.tsx +++ b/static/app/views/dashboards/widgetCard/metricWidgetCard/index.tsx @@ -18,7 +18,7 @@ import { MetricDisplayType, type MetricWidgetQueryParams, } from 'sentry/utils/metrics/types'; -import {useMetricsDataZoom} from 'sentry/utils/metrics/useMetricsData'; +import {useMetricsQueryZoom} from 'sentry/utils/metrics/useMetricsData'; import {WidgetCardPanel, WidgetTitleRow} from 'sentry/views/dashboards/widgetCard'; import type {AugmentedEChartDataZoomHandler} from 'sentry/views/dashboards/widgetCard/chart'; import {DashboardsMEPContext} from 'sentry/views/dashboards/widgetCard/dashboardsMEPContext'; @@ -34,7 +34,7 @@ import { convertToDashboardWidget, toMetricDisplayType, } from '../../../../utils/metrics/dashboard'; -import {parseField} from '../../../../utils/metrics/mri'; +import {MRIToField, parseField} from '../../../../utils/metrics/mri'; import {DASHBOARD_CHART_GROUP} from '../../dashboard'; import type {DashboardFilters, Widget} from '../../types'; import {useMetricsDashboardContext} from '../metricsContext'; @@ -222,7 +222,7 @@ export function MetricWidgetChartContainer({ isLoading, isError, error, - } = useMetricsDataZoom( + } = useMetricsQueryZoom( { mri, op, @@ -242,9 +242,10 @@ export function MetricWidgetChartContainer({ ? getChartTimeseries(timeseriesData, { getChartPalette: createChartPalette, mri, + field: MRIToField(mri, op || ''), }) : []; - }, [timeseriesData, mri]); + }, [timeseriesData, mri, op]); if (isError) { const errorMessage = @@ -259,7 +260,7 @@ export function MetricWidgetChartContainer({ ); } - if (timeseriesData?.groups.length === 0) { + if (timeseriesData?.data.length === 0) { return ( } diff --git a/static/app/views/ddm/createAlertModal.tsx b/static/app/views/ddm/createAlertModal.tsx index e76693ae36a877..b89929ec432099 100644 --- a/static/app/views/ddm/createAlertModal.tsx +++ b/static/app/views/ddm/createAlertModal.tsx @@ -24,9 +24,14 @@ import { getFieldFromMetricsQuery as getAlertAggregate, } from 'sentry/utils/metrics'; import {formatMetricUsingFixedUnit} from 'sentry/utils/metrics/formatters'; -import {formatMRIField, getUseCaseFromMRI, parseMRI} from 'sentry/utils/metrics/mri'; +import { + formatMRIField, + getUseCaseFromMRI, + MRIToField, + parseMRI, +} from 'sentry/utils/metrics/mri'; import type {MetricsQuery} from 'sentry/utils/metrics/types'; -import {useMetricsData} from 'sentry/utils/metrics/useMetricsData'; +import {useMetricsQuery} from 'sentry/utils/metrics/useMetricsData'; import useOrganization from 'sentry/utils/useOrganization'; import useProjects from 'sentry/utils/useProjects'; import useRouter from 'sentry/utils/useRouter'; @@ -134,7 +139,7 @@ export function CreateAlertModal({Header, Body, Footer, metricsQuery}: Props) { const aggregate = useMemo(() => getAlertAggregate(metricsQuery), [metricsQuery]); - const {data, isLoading, refetch, isError} = useMetricsData( + const {data, isLoading, refetch, isError} = useMetricsQuery( { mri: metricsQuery.mri, op: metricsQuery.op, @@ -154,10 +159,11 @@ export function CreateAlertModal({Header, Body, Footer, metricsQuery}: Props) { getChartTimeseries(data, { mri: metricsQuery.mri, focusedSeries: undefined, + field: MRIToField(metricsQuery.mri, metricsQuery.op!), // We are limited to one series in this chart, so we can just use the first color getChartPalette: createChartPalette, }), - [data, metricsQuery.mri] + [data, metricsQuery.mri, metricsQuery.op] ); const projectOptions = useMemo(() => { diff --git a/static/app/views/ddm/widget.tsx b/static/app/views/ddm/widget.tsx index 679be166ac4a30..6be62d8c45ac02 100644 --- a/static/app/views/ddm/widget.tsx +++ b/static/app/views/ddm/widget.tsx @@ -15,7 +15,7 @@ import {Tooltip} from 'sentry/components/tooltip'; import {IconSearch} from 'sentry/icons'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; -import type {MetricsApiResponse, MRI, PageFilters} from 'sentry/types'; +import type {MetricsQueryApiResponse, MRI, PageFilters} from 'sentry/types'; import type {ReactEchartsRef} from 'sentry/types/echarts'; import { getDefaultMetricDisplayType, @@ -23,7 +23,7 @@ import { stringifyMetricWidget, } from 'sentry/utils/metrics'; import {metricDisplayTypeOptions} from 'sentry/utils/metrics/constants'; -import {parseMRI} from 'sentry/utils/metrics/mri'; +import {MRIToField, parseMRI} from 'sentry/utils/metrics/mri'; import type { FocusedMetricsSeries, MetricCorrelation, @@ -32,7 +32,7 @@ import type { import {MetricDisplayType} from 'sentry/utils/metrics/types'; import {useIncrementQueryMetric} from 'sentry/utils/metrics/useIncrementQueryMetric'; import {useMetricSamples} from 'sentry/utils/metrics/useMetricsCorrelations'; -import {useMetricsDataZoom} from 'sentry/utils/metrics/useMetricsData'; +import {useMetricsQueryZoom} from 'sentry/utils/metrics/useMetricsData'; import {MetricChart} from 'sentry/views/ddm/chart'; import type {FocusAreaProps} from 'sentry/views/ddm/context'; import {createChartPalette} from 'sentry/views/ddm/metricsChartPalette'; @@ -245,7 +245,7 @@ const MetricWidgetBody = memo( isLoading, isError, error, - } = useMetricsDataZoom( + } = useMetricsQueryZoom( { mri, op, @@ -274,13 +274,14 @@ const MetricWidgetBody = memo( const chartSeries = useMemo(() => { return timeseriesData ? getChartTimeseries(timeseriesData, { + field: MRIToField(mri, op || ''), getChartPalette, mri, focusedSeries: focusedSeries && new Set(focusedSeries?.map(s => s.seriesName)), }) : []; - }, [timeseriesData, getChartPalette, mri, focusedSeries]); + }, [timeseriesData, op, mri, getChartPalette, focusedSeries]); const toggleSeriesVisibility = useCallback( (series: FocusedMetricsSeries) => { @@ -354,7 +355,7 @@ const MetricWidgetBody = memo( ); } - if (timeseriesData.groups.length === 0) { + if (timeseriesData.data.length === 0) { return ( Record; mri: MRI; focusedSeries?: Set; @@ -410,15 +413,15 @@ export function getChartTimeseries( const parsed = parseMRI(mri); const unit = parsed?.unit ?? ''; - const series = data.groups.map(g => { - return { - values: Object.values(g.series)[0], - name: getMetricsSeriesName(g), - groupBy: g.by, - transaction: g.by.transaction, - release: g.by.release, - }; - }); + const series = data.data.flatMap(group => + group.map(entry => ({ + values: entry.series, + name: getMetricsSeriesName(field, entry.by), + groupBy: entry.by, + transaction: entry.by.transaction, + release: entry.by.release, + })) + ); const chartPalette = getChartPalette(series.map(s => s.name)); diff --git a/static/app/views/settings/projectMetrics/projectMetricsDetails.tsx b/static/app/views/settings/projectMetrics/projectMetricsDetails.tsx index ff0dc008a56984..a6f616afd66d44 100644 --- a/static/app/views/settings/projectMetrics/projectMetricsDetails.tsx +++ b/static/app/views/settings/projectMetrics/projectMetricsDetails.tsx @@ -27,7 +27,7 @@ import {getReadableMetricType} from 'sentry/utils/metrics/formatters'; import {formatMRI, formatMRIField, MRIToField, parseMRI} from 'sentry/utils/metrics/mri'; import {MetricDisplayType} from 'sentry/utils/metrics/types'; import {useBlockMetric} from 'sentry/utils/metrics/useBlockMetric'; -import {useMetricsData} from 'sentry/utils/metrics/useMetricsData'; +import {useMetricsQuery} from 'sentry/utils/metrics/useMetricsData'; import {useMetricsTags} from 'sentry/utils/metrics/useMetricsTags'; import routeTitleGen from 'sentry/utils/routeTitle'; import {CodeLocations} from 'sentry/views/ddm/codeLocations'; @@ -75,7 +75,7 @@ function ProjectMetricsDetails({project, params, organization}: Props) { const {type, name, unit} = parseMRI(mri) ?? {}; const operation = getSettingsOperationForType(type ?? 'c'); - const {data: metricsData, isLoading} = useMetricsData( + const {data: metricsData, isLoading} = useMetricsQuery( { datetime: { period: '30d', @@ -98,7 +98,7 @@ function ProjectMetricsDetails({project, params, organization}: Props) { data: metricsData?.intervals.map((interval, index) => ({ name: interval, - value: metricsData.groups[0]?.series[field][index] ?? 0, + value: metricsData.data[0]?.[0]?.series[index] ?? 0, })) ?? [], }, ]; @@ -228,8 +228,8 @@ function ProjectMetricsDetails({project, params, organization}: Props) { const isBlockedTag = blockingStatus?.blockedTags?.includes(key) ?? false; return ( -
    {key}
    - +
    {key}
    + Date: Wed, 14 Feb 2024 10:36:16 +0100 Subject: [PATCH 355/357] feat(ddm): Switch to query endpoint (dashboards) (#65143) Switch metrics tables in dashboards to query endpoint. Remove unused helpers. Add tests. - closes https://github.com/getsentry/sentry/issues/64770 --- static/app/types/metrics.tsx | 4 - static/app/utils/metrics/index.spec.tsx | 198 +----------------- static/app/utils/metrics/index.tsx | 35 ---- .../app/utils/metrics/useMetricsData.spec.tsx | 196 ++++++++++++++++- static/app/utils/metrics/useMetricsData.tsx | 30 ++- .../dashboards/datasetConfig/metrics.tsx | 92 ++++---- .../widgetCard/metricWidgetQueries.tsx | 11 +- 7 files changed, 271 insertions(+), 295 deletions(-) diff --git a/static/app/types/metrics.tsx b/static/app/types/metrics.tsx index 1283e4199069e7..bf1440ed1d70bf 100644 --- a/static/app/types/metrics.tsx +++ b/static/app/types/metrics.tsx @@ -45,10 +45,6 @@ export interface MetricsApiRequestQuery extends MetricsApiRequestMetric { export type MetricsDataIntervalLadder = 'ddm' | 'bar' | 'dashboard'; -export interface MetricsApiRequestQueryOptions extends Partial { - intervalLadder?: MetricsDataIntervalLadder; -} - export type MetricsApiResponse = { end: string; groups: MetricsGroup[]; diff --git a/static/app/utils/metrics/index.spec.tsx b/static/app/utils/metrics/index.spec.tsx index d20998d127573b..82b96933c4fc94 100644 --- a/static/app/utils/metrics/index.spec.tsx +++ b/static/app/utils/metrics/index.spec.tsx @@ -1,207 +1,11 @@ -import type { - MetricsApiRequestQueryOptions, - MetricsOperation, - PageFilters, -} from 'sentry/types'; +import type {MetricsOperation} from 'sentry/types'; import { getAbsoluteDateTimeRange, getDateTimeParams, getDDMInterval, - getMetricsApiRequestQuery, stringifyMetricWidget, } from 'sentry/utils/metrics'; -describe('getMetricsApiRequestQuery', () => { - it('should return the correct query object with default values', () => { - const metric = {field: 'sessions', query: 'error', groupBy: ['project']}; - const filters = { - projects: [1], - environments: ['production'], - datetime: {start: '2023-01-01', end: '2023-01-31', period: null, utc: true}, - }; - - const result = getMetricsApiRequestQuery(metric, filters); - - expect(result).toEqual({ - start: '2023-01-01T00:00:00.000Z', - end: '2023-01-31T00:00:00.000Z', - query: 'error', - project: [1], - environment: ['production'], - field: 'sessions', - useCase: 'custom', - interval: '2h', - groupBy: ['project'], - orderBy: '-sessions', - useNewMetricsLayer: true, - }); - }); - - it('should return the correct query object with default values (period)', () => { - const metric = {field: 'sessions', query: 'error', groupBy: ['project']}; - const filters = { - projects: [1], - environments: ['production'], - datetime: {period: '7d', utc: true} as PageFilters['datetime'], - }; - - const result = getMetricsApiRequestQuery(metric, filters); - - expect(result).toEqual({ - statsPeriod: '7d', - query: 'error', - project: [1], - environment: ['production'], - field: 'sessions', - useCase: 'custom', - interval: '30m', - groupBy: ['project'], - orderBy: '-sessions', - useNewMetricsLayer: true, - }); - }); - - it('should return the correct query object with overridden values', () => { - const metric = {field: 'sessions', query: 'error', groupBy: ['project']}; - const filters = { - projects: [1], - environments: ['production'], - datetime: {start: '2023-01-01', end: '2023-01-02', period: null, utc: true}, - }; - - const result = getMetricsApiRequestQuery(metric, filters, {groupBy: ['environment']}); - - expect(result).toEqual({ - start: '2023-01-01T00:00:00.000Z', - end: '2023-01-02T00:00:00.000Z', - query: 'error', - project: [1], - environment: ['production'], - field: 'sessions', - useCase: 'custom', - interval: '5m', - groupBy: ['environment'], - orderBy: '-sessions', - useNewMetricsLayer: true, - }); - }); - - it('should not add a default orderBy if one is already present', () => { - const metric = { - field: 'sessions', - query: 'error', - groupBy: ['project'], - orderBy: 'foo', - }; - const filters = { - projects: [1], - environments: ['production'], - datetime: {start: '2023-01-01', end: '2023-01-02', period: null, utc: true}, - }; - - const result = getMetricsApiRequestQuery(metric, filters); - - expect(result).toEqual({ - start: '2023-01-01T00:00:00.000Z', - end: '2023-01-02T00:00:00.000Z', - query: 'error', - project: [1], - environment: ['production'], - field: 'sessions', - useCase: 'custom', - interval: '5m', - groupBy: ['project'], - orderBy: 'foo', - useNewMetricsLayer: true, - }); - }); - - it('should not add a default orderBy if there are no groups', () => { - const metric = { - field: 'sessions', - query: 'error', - groupBy: [], - }; - const filters = { - projects: [1], - environments: ['production'], - datetime: {start: '2023-01-01', end: '2023-01-02', period: null, utc: true}, - }; - - const result = getMetricsApiRequestQuery(metric, filters); - - expect(result).toEqual({ - start: '2023-01-01T00:00:00.000Z', - end: '2023-01-02T00:00:00.000Z', - query: 'error', - project: [1], - environment: ['production'], - field: 'sessions', - useCase: 'custom', - interval: '5m', - groupBy: [], - useNewMetricsLayer: true, - }); - }); - - it('should not add a default orderBy if there is no field', () => { - const metric = { - field: '', - query: 'error', - groupBy: [], - }; - const filters = { - projects: [1], - environments: ['production'], - datetime: {start: '2023-01-01', end: '2023-01-02', period: null, utc: true}, - }; - - const result = getMetricsApiRequestQuery(metric, filters); - - expect(result).toEqual({ - start: '2023-01-01T00:00:00.000Z', - end: '2023-01-02T00:00:00.000Z', - query: 'error', - project: [1], - environment: ['production'], - field: '', - useCase: 'custom', - interval: '5m', - groupBy: [], - useNewMetricsLayer: true, - }); - }); - - it('should not add all overrides into the request', () => { - const metric = { - field: '', - query: 'error', - groupBy: [], - }; - const filters = { - projects: [1], - environments: ['production'], - datetime: {start: '2023-01-01', end: '2023-01-02', period: null, utc: true}, - }; - const overrides: MetricsApiRequestQueryOptions = {intervalLadder: 'ddm'}; - - const result = getMetricsApiRequestQuery(metric, filters, overrides); - - expect(result).toEqual({ - start: '2023-01-01T00:00:00.000Z', - end: '2023-01-02T00:00:00.000Z', - query: 'error', - project: [1], - environment: ['production'], - field: '', - useCase: 'custom', - interval: '5m', - groupBy: [], - useNewMetricsLayer: true, - }); - }); -}); - describe('getDDMInterval', () => { it('should return the correct interval for non-"1m" intervals', () => { const dateTimeObj = {start: '2023-01-01', end: '2023-01-31'}; diff --git a/static/app/utils/metrics/index.tsx b/static/app/utils/metrics/index.tsx index e1fd9cc7951afa..a23eaf8cc9d1d5 100644 --- a/static/app/utils/metrics/index.tsx +++ b/static/app/utils/metrics/index.tsx @@ -23,9 +23,6 @@ import {t} from 'sentry/locale'; import type {Organization, PageFilters} from 'sentry/types'; import type { MetricMeta, - MetricsApiRequestMetric, - MetricsApiRequestQuery, - MetricsApiRequestQueryOptions, MetricsDataIntervalLadder, MetricsOperation, MRI, @@ -38,7 +35,6 @@ import {getMeasurements} from 'sentry/utils/measurements/measurements'; import { formatMRI, formatMRIField, - getUseCaseFromMRI, MRIToField, parseField, parseMRI, @@ -105,37 +101,6 @@ export function getDdmUrl( return `/organizations/${orgSlug}/ddm/?${qs.stringify(urlParams)}`; } -export function getMetricsApiRequestQuery( - {field, query, groupBy, orderBy}: MetricsApiRequestMetric, - {projects, environments, datetime}: PageFilters, - {intervalLadder, ...overrides}: Partial = {} -): MetricsApiRequestQuery { - const {mri: mri} = parseField(field) ?? {}; - const useCase = getUseCaseFromMRI(mri) ?? 'custom'; - const interval = getDDMInterval(datetime, useCase, intervalLadder); - - const hasGroupBy = groupBy && groupBy.length > 0; - - const queryToSend = { - ...getDateTimeParams(datetime), - query: sanitizeQuery(query), - project: projects, - environment: environments, - field, - useCase, - interval, - groupBy, - orderBy: hasGroupBy && !orderBy && field ? `-${field}` : orderBy, - useNewMetricsLayer: true, - }; - - return {...queryToSend, ...overrides}; -} - -function sanitizeQuery(query?: string) { - return query?.trim(); -} - const intervalLadders: Record = { ddm: new GranularityLadder([ [SIXTY_DAYS, '1d'], diff --git a/static/app/utils/metrics/useMetricsData.spec.tsx b/static/app/utils/metrics/useMetricsData.spec.tsx index 6c9420372e29ec..af72f28340f32e 100644 --- a/static/app/utils/metrics/useMetricsData.spec.tsx +++ b/static/app/utils/metrics/useMetricsData.spec.tsx @@ -1,4 +1,8 @@ -import {createMqlQuery} from 'sentry/utils/metrics/useMetricsData'; +import type {PageFilters} from 'sentry/types'; +import { + createMqlQuery, + getMetricsQueryApiRequestPayload, +} from 'sentry/utils/metrics/useMetricsData'; describe('createMqlQuery', () => { it('should create a basic mql query', () => { @@ -35,3 +39,193 @@ describe('createMqlQuery', () => { ); }); }); + +describe('getMetricsQueryApiRequestPayload', () => { + it('should return the correct query object with default values', () => { + const metric = {field: 'sessions', query: 'error', groupBy: ['project']}; + const filters = { + projects: [1], + environments: ['production'], + datetime: {start: '2023-01-01', end: '2023-01-31', period: null, utc: true}, + }; + + const result = getMetricsQueryApiRequestPayload(metric, filters); + + expect(result.query).toEqual({ + start: '2023-01-01T00:00:00.000Z', + end: '2023-01-31T00:00:00.000Z', + project: [1], + environment: ['production'], + interval: '2h', + }); + + expect(result.body).toEqual({ + queries: [ + { + name: 'query_1', + mql: 'sessions{error} by (project)', + }, + ], + formulas: [{mql: '$query_1', limit: undefined, order: 'desc'}], + }); + }); + + it('should return the correct query object with default values (period)', () => { + const metric = {field: 'sessions', query: 'error', groupBy: ['project']}; + const filters = { + projects: [1], + environments: ['production'], + datetime: {period: '7d', utc: true} as PageFilters['datetime'], + }; + + const result = getMetricsQueryApiRequestPayload(metric, filters); + + expect(result.query).toEqual({ + statsPeriod: '7d', + project: [1], + environment: ['production'], + interval: '30m', + }); + + expect(result.body).toEqual({ + queries: [ + { + name: 'query_1', + mql: 'sessions{error} by (project)', + }, + ], + formulas: [{mql: '$query_1', limit: undefined, order: 'desc'}], + }); + }); + + it('should return the correct query object with overridden values', () => { + const metric = {field: 'sessions', query: 'error', groupBy: ['project']}; + const filters = { + projects: [1], + environments: ['production'], + datetime: {start: '2023-01-01', end: '2023-01-02', period: null, utc: true}, + }; + + const result = getMetricsQueryApiRequestPayload(metric, filters, {interval: '123m'}); + + expect(result.query).toEqual({ + start: '2023-01-01T00:00:00.000Z', + end: '2023-01-02T00:00:00.000Z', + project: [1], + environment: ['production'], + interval: '123m', + }); + + expect(result.body).toEqual({ + queries: [ + { + name: 'query_1', + mql: 'sessions{error} by (project)', + }, + ], + formulas: [{mql: '$query_1', limit: undefined, order: 'desc'}], + }); + }); + + it('should not add a default orderBy if one is already present', () => { + const metric = { + field: 'sessions', + query: 'error', + groupBy: ['project'], + orderBy: 'asc' as const, + }; + const filters = { + projects: [1], + environments: ['production'], + datetime: {start: '2023-01-01', end: '2023-01-02', period: null, utc: true}, + }; + + const result = getMetricsQueryApiRequestPayload(metric, filters); + + expect(result.query).toEqual({ + start: '2023-01-01T00:00:00.000Z', + end: '2023-01-02T00:00:00.000Z', + project: [1], + environment: ['production'], + interval: '5m', + }); + + expect(result.body).toEqual({ + queries: [ + { + name: 'query_1', + mql: 'sessions{error} by (project)', + }, + ], + formulas: [{mql: '$query_1', limit: undefined, order: 'asc'}], + }); + }); + + it('should not add a default orderBy if there are no groups', () => { + const metric = { + field: 'sessions', + query: 'error', + groupBy: [], + }; + const filters = { + projects: [1], + environments: ['production'], + datetime: {start: '2023-01-01', end: '2023-01-02', period: null, utc: true}, + }; + + const result = getMetricsQueryApiRequestPayload(metric, filters); + + expect(result.query).toEqual({ + start: '2023-01-01T00:00:00.000Z', + end: '2023-01-02T00:00:00.000Z', + project: [1], + environment: ['production'], + interval: '5m', + }); + + expect(result.body).toEqual({ + queries: [ + { + name: 'query_1', + mql: 'sessions{error}', + }, + ], + formulas: [{mql: '$query_1', limit: undefined, order: undefined}], + }); + }); + + it('should not add intervalLadder override into the request', () => { + const metric = { + field: 'test', + query: 'error', + groupBy: [], + }; + const filters = { + projects: [1], + environments: ['production'], + datetime: {start: '2023-01-01', end: '2023-01-02', period: null, utc: true}, + }; + + const result = getMetricsQueryApiRequestPayload(metric, filters, { + intervalLadder: 'ddm', + }); + + expect(result.query).toEqual({ + start: '2023-01-01T00:00:00.000Z', + end: '2023-01-02T00:00:00.000Z', + project: [1], + environment: ['production'], + interval: '5m', + }); + + expect(result.body).toEqual({ + queries: [ + { + name: 'query_1', + mql: 'test{error}', + }, + ], + formulas: [{mql: '$query_1', limit: undefined, order: undefined}], + }); + }); +}); diff --git a/static/app/utils/metrics/useMetricsData.tsx b/static/app/utils/metrics/useMetricsData.tsx index 001807983a0646..c62e1894c1ea4a 100644 --- a/static/app/utils/metrics/useMetricsData.tsx +++ b/static/app/utils/metrics/useMetricsData.tsx @@ -8,7 +8,7 @@ import {useApiQuery} from 'sentry/utils/queryClient'; import useOrganization from 'sentry/utils/useOrganization'; import type { - MetricsApiRequestQueryOptions, + MetricsDataIntervalLadder, MetricsQueryApiResponse, } from '../../types/metrics'; @@ -27,19 +27,30 @@ export function createMqlQuery({ return mql; } -function getMetricsQueryApiRequestPayload( +export function getMetricsQueryApiRequestPayload( { field, query, groupBy, orderBy, - }: {field: string; query: string; groupBy?: string[]; orderBy?: 'asc' | 'desc'}, + limit, + }: { + field: string; + groupBy?: string[]; + limit?: number; + orderBy?: 'asc' | 'desc'; + query?: string; + }, {projects, environments, datetime}: PageFilters, - {intervalLadder, ...overrides}: Partial = {} + { + intervalLadder, + interval: intervalParam, + }: {interval?: string; intervalLadder?: MetricsDataIntervalLadder} = {} ) { const {mri: mri} = parseField(field) ?? {}; const useCase = getUseCaseFromMRI(mri) ?? 'custom'; - const interval = getDDMInterval(datetime, useCase, intervalLadder); + const interval = intervalParam ?? getDDMInterval(datetime, useCase, intervalLadder); + const hasGoupBy = groupBy && groupBy.length > 0; return { query: { @@ -47,7 +58,6 @@ function getMetricsQueryApiRequestPayload( project: projects, environment: environments, interval, - ...overrides, }, body: { queries: [ @@ -56,14 +66,16 @@ function getMetricsQueryApiRequestPayload( mql: createMqlQuery({field, query, groupBy}), }, ], - formulas: [{mql: '$query_1', limit: overrides.limit, order: orderBy ?? 'desc'}], + formulas: [ + {mql: '$query_1', limit: limit, order: hasGoupBy ? orderBy ?? 'desc' : undefined}, + ], }, }; } export function useMetricsQuery( {mri, op, datetime, projects, environments, query, groupBy}: MetricsQuery, - overrides: Partial = {} + overrides: {interval?: string; intervalLadder?: MetricsDataIntervalLadder} = {} ) { const organization = useOrganization(); @@ -99,7 +111,7 @@ export function useMetricsQuery( // 2. provides a callback to trim the data to a specific time range when chart zoom is used export function useMetricsQueryZoom( metricsQuery: MetricsQuery, - overrides: Partial = {} + overrides: {interval?: string; intervalLadder?: MetricsDataIntervalLadder} = {} ) { const [metricsData, setMetricsData] = useState(); const { diff --git a/static/app/views/dashboards/datasetConfig/metrics.tsx b/static/app/views/dashboards/datasetConfig/metrics.tsx index 165d7e2405dad7..53e74ff761c5c3 100644 --- a/static/app/views/dashboards/datasetConfig/metrics.tsx +++ b/static/app/views/dashboards/datasetConfig/metrics.tsx @@ -3,9 +3,7 @@ import omit from 'lodash/omit'; import type {Client, ResponseMeta} from 'sentry/api'; import {t} from 'sentry/locale'; import type { - MetricsApiResponse, - MetricsGroup, - MRI, + MetricsQueryApiResponse, Organization, PageFilters, TagCollection, @@ -15,10 +13,9 @@ import type {CustomMeasurementCollection} from 'sentry/utils/customMeasurements/ import type {TableData} from 'sentry/utils/discover/discoverQuery'; import type {EventData} from 'sentry/utils/discover/eventView'; import {NumberContainer} from 'sentry/utils/discover/styles'; -import {getMetricsApiRequestQuery, groupByOp} from 'sentry/utils/metrics'; +import {getMetricsSeriesName, groupByOp} from 'sentry/utils/metrics'; import {formatMetricUsingUnit} from 'sentry/utils/metrics/formatters'; import { - formatMRI, formatMRIField, getMRI, getUseCaseFromMRI, @@ -26,6 +23,7 @@ import { parseField, parseMRI, } from 'sentry/utils/metrics/mri'; +import {getMetricsQueryApiRequestPayload} from 'sentry/utils/metrics/useMetricsData'; import type {OnDemandControlContext} from 'sentry/utils/performance/contexts/onDemandControl'; import {MetricSearchBar} from 'sentry/views/dashboards/widgetBuilder/buildSteps/filterResultsStep/metricSearchBar'; import type {FieldValueOption} from 'sentry/views/discover/table/queryField'; @@ -47,7 +45,10 @@ const DEFAULT_WIDGET_QUERY: WidgetQuery = { orderby: '', }; -export const MetricsConfig: DatasetConfig = { +export const MetricsConfig: DatasetConfig< + MetricsQueryApiResponse, + MetricsQueryApiResponse +> = { defaultWidgetQuery: DEFAULT_WIDGET_QUERY, enableEquations: false, getTableRequest: ( @@ -288,16 +289,24 @@ function handleMetricTableOrderByReset(widgetQuery: WidgetQuery, newFields: stri return handleOrderByReset(widgetQuery, newFields); } -export function transformMetricsResponseToTable(data: MetricsApiResponse): TableData { - const rows = data.groups.map((group, index) => { - const groupColumn = mapMetricGroupsToFields(group.by); - const value = mapMetricGroupsToFields(group.totals); - return { - id: String(index), - ...groupColumn, - ...value, - }; - }); +export function transformMetricsResponseToTable( + data: MetricsQueryApiResponse, + widgetQuery: WidgetQuery +): TableData { + const field = widgetQuery.aggregates[0]; + const rows = data.data.flatMap((group, index) => + group.map(entry => { + const groupColumn = mapMetricGroupsToFields(entry.by); + const value = { + [field]: entry.totals, + }; + return { + id: String(index), + ...groupColumn, + ...value, + }; + }) + ); const singleRow = rows[0]; const meta = { @@ -330,17 +339,19 @@ function changeObjectValuesToTypes( } export function transformMetricsResponseToSeries( - data: MetricsApiResponse, + data: MetricsQueryApiResponse, widgetQuery: WidgetQuery ) { if (data === null) { return []; } + const field = widgetQuery.aggregates[0]; + const results: Series[] = []; const queryAlias = widgetQuery.name; - if (!data.groups.length) { + if (!data.data.length) { return [ { seriesName: `(${t('no results')})`, @@ -352,32 +363,17 @@ export function transformMetricsResponseToSeries( ]; } - function getMetricsSeriesName(group: MetricsGroup) { - const groupByEntries = Object.entries(group.by ?? {}); - if (!groupByEntries.length) { - const field = Object.keys(group.series)?.[0]; - const {mri} = parseField(field) ?? {mri: field}; - const name = formatMRI(mri as MRI); - - return name ?? '(none)'; - } - - return groupByEntries - .map(([_key, value]) => `${String(value).length ? value : t('(none)')}`) - .join(', '); - } - - data.groups.forEach(group => { - Object.keys(group.series).forEach(field => { + data.data.forEach(group => + group.forEach(entry => { results.push({ - seriesName: queryAlias || getMetricsSeriesName(group), + seriesName: getMetricsSeriesName(queryAlias || field, entry.by), data: data.intervals.map((interval, index) => ({ name: interval, - value: group.series[field][index] ?? 0, + value: entry.series[field][index] ?? 0, })), }); - }); - }); + }) + ); return results; } @@ -389,7 +385,7 @@ function getMetricRequest( pageFilters: PageFilters, limit?: number, displayType?: DisplayType -): Promise<[MetricsApiResponse, string | undefined, ResponseMeta | undefined]> { +): Promise<[MetricsQueryApiResponse, string | undefined, ResponseMeta | undefined]> { if (!query.aggregates[0]) { // No aggregate selected, return empty response return Promise.resolve([ @@ -405,24 +401,30 @@ function getMetricRequest( ] as any); } - const requestData = getMetricsApiRequestQuery( + const payload = getMetricsQueryApiRequestPayload( { field: query.aggregates[0], query: query.conditions || undefined, groupBy: query.columns || undefined, - orderBy: query.orderby || undefined, + orderBy: query.orderby + ? query.orderby.indexOf('-') === 0 + ? 'desc' + : 'asc' + : undefined, + limit: limit || undefined, }, pageFilters, { - limit: limit || undefined, intervalLadder: displayType === DisplayType.BAR ? 'bar' : 'dashboard', } ); - const pathname = `/organizations/${organization.slug}/metrics/data/`; + const pathname = `/organizations/${organization.slug}/metrics/query/`; return api.requestPromise(pathname, { + method: 'POST', + query: payload.query, + data: payload.body, includeAllArgs: true, - query: requestData, }); } diff --git a/static/app/views/dashboards/widgetCard/metricWidgetQueries.tsx b/static/app/views/dashboards/widgetCard/metricWidgetQueries.tsx index 0401b1a3c1f0ed..4561ddc444b518 100644 --- a/static/app/views/dashboards/widgetCard/metricWidgetQueries.tsx +++ b/static/app/views/dashboards/widgetCard/metricWidgetQueries.tsx @@ -4,7 +4,7 @@ import omit from 'lodash/omit'; import type {Client} from 'sentry/api'; import {isSelectionEqual} from 'sentry/components/organizations/pageFilters/utils'; -import type {MetricsApiResponse, Organization, PageFilters} from 'sentry/types'; +import type {MetricsQueryApiResponse, Organization, PageFilters} from 'sentry/types'; import type {Series} from 'sentry/types/echarts'; import type {TableDataWithTitle} from 'sentry/utils/discover/discoverQuery'; import {TOP_N} from 'sentry/utils/discover/types'; @@ -63,8 +63,11 @@ class MetricWidgetQueries extends Component { } customDidUpdateComparator = ( - prevProps: GenericWidgetQueriesProps, - nextProps: GenericWidgetQueriesProps + prevProps: GenericWidgetQueriesProps< + MetricsQueryApiResponse, + MetricsQueryApiResponse + >, + nextProps: GenericWidgetQueriesProps ) => { const {loading, limit, widget, cursor, organization, selection, dashboardFilters} = nextProps; @@ -126,7 +129,7 @@ class MetricWidgetQueries extends Component { const config = MetricsConfig; return ( - + config={config} api={api} organization={organization} From 61ffcc40c9709750552807d8605899ef2a6aa4bd Mon Sep 17 00:00:00 2001 From: Riccardo Busetti Date: Wed, 14 Feb 2024 13:16:49 +0100 Subject: [PATCH 356/357] feat(ddm): Implement bulk querying engine for metrics queries (#65066) --- .../api/endpoints/organization_metrics.py | 2 +- .../sentry_metrics/querying/data_v2/api.py | 5 +- .../querying/data_v2/execution.py | 572 ++++++++++-------- .../querying/data_v2/transformation.py | 10 +- src/sentry/snuba/metrics_layer/query.py | 3 + .../querying/data_v2/test_api.py | 9 +- 6 files changed, 339 insertions(+), 262 deletions(-) diff --git a/src/sentry/api/endpoints/organization_metrics.py b/src/sentry/api/endpoints/organization_metrics.py index 3e0c77af8ab937..ae5d28c6fe522a 100644 --- a/src/sentry/api/endpoints/organization_metrics.py +++ b/src/sentry/api/endpoints/organization_metrics.py @@ -378,6 +378,7 @@ def _metrics_queries_plan_from_request(self, request: Request) -> MetricsQueries """ Extracts the metrics queries plan from the request payload. """ + # TODO: maybe we could use a serializer to read the body of the request. metrics_queries_plan = MetricsQueriesPlan() queries = request.data.get("queries") or [] @@ -406,7 +407,6 @@ def post(self, request: Request, organization) -> Response: end=end, interval=interval, organization=organization, - # TODO: figure out how to make these methods work with HTTP body. projects=self.get_projects(request, organization), environments=self.get_environments(request, organization), referrer=Referrer.API_DDM_METRICS_QUERY.value, diff --git a/src/sentry/sentry_metrics/querying/data_v2/api.py b/src/sentry/sentry_metrics/querying/data_v2/api.py index ea878571a9dabd..1c7b74568533d9 100644 --- a/src/sentry/sentry_metrics/querying/data_v2/api.py +++ b/src/sentry/sentry_metrics/querying/data_v2/api.py @@ -1,5 +1,6 @@ -from collections.abc import Sequence +from collections.abc import Mapping, Sequence from datetime import datetime +from typing import Any from snuba_sdk import MetricsQuery, MetricsScope, Rollup @@ -22,7 +23,7 @@ def run_metrics_queries_plan( projects: Sequence[Project], environments: Sequence[Environment], referrer: str, -): +) -> Mapping[str, Any]: # For now, if the query plan is empty, we return an empty dictionary. In the future, we might want to default # to a better data type. if metrics_queries_plan.is_empty(): diff --git a/src/sentry/sentry_metrics/querying/data_v2/execution.py b/src/sentry/sentry_metrics/querying/data_v2/execution.py index 8b309b6c5e6a22..4a7e89a74a7a74 100644 --- a/src/sentry/sentry_metrics/querying/data_v2/execution.py +++ b/src/sentry/sentry_metrics/querying/data_v2/execution.py @@ -1,7 +1,8 @@ from collections.abc import Mapping, Sequence from dataclasses import dataclass, replace from datetime import datetime -from typing import Any +from enum import Enum +from typing import Any, Union, cast import sentry_sdk from snuba_sdk import Column, Direction, MetricsQuery, MetricsScope, Request @@ -11,10 +12,7 @@ from sentry.models.project import Project from sentry.sentry_metrics.querying.common import DEFAULT_QUERY_INTERVALS, SNUBA_QUERY_LIMIT from sentry.sentry_metrics.querying.data_v2.plan import QueryOrder -from sentry.sentry_metrics.querying.errors import ( - InvalidMetricsQueryError, - MetricsQueryExecutionError, -) +from sentry.sentry_metrics.querying.errors import MetricsQueryExecutionError from sentry.sentry_metrics.querying.types import GroupKey, GroupsCollection from sentry.sentry_metrics.querying.visitors import ( QueriedMetricsVisitor, @@ -24,7 +22,7 @@ from sentry.sentry_metrics.visibility import get_metrics_blocking_state from sentry.snuba.dataset import Dataset from sentry.snuba.metrics import to_intervals -from sentry.snuba.metrics_layer.query import run_query +from sentry.snuba.metrics_layer.query import bulk_run_query from sentry.utils import metrics from sentry.utils.snuba import SnubaError @@ -39,7 +37,6 @@ def _extract_groups_from_seq(seq: Sequence[Mapping[str, Any]]) -> GroupsCollecti for data in seq: inner_group = [] for key, value in data.items(): - # TODO: check if time can be used as a tag key. if key not in ["aggregate_value", "time"]: inner_group.append((key, value)) @@ -99,160 +96,223 @@ def _build_aligned_seq( return aligned_seq -@dataclass(frozen=True) -class ExecutableQuery: - with_series: bool - with_totals: bool +def _push_down_group_filters( + metrics_query: MetricsQuery, groups_collection: GroupsCollection | None +) -> MetricsQuery: + """ + Returns a new `MetricsQuery` with a series of filters that ensure that the new query will have the same + groups returned. Keep in mind that there is no guarantee that all the groups will be returned, since data might + change in the meanwhile, so the guarantee of this method is that the returned groups will all be belonging to + `groups_collection`. + + The need for this filter arises because when executing multiple queries, we want to have the same groups + returned, in order to make results consistent. Note that in case queries have different groups, some results + might be missing, since the reference query dictates which values are returned during the alignment process. + """ + if not groups_collection: + return metrics_query - identifier: str - metrics_query: MetricsQuery - order: QueryOrder | None - limit: int | None + # We perform a transformation in the form [(key_1 = value_1 AND key_2 = value_2) OR (key_3 = value_3)]. + groups_filters = [] + for groups in groups_collection: + inner_snuba_filters = [] + for filter_key, filter_value in groups: + inner_snuba_filters.append(Condition(Column(filter_key), Op.EQ, filter_value)) - def is_empty(self) -> bool: - return not self.metrics_query.scope.org_ids or not self.metrics_query.scope.project_ids + # In case we have more than one filter, we have to group them into an `AND`. + if len(inner_snuba_filters) > 1: + groups_filters.append(BooleanCondition(BooleanOp.AND, inner_snuba_filters)) + else: + groups_filters.append(inner_snuba_filters[0]) - def replace_date_range(self, start: datetime, end: datetime) -> "ExecutableQuery": - return replace( - self, - metrics_query=self.metrics_query.set_start(start).set_end(end), - ) + # In case we have more than one filter, we have to group them into an `OR`. + if len(groups_filters) > 1: + groups_filters = [BooleanCondition(BooleanOp.OR, groups_filters)] - def replace_limit(self, limit: int = SNUBA_QUERY_LIMIT) -> "ExecutableQuery": - return replace( - self, - metrics_query=self.metrics_query.set_limit(limit), - ) + merged_query = TimeseriesConditionInjectionVisitor(groups_filters).visit(metrics_query.query) + return metrics_query.set_query(merged_query) - def replace_interval(self, new_interval: int) -> "ExecutableQuery": - return replace( - self, - metrics_query=self.metrics_query.set_rollup( - replace(self.metrics_query.rollup, interval=new_interval) - ), - ) - def replace_order_by(self, direction: Direction) -> "ExecutableQuery": - return replace( - self, - metrics_query=self.metrics_query.set_rollup( - replace(self.metrics_query.rollup, interval=None, totals=True, orderby=direction) - ), - ) +class ScheduledQueryType(Enum): + SERIES = 0 + TOTALS = 1 - def to_totals_query(self) -> "ExecutableQuery": - return replace( - self, - metrics_query=self.metrics_query.set_rollup( - # If an order_by is used, we must run a totals query. - replace(self.metrics_query.rollup, interval=None, totals=True) - ), - ) - def add_group_filters( +@dataclass(frozen=True) +class ScheduledQuery: + type: ScheduledQueryType + metrics_query: MetricsQuery + next: Union["ScheduledQuery", None] = None + order: QueryOrder | None = None + limit: int | None = None + + def initialize( self, - groups_collection: GroupsCollection | None, - ) -> "ExecutableQuery": - """ - Returns a new `ExecutableQuery` with a series of filters that ensure that the new query will have the same - groups returned. Keep in mind that there is no guarantee that all the groups will be returned, since data might - change in the meanwhile, so the guarantee of this method is that the returned groups will all be belonging to - `groups_collection`. - - The need for this filter arises because when executing multiple queries, we want to have the same groups - returned, in order to make results consistent. Note that in case queries have different groups, some results - might be missing, since the reference query dictates which values are returned during the alignment process. - """ - if not groups_collection: - return self + organization: Organization, + projects: Sequence[Project], + blocked_metrics_for_projects: Mapping[str, set[int]], + ) -> "ScheduledQuery": + updated_metrics_query = self.metrics_query - # We perform a transformation in the form [(key_1 = value_1 AND key_2 = value_2) OR (key_3 = value_3)]. - groups_filters = [] - for groups in groups_collection: - inner_snuba_filters = [] - for filter_key, filter_value in groups: - inner_snuba_filters.append(Condition(Column(filter_key), Op.EQ, filter_value)) + # We filter out all the projects for which the queried metrics are blocked. + updated_metrics_query = self._filter_blocked_projects( + updated_metrics_query, organization, projects, blocked_metrics_for_projects + ) + # We align the date range of the query, considering the supplied interval. + updated_metrics_query = self._align_date_range(updated_metrics_query) + + # We perform type-specific initializations, since based on the type we want to run + # a different query. + if self.type == ScheduledQueryType.SERIES: + updated_metrics_query = self._initialize_series(updated_metrics_query) + elif self.type == ScheduledQueryType.TOTALS: + updated_metrics_query = self._initialize_totals(updated_metrics_query) + + # We recursively apply the initialization transformations downstream. + updated_next = None + if self.next is not None: + updated_next = self.next.initialize( + organization, projects, blocked_metrics_for_projects + ) - # In case we have more than one filter, we have to group them into an `AND`. - if len(inner_snuba_filters) > 1: - groups_filters.append(BooleanCondition(BooleanOp.AND, inner_snuba_filters)) - else: - groups_filters.append(inner_snuba_filters[0]) + return replace(self, metrics_query=updated_metrics_query, next=updated_next) - # In case we have more than one filter, we have to group them into an `OR`. - if len(groups_filters) > 1: - groups_filters = [BooleanCondition(BooleanOp.OR, groups_filters)] + def _initialize_series(self, metrics_query: MetricsQuery) -> MetricsQuery: + updated_metrics_query = metrics_query - merged_query = TimeseriesConditionInjectionVisitor(groups_filters).visit( - self.metrics_query.query - ) - return replace( - self, - metrics_query=self.metrics_query.set_query(merged_query), + # A series query runs always up to the maximum query limit. + updated_metrics_query = updated_metrics_query.set_limit(SNUBA_QUERY_LIMIT) + + return updated_metrics_query + + def _initialize_totals(self, metrics_query: MetricsQuery) -> MetricsQuery: + updated_metrics_query = metrics_query + + # A totals query doesn't have an interval. + updated_metrics_query = updated_metrics_query.set_rollup( + replace(updated_metrics_query.rollup, interval=None, totals=True) ) - def filter_blocked_projects( - self, + if self.order: + updated_metrics_query = updated_metrics_query.set_rollup( + replace(updated_metrics_query.rollup, orderby=self.order.to_snuba_order()) + ) + + if self.limit: + updated_metrics_query = updated_metrics_query.set_limit(self.limit) + else: + updated_metrics_query = updated_metrics_query.set_limit(SNUBA_QUERY_LIMIT) + + return updated_metrics_query + + def is_empty(self) -> bool: + return not self.metrics_query.scope.org_ids or not self.metrics_query.scope.project_ids + + @classmethod + def _filter_blocked_projects( + cls, + metrics_query: MetricsQuery, organization: Organization, - projects: set[Project], + projects: Sequence[Project], blocked_metrics_for_projects: Mapping[str, set[int]], - ) -> "ExecutableQuery": - """ - Returns a new `ExecutableQuery` with the projects for which all the queries are not blocked. In case no projects - exist, the query will be returned with empty projects, signaling the executor to not run the query. - """ + ) -> MetricsQuery: intersected_projects: set[int] = {project.id for project in projects} - for queried_metric in QueriedMetricsVisitor().visit(self.metrics_query.query): + for queried_metric in QueriedMetricsVisitor().visit(metrics_query.query): blocked_for_projects = blocked_metrics_for_projects.get(queried_metric) if blocked_for_projects: metrics.incr(key="ddm.metrics_api.blocked_metric_queried", amount=1) intersected_projects -= blocked_for_projects - return replace( - self, - metrics_query=self.metrics_query.set_scope( - MetricsScope( - org_ids=[organization.id], - project_ids=list(intersected_projects), - ) - ), + return metrics_query.set_scope( + MetricsScope( + org_ids=[organization.id], + project_ids=list(intersected_projects), + ) ) + @classmethod + def _align_date_range(cls, metrics_query: MetricsQuery) -> MetricsQuery: + # We use as a reference the interval supplied via the initial version of the query. + interval = metrics_query.rollup.interval + if interval: + modified_start, modified_end, _ = to_intervals( + metrics_query.start, + metrics_query.end, + interval, + ) + if modified_start and modified_end: + return metrics_query.set_start(modified_start).set_end(modified_end) + + return metrics_query + @dataclass(frozen=True) class QueryResult: - series_executable_query: ExecutableQuery | None - totals_executable_query: ExecutableQuery | None + series_executable_query: MetricsQuery | None + totals_executable_query: MetricsQuery | None result: Mapping[str, Any] def __post_init__(self): assert self.series_executable_query or self.totals_executable_query @classmethod - def empty_from(cls, executable_query: ExecutableQuery) -> "QueryResult": + def empty_from(cls, scheduled_query: ScheduledQuery) -> "QueryResult": + series_metrics_query = None + totals_metrics_query = None + + if scheduled_query.next is not None: + totals_metrics_query = scheduled_query.metrics_query + series_metrics_query = scheduled_query.next.metrics_query + else: + if scheduled_query.type == ScheduledQueryType.SERIES: + series_metrics_query = scheduled_query.metrics_query + elif scheduled_query.type == ScheduledQueryType.TOTALS: + totals_metrics_query = scheduled_query.metrics_query + return QueryResult( - series_executable_query=executable_query, - totals_executable_query=executable_query, + series_executable_query=series_metrics_query, + totals_executable_query=totals_metrics_query, result={ "series": {"data": {}, "meta": {}}, "totals": {"data": {}, "meta": {}}, # We want to honor the date ranges of the supplied query. - "modified_start": executable_query.metrics_query.start, - "modified_end": executable_query.metrics_query.end, + "modified_start": scheduled_query.metrics_query.start, + "modified_end": scheduled_query.metrics_query.end, }, ) - @property - def query_name(self) -> str: - if self.series_executable_query: - return self.series_executable_query.identifier + @classmethod + def from_query_type( + cls, query_type: ScheduledQueryType, query: MetricsQuery, query_result: Mapping[str, Any] + ) -> "QueryResult": + extended_result = { + "modified_start": query_result["modified_start"], + "modified_end": query_result["modified_end"], + } + + if query_type == ScheduledQueryType.SERIES: + extended_result["series"] = query_result + return QueryResult( + series_executable_query=query, + totals_executable_query=None, + result=extended_result, + ) + elif query_type == ScheduledQueryType.TOTALS: + extended_result["totals"] = query_result + return QueryResult( + series_executable_query=None, + totals_executable_query=query, + result=extended_result, + ) - if self.totals_executable_query: - return self.totals_executable_query.identifier + raise MetricsQueryExecutionError(f"Can't build query result from query type {query_type}") - raise InvalidMetricsQueryError( - "Unable to determine the query name for a result with no queries" + def merge(self, other: "QueryResult") -> "QueryResult": + return QueryResult( + series_executable_query=self.series_executable_query or other.series_executable_query, + totals_executable_query=self.totals_executable_query or other.totals_executable_query, + result={**self.result, **other.result}, ) @property @@ -270,7 +330,7 @@ def interval(self) -> int: "You have to run a timeseries query in order to use the interval" ) - return self.series_executable_query.metrics_query.rollup.interval + return self.series_executable_query.rollup.interval @property def series(self) -> Sequence[Mapping[str, Any]]: @@ -300,34 +360,25 @@ def group_bys(self) -> list[str]: # # Sorting of the groups is done to maintain consistency across function calls. if self.series_executable_query: - return sorted( - UsedGroupBysVisitor().visit(self.series_executable_query.metrics_query.query) - ) + return sorted(UsedGroupBysVisitor().visit(self.series_executable_query.query)) if self.totals_executable_query: - return sorted( - UsedGroupBysVisitor().visit(self.totals_executable_query.metrics_query.query) - ) + return sorted(UsedGroupBysVisitor().visit(self.totals_executable_query.query)) return [] @property - def order(self) -> str | None: - if self.series_executable_query and self.series_executable_query.order is not None: - return self.series_executable_query.order.value - - if self.totals_executable_query and self.totals_executable_query.order is not None: - return self.totals_executable_query.order.value + def order(self) -> Direction | None: + if self.totals_executable_query: + return self.totals_executable_query.rollup.orderby return None @property def limit(self) -> int | None: - if self.series_executable_query: - return self.series_executable_query.limit - + # The totals limit is the only one that controls the number of groups that are returned. if self.totals_executable_query: - return self.totals_executable_query.limit + return self.totals_executable_query.limit.limit return None @@ -373,6 +424,19 @@ def align_series_to_totals(self) -> "QueryResult": return self +@dataclass(frozen=True) +class PartialQueryResult: + scheduled_query: ScheduledQuery + executed_result: Mapping[str, Any] + + def to_query_result(self) -> QueryResult: + return QueryResult.from_query_type( + query_type=self.scheduled_query.type, + query=self.scheduled_query.metrics_query, + query_result=self.executed_result, + ) + + class QueryExecutor: def __init__(self, organization: Organization, projects: Sequence[Project], referrer: str): self._organization = organization @@ -383,9 +447,12 @@ def __init__(self, organization: Organization, projects: Sequence[Project], refe # to avoid an infinite recursion. self._interval_choices = sorted(DEFAULT_QUERY_INTERVALS) # List of queries scheduled for execution. - self._scheduled_queries: list[ExecutableQuery] = [] + self._scheduled_queries: list[ScheduledQuery] = [] # Tracks the number of queries that have been executed (for measuring purposes). self._number_of_executed_queries = 0 + # Tracks the pending query results that have been run by the executor. The list will contain both the final + # `QueryResult` objects and the partial `PartialQueryResult` objects that still have to be executed. + self._pending_query_results: list[QueryResult | PartialQueryResult] = [] # We load the blocked metrics for the supplied projects. self._blocked_metrics_for_projects = self._load_blocked_metrics_for_projects() @@ -419,135 +486,126 @@ def _build_request(self, query: MetricsQuery) -> Request: tenant_ids={"referrer": self._referrer, "organization_id": self._organization.id}, ) - def _execute(self, executable_query: ExecutableQuery) -> QueryResult: - """ - Executes a query as series and/or totals and returns the result. - """ - try: - # We merge the query with the blocked projects, in order to obtain a new query with only the projects that - # all have the queried metrics unblocked. - executable_query = executable_query.filter_blocked_projects( - organization=self._organization, - projects=set(self._projects), - blocked_metrics_for_projects=self._blocked_metrics_for_projects, + def _build_request_for_partial(self, partial_query_result: PartialQueryResult) -> Request: + if partial_query_result.scheduled_query.type != ScheduledQueryType.TOTALS: + raise MetricsQueryExecutionError( + "A partial query result must have an initial query of type totals" ) - # We try to determine the interval of the query, which will be used to define clear time bounds for both - # queries. This is done here since the metrics layer doesn't adjust the time for totals queries. - interval = executable_query.metrics_query.rollup.interval - if interval: - modified_start, modified_end, _ = to_intervals( - executable_query.metrics_query.start, - executable_query.metrics_query.end, - interval, - ) - if modified_start and modified_end: - executable_query = executable_query.replace_date_range( - modified_start, modified_end - ) - - # If, after merging the query with the blocked projects, the query becomes empty, we will return an empty - # result. - if executable_query.is_empty(): - return QueryResult.empty_from(executable_query) - - totals_executable_query = executable_query - totals_result = None - if executable_query.with_totals: - # If there is an order by, we apply it only on the totals query. We can't order a series query, for this - # reason we have to perform ordering here. - if executable_query.order: - totals_executable_query = totals_executable_query.replace_order_by( - executable_query.order.to_snuba_order() - ) - - # Only in totals, if there is a limit passed by the user, we will honor that and apply it. - if executable_query.limit: - totals_executable_query = totals_executable_query.replace_limit( - executable_query.limit - ) - - self._number_of_executed_queries += 1 - totals_result = run_query( - request=self._build_request( - totals_executable_query.to_totals_query().metrics_query - ) - ) - - series_executable_query = executable_query - series_result = None - if executable_query.with_series: - # For series queries, we always want to use the default Snuba limit. - series_executable_query = series_executable_query.replace_limit(SNUBA_QUERY_LIMIT) - - # In order to have at least the same groups, we need to pass down the groups obtained in the - # previous totals query to the series query. - if totals_result: - series_executable_query = series_executable_query.add_group_filters( - _extract_groups_from_seq(totals_result["data"]) - ) + next_scheduled_query = partial_query_result.scheduled_query.next + if next_scheduled_query is None: + raise MetricsQueryExecutionError( + "A partial query result must have a next query to be executed" + ) - self._number_of_executed_queries += 1 - series_result = run_query( - request=self._build_request(series_executable_query.metrics_query) - ) + # We compute the groups that were returned by the query that was executed. We then inject those groups in each + # `Timeseries` of the next query to execute. We do this in order to have at least the same groups returned by + # the next query. + # + # Note that the mutation we do is not reflected in the queries that are returned as part of the + # `QueryResult`(s) but since we do not need this data we can leave it out. + next_metrics_query = _push_down_group_filters( + next_scheduled_query.metrics_query, + _extract_groups_from_seq(partial_query_result.executed_result["data"]), + ) - result = {} - if series_result and totals_result: - result = { - "series": series_result, - "totals": totals_result, - "modified_start": series_result["modified_start"], - "modified_end": series_result["modified_end"], - } - elif series_result: - result = { - "series": series_result, - "modified_start": series_result["modified_start"], - "modified_end": series_result["modified_end"], - } - elif totals_result: - result = { - "totals": totals_result, - "modified_start": totals_result["modified_start"], - "modified_end": totals_result["modified_end"], - } + return self._build_request(next_metrics_query) - return QueryResult( - series_executable_query=series_executable_query, - totals_executable_query=totals_executable_query, - result=result, - ) + def _bulk_run_query(self, requests: list[Request]) -> list[Mapping[str, Any]]: + try: + return bulk_run_query(requests) except SnubaError as e: sentry_sdk.capture_exception(e) raise MetricsQueryExecutionError("An error occurred while executing the query") - def _serial_execute(self) -> Sequence[QueryResult]: - """ - Executes serially all the queries that are supplied to the QueryExecutor. + def _bulk_execute(self) -> Sequence[QueryResult]: + # We build all the requests that can be scheduled together in the first step. + bulk_requests = [] + # We collect all the indexes of the queries which are empty and should not be executed. + empty_queries_indexes = [] + for query_index, scheduled_query in enumerate(self._scheduled_queries): + if scheduled_query.is_empty(): + empty_queries_indexes.append(query_index) + else: + bulk_requests.append(self._build_request(scheduled_query.metrics_query)) + + # We run the requests in bulk and obtain a list of pending query results, which can include both + # `QueryResult`(s) that are done and `PartialQueryResult`(s) which require a second pass. + query_results = self._bulk_run_query(bulk_requests) + # We inject into the results all the empty values belonging to the empty queries. This insertion assumes that + # we do the filling of `query_results` in order, otherwise it won't work. + for empty_query_index in empty_queries_indexes: + query_results.insert(empty_query_index, {}) + + for query_index, query_result in enumerate(query_results): + scheduled_query = self._scheduled_queries[query_index] + if scheduled_query.is_empty(): + self._pending_query_results.append( + QueryResult.empty_from(scheduled_query=scheduled_query) + ) + elif scheduled_query.next is not None: + self._pending_query_results.append( + PartialQueryResult( + scheduled_query=scheduled_query, + executed_result=query_result, + ) + ) + else: + self._pending_query_results.append( + QueryResult.from_query_type( + query_type=scheduled_query.type, + query=scheduled_query.metrics_query, + query_result=query_result, + ) + ) - The execution will try to satisfy the query by dynamically changing its interval, in the case in which the - Snuba limit is reached. - """ - results = [] - for query in self._scheduled_queries: - with metrics.timer(key="ddm.metrics_api.metrics_query.execution_time"): - query_result = self._execute(executable_query=query) - results.append(query_result.align_series_to_totals()) + # We build all the requests for the `PendingQueryResult`(s) which will again be executed in parallel. + bulk_requests = [] + mappings = [] + for query_index, pending_query_result in enumerate(self._pending_query_results): + if isinstance(pending_query_result, PartialQueryResult): + bulk_requests.append(self._build_request_for_partial(pending_query_result)) + mappings.append(query_index) + + # We run the requests in bulk to obtain a list of `QueryResult`(s). In order to do so, the `QueryResult` objects + # from the first and second query are merged. + query_results = self._bulk_run_query(bulk_requests) + for query_index, query_result in zip(mappings, query_results): + partial_query_result = self._pending_query_results[query_index] + if isinstance(partial_query_result, PartialQueryResult): + next_scheduled_query = partial_query_result.scheduled_query.next + # If, for some reason, there is a `None` next at this point, we will just dump the partial query as a + # `QueryResult`. + if next_scheduled_query is None: + self._pending_query_results[ + query_index + ] = partial_query_result.to_query_result() + continue + + # If there is a next query, we will merge the first and second queries into a single `QueryResult`. + first_query_result = partial_query_result.to_query_result() + second_query_result = QueryResult.from_query_type( + query_type=next_scheduled_query.type, + query=next_scheduled_query.metrics_query, + query_result=query_result, + ) + self._pending_query_results[query_index] = first_query_result.merge( + second_query_result + ) - return results + # For now, we naively cast to a list of `QueryResult` since we assume that the chaining is used with at most + # a depth of 2 (e.g., query_1 -> query_2), so by this point we should NOT have anymore `PartialQueryResult`(s) + # left. + return cast(Sequence[QueryResult], self._pending_query_results) - def execute(self, batch: bool = False) -> Sequence[QueryResult]: + def execute(self) -> Sequence[QueryResult]: """ Executes the scheduled queries serially. """ - # TODO: implement batch execution when there will be the support for it. - results = self._serial_execute() - metrics.distribution( - key="ddm.metrics_api.queries_executed", value=self._number_of_executed_queries - ) + if not self._scheduled_queries: + return [] - return results + return self._bulk_execute() def schedule( self, @@ -560,13 +618,23 @@ def schedule( Note that this method won't execute the query, since it's lazy in nature. """ - executable_query = ExecutableQuery( - with_series=True, - with_totals=True, - # We identify the query with its index. - identifier=str(len(self._scheduled_queries)), + # For now, we are always building a (totals -> series) query, but the execution engine is fully capable of + # supporting either a single totals or series query. + executable_query = ScheduledQuery( + type=ScheduledQueryType.TOTALS, metrics_query=query, + next=ScheduledQuery( + type=ScheduledQueryType.SERIES, + metrics_query=query, + order=order, + limit=limit, + ), order=order, limit=limit, ) + # We initialize the query by performing type-aware mutations that prepare the query to be executed correctly + # (e.g., adding `totals` to a totals query...). + executable_query = executable_query.initialize( + self._organization, self._projects, self._blocked_metrics_for_projects + ) self._scheduled_queries.append(executable_query) diff --git a/src/sentry/sentry_metrics/querying/data_v2/transformation.py b/src/sentry/sentry_metrics/querying/data_v2/transformation.py index b683519751b03e..edbf60dd2f7c4b 100644 --- a/src/sentry/sentry_metrics/querying/data_v2/transformation.py +++ b/src/sentry/sentry_metrics/querying/data_v2/transformation.py @@ -168,7 +168,11 @@ def _add_to_query_groups( # We add additional metadata from the query themselves to make the API more transparent. query_meta.append( - QueryMeta(group_bys=group_bys, order=query_result.order, limit=query_result.limit) + QueryMeta( + group_bys=group_bys, + order=query_result.order.value if query_result.order else None, + limit=query_result.limit, + ) ) queries_groups.append(query_groups) @@ -180,6 +184,10 @@ def transform(self) -> Mapping[str, Any]: """ Transforms the query results into the Sentry's API format. """ + # If we have not run any queries, we won't return anything back. + if not self._query_results: + return {} + # We first build intermediate results that we can work efficiently with. queries_groups, queries_meta = self._build_intermediate_results() diff --git a/src/sentry/snuba/metrics_layer/query.py b/src/sentry/snuba/metrics_layer/query.py index cc0a0ddde70b0d..292bad7e722e8a 100644 --- a/src/sentry/snuba/metrics_layer/query.py +++ b/src/sentry/snuba/metrics_layer/query.py @@ -83,6 +83,9 @@ def bulk_run_query(requests: list[Request]) -> list[Mapping[str, Any]]: This function is used to execute multiple metrics queries in a single request. """ + if not requests: + return [] + queries = [] for request in requests: request, start, end = _setup_metrics_query(request) diff --git a/tests/sentry/sentry_metrics/querying/data_v2/test_api.py b/tests/sentry/sentry_metrics/querying/data_v2/test_api.py index 61724b01d244fa..9efc9008bce161 100644 --- a/tests/sentry/sentry_metrics/querying/data_v2/test_api.py +++ b/tests/sentry/sentry_metrics/querying/data_v2/test_api.py @@ -263,7 +263,7 @@ def test_query_with_group_by(self) -> None: first_meta = sorted(meta[0], key=lambda value: value.get("name", "")) assert first_meta[0] == { "group_bys": ["platform", "transaction"], - "limit": None, + "limit": 10000, "order": None, } @@ -583,9 +583,9 @@ def test_query_with_multiple_aggregations_and_single_group_by_and_order_by_with_ meta = results["meta"] assert len(meta) == 2 first_meta = sorted(meta[0], key=lambda value: value.get("name", "")) - assert first_meta[0] == {"group_bys": ["platform"], "limit": 2, "order": "asc"} + assert first_meta[0] == {"group_bys": ["platform"], "limit": 2, "order": "ASC"} second_meta = sorted(meta[1], key=lambda value: value.get("name", "")) - assert second_meta[0] == {"group_bys": ["platform"], "limit": 2, "order": "desc"} + assert second_meta[0] == {"group_bys": ["platform"], "limit": 2, "order": "DESC"} def test_query_with_custom_set(self): mri = "s:custom/User.Click.2@none" @@ -765,9 +765,6 @@ def test_query_with_invalid_syntax( referrer="metrics.data.api", ) - # Different namespaces - # Different types - # Different group bys (at the formula level and also at the timeseries level) def test_query_with_different_namespaces(self): query_1 = self.mql( "min", From b103829d55de7a74492d0e81a0330e1fab78e50d Mon Sep 17 00:00:00 2001 From: ArthurKnaus Date: Wed, 14 Feb 2024 13:31:33 +0100 Subject: [PATCH 357/357] ref(ddm): Prepare metrics query helpers for multi queries (#65146) - relates to https://github.com/getsentry/sentry/issues/64773 --- static/app/utils/metrics/useMetricsData.tsx | 176 ------------------ ...Data.spec.tsx => useMetricsQuery.spec.tsx} | 16 +- static/app/utils/metrics/useMetricsQuery.tsx | 116 ++++++++++++ .../dashboards/datasetConfig/metrics.tsx | 26 +-- .../widgetCard/metricWidgetCard/index.tsx | 16 +- static/app/views/ddm/createAlertModal.tsx | 7 +- static/app/views/ddm/widget.tsx | 9 +- .../projectMetrics/projectMetricsDetails.tsx | 5 +- 8 files changed, 157 insertions(+), 214 deletions(-) delete mode 100644 static/app/utils/metrics/useMetricsData.tsx rename static/app/utils/metrics/{useMetricsData.spec.tsx => useMetricsQuery.spec.tsx} (92%) create mode 100644 static/app/utils/metrics/useMetricsQuery.tsx diff --git a/static/app/utils/metrics/useMetricsData.tsx b/static/app/utils/metrics/useMetricsData.tsx deleted file mode 100644 index c62e1894c1ea4a..00000000000000 --- a/static/app/utils/metrics/useMetricsData.tsx +++ /dev/null @@ -1,176 +0,0 @@ -import {useCallback, useEffect, useState} from 'react'; - -import type {DateString, PageFilters} from 'sentry/types'; -import {getDateTimeParams, getDDMInterval} from 'sentry/utils/metrics'; -import {getUseCaseFromMRI, parseField} from 'sentry/utils/metrics/mri'; -import type {MetricsQuery} from 'sentry/utils/metrics/types'; -import {useApiQuery} from 'sentry/utils/queryClient'; -import useOrganization from 'sentry/utils/useOrganization'; - -import type { - MetricsDataIntervalLadder, - MetricsQueryApiResponse, -} from '../../types/metrics'; - -export function createMqlQuery({ - field, - query, - groupBy = [], -}: {field: string; groupBy?: string[]; query?: string}) { - let mql = field; - if (query) { - mql = `${mql}{${query}}`; - } - if (groupBy.length) { - mql = `${mql} by (${groupBy.join(',')})`; - } - return mql; -} - -export function getMetricsQueryApiRequestPayload( - { - field, - query, - groupBy, - orderBy, - limit, - }: { - field: string; - groupBy?: string[]; - limit?: number; - orderBy?: 'asc' | 'desc'; - query?: string; - }, - {projects, environments, datetime}: PageFilters, - { - intervalLadder, - interval: intervalParam, - }: {interval?: string; intervalLadder?: MetricsDataIntervalLadder} = {} -) { - const {mri: mri} = parseField(field) ?? {}; - const useCase = getUseCaseFromMRI(mri) ?? 'custom'; - const interval = intervalParam ?? getDDMInterval(datetime, useCase, intervalLadder); - const hasGoupBy = groupBy && groupBy.length > 0; - - return { - query: { - ...getDateTimeParams(datetime), - project: projects, - environment: environments, - interval, - }, - body: { - queries: [ - { - name: 'query_1', - mql: createMqlQuery({field, query, groupBy}), - }, - ], - formulas: [ - {mql: '$query_1', limit: limit, order: hasGoupBy ? orderBy ?? 'desc' : undefined}, - ], - }, - }; -} - -export function useMetricsQuery( - {mri, op, datetime, projects, environments, query, groupBy}: MetricsQuery, - overrides: {interval?: string; intervalLadder?: MetricsDataIntervalLadder} = {} -) { - const organization = useOrganization(); - - const field = op ? `${op}(${mri})` : mri; - - const {query: queryToSend, body} = getMetricsQueryApiRequestPayload( - { - field, - query: query ?? '', - groupBy, - }, - {datetime, projects, environments}, - {...overrides} - ); - - return useApiQuery( - [ - `/organizations/${organization.slug}/metrics/query/`, - {query: queryToSend, data: body, method: 'POST'}, - ], - { - retry: 0, - staleTime: 0, - refetchOnReconnect: true, - refetchOnWindowFocus: true, - refetchInterval: false, - } - ); -} - -// Wraps useMetricsData and provides two additional features: -// 1. return data is undefined only during the initial load -// 2. provides a callback to trim the data to a specific time range when chart zoom is used -export function useMetricsQueryZoom( - metricsQuery: MetricsQuery, - overrides: {interval?: string; intervalLadder?: MetricsDataIntervalLadder} = {} -) { - const [metricsData, setMetricsData] = useState(); - const { - data: rawData, - isLoading, - isError, - error, - } = useMetricsQuery(metricsQuery, overrides); - - useEffect(() => { - if (rawData) { - setMetricsData(rawData); - } - }, [rawData]); - - const trimData = useCallback( - ( - currentData: MetricsQueryApiResponse | undefined, - start, - end - ): MetricsQueryApiResponse | undefined => { - if (!currentData) { - return currentData; - } - // find the index of the first interval that is greater than the start time - const startIndex = - currentData.intervals.findIndex(interval => interval >= start) - 1; - const endIndex = currentData.intervals.findIndex(interval => interval >= end); - - if (startIndex === -1 || endIndex === -1) { - return currentData; - } - - return { - ...currentData, - intervals: currentData.intervals.slice(startIndex, endIndex), - data: currentData.data.map(group => - group.map(entry => ({ - ...entry, - series: entry.series.slice(startIndex, endIndex), - })) - ), - }; - }, - [] - ); - - const handleZoom = useCallback( - (start: DateString, end: DateString) => { - setMetricsData(currentData => trimData(currentData, start, end)); - }, - [trimData] - ); - - return { - data: metricsData, - isLoading, - isError, - error, - onZoom: handleZoom, - }; -} diff --git a/static/app/utils/metrics/useMetricsData.spec.tsx b/static/app/utils/metrics/useMetricsQuery.spec.tsx similarity index 92% rename from static/app/utils/metrics/useMetricsData.spec.tsx rename to static/app/utils/metrics/useMetricsQuery.spec.tsx index af72f28340f32e..bd8d7ac1789f52 100644 --- a/static/app/utils/metrics/useMetricsData.spec.tsx +++ b/static/app/utils/metrics/useMetricsQuery.spec.tsx @@ -2,7 +2,7 @@ import type {PageFilters} from 'sentry/types'; import { createMqlQuery, getMetricsQueryApiRequestPayload, -} from 'sentry/utils/metrics/useMetricsData'; +} from 'sentry/utils/metrics/useMetricsQuery'; describe('createMqlQuery', () => { it('should create a basic mql query', () => { @@ -49,7 +49,7 @@ describe('getMetricsQueryApiRequestPayload', () => { datetime: {start: '2023-01-01', end: '2023-01-31', period: null, utc: true}, }; - const result = getMetricsQueryApiRequestPayload(metric, filters); + const result = getMetricsQueryApiRequestPayload([metric], filters); expect(result.query).toEqual({ start: '2023-01-01T00:00:00.000Z', @@ -78,7 +78,7 @@ describe('getMetricsQueryApiRequestPayload', () => { datetime: {period: '7d', utc: true} as PageFilters['datetime'], }; - const result = getMetricsQueryApiRequestPayload(metric, filters); + const result = getMetricsQueryApiRequestPayload([metric], filters); expect(result.query).toEqual({ statsPeriod: '7d', @@ -106,7 +106,9 @@ describe('getMetricsQueryApiRequestPayload', () => { datetime: {start: '2023-01-01', end: '2023-01-02', period: null, utc: true}, }; - const result = getMetricsQueryApiRequestPayload(metric, filters, {interval: '123m'}); + const result = getMetricsQueryApiRequestPayload([metric], filters, { + interval: '123m', + }); expect(result.query).toEqual({ start: '2023-01-01T00:00:00.000Z', @@ -140,7 +142,7 @@ describe('getMetricsQueryApiRequestPayload', () => { datetime: {start: '2023-01-01', end: '2023-01-02', period: null, utc: true}, }; - const result = getMetricsQueryApiRequestPayload(metric, filters); + const result = getMetricsQueryApiRequestPayload([metric], filters); expect(result.query).toEqual({ start: '2023-01-01T00:00:00.000Z', @@ -173,7 +175,7 @@ describe('getMetricsQueryApiRequestPayload', () => { datetime: {start: '2023-01-01', end: '2023-01-02', period: null, utc: true}, }; - const result = getMetricsQueryApiRequestPayload(metric, filters); + const result = getMetricsQueryApiRequestPayload([metric], filters); expect(result.query).toEqual({ start: '2023-01-01T00:00:00.000Z', @@ -206,7 +208,7 @@ describe('getMetricsQueryApiRequestPayload', () => { datetime: {start: '2023-01-01', end: '2023-01-02', period: null, utc: true}, }; - const result = getMetricsQueryApiRequestPayload(metric, filters, { + const result = getMetricsQueryApiRequestPayload([metric], filters, { intervalLadder: 'ddm', }); diff --git a/static/app/utils/metrics/useMetricsQuery.tsx b/static/app/utils/metrics/useMetricsQuery.tsx new file mode 100644 index 00000000000000..715888483d85c5 --- /dev/null +++ b/static/app/utils/metrics/useMetricsQuery.tsx @@ -0,0 +1,116 @@ +import {useMemo} from 'react'; + +import type {PageFilters} from 'sentry/types'; +import {getDateTimeParams, getDDMInterval} from 'sentry/utils/metrics'; +import {getUseCaseFromMRI, MRIToField, parseField} from 'sentry/utils/metrics/mri'; +import {useApiQuery} from 'sentry/utils/queryClient'; +import useOrganization from 'sentry/utils/useOrganization'; + +import type { + MetricsDataIntervalLadder, + MetricsQueryApiResponse, + MRI, +} from '../../types/metrics'; + +export function createMqlQuery({ + field, + query, + groupBy = [], +}: {field: string; groupBy?: string[]; query?: string}) { + let mql = field; + if (query) { + mql = `${mql}{${query}}`; + } + if (groupBy.length) { + mql = `${mql} by (${groupBy.join(',')})`; + } + return mql; +} + +interface MetricsQueryApiRequestQuery { + field: string; + groupBy?: string[]; + limit?: number; + name?: string; + orderBy?: 'asc' | 'desc'; + query?: string; +} + +export function getMetricsQueryApiRequestPayload( + queries: MetricsQueryApiRequestQuery[], + {projects, environments, datetime}: PageFilters, + { + intervalLadder, + interval: intervalParam, + }: {interval?: string; intervalLadder?: MetricsDataIntervalLadder} = {} +) { + // We take the first queries useCase to determine the interval + // If no useCase is found we default to custom + // The backend will error if the interval is not valid for any of the useCases + const {mri: mri} = parseField(queries[0]?.field) ?? {}; + const useCase = getUseCaseFromMRI(mri) ?? 'custom'; + const interval = intervalParam ?? getDDMInterval(datetime, useCase, intervalLadder); + + const requestQueries: {mql: string; name: string}[] = []; + const requestFormulas: {mql: string; limit?: number; order?: 'asc' | 'desc'}[] = []; + + queries.forEach((query, index) => { + const {field, groupBy, limit, orderBy, query: queryParam, name: nameParam} = query; + const name = nameParam || `query_${index + 1}`; + const hasGoupBy = groupBy && groupBy.length > 0; + requestQueries.push({name, mql: createMqlQuery({field, query: queryParam, groupBy})}); + requestFormulas.push({ + mql: `$${name}`, + limit, + order: hasGoupBy ? orderBy ?? 'desc' : undefined, + }); + }); + + return { + query: { + ...getDateTimeParams(datetime), + project: projects, + environment: environments, + interval, + }, + body: { + queries: requestQueries, + formulas: requestFormulas, + }, + }; +} + +export function useMetricsQuery( + queries: (Omit & {mri: MRI; op?: string})[], + {projects, environments, datetime}: PageFilters, + overrides: {interval?: string; intervalLadder?: MetricsDataIntervalLadder} = {} +) { + const organization = useOrganization(); + + const queryIsComplete = queries.every(({op}) => op); + + const {query: queryToSend, body} = useMemo( + () => + getMetricsQueryApiRequestPayload( + queries.map(query => ({...query, field: MRIToField(query.mri, query.op!)})), + {datetime, projects, environments}, + {...overrides} + ), + [queries, datetime, projects, environments, overrides] + ); + + return useApiQuery( + [ + `/organizations/${organization.slug}/metrics/query/`, + {query: queryToSend, data: body, method: 'POST'}, + ], + { + retry: 0, + staleTime: 0, + refetchOnReconnect: true, + refetchOnWindowFocus: true, + refetchInterval: false, + enabled: queryIsComplete, + } + ); +} diff --git a/static/app/views/dashboards/datasetConfig/metrics.tsx b/static/app/views/dashboards/datasetConfig/metrics.tsx index 53e74ff761c5c3..423f6b285cbb9d 100644 --- a/static/app/views/dashboards/datasetConfig/metrics.tsx +++ b/static/app/views/dashboards/datasetConfig/metrics.tsx @@ -23,7 +23,7 @@ import { parseField, parseMRI, } from 'sentry/utils/metrics/mri'; -import {getMetricsQueryApiRequestPayload} from 'sentry/utils/metrics/useMetricsData'; +import {getMetricsQueryApiRequestPayload} from 'sentry/utils/metrics/useMetricsQuery'; import type {OnDemandControlContext} from 'sentry/utils/performance/contexts/onDemandControl'; import {MetricSearchBar} from 'sentry/views/dashboards/widgetBuilder/buildSteps/filterResultsStep/metricSearchBar'; import type {FieldValueOption} from 'sentry/views/discover/table/queryField'; @@ -402,17 +402,19 @@ function getMetricRequest( } const payload = getMetricsQueryApiRequestPayload( - { - field: query.aggregates[0], - query: query.conditions || undefined, - groupBy: query.columns || undefined, - orderBy: query.orderby - ? query.orderby.indexOf('-') === 0 - ? 'desc' - : 'asc' - : undefined, - limit: limit || undefined, - }, + [ + { + field: query.aggregates[0], + query: query.conditions || undefined, + groupBy: query.columns || undefined, + orderBy: query.orderby + ? query.orderby.indexOf('-') === 0 + ? 'desc' + : 'asc' + : undefined, + limit: limit || undefined, + }, + ], pageFilters, { intervalLadder: displayType === DisplayType.BAR ? 'bar' : 'dashboard', diff --git a/static/app/views/dashboards/widgetCard/metricWidgetCard/index.tsx b/static/app/views/dashboards/widgetCard/metricWidgetCard/index.tsx index da7724be2cb3fc..c4a2bfe3835ce7 100644 --- a/static/app/views/dashboards/widgetCard/metricWidgetCard/index.tsx +++ b/static/app/views/dashboards/widgetCard/metricWidgetCard/index.tsx @@ -18,7 +18,7 @@ import { MetricDisplayType, type MetricWidgetQueryParams, } from 'sentry/utils/metrics/types'; -import {useMetricsQueryZoom} from 'sentry/utils/metrics/useMetricsData'; +import {useMetricsQuery} from 'sentry/utils/metrics/useMetricsQuery'; import {WidgetCardPanel, WidgetTitleRow} from 'sentry/views/dashboards/widgetCard'; import type {AugmentedEChartDataZoomHandler} from 'sentry/views/dashboards/widgetCard/chart'; import {DashboardsMEPContext} from 'sentry/views/dashboards/widgetCard/dashboardsMEPContext'; @@ -222,12 +222,16 @@ export function MetricWidgetChartContainer({ isLoading, isError, error, - } = useMetricsQueryZoom( + } = useMetricsQuery( + [ + { + mri, + op, + query: extendQuery(metricWidgetQueryParams.query, dashboardFilters), + groupBy, + }, + ], { - mri, - op, - query: extendQuery(metricWidgetQueryParams.query, dashboardFilters), - groupBy, projects, environments, datetime, diff --git a/static/app/views/ddm/createAlertModal.tsx b/static/app/views/ddm/createAlertModal.tsx index b89929ec432099..08a2a9b5a6f048 100644 --- a/static/app/views/ddm/createAlertModal.tsx +++ b/static/app/views/ddm/createAlertModal.tsx @@ -1,5 +1,6 @@ import {Fragment, useCallback, useMemo, useState} from 'react'; import styled from '@emotion/styled'; +import pick from 'lodash/pick'; import * as qs from 'query-string'; import type {ModalRenderProps} from 'sentry/actionCreators/modal'; @@ -31,7 +32,7 @@ import { parseMRI, } from 'sentry/utils/metrics/mri'; import type {MetricsQuery} from 'sentry/utils/metrics/types'; -import {useMetricsQuery} from 'sentry/utils/metrics/useMetricsData'; +import {useMetricsQuery} from 'sentry/utils/metrics/useMetricsQuery'; import useOrganization from 'sentry/utils/useOrganization'; import useProjects from 'sentry/utils/useProjects'; import useRouter from 'sentry/utils/useRouter'; @@ -140,13 +141,11 @@ export function CreateAlertModal({Header, Body, Footer, metricsQuery}: Props) { const aggregate = useMemo(() => getAlertAggregate(metricsQuery), [metricsQuery]); const {data, isLoading, refetch, isError} = useMetricsQuery( + [pick(metricsQuery, 'op', 'mri', 'query')], { - mri: metricsQuery.mri, - op: metricsQuery.op, projects: formState.project ? [parseInt(formState.project, 10)] : [], environments: formState.environment ? [formState.environment] : [], datetime: {period: alertPeriod} as PageFilters['datetime'], - query: metricsQuery.query, }, { interval: alertInterval, diff --git a/static/app/views/ddm/widget.tsx b/static/app/views/ddm/widget.tsx index 6be62d8c45ac02..56668cb9c1a06e 100644 --- a/static/app/views/ddm/widget.tsx +++ b/static/app/views/ddm/widget.tsx @@ -32,7 +32,7 @@ import type { import {MetricDisplayType} from 'sentry/utils/metrics/types'; import {useIncrementQueryMetric} from 'sentry/utils/metrics/useIncrementQueryMetric'; import {useMetricSamples} from 'sentry/utils/metrics/useMetricsCorrelations'; -import {useMetricsQueryZoom} from 'sentry/utils/metrics/useMetricsData'; +import {useMetricsQuery} from 'sentry/utils/metrics/useMetricsQuery'; import {MetricChart} from 'sentry/views/ddm/chart'; import type {FocusAreaProps} from 'sentry/views/ddm/context'; import {createChartPalette} from 'sentry/views/ddm/metricsChartPalette'; @@ -245,12 +245,9 @@ const MetricWidgetBody = memo( isLoading, isError, error, - } = useMetricsQueryZoom( + } = useMetricsQuery( + [{mri, op, query, groupBy}], { - mri, - op, - query, - groupBy, projects, environments, datetime, diff --git a/static/app/views/settings/projectMetrics/projectMetricsDetails.tsx b/static/app/views/settings/projectMetrics/projectMetricsDetails.tsx index a6f616afd66d44..a3edc88c3ca74f 100644 --- a/static/app/views/settings/projectMetrics/projectMetricsDetails.tsx +++ b/static/app/views/settings/projectMetrics/projectMetricsDetails.tsx @@ -27,7 +27,7 @@ import {getReadableMetricType} from 'sentry/utils/metrics/formatters'; import {formatMRI, formatMRIField, MRIToField, parseMRI} from 'sentry/utils/metrics/mri'; import {MetricDisplayType} from 'sentry/utils/metrics/types'; import {useBlockMetric} from 'sentry/utils/metrics/useBlockMetric'; -import {useMetricsQuery} from 'sentry/utils/metrics/useMetricsData'; +import {useMetricsQuery} from 'sentry/utils/metrics/useMetricsQuery'; import {useMetricsTags} from 'sentry/utils/metrics/useMetricsTags'; import routeTitleGen from 'sentry/utils/routeTitle'; import {CodeLocations} from 'sentry/views/ddm/codeLocations'; @@ -76,6 +76,7 @@ function ProjectMetricsDetails({project, params, organization}: Props) { const {type, name, unit} = parseMRI(mri) ?? {}; const operation = getSettingsOperationForType(type ?? 'c'); const {data: metricsData, isLoading} = useMetricsQuery( + [{mri, op: operation}], { datetime: { period: '30d', @@ -84,9 +85,7 @@ function ProjectMetricsDetails({project, params, organization}: Props) { utc: false, }, environments: [], - mri, projects: projectIds, - op: operation, }, {interval: '1d'} );