Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,95 +1,132 @@
from collections import defaultdict
from collections.abc import Mapping, Sequence
from typing import Any, DefaultDict
from datetime import datetime
from typing import Any, ClassVar, DefaultDict

from django.contrib.auth.models import AnonymousUser
from django.db.models import Subquery

from sentry.api.serializers import Serializer, serialize
from sentry.incidents.endpoints.serializers.incident import (
DetailedIncidentSerializerResponse,
IncidentSerializer,
IncidentSerializerResponse,
)
from sentry.incidents.models.incident import Incident
from sentry.incidents.endpoints.serializers.utils import get_fake_id_from_object_id
from sentry.incidents.models.incident import IncidentStatus, IncidentStatusMethod, IncidentType
from sentry.models.groupopenperiod import GroupOpenPeriod
from sentry.models.groupopenperiodactivity import GroupOpenPeriodActivity
from sentry.snuba.entity_subscription import apply_dataset_query_conditions
from sentry.snuba.models import QuerySubscription, SnubaQuery
from sentry.types.group import PriorityLevel
from sentry.users.models.user import User
from sentry.users.services.user.model import RpcUser
from sentry.workflow_engine.endpoints.serializers.group_open_period_serializer import (
GroupOpenPeriodActivitySerializer,
)
from sentry.workflow_engine.models import (
Action,
DataCondition,
DataConditionGroupAction,
AlertRuleDetector,
DataSourceDetector,
Detector,
DetectorWorkflow,
DetectorGroup,
IncidentGroupOpenPeriod,
WorkflowDataConditionGroup,
)
from sentry.workflow_engine.models.workflow_action_group_status import WorkflowActionGroupStatus


class WorkflowEngineIncidentSerializer(Serializer):
def __init__(self, expand=None):
self.expand = expand or []

priority_to_incident_status: ClassVar[dict[int, int]] = {
PriorityLevel.HIGH.value: IncidentStatus.CRITICAL.value,
PriorityLevel.MEDIUM.value: IncidentStatus.WARNING.value,
}

def get_incident_status(self, priority: int | None, date_ended: datetime | None) -> int:
if priority is None:
raise ValueError("Priority is required to get an incident status")

if date_ended:
return IncidentStatus.CLOSED.value

return self.priority_to_incident_status[priority]

def get_attrs(
self,
item_list: Sequence[GroupOpenPeriod],
user: User | RpcUser | AnonymousUser,
**kwargs: Any,
) -> defaultdict[GroupOpenPeriod, dict[str, Any]]:

from sentry.incidents.endpoints.serializers.workflow_engine_detector import (
WorkflowEngineDetectorSerializer,
)

results: DefaultDict[GroupOpenPeriod, dict[str, Any]] = defaultdict()
open_periods_to_detectors = self.get_open_periods_to_detectors(item_list)
alert_rules = {
alert_rule["id"]: alert_rule # we are serializing detectors to look like alert rules
for alert_rule in serialize(
list(open_periods_to_detectors.values()),
user,
WorkflowEngineDetectorSerializer(expand=self.expand),
)
}
alert_rule_detectors = AlertRuleDetector.objects.filter(
detector__in=list(open_periods_to_detectors.values())
).values_list("alert_rule_id", "detector_id")
Comment on lines +73 to +75

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

High severity vulnerability may affect your project—review required:
Line 73 lists a dependency (django) with a known High severity vulnerability.

ℹ️ Why this matters

Affected versions of Django are vulnerable to Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection'). SQL injection in Django's ORM column aliases: when using QuerySet.annotate(), QuerySet.alias(), QuerySet.aggregate(), or QuerySet.extra() with dictionary expansion (**kwargs), the dictionary keys are used unescaped as SQL column aliases. On MySQL and MariaDB backends, an attacker who can influence those keys (for example, by passing a crafted dict of annotations) can inject arbitrary SQL into the generated query.

References: GHSA, CVE

To resolve this comment:
Check if you are using Django with MySQL or MariaDB.

  • If you're affected, upgrade this dependency to at least version 5.2.7 at uv.lock.
  • If you're not affected, comment /fp we don't use this [condition]
💬 Ignore this finding

To ignore this, reply with:

  • /fp <comment> for false positive
  • /ar <comment> for acceptable risk
  • /other <comment> for all other reasons

You can view more details on this finding in the Semgrep AppSec Platform here.

detector_ids_to_alert_rule_ids = {}
for alert_rule_id, detector_id in alert_rule_detectors:
detector_ids_to_alert_rule_ids[detector_id] = alert_rule_id

for open_period in item_list:
detector_id = open_periods_to_detectors[open_period].id
if detector_id in detector_ids_to_alert_rule_ids:
alert_rule_id = detector_ids_to_alert_rule_ids[detector_id]
else:
alert_rule_id = get_fake_id_from_object_id(detector_id)

results[open_period] = {"projects": [open_period.project.slug]}
results[open_period]["alert_rule"] = alert_rules.get(str(alert_rule_id))

if "activities" in self.expand:
gopas = list(
GroupOpenPeriodActivity.objects.filter(group_open_period__in=item_list)[:1000]
)
open_period_activities = defaultdict(list)
# XXX: the incident endpoint is undocumented, so we aren' on the hook for supporting
# any specific payloads. Since this isn't used on the Sentry side for notification charts,
# I've opted to just use the GroupOpenPeriodActivity serializer.
for gopa, serialized_activity in zip(
gopas,
serialize(gopas, user=user, serializer=GroupOpenPeriodActivitySerializer()),
):
open_period_activities[gopa.group_open_period_id].append(serialized_activity)
for open_period in item_list:
results[open_period]["activities"] = open_period_activities[open_period.id]

return results

def get_open_periods_to_detectors(
self, open_periods: Sequence[GroupOpenPeriod]
) -> dict[GroupOpenPeriod, Detector]:
wf_action_group_statuses = WorkflowActionGroupStatus.objects.filter(
group__in=[open_period.group for open_period in open_periods]
)
open_periods_to_actions: DefaultDict[GroupOpenPeriod, Action] = defaultdict()
for open_period in open_periods:
for wf_action_group_status in wf_action_group_statuses:
if wf_action_group_status.group == open_period.group:
open_periods_to_actions[open_period] = wf_action_group_status.action
break

dcgas = DataConditionGroupAction.objects.filter(
action__in=list(open_periods_to_actions.values())
)
open_periods_to_condition_group: DefaultDict[GroupOpenPeriod, DataConditionGroupAction] = (
defaultdict()
)
for open_period, action in open_periods_to_actions.items():
for dcga in dcgas:
if dcga.action == action:
open_periods_to_condition_group[open_period] = dcga
break

action_filters = DataCondition.objects.filter(
condition_group__in=[dcga.condition_group for dcga in dcgas]
)
open_period_to_action_filters: DefaultDict[GroupOpenPeriod, DataCondition] = defaultdict()
for open_period, dcga in open_periods_to_condition_group.items():
for action_filter in action_filters:
if action_filter.condition_group == dcga.condition_group:
open_period_to_action_filters[open_period] = action_filter
break

workflow_dcgs = WorkflowDataConditionGroup.objects.filter(
condition_group__in=Subquery(action_filters.values("condition_group"))
)
# open period -> group -> detector via detectorgroup
groups = [op.group for op in open_periods]
group_to_open_periods = defaultdict(list)

open_periods_to_workflow_dcgs: DefaultDict[GroupOpenPeriod, WorkflowDataConditionGroup] = (
defaultdict()
)
for open_period, action_filter in open_period_to_action_filters.items():
for workflow_dcg in workflow_dcgs:
if workflow_dcg.condition_group == action_filter.condition_group:
open_periods_to_workflow_dcgs[open_period] = workflow_dcg
for op in open_periods:
group_to_open_periods[op.group].append(op)

detector_workflows = DetectorWorkflow.objects.filter(
workflow__in=Subquery(workflow_dcgs.values("workflow"))
detector_groups = DetectorGroup.objects.filter(group__in=groups).select_related(
"group", "detector"
)
open_periods_to_detectors: DefaultDict[GroupOpenPeriod, Detector] = defaultdict()
for open_period, workflow_dcg in open_periods_to_workflow_dcgs.items():
for detector_workflow in detector_workflows:
if detector_workflow.workflow == workflow_dcg.workflow:
open_periods_to_detectors[open_period] = detector_workflow.detector
break

groups_to_detectors = {}
for dg in detector_groups:
groups_to_detectors[dg.group] = dg.detector

open_periods_to_detectors = {}
for group in group_to_open_periods:
for op in group_to_open_periods[group]:
open_periods_to_detectors[op] = groups_to_detectors[group]
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Bug: Missing Data Breaks Group Processing

The get_open_periods_to_detectors method raises a KeyError when a group lacks a DetectorGroup record. Line 129 accesses groups_to_detectors[group] without checking if the key exists, which only contains groups with existing DetectorGroup relationships from the database query. This causes the serializer to fail for groups without detectors instead of gracefully handling the missing relationship.

Fix in Cursor Fix in Web

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Fair, I suppose we can check for the existence first before adding to the dict


return open_periods_to_detectors

Expand All @@ -103,10 +140,35 @@ def serialize(
"""
Temporary serializer to take a GroupOpenPeriod and serialize it for the old incident endpoint
"""
incident = Incident.objects.get(
id=IncidentGroupOpenPeriod.objects.get(group_open_period=obj).incident_id
)
return serialize(incident, user=user, serializer=IncidentSerializer(expand=self.expand))
try:
igop = IncidentGroupOpenPeriod.objects.get(group_open_period=obj)
incident_id = igop.incident_id
incident_identifier = igop.incident_identifier
except IncidentGroupOpenPeriod.DoesNotExist:
incident_id = get_fake_id_from_object_id(obj.id)
incident_identifier = incident_id

date_closed = obj.date_ended.replace(second=0, microsecond=0) if obj.date_ended else None
return {
"id": str(incident_id),
"identifier": str(incident_identifier),
"organizationId": str(obj.project.organization.id),
"projects": attrs["projects"],
"alertRule": attrs["alert_rule"],
"activities": attrs["activities"] if "activities" in self.expand else None,
"status": self.get_incident_status(obj.group.priority, obj.date_ended),
"statusMethod": (
IncidentStatusMethod.RULE_TRIGGERED.value
if not date_closed
else IncidentStatusMethod.RULE_UPDATED.value
),
"type": IncidentType.ALERT_TRIGGERED.value,
"title": obj.group.title,
"dateStarted": obj.date_started,
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Technically, this should be obj.date_started - one snuba time window. I felt that introducing the snuba stuff to a serializer that doesn't use it elsewhere was unnecessarily complicated. Same for dateClosed.

"dateDetected": obj.date_started,
"dateCreated": obj.date_added,
"dateClosed": date_closed,
}


class WorkflowEngineDetailedIncidentSerializer(WorkflowEngineIncidentSerializer):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ def setUp(self) -> None:
self.alert_rule
)

self.create_detector_group(detector=self.detector, group=self.group)
self.expected_critical_action = [
{
"id": str(self.critical_trigger_action.id),
Expand Down
103 changes: 89 additions & 14 deletions tests/sentry/incidents/serializers/test_workflow_engine_incident.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,17 @@
from sentry.api.serializers import serialize
from sentry.incidents.endpoints.serializers.alert_rule import DetailedAlertRuleSerializer
from sentry.incidents.endpoints.serializers.utils import get_fake_id_from_object_id
from sentry.incidents.endpoints.serializers.workflow_engine_incident import (
WorkflowEngineDetailedIncidentSerializer,
WorkflowEngineIncidentSerializer,
)
from sentry.incidents.models.incident import Incident
from sentry.incidents.models.incident import IncidentStatus, IncidentStatusMethod, IncidentType
from sentry.models.groupopenperiodactivity import GroupOpenPeriodActivity, OpenPeriodActivityType
from sentry.types.group import PriorityLevel
from sentry.workflow_engine.models import (
ActionAlertRuleTriggerAction,
AlertRuleDetector,
DataConditionAlertRuleTrigger,
)
from tests.sentry.incidents.serializers.test_workflow_engine_base import (
TestWorkflowEngineSerializer,
)
Expand All @@ -15,24 +22,22 @@ def setUp(self) -> None:
super().setUp()
self.add_warning_trigger()
self.add_incident_data()
incident = Incident.objects.get(id=self.incident_group_open_period.incident_id)
self.incident_identifier = str(self.incident_group_open_period.incident_identifier)
alert_rule_serializer = DetailedAlertRuleSerializer()
self.incident_expected = {
"id": str(self.incident_group_open_period.incident_id),
"identifier": self.incident_identifier,
"organizationId": str(incident.organization_id),
"organizationId": str(self.group_open_period.project.organization_id),
"projects": [self.project.slug],
"alertRule": serialize(incident.alert_rule, serializer=alert_rule_serializer),
"alertRule": self.expected,
"activities": None,
"status": incident.status,
"statusMethod": incident.status_method,
"type": incident.type,
"title": incident.title,
"dateStarted": incident.date_started,
"dateDetected": incident.date_detected,
"dateCreated": incident.date_added,
"dateClosed": incident.date_closed,
"status": IncidentStatus.CRITICAL.value,
"statusMethod": IncidentStatusMethod.RULE_TRIGGERED.value,
"type": IncidentType.ALERT_TRIGGERED.value,
"title": self.group.title,
"dateStarted": self.group_open_period.date_started,
"dateDetected": self.group_open_period.date_started,
"dateCreated": self.group_open_period.date_added,
"dateClosed": None,
}

def test_simple(self) -> None:
Expand All @@ -47,3 +52,73 @@ def test_detailed(self) -> None:
)
self.incident_expected["discoverQuery"] = "(event.type:error) AND (level:error)"
assert serialized_incident == self.incident_expected

def test_no_incident(self) -> None:
"""
Assert that nothing breaks if the legacy models do not exist.
"""
self.incident_group_open_period.delete()
ard = AlertRuleDetector.objects.filter(detector_id=self.detector.id)
dcart = DataConditionAlertRuleTrigger.objects.filter(
data_condition_id=self.critical_detector_trigger.id
)
aarta = ActionAlertRuleTriggerAction.objects.filter(action_id=self.critical_action.id)

ard.delete()
dcart.delete()
aarta.delete()

serialized_incident = serialize(
self.group_open_period, self.user, WorkflowEngineIncidentSerializer()
)
fake_alert_rule_id = get_fake_id_from_object_id(self.detector.id)
fake_incident_id = get_fake_id_from_object_id(self.group_open_period.id)
self.expected.update({"id": str(fake_alert_rule_id)})
self.expected["triggers"][0].update(
{
"id": str(get_fake_id_from_object_id(self.critical_detector_trigger.id)),
"alertRuleId": str(fake_alert_rule_id),
}
)
self.expected["triggers"][1].update(
{
"alertRuleId": str(fake_alert_rule_id),
}
)
self.expected["triggers"][0]["actions"][0].update(
{
"id": str(get_fake_id_from_object_id(self.critical_action.id)),
"alertRuleTriggerId": str(
get_fake_id_from_object_id(self.critical_detector_trigger.id)
),
}
)

self.incident_expected.update(
{
"id": str(fake_incident_id),
"identifier": str(fake_incident_id),
}
)
assert serialized_incident == self.incident_expected

def test_with_activities(self) -> None:
gopa = GroupOpenPeriodActivity.objects.create(
date_added=self.group_open_period.date_added,
group_open_period=self.group_open_period,
type=OpenPeriodActivityType.OPENED,
value=self.group.priority,
)
serialized_incident = serialize(
self.group_open_period,
self.user,
WorkflowEngineIncidentSerializer(expand=["activities"]),
)
assert len(serialized_incident["activities"]) == 1
serialized_activity = serialized_incident["activities"][0]
assert serialized_activity == {
"id": str(gopa.id),
"type": OpenPeriodActivityType.OPENED.to_str(),
"value": PriorityLevel(self.group.priority).to_str(),
"dateCreated": gopa.date_added,
}
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,7 @@ def create_models(self):

self.group.priority = PriorityLevel.HIGH.value
self.group.save()
self.create_detector_group(detector=self.detector, group=self.group)
self.open_period, _ = GroupOpenPeriod.objects.get_or_create(
group=self.group,
project=self.project,
Expand Down
Loading