Skip to content

Commit 40ccecb

Browse files
committed
feat(group-by-alerts): add group by alerts metric issue functionality
1 parent cebc964 commit 40ccecb

File tree

4 files changed

+176
-12
lines changed

4 files changed

+176
-12
lines changed

src/sentry/incidents/grouptype.py

Lines changed: 41 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,12 @@
2727
from sentry.workflow_engine.models.data_condition import Condition, DataCondition
2828
from sentry.workflow_engine.models.data_source import DataPacket
2929
from sentry.workflow_engine.processors.data_condition_group import ProcessedDataConditionGroup
30-
from sentry.workflow_engine.types import DetectorException, DetectorPriorityLevel, DetectorSettings
30+
from sentry.workflow_engine.types import (
31+
DetectorException,
32+
DetectorGroupKey,
33+
DetectorPriorityLevel,
34+
DetectorSettings,
35+
)
3136

3237
logger = logging.getLogger(__name__)
3338

@@ -189,6 +194,7 @@ def create_occurrence(
189194
evaluation_result: ProcessedDataConditionGroup,
190195
data_packet: DataPacket[MetricUpdate],
191196
priority: DetectorPriorityLevel,
197+
group_key: DetectorGroupKey | None = None,
192198
) -> tuple[DetectorOccurrence, EventData]:
193199
try:
194200
detector_trigger = DataCondition.objects.get(
@@ -223,7 +229,7 @@ def create_occurrence(
223229
return (
224230
DetectorOccurrence(
225231
issue_title=self.detector.name,
226-
subtitle=self.construct_title(snuba_query, detector_trigger, priority),
232+
subtitle=self.construct_title(snuba_query, detector_trigger, priority, group_key),
227233
evidence_data={
228234
**self.build_detector_evidence_data(evaluation_result, data_packet, priority),
229235
},
@@ -243,15 +249,40 @@ def extract_dedupe_value(self, data_packet: DataPacket[MetricUpdate]) -> int:
243249
def extract_value(self, data_packet: DataPacket[MetricUpdate]) -> MetricResult:
244250
# this is a bit of a hack - anomaly detection data packets send extra data we need to pass along
245251
values = data_packet.packet.values
246-
if isinstance(data_packet.packet, AnomalyDetectionUpdate):
247-
return {None: values}
248-
return values.get("value")
252+
# Check if this is grouped data returned in our data packet
253+
if "groups" in values:
254+
# Return grouped data as dict[DetectorGroupKey, int]
255+
grouped_values = {}
256+
for group_data in values["groups"]:
257+
group_keys = group_data.get("group_keys", {})
258+
group_value = group_data.get("value", 0)
259+
260+
# Create group key from group keys
261+
group_key = self._create_group_key(group_keys)
262+
grouped_values[group_key] = group_value
263+
264+
return grouped_values
265+
else:
266+
if values.get("value") is not None:
267+
return values.get("value")
268+
return values
269+
270+
def _create_group_key(self, group_keys: dict[str, str]) -> str:
271+
"""Create a deterministic group key from group keys"""
272+
if not group_keys:
273+
return None
274+
275+
# Sort keys for deterministic fingerprint.
276+
sorted_items = sorted(group_keys.items())
277+
key_string = ",".join(f"{k}={v}" for k, v in sorted_items)
278+
return key_string
249279

250280
def construct_title(
251281
self,
252282
snuba_query: SnubaQuery,
253283
detector_trigger: DataCondition,
254284
priority: DetectorPriorityLevel,
285+
group_key: DetectorGroupKey | None = None,
255286
) -> str:
256287
comparison_delta = self.detector.config.get("comparison_delta")
257288
detection_type = self.detector.config.get("detection_type")
@@ -308,8 +339,12 @@ def construct_title(
308339
else:
309340
comparison = detector_trigger.comparison
310341

311-
template = "{label}: {metric} in the last {time_window} {higher_or_lower} {comparison}"
342+
template = (
343+
"{group_key} {label}: {metric} in the last {time_window} {higher_or_lower} {comparison}"
344+
)
345+
312346
return template.format(
347+
group_key=group_key,
313348
label=label.capitalize(),
314349
metric=aggregate,
315350
higher_or_lower=higher_or_lower,

src/sentry/workflow_engine/handlers/detector/base.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -115,6 +115,7 @@ def create_occurrence(
115115
evaluation_result: ProcessedDataConditionGroup,
116116
data_packet: DataPacket[DataPacketType],
117117
priority: DetectorPriorityLevel,
118+
group_key: DetectorGroupKey | None = None,
118119
) -> tuple[DetectorOccurrence, EventData]:
119120
"""
120121
This method provides the value that was evaluated against, the data packet that was

src/sentry/workflow_engine/handlers/detector/stateful.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -522,7 +522,7 @@ def _build_detector_evaluation_result(
522522
else:
523523
# Call the `create_occurrence` method to create the detector occurrence.
524524
detector_occurrence, event_data = self.create_occurrence(
525-
condition_results, data_packet, new_priority
525+
condition_results, data_packet, new_priority, group_key
526526
)
527527
detector_result = self._create_decorated_issue_occurrence(
528528
data_packet,

tests/sentry/incidents/utils/test_metric_issue_base.py

Lines changed: 133 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,26 @@
1-
from datetime import UTC, datetime, timedelta
1+
from datetime import UTC, datetime, timedelta, timezone
22

3-
from sentry.incidents.grouptype import MetricIssue
3+
from sentry.incidents.grouptype import MetricIssue, MetricIssueDetectorHandler
44
from sentry.incidents.utils.constants import INCIDENTS_SNUBA_SUBSCRIPTION_TYPE
55
from sentry.incidents.utils.types import (
66
DATA_SOURCE_SNUBA_QUERY_SUBSCRIPTION,
77
ProcessedSubscriptionUpdate,
8+
QuerySubscriptionUpdate,
89
)
910
from sentry.issues.issue_occurrence import IssueOccurrence
1011
from sentry.issues.status_change_message import StatusChangeMessage
1112
from sentry.snuba.dataset import Dataset
1213
from sentry.snuba.models import SnubaQuery, SnubaQueryEventType
1314
from sentry.snuba.subscriptions import create_snuba_query, create_snuba_subscription
1415
from sentry.testutils.cases import TestCase
15-
from sentry.workflow_engine.models import DataPacket
16-
from sentry.workflow_engine.models.data_condition import Condition
16+
from sentry.testutils.helpers.datetime import freeze_time
17+
from sentry.workflow_engine.models import Condition, DataCondition, DataPacket
1718
from sentry.workflow_engine.processors.data_packet import process_data_packet
18-
from sentry.workflow_engine.types import DetectorEvaluationResult, DetectorPriorityLevel
19+
from sentry.workflow_engine.types import (
20+
DetectorEvaluationResult,
21+
DetectorGroupKey,
22+
DetectorPriorityLevel,
23+
)
1924

2025

2126
class BaseMetricIssueTest(TestCase):
@@ -97,3 +102,126 @@ def process_packet_and_return_result(
97102
return None
98103
evaluation_result: DetectorEvaluationResult = results[0][1][self.detector_group_key]
99104
return evaluation_result.result
105+
106+
107+
@freeze_time()
108+
class TestEvaluateMetricDetectorGroupBy(BaseMetricIssueTest):
109+
def setUp(self):
110+
super().setUp()
111+
self.detector = self.create_detector(
112+
project=self.project,
113+
workflow_condition_group=self.create_data_condition_group(),
114+
type=MetricIssue.slug,
115+
created_by_id=self.user.id,
116+
)
117+
self.critical_detector_trigger = self.create_data_condition(
118+
type=Condition.GREATER,
119+
comparison=5,
120+
condition_result=DetectorPriorityLevel.HIGH,
121+
condition_group=self.detector.workflow_condition_group,
122+
)
123+
self.warning_detector_trigger = self.create_data_condition(
124+
comparison=3,
125+
type=Condition.GREATER,
126+
condition_result=DetectorPriorityLevel.MEDIUM,
127+
condition_group=self.detector.workflow_condition_group,
128+
)
129+
with self.tasks():
130+
self.snuba_query = create_snuba_query(
131+
query_type=SnubaQuery.Type.ERROR,
132+
dataset=Dataset.Events,
133+
query="hello",
134+
aggregate="count()",
135+
time_window=timedelta(minutes=1),
136+
resolution=timedelta(minutes=1),
137+
environment=self.environment,
138+
event_types=[SnubaQueryEventType.EventType.ERROR],
139+
group_by=["level"],
140+
)
141+
self.query_subscription = create_snuba_subscription(
142+
project=self.detector.project,
143+
subscription_type=INCIDENTS_SNUBA_SUBSCRIPTION_TYPE,
144+
snuba_query=self.snuba_query,
145+
)
146+
self.alert_rule = self.create_alert_rule()
147+
self.create_alert_rule_detector(alert_rule_id=self.alert_rule.id, detector=self.detector)
148+
149+
self.handler = MetricIssueDetectorHandler(self.detector)
150+
151+
def test_metric_issue_occurrence(self):
152+
self.detector_group_keys = ["level=error", "level=warning"]
153+
self.detector_group_values = {
154+
"groups": [
155+
{
156+
"group_keys": {"level": "error"},
157+
"value": self.critical_detector_trigger.comparison + 1,
158+
},
159+
{
160+
"group_keys": {"level": "warning"},
161+
"value": self.warning_detector_trigger.comparison + 2,
162+
},
163+
],
164+
}
165+
self.threshold_values = ["Critical", "Warning"]
166+
self.comparison_values = [5, 3]
167+
168+
packet = ProcessedSubscriptionUpdate(
169+
entity="entity",
170+
subscription_id=str(self.query_subscription.id),
171+
values=self.detector_group_values,
172+
timestamp=datetime.now(timezone.utc),
173+
)
174+
data_packet = DataPacket[QuerySubscriptionUpdate](
175+
source_id=str(self.query_subscription.id), packet=packet
176+
)
177+
178+
result: dict[DetectorGroupKey, DetectorEvaluationResult] = self.handler.evaluate(
179+
data_packet
180+
)
181+
for detector_group_key, detector_group_value, threshold_value, comparison_value in zip(
182+
self.detector_group_keys,
183+
self.detector_group_values["groups"],
184+
self.threshold_values,
185+
self.comparison_values,
186+
):
187+
evaluation_result: DetectorEvaluationResult = result[detector_group_key]
188+
assert isinstance(evaluation_result.result, IssueOccurrence)
189+
occurrence: IssueOccurrence = evaluation_result.result
190+
assert occurrence is not None
191+
assert occurrence.issue_title == self.detector.name
192+
assert (
193+
occurrence.subtitle
194+
== f"{detector_group_key} {threshold_value}: Number of events in the last minute above {comparison_value}"
195+
)
196+
assert occurrence.level == "error"
197+
198+
def generate_evidence_data(
199+
self,
200+
value: int,
201+
detector_trigger: DataCondition,
202+
extra_trigger: DataCondition | None = None,
203+
):
204+
evidence_data = {
205+
"detector_id": self.detector.id,
206+
"value": detector_trigger.condition_result,
207+
"alert_id": self.alert_rule.id,
208+
"data_packet_source_id": str(self.query_subscription.id),
209+
"conditions": [
210+
{
211+
"id": detector_trigger.id,
212+
"type": detector_trigger.type,
213+
"comparison": detector_trigger.comparison,
214+
"condition_result": detector_trigger.condition_result.value,
215+
},
216+
],
217+
}
218+
if extra_trigger:
219+
evidence_data["conditions"].append(
220+
{
221+
"id": extra_trigger.id,
222+
"type": extra_trigger.type,
223+
"comparison": extra_trigger.comparison,
224+
"condition_result": extra_trigger.condition_result.value,
225+
}
226+
)
227+
return evidence_data

0 commit comments

Comments
 (0)