|
1 |
| -from datetime import UTC, datetime, timedelta |
| 1 | +from datetime import UTC, datetime, timedelta, timezone |
2 | 2 |
|
3 |
| -from sentry.incidents.grouptype import MetricIssue |
| 3 | +from sentry.incidents.grouptype import MetricIssue, MetricIssueDetectorHandler |
4 | 4 | from sentry.incidents.utils.constants import INCIDENTS_SNUBA_SUBSCRIPTION_TYPE
|
5 | 5 | from sentry.incidents.utils.types import (
|
6 | 6 | DATA_SOURCE_SNUBA_QUERY_SUBSCRIPTION,
|
7 | 7 | ProcessedSubscriptionUpdate,
|
| 8 | + QuerySubscriptionUpdate, |
8 | 9 | )
|
9 | 10 | from sentry.issues.issue_occurrence import IssueOccurrence
|
10 | 11 | from sentry.issues.status_change_message import StatusChangeMessage
|
11 | 12 | from sentry.snuba.dataset import Dataset
|
12 | 13 | from sentry.snuba.models import SnubaQuery, SnubaQueryEventType
|
13 | 14 | from sentry.snuba.subscriptions import create_snuba_query, create_snuba_subscription
|
14 | 15 | from sentry.testutils.cases import TestCase
|
15 |
| -from sentry.workflow_engine.models import DataPacket |
16 |
| -from sentry.workflow_engine.models.data_condition import Condition |
| 16 | +from sentry.testutils.helpers.datetime import freeze_time |
| 17 | +from sentry.workflow_engine.models import Condition, DataCondition, DataPacket |
17 | 18 | from sentry.workflow_engine.processors.data_packet import process_data_packet
|
18 |
| -from sentry.workflow_engine.types import DetectorEvaluationResult, DetectorPriorityLevel |
| 19 | +from sentry.workflow_engine.types import ( |
| 20 | + DetectorEvaluationResult, |
| 21 | + DetectorGroupKey, |
| 22 | + DetectorPriorityLevel, |
| 23 | +) |
19 | 24 |
|
20 | 25 |
|
21 | 26 | class BaseMetricIssueTest(TestCase):
|
@@ -97,3 +102,126 @@ def process_packet_and_return_result(
|
97 | 102 | return None
|
98 | 103 | evaluation_result: DetectorEvaluationResult = results[0][1][self.detector_group_key]
|
99 | 104 | return evaluation_result.result
|
| 105 | + |
| 106 | + |
| 107 | +@freeze_time() |
| 108 | +class TestEvaluateMetricDetectorGroupBy(BaseMetricIssueTest): |
| 109 | + def setUp(self): |
| 110 | + super().setUp() |
| 111 | + self.detector = self.create_detector( |
| 112 | + project=self.project, |
| 113 | + workflow_condition_group=self.create_data_condition_group(), |
| 114 | + type=MetricIssue.slug, |
| 115 | + created_by_id=self.user.id, |
| 116 | + ) |
| 117 | + self.critical_detector_trigger = self.create_data_condition( |
| 118 | + type=Condition.GREATER, |
| 119 | + comparison=5, |
| 120 | + condition_result=DetectorPriorityLevel.HIGH, |
| 121 | + condition_group=self.detector.workflow_condition_group, |
| 122 | + ) |
| 123 | + self.warning_detector_trigger = self.create_data_condition( |
| 124 | + comparison=3, |
| 125 | + type=Condition.GREATER, |
| 126 | + condition_result=DetectorPriorityLevel.MEDIUM, |
| 127 | + condition_group=self.detector.workflow_condition_group, |
| 128 | + ) |
| 129 | + with self.tasks(): |
| 130 | + self.snuba_query = create_snuba_query( |
| 131 | + query_type=SnubaQuery.Type.ERROR, |
| 132 | + dataset=Dataset.Events, |
| 133 | + query="hello", |
| 134 | + aggregate="count()", |
| 135 | + time_window=timedelta(minutes=1), |
| 136 | + resolution=timedelta(minutes=1), |
| 137 | + environment=self.environment, |
| 138 | + event_types=[SnubaQueryEventType.EventType.ERROR], |
| 139 | + group_by=["level"], |
| 140 | + ) |
| 141 | + self.query_subscription = create_snuba_subscription( |
| 142 | + project=self.detector.project, |
| 143 | + subscription_type=INCIDENTS_SNUBA_SUBSCRIPTION_TYPE, |
| 144 | + snuba_query=self.snuba_query, |
| 145 | + ) |
| 146 | + self.alert_rule = self.create_alert_rule() |
| 147 | + self.create_alert_rule_detector(alert_rule_id=self.alert_rule.id, detector=self.detector) |
| 148 | + |
| 149 | + self.handler = MetricIssueDetectorHandler(self.detector) |
| 150 | + |
| 151 | + def test_metric_issue_occurrence(self): |
| 152 | + self.detector_group_keys = ["level=error", "level=warning"] |
| 153 | + self.detector_group_values = { |
| 154 | + "groups": [ |
| 155 | + { |
| 156 | + "group_keys": {"level": "error"}, |
| 157 | + "value": self.critical_detector_trigger.comparison + 1, |
| 158 | + }, |
| 159 | + { |
| 160 | + "group_keys": {"level": "warning"}, |
| 161 | + "value": self.warning_detector_trigger.comparison + 2, |
| 162 | + }, |
| 163 | + ], |
| 164 | + } |
| 165 | + self.threshold_values = ["Critical", "Warning"] |
| 166 | + self.comparison_values = [5, 3] |
| 167 | + |
| 168 | + packet = ProcessedSubscriptionUpdate( |
| 169 | + entity="entity", |
| 170 | + subscription_id=str(self.query_subscription.id), |
| 171 | + values=self.detector_group_values, |
| 172 | + timestamp=datetime.now(timezone.utc), |
| 173 | + ) |
| 174 | + data_packet = DataPacket[QuerySubscriptionUpdate]( |
| 175 | + source_id=str(self.query_subscription.id), packet=packet |
| 176 | + ) |
| 177 | + |
| 178 | + result: dict[DetectorGroupKey, DetectorEvaluationResult] = self.handler.evaluate( |
| 179 | + data_packet |
| 180 | + ) |
| 181 | + for detector_group_key, detector_group_value, threshold_value, comparison_value in zip( |
| 182 | + self.detector_group_keys, |
| 183 | + self.detector_group_values["groups"], |
| 184 | + self.threshold_values, |
| 185 | + self.comparison_values, |
| 186 | + ): |
| 187 | + evaluation_result: DetectorEvaluationResult = result[detector_group_key] |
| 188 | + assert isinstance(evaluation_result.result, IssueOccurrence) |
| 189 | + occurrence: IssueOccurrence = evaluation_result.result |
| 190 | + assert occurrence is not None |
| 191 | + assert occurrence.issue_title == self.detector.name |
| 192 | + assert ( |
| 193 | + occurrence.subtitle |
| 194 | + == f"{detector_group_key} {threshold_value}: Number of events in the last minute above {comparison_value}" |
| 195 | + ) |
| 196 | + assert occurrence.level == "error" |
| 197 | + |
| 198 | + def generate_evidence_data( |
| 199 | + self, |
| 200 | + value: int, |
| 201 | + detector_trigger: DataCondition, |
| 202 | + extra_trigger: DataCondition | None = None, |
| 203 | + ): |
| 204 | + evidence_data = { |
| 205 | + "detector_id": self.detector.id, |
| 206 | + "value": detector_trigger.condition_result, |
| 207 | + "alert_id": self.alert_rule.id, |
| 208 | + "data_packet_source_id": str(self.query_subscription.id), |
| 209 | + "conditions": [ |
| 210 | + { |
| 211 | + "id": detector_trigger.id, |
| 212 | + "type": detector_trigger.type, |
| 213 | + "comparison": detector_trigger.comparison, |
| 214 | + "condition_result": detector_trigger.condition_result.value, |
| 215 | + }, |
| 216 | + ], |
| 217 | + } |
| 218 | + if extra_trigger: |
| 219 | + evidence_data["conditions"].append( |
| 220 | + { |
| 221 | + "id": extra_trigger.id, |
| 222 | + "type": extra_trigger.type, |
| 223 | + "comparison": extra_trigger.comparison, |
| 224 | + "condition_result": extra_trigger.condition_result.value, |
| 225 | + } |
| 226 | + ) |
| 227 | + return evidence_data |
0 commit comments