Skip to content
1 change: 1 addition & 0 deletions src/sentry/workflow_engine/handlers/detector/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ class EvidenceData(Generic[DataPacketEvaluationType]):
detector_id: int
data_packet_source_id: int
conditions: list[dict[str, Any]]
data_source_definition: dict[str, Any] | None
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Also wanted to call out that I'm adding a property to this dataclass and I know python will raise an exception when instantiating if it doesn't exist. Is it safe to do this? Want to make sure this won't start failing for old event data when this is merged

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🤔 it should be okay, but might want to set a default value to None to be safe.

afaik, we only apply this dataclass to the write on an issue occurrence, and when we're fetching for the API we just pass it through the serializer.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Okay added a default value, but I agree I think it would probably be okay without one



@dataclasses.dataclass(frozen=True, kw_only=True)
Expand Down
31 changes: 29 additions & 2 deletions src/sentry/workflow_engine/handlers/detector/stateful.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from django.db.models import Q
from sentry_redis_tools.retrying_cluster import RetryingRedisCluster

from sentry.api.serializers import serialize
from sentry.issues.issue_occurrence import IssueOccurrence
from sentry.issues.status_change_message import StatusChangeMessage
from sentry.models.group import GroupStatus
Expand All @@ -21,7 +22,7 @@
EventData,
GroupedDetectorEvaluationResult,
)
from sentry.workflow_engine.models import DataPacket, Detector, DetectorState
from sentry.workflow_engine.models import DataPacket, DataSource, Detector, DetectorState
from sentry.workflow_engine.processors.data_condition_group import (
ProcessedDataConditionGroup,
process_data_condition_group,
Expand Down Expand Up @@ -353,6 +354,29 @@ def build_detector_evidence_data(
"""
return {}

def _build_data_source_definition(
self, data_packet: DataPacket[DataPacketType]
) -> dict[str, Any] | None:
try:
data_source = DataSource.objects.filter(
detectors=self.detector, source_id=data_packet.source_id
).first()
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we should probably allow this to have multiple data sources rather than limit to the first one. could we just serialize the list of data sources even though it'll generally only be 1? it might also help with debugging to see if there are multiple sources connected or not.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Okay makes sense - I thought that because this sent a source_id that meant that only one datasource should match, but if that's not the case the list makes sense. Updated

if not data_source:
logger.warning(
"Matching data source not found for detector while generating occurrence evidence data",
extra={
"detector_id": self.detector.id,
"data_packet_source_id": data_packet.source_id,
},
)
return None
return serialize(data_source)
except Exception:
logger.exception(
"Failed to serialize data source definition when building workflow engine evidence data"
)
return None

def _build_workflow_engine_evidence_data(
self,
evaluation_result: ProcessedDataConditionGroup,
Expand All @@ -363,15 +387,18 @@ def _build_workflow_engine_evidence_data(
Build the workflow engine specific evidence data.
This is data that is common to all detectors.
"""
return {
base: dict[str, Any] = {
"detector_id": self.detector.id,
"value": evaluation_value,
"data_packet_source_id": str(data_packet.source_id),
"conditions": [
result.condition.get_snapshot() for result in evaluation_result.condition_results
],
"data_source_definition": self._build_data_source_definition(data_packet),
}

return base

def evaluate_impl(
self, data_packet: DataPacket[DataPacketType]
) -> GroupedDetectorEvaluationResult:
Expand Down
22 changes: 22 additions & 0 deletions tests/sentry/incidents/test_metric_issue_detector_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ def generate_evidence_data(
detector_trigger: DataCondition,
extra_trigger: DataCondition | None = None,
):
self.query_subscription.refresh_from_db()

conditions = [
{
Expand All @@ -50,6 +51,27 @@ def generate_evidence_data(
"alert_id": self.alert_rule.id,
"data_packet_source_id": str(self.query_subscription.id),
"conditions": conditions,
"data_source_definition": {
"id": str(self.data_source.id),
"organizationId": str(self.organization.id),
"type": self.data_source.type,
"sourceId": str(self.query_subscription.id),
"queryObj": {
"id": str(self.query_subscription.id),
"status": self.query_subscription.status,
"subscription": self.query_subscription.subscription_id,
"snubaQuery": {
"id": str(self.snuba_query.id),
"dataset": self.snuba_query.dataset,
"query": self.snuba_query.query,
"aggregate": self.snuba_query.aggregate,
"timeWindow": self.snuba_query.time_window,
"environment": self.environment.name,
"eventTypes": ["error"],
"extrapolationMode": "unknown",
},
},
},
}

return evidence_data
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,7 @@ def create_models(self):
"condition_result": DetectorPriorityLevel.OK.value,
},
],
data_source_definition=None,
alert_id=self.alert_rule.id,
)

Expand All @@ -126,6 +127,7 @@ def create_models(self):
"condition_result": DetectorPriorityLevel.HIGH.value,
},
],
data_source_definition=None,
alert_id=self.alert_rule.id,
)
self.group, self.event, self.group_event = self.create_group_event(
Expand Down
Loading