Skip to content

Commit

Permalink
plural data sources, update schema in test
Browse files Browse the repository at this point in the history
  • Loading branch information
ceorourke committed Feb 11, 2025
1 parent 4330776 commit ec4b7fe
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 49 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ class MetricAlertComparisonConditionValidator(NumericComparisonConditionValidato


class MetricAlertsDetectorValidator(BaseGroupTypeDetectorValidator):
data_source = SnubaQueryValidator(required=True)
data_sources = SnubaQueryValidator(required=True, many=True)
data_conditions = MetricAlertComparisonConditionValidator(many=True)

def validate(self, attrs):
Expand Down Expand Up @@ -72,15 +72,15 @@ def update_data_conditions(self, instance, data_conditions):
)
return data_condition_group

def update_data_source(self, instance, data_source):
for source in data_source:
def update_data_sources(self, instance, data_sources):
for source in data_sources:
try:
source_instance = DataSource.objects.get(detector=instance)
except DataSource.DoesNotExist:
continue
if source_instance:
try:
snuba_query = SnubaQuery.objects.get(id=source_instance.query_id)
snuba_query = SnubaQuery.objects.get(id=source_instance.source_id)
except SnubaQuery.DoesNotExist:
raise serializers.ValidationError("SnubaQuery not found, can't update")

Expand All @@ -91,12 +91,11 @@ def update_data_source(self, instance, data_source):
dataset=source.get("dataset", snuba_query.dataset),
query=source.get("query", snuba_query.query),
aggregate=source.get("aggregate", snuba_query.aggregate),
time_window=source.get("time_window", timedelta(seconds=snuba_query.time_window)),
time_window=timedelta(minutes=source.get("time_window", snuba_query.time_window)),
resolution=timedelta(seconds=source.get("resolution", snuba_query.resolution)),
environment=source.get("environment", snuba_query.environment),
event_types=source.get("event_types", [event_types]),
)
# TODO handle adding an additional DataSource

def update(self, instance, validated_data):
instance.name = validated_data.get("name", instance.name)
Expand All @@ -106,12 +105,9 @@ def update(self, instance, validated_data):
instance.workflow_condition_group = self.update_data_conditions(
instance, data_conditions
)

data_source = validated_data.pop(
"data_source"
) # TODO this IS a m2m, should be updated to data_sources plural
if data_source:
self.update_data_source(instance, data_source)
data_sources = validated_data.pop("data_sources")
if data_sources:
self.update_data_sources(instance, data_sources)

instance.save()
return instance
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from datetime import timedelta

import pytest
from django.utils import timezone

from sentry.api.serializers import serialize
from sentry.deletions.models.scheduleddeletion import RegionScheduledDeletion
Expand Down Expand Up @@ -31,18 +32,6 @@ class ProjectDetectorDetailsBaseTest(APITestCase):
def setUp(self):
super().setUp()
self.login_as(user=self.user)
self.data_source = self.create_data_source(organization=self.organization)
self.data_condition_group = self.create_data_condition_group()
self.detector = self.create_detector(
project_id=self.project.id,
name="Test Detector",
type=MetricAlertFire.slug,
workflow_condition_group=self.data_condition_group,
)
self.data_source_detector = self.create_data_source_detector(
data_source=self.data_source, detector=self.detector
)
assert self.detector.data_sources is not None
self.environment = self.create_environment(
organization_id=self.organization.id, name="production"
)
Expand All @@ -63,14 +52,14 @@ def setUp(self):
snuba_query=self.snuba_query,
)
self.data_source = self.create_data_source(
organization=self.organization, query_id=self.query_subscription.id
organization=self.organization, source_id=self.query_subscription.id
)
self.condition_group = self.create_data_condition_group(
self.data_condition_group = self.create_data_condition_group(
organization_id=self.organization.id,
logic_type=DataConditionGroup.Type.ANY,
)
self.condition = self.create_data_condition(
condition_group=self.condition_group,
condition_group=self.data_condition_group,
type=Condition.LESS,
comparison=50,
condition_result=DetectorPriorityLevel.LOW,
Expand All @@ -79,9 +68,11 @@ def setUp(self):
project_id=self.project.id,
name="Test Detector",
type=MetricAlertFire.slug,
workflow_condition_group=self.condition_group,
workflow_condition_group=self.data_condition_group,
)
self.data_source_detector = self.create_data_source_detector(
data_source=self.data_source, detector=self.detector
)
DataSourceDetector.objects.create(data_source=self.data_source, detector=self.detector)
assert self.detector.data_sources is not None


Expand Down Expand Up @@ -137,29 +128,26 @@ class ProjectDetectorDetailsPutTest(ProjectDetectorDetailsBaseTest):
def setUp(self):
super().setUp()
self.valid_data = {
"id": self.detector.id,
"projectId": self.project.id,
"name": "Updated Detector",
"group_type": MetricAlertFire.slug,
"data_source": [
"groupType": MetricAlertFire.slug, # or should we change this to type like it is in the doc?
"dateCreated": self.detector.date_added,
"dateUpdated": timezone.now(),
"dataSources": [
{
"query_type": self.snuba_query.type,
"dataset": self.snuba_query.dataset,
"query": "updated query",
"aggregate": self.snuba_query.aggregate,
"time_window": 5, # minutes
"environment": None, # getting env not in org error when passing self.environment.id
"event_types": [
event_type.value for event_type in self.snuba_query.event_types
],
}
],
"data_conditions": [
{
"id": self.condition.id,
"type": Condition.GREATER,
"comparison": 100,
"result": DetectorPriorityLevel.HIGH,
"eventTypes": [event_type.name for event_type in self.snuba_query.event_types],
}
],
"dataConditions": [],
# "conditionGroup": [self.data_condition_group], # write out all the attrs
"config": self.detector.config,
}

def test_update(self):
Expand All @@ -183,16 +171,16 @@ def test_update(self):
assert condition_group.logic_type == DataConditionGroup.Type.ANY
assert condition_group.organization_id == self.organization.id

conditions = list(DataCondition.objects.filter(condition_group=condition_group))
assert len(conditions) == 1
condition = conditions[0]
assert condition.type == Condition.GREATER
assert condition.comparison == 100
assert condition.condition_result == DetectorPriorityLevel.HIGH
# conditions = list(DataCondition.objects.filter(condition_group=condition_group))
# assert len(conditions) == 1
# condition = conditions[0]
# assert condition.type == Condition.GREATER
# assert condition.comparison == 100
# assert condition.condition_result == DetectorPriorityLevel.HIGH

data_source_detector = DataSourceDetector.objects.get(detector=detector)
data_source = DataSource.objects.get(id=data_source_detector.detector.id)
snuba_query = SnubaQuery.objects.get(id=data_source.query_id)
snuba_query = SnubaQuery.objects.get(id=data_source.source_id)
assert snuba_query.query == "updated query"
assert snuba_query.time_window == 300 # seconds = 5 minutes

Expand Down

0 comments on commit ec4b7fe

Please sign in to comment.