Skip to content

Commit dc87bfc

Browse files
authored
feat(preprod): Hook size analysis detector to diff (#108209)
Replace direct Kafka occurrence production with the workflow engine's DataPacket pipeline. maybe_emit_issues() now looks up SizeAnalysisSubscriptions for the project and feeds size deltas into process_data_packet(), which resolves linked detectors and evaluates them instead of using a hardcoded threshold. PRs: - #108208 Add size_analysis detector - #108209 Hook size analysis detector to diff (this PR) - #108210 Add new issue type to frontend - #108211 Add size monitor UI [Design doc](https://www.notion.so/sentry/Size-Monitors-3068b10e4b5d805ca57de084d1b4eba6)
1 parent 411b4c5 commit dc87bfc

File tree

2 files changed

+151
-1193
lines changed

2 files changed

+151
-1193
lines changed

src/sentry/preprod/size_analysis/tasks.py

Lines changed: 41 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -8,14 +8,18 @@
88
from django.utils import timezone
99

1010
from sentry import features
11-
from sentry.issues.producer import PayloadType, produce_occurrence_to_kafka
1211
from sentry.models.files.file import File
1312
from sentry.preprod.models import (
1413
PreprodArtifact,
1514
PreprodArtifactSizeComparison,
1615
PreprodArtifactSizeMetrics,
1716
)
1817
from sentry.preprod.size_analysis.compare import compare_size_analysis
18+
from sentry.preprod.size_analysis.grouptype import (
19+
PreprodSizeAnalysisGroupType,
20+
SizeAnalysisDataPacket,
21+
SizeAnalysisValue,
22+
)
1923
from sentry.preprod.size_analysis.models import ComparisonResults, SizeAnalysisResults
2024
from sentry.preprod.size_analysis.utils import build_size_metrics_map, can_compare_size_metrics
2125
from sentry.preprod.vcs.status_checks.size.tasks import create_preprod_status_check_task
@@ -24,8 +28,8 @@
2428
from sentry.taskworker.namespaces import preprod_tasks
2529
from sentry.utils import metrics
2630
from sentry.utils.json import dumps_htmlsafe
27-
28-
from .issues import diff_to_occurrence
31+
from sentry.workflow_engine.models import DataPacket, Detector
32+
from sentry.workflow_engine.processors.detector import process_detectors
2933

3034
logger = logging.getLogger(__name__)
3135

@@ -551,37 +555,45 @@ def _maybe_emit_issues(
551555
)
552556
return
553557

554-
# TODO(EME-80): Make threshold configurable:
555-
arbitrary_threshold = 100 * 1024
556-
diff = comparison_results.size_metric_diff_item
557-
download_delta = diff.head_download_size - diff.base_download_size
558-
install_delta = diff.head_install_size - diff.base_install_size
559-
560-
issue_count = 0
561-
562-
if download_delta >= arbitrary_threshold:
563-
occurrence, event_data = diff_to_occurrence("download", diff, head_metric, base_metric)
564-
produce_occurrence_to_kafka(
565-
payload_type=PayloadType.OCCURRENCE,
566-
occurrence=occurrence,
567-
event_data=event_data,
558+
detectors = list(
559+
Detector.objects.filter(
560+
project_id=project_id,
561+
type=PreprodSizeAnalysisGroupType.slug,
562+
enabled=True,
568563
)
569-
issue_count += 1
570-
571-
if install_delta >= arbitrary_threshold:
572-
occurrence, event_data = diff_to_occurrence("install", diff, head_metric, base_metric)
573-
produce_occurrence_to_kafka(
574-
payload_type=PayloadType.OCCURRENCE,
575-
occurrence=occurrence,
576-
event_data=event_data,
564+
)
565+
if not detectors:
566+
logger.info(
567+
"preprod.size_analysis.no_detectors",
568+
extra={"project_id": project_id},
577569
)
578-
issue_count += 1
570+
return
579571

572+
diff = comparison_results.size_metric_diff_item
573+
size_data: SizeAnalysisValue = {
574+
"head_install_size_bytes": diff.head_install_size,
575+
"head_download_size_bytes": diff.head_download_size,
576+
"base_install_size_bytes": diff.base_install_size,
577+
"base_download_size_bytes": diff.base_download_size,
578+
}
579+
580+
data_packet: SizeAnalysisDataPacket = DataPacket(
581+
source_id=f"preprod-size-analysis:{project_id}",
582+
packet=size_data,
583+
)
584+
585+
logger.info(
586+
"preprod.size_analysis.process_detectors.starting",
587+
extra={
588+
"project_id": project_id,
589+
"detector_count": len(detectors),
590+
},
591+
)
592+
results = process_detectors(data_packet, detectors)
580593
logger.info(
581-
"preprod.size_analysis.compare.issues",
594+
"preprod.size_analysis.process_detectors.completed",
582595
extra={
583596
"project_id": project_id,
584-
"organization_id": organization_id,
585-
"issue_count": issue_count,
597+
"detector_count": len(results),
586598
},
587599
)

0 commit comments

Comments
 (0)