|
1 | 1 | import logging
|
2 | 2 | import re
|
3 | 3 | from concurrent.futures import ThreadPoolExecutor
|
| 4 | +from datetime import timedelta |
4 | 5 |
|
5 | 6 | import sentry_sdk
|
6 | 7 | from django.conf import settings
|
| 8 | +from django.utils import timezone |
7 | 9 | from rest_framework.exceptions import ParseError, ValidationError
|
8 | 10 | from rest_framework.request import Request
|
9 | 11 | from rest_framework.response import Response
|
@@ -94,6 +96,18 @@ def get(self, request: Request, organization) -> Response:
|
94 | 96 | except NoProjects:
|
95 | 97 | return Response([])
|
96 | 98 |
|
| 99 | + modified_params = params.copy() |
| 100 | + delta = modified_params["end"] - modified_params["start"] |
| 101 | + duration = delta.total_seconds() |
| 102 | + if duration >= 1209600 and duration <= 1209600 * 2: |
| 103 | + new_start = modified_params["end"] - timedelta(days=30) |
| 104 | + min_start = timezone.now() - timedelta(days=90) |
| 105 | + modified_params["start"] = new_start if min_start < new_start else min_start |
| 106 | + sentry_sdk.set_tag("performance.trendsv2.extra_data_fetched", True) |
| 107 | + sentry_sdk.set_tag( |
| 108 | + "performance.trendsv2.optimized_start_out_of_bounds", new_start > min_start |
| 109 | + ) |
| 110 | + |
97 | 111 | trend_type = request.GET.get("trendType", REGRESSION)
|
98 | 112 | if trend_type not in TREND_TYPES:
|
99 | 113 | raise ParseError(detail=f"{trend_type} is not a supported trend type")
|
@@ -234,6 +248,17 @@ def get_trends_data(stats_data, request):
|
234 | 248 | # list of requests to send to microservice async
|
235 | 249 | trends_requests = []
|
236 | 250 |
|
| 251 | + # format start and end |
| 252 | + for data in list(stats_data.items()): |
| 253 | + data_start = data[1].pop("start", "") |
| 254 | + data_end = data[1].pop("end", "") |
| 255 | + # data start and end that analysis is ran on |
| 256 | + data[1]["data_start"] = data_start |
| 257 | + data[1]["data_end"] = data_end |
| 258 | + # user requested start and end |
| 259 | + data[1]["request_start"] = params["start"].timestamp() |
| 260 | + data[1]["request_end"] = data_end |
| 261 | + |
237 | 262 | # split the txns data into multiple dictionaries
|
238 | 263 | split_transactions_data = [
|
239 | 264 | dict(list(stats_data.items())[i : i + EVENTS_PER_QUERY])
|
@@ -278,6 +303,7 @@ def get_stats_data_for_trending_events(results):
|
278 | 303 | if request.GET.get("withTimeseries", False):
|
279 | 304 | trending_transaction_names_stats = stats_data
|
280 | 305 | else:
|
| 306 | + # TODO remove extra data when stats period is from 14d to 30d |
281 | 307 | for t in results["data"]:
|
282 | 308 | transaction_name = t["transaction"]
|
283 | 309 | project = t["project"]
|
@@ -316,7 +342,7 @@ def get_stats_data_for_trending_events(results):
|
316 | 342 | get_event_stats_metrics,
|
317 | 343 | top_events=EVENTS_PER_QUERY,
|
318 | 344 | query_column=trend_function,
|
319 |
| - params=params, |
| 345 | + params=modified_params, |
320 | 346 | query=query,
|
321 | 347 | )
|
322 | 348 |
|
|
0 commit comments