Skip to content

Commit 7713678

Browse files
authored
feat(trends): Fetch extra data (#51153)
When stats period <14d and >30d, fetch all 30 days to account for seasonality. I need to clean up timeseries with extra data since it'll always be 30 days for now
1 parent 96db093 commit 7713678

File tree

1 file changed

+27
-1
lines changed

1 file changed

+27
-1
lines changed

src/sentry/api/endpoints/organization_events_trendsv2.py

Lines changed: 27 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,11 @@
11
import logging
22
import re
33
from concurrent.futures import ThreadPoolExecutor
4+
from datetime import timedelta
45

56
import sentry_sdk
67
from django.conf import settings
8+
from django.utils import timezone
79
from rest_framework.exceptions import ParseError, ValidationError
810
from rest_framework.request import Request
911
from rest_framework.response import Response
@@ -94,6 +96,18 @@ def get(self, request: Request, organization) -> Response:
9496
except NoProjects:
9597
return Response([])
9698

99+
modified_params = params.copy()
100+
delta = modified_params["end"] - modified_params["start"]
101+
duration = delta.total_seconds()
102+
if duration >= 1209600 and duration <= 1209600 * 2:
103+
new_start = modified_params["end"] - timedelta(days=30)
104+
min_start = timezone.now() - timedelta(days=90)
105+
modified_params["start"] = new_start if min_start < new_start else min_start
106+
sentry_sdk.set_tag("performance.trendsv2.extra_data_fetched", True)
107+
sentry_sdk.set_tag(
108+
"performance.trendsv2.optimized_start_out_of_bounds", new_start > min_start
109+
)
110+
97111
trend_type = request.GET.get("trendType", REGRESSION)
98112
if trend_type not in TREND_TYPES:
99113
raise ParseError(detail=f"{trend_type} is not a supported trend type")
@@ -234,6 +248,17 @@ def get_trends_data(stats_data, request):
234248
# list of requests to send to microservice async
235249
trends_requests = []
236250

251+
# format start and end
252+
for data in list(stats_data.items()):
253+
data_start = data[1].pop("start", "")
254+
data_end = data[1].pop("end", "")
255+
# data start and end that analysis is ran on
256+
data[1]["data_start"] = data_start
257+
data[1]["data_end"] = data_end
258+
# user requested start and end
259+
data[1]["request_start"] = params["start"].timestamp()
260+
data[1]["request_end"] = data_end
261+
237262
# split the txns data into multiple dictionaries
238263
split_transactions_data = [
239264
dict(list(stats_data.items())[i : i + EVENTS_PER_QUERY])
@@ -278,6 +303,7 @@ def get_stats_data_for_trending_events(results):
278303
if request.GET.get("withTimeseries", False):
279304
trending_transaction_names_stats = stats_data
280305
else:
306+
# TODO remove extra data when stats period is from 14d to 30d
281307
for t in results["data"]:
282308
transaction_name = t["transaction"]
283309
project = t["project"]
@@ -316,7 +342,7 @@ def get_stats_data_for_trending_events(results):
316342
get_event_stats_metrics,
317343
top_events=EVENTS_PER_QUERY,
318344
query_column=trend_function,
319-
params=params,
345+
params=modified_params,
320346
query=query,
321347
)
322348

0 commit comments

Comments
 (0)