Skip to content
This repository was archived by the owner on May 5, 2025. It is now read-only.

Commit f21ee54

Browse files
committed
create utils for accessing testrun timescale models
1 parent 35bab8a commit f21ee54

File tree

2 files changed

+576
-0
lines changed

2 files changed

+576
-0
lines changed

services/ta_timeseries.py

Lines changed: 189 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,189 @@
1+
from __future__ import annotations
2+
3+
from datetime import datetime, timedelta
4+
from enum import Enum
5+
from typing import TypedDict
6+
7+
import test_results_parser
8+
from django.db import connections
9+
from django.db.models import Q
10+
from shared.django_apps.test_analytics.models import Flake
11+
from shared.django_apps.timeseries.models import (
12+
Testrun,
13+
TestrunBranchSummary,
14+
TestrunSummary,
15+
)
16+
17+
from services.test_results import FlakeInfo
18+
from ta_storage.utils import calc_flags_hash, calc_test_id
19+
20+
21+
class Interval(Enum):
22+
ONE_DAY = 1
23+
SEVEN_DAYS = 7
24+
THIRTY_DAYS = 30
25+
26+
27+
def get_flaky_tests_set(repo_id: int) -> set[bytes]:
28+
return set(
29+
Flake.objects.filter(repoid=repo_id, end_date__isnull=True)
30+
.values_list("test_id", flat=True)
31+
.distinct()
32+
)
33+
34+
35+
def get_flaky_tests_dict(repo_id: int) -> dict[bytes, FlakeInfo]:
36+
return {
37+
flake.test_id: FlakeInfo(flake.fail_count, flake.count)
38+
for flake in Flake.objects.filter(repoid=repo_id, end_date__isnull=True)
39+
}
40+
41+
42+
def insert_testrun(
43+
timestamp: datetime,
44+
repo_id: int | None,
45+
commit_sha: str | None,
46+
branch: str | None,
47+
upload_id: int | None,
48+
flags: list[str] | None,
49+
parsing_info: test_results_parser.ParsingInfo,
50+
flaky_test_ids: set[bytes] | None = None,
51+
):
52+
testruns_to_create = []
53+
for testrun in parsing_info["testruns"]:
54+
test_id = calc_test_id(
55+
testrun["name"], testrun["classname"], testrun["testsuite"]
56+
)
57+
flags_hash = calc_flags_hash(flags) if flags else None
58+
outcome = testrun["outcome"]
59+
60+
if outcome == "failure" and flaky_test_ids and test_id in flaky_test_ids:
61+
outcome = "flaky_failure"
62+
63+
testruns_to_create.append(
64+
Testrun(
65+
timestamp=timestamp,
66+
test_id=test_id,
67+
flags_hash=flags_hash,
68+
name=testrun["name"],
69+
classname=testrun["classname"],
70+
testsuite=testrun["testsuite"],
71+
computed_name=testrun["computed_name"],
72+
outcome=outcome,
73+
duration_seconds=testrun["duration"],
74+
failure_message=testrun["failure_message"],
75+
framework=parsing_info["framework"],
76+
filename=testrun["filename"],
77+
repo_id=repo_id,
78+
commit_sha=commit_sha,
79+
branch=branch,
80+
flags=flags,
81+
upload_id=upload_id,
82+
)
83+
)
84+
Testrun.objects.bulk_create(testruns_to_create)
85+
86+
87+
class TestInstance(TypedDict):
88+
test_id: bytes
89+
flags_hash: bytes | None
90+
computed_name: str
91+
failure_message: str
92+
upload_id: int
93+
duration_seconds: float | None
94+
95+
96+
def get_pr_comment_failures(repo_id: int, commit_sha: str) -> list[TestInstance]:
97+
with connections["timeseries"].cursor() as cursor:
98+
cursor.execute(
99+
"""
100+
SELECT
101+
test_id,
102+
flags_hash,
103+
LAST(computed_name, timestamp) as computed_name,
104+
LAST(failure_message, timestamp) as failure_message,
105+
LAST(upload_id, timestamp) as upload_id,
106+
LAST(duration_seconds, timestamp) as duration_seconds
107+
FROM timeseries_testrun
108+
WHERE repo_id = %s AND commit_sha = %s AND outcome IN ('failure', 'flaky_failure')
109+
GROUP BY test_id, flags_hash
110+
""",
111+
[repo_id, commit_sha],
112+
)
113+
return [
114+
{
115+
"test_id": bytes(test_id),
116+
"flags_hash": bytes(flags_hash),
117+
"computed_name": computed_name,
118+
"failure_message": failure_message,
119+
"upload_id": upload_id,
120+
"duration_seconds": duration_seconds,
121+
}
122+
for test_id, flags_hash, computed_name, failure_message, upload_id, duration_seconds in cursor.fetchall()
123+
]
124+
125+
126+
def get_pr_comment_agg(repo_id: int, commit_sha: str) -> dict[str, int]:
127+
with connections["timeseries"].cursor() as cursor:
128+
cursor.execute(
129+
"""
130+
SELECT outcome, count(*) FROM (
131+
SELECT
132+
test_id,
133+
flags_hash,
134+
LAST(outcome, timestamp) as outcome
135+
FROM timeseries_testrun
136+
WHERE repo_id = %s AND commit_sha = %s
137+
GROUP BY test_id, flags_hash
138+
) AS t
139+
GROUP BY outcome
140+
""",
141+
[repo_id, commit_sha],
142+
)
143+
return {outcome: count for outcome, count in cursor.fetchall()}
144+
145+
146+
def get_testruns_for_flake_detection(
147+
upload_id: int,
148+
flaky_test_ids: set[bytes],
149+
) -> list[Testrun]:
150+
return list(
151+
Testrun.objects.filter(
152+
Q(upload_id=upload_id)
153+
& (
154+
Q(outcome="failure")
155+
| Q(outcome="flaky_failure")
156+
| (Q(outcome="pass") & Q(test_id__in=flaky_test_ids))
157+
)
158+
)
159+
)
160+
161+
162+
def update_testrun_to_flaky(
163+
timestamp: datetime, test_id: bytes, flags_hash: bytes | None
164+
):
165+
with connections["timeseries"].cursor() as cursor:
166+
cursor.execute(
167+
"UPDATE timeseries_testrun SET outcome = %s WHERE timestamp = %s AND test_id = %s AND flags_hash = %s",
168+
["flaky_failure", timestamp, test_id, flags_hash],
169+
)
170+
171+
172+
def get_testrun_summary(
173+
repo_id: int, interval: Interval, branch: str | None = None
174+
) -> list[TestrunSummary]:
175+
timestamp_bin = datetime.now() - timedelta(days=interval.value)
176+
return list(
177+
TestrunSummary.objects.filter(repo_id=repo_id, timestamp_bin__gte=timestamp_bin)
178+
)
179+
180+
181+
def get_testrun_branch_summary(
182+
repo_id: int, branch: str, interval: Interval
183+
) -> list[TestrunBranchSummary]:
184+
timestamp_bin = datetime.now() - timedelta(days=interval.value)
185+
return list(
186+
TestrunBranchSummary.objects.filter(
187+
repo_id=repo_id, branch=branch, timestamp_bin__gte=timestamp_bin
188+
)
189+
)

0 commit comments

Comments
 (0)