5
5
from fractions import Fraction
6
6
from itertools import filterfalse , zip_longest
7
7
from types import GeneratorType
8
- from typing import Any , cast
8
+ from typing import Any , Generator , cast
9
9
10
10
import orjson
11
11
import sentry_sdk
@@ -590,7 +590,6 @@ def parse_chunks(chunks: str) -> tuple[list[str], ReportHeader]:
590
590
591
591
592
592
class Report (object ):
593
- file_class = ReportFile
594
593
_files : dict [str , ReportFileSummary ]
595
594
_header : ReportHeader
596
595
@@ -609,22 +608,57 @@ def __init__(
609
608
self .sessions = get_sessions (sessions ) if sessions else {}
610
609
611
610
# ["<json>", ...]
611
+ self ._chunks : list [str | ReportFile ]
612
612
self ._chunks , self ._header = (
613
613
parse_chunks (chunks )
614
614
if chunks and isinstance (chunks , str )
615
615
else (chunks or [], ReportHeader ())
616
616
)
617
617
618
618
# <ReportTotals>
619
+ self ._totals : ReportTotals | None = None
619
620
if isinstance (totals , ReportTotals ):
620
621
self ._totals = totals
621
622
elif totals :
622
623
self ._totals = ReportTotals (* migrate_totals (totals ))
623
- else :
624
- self ._totals = None
625
624
626
625
self .diff_totals = diff_totals
627
626
627
+ def _invalidate_caches (self ):
628
+ self ._totals = None
629
+
630
+ @property
631
+ def totals (self ):
632
+ if not self ._totals :
633
+ self ._totals = self ._process_totals ()
634
+ return self ._totals
635
+
636
+ def _process_totals (self ):
637
+ """Runs through the file network to aggregate totals
638
+ returns <ReportTotals>
639
+ """
640
+
641
+ def _iter_totals ():
642
+ for filename , data in self ._files .items ():
643
+ if data .file_totals is None :
644
+ yield self .get (filename ).totals
645
+ else :
646
+ yield data .file_totals
647
+
648
+ totals = agg_totals (_iter_totals ())
649
+ totals .sessions = len (self .sessions )
650
+ return totals
651
+
652
+ def _iter_parsed_files (self ) -> Generator [ReportFile , None , None ]:
653
+ for name , summary in self ._files .items ():
654
+ idx = summary .file_index
655
+ file = self ._chunks [idx ]
656
+ if not isinstance (file , ReportFile ):
657
+ file = self ._chunks [idx ] = ReportFile (
658
+ name = name , totals = summary .file_totals , lines = file
659
+ )
660
+ yield file
661
+
628
662
@property
629
663
def header (self ) -> ReportHeader :
630
664
return self ._header
@@ -787,7 +821,7 @@ def get(self, filename, _else=None, bind=False):
787
821
lines = None
788
822
if isinstance (lines , ReportFile ):
789
823
return lines
790
- report_file = self . file_class (
824
+ report_file = ReportFile (
791
825
name = filename ,
792
826
totals = _file .file_totals ,
793
827
lines = lines ,
@@ -857,29 +891,6 @@ def get_file_totals(self, path: str) -> ReportTotals | None:
857
891
else :
858
892
return ReportTotals (* totals )
859
893
860
- @property
861
- def totals (self ):
862
- if not self ._totals :
863
- # reprocess totals
864
- self ._totals = self ._process_totals ()
865
- return self ._totals
866
-
867
- def _process_totals (self ):
868
- """Runs through the file network to aggregate totals
869
- returns <ReportTotals>
870
- """
871
-
872
- def _iter_totals ():
873
- for filename , data in self ._files .items ():
874
- if data .file_totals is None :
875
- yield self .get (filename ).totals
876
- else :
877
- yield data .file_totals
878
-
879
- totals = agg_totals (_iter_totals ())
880
- totals .sessions = len (self .sessions )
881
- return totals
882
-
883
894
def next_session_number (self ):
884
895
start_number = len (self .sessions )
885
896
while start_number in self .sessions or str (start_number ) in self .sessions :
@@ -912,7 +923,7 @@ def __iter__(self):
912
923
if isinstance (report , ReportFile ):
913
924
yield report
914
925
else :
915
- yield self . file_class (
926
+ yield ReportFile (
916
927
name = filename ,
917
928
totals = _file .file_totals ,
918
929
lines = report ,
@@ -1230,6 +1241,82 @@ def _passes_integrity_analysis(self):
1230
1241
return False
1231
1242
return True
1232
1243
1244
+ def delete_labels (
1245
+ self , sessionids : list [int ] | set [int ], labels_to_delete : list [int ] | set [int ]
1246
+ ):
1247
+ files_to_delete = []
1248
+ for file in self ._iter_parsed_files ():
1249
+ file .delete_labels (sessionids , labels_to_delete )
1250
+ if file :
1251
+ self ._files [file .name ] = dataclasses .replace (
1252
+ self ._files [file .name ],
1253
+ file_totals = file .totals ,
1254
+ )
1255
+ else :
1256
+ files_to_delete .append (file .name )
1257
+ for file in files_to_delete :
1258
+ del self [file ]
1259
+
1260
+ self ._invalidate_caches ()
1261
+ return sessionids
1262
+
1263
+ def delete_multiple_sessions (self , session_ids_to_delete : list [int ] | set [int ]):
1264
+ session_ids_to_delete = set (session_ids_to_delete )
1265
+ for sessionid in session_ids_to_delete :
1266
+ self .sessions .pop (sessionid )
1267
+
1268
+ files_to_delete = []
1269
+ for file in self ._iter_parsed_files ():
1270
+ file .delete_multiple_sessions (session_ids_to_delete )
1271
+ if file :
1272
+ self ._files [file .name ] = dataclasses .replace (
1273
+ self ._files [file .name ],
1274
+ file_totals = file .totals ,
1275
+ )
1276
+ else :
1277
+ files_to_delete .append (file .name )
1278
+ for file in files_to_delete :
1279
+ del self [file ]
1280
+
1281
+ self ._invalidate_caches ()
1282
+
1283
+ @sentry_sdk .trace
1284
+ def change_sessionid (self , old_id : int , new_id : int ):
1285
+ """
1286
+ This changes the session with `old_id` to have `new_id` instead.
1287
+ It patches up all the references to that session across all files and line records.
1288
+
1289
+ In particular, it changes the id in all the `LineSession`s and `CoverageDatapoint`s,
1290
+ and does the equivalent of `calculate_present_sessions`.
1291
+ """
1292
+ session = self .sessions [new_id ] = self .sessions .pop (old_id )
1293
+ session .id = new_id
1294
+
1295
+ for file in self ._iter_parsed_files ():
1296
+ all_sessions = set ()
1297
+
1298
+ for idx , _line in enumerate (file ._lines ):
1299
+ if not _line :
1300
+ continue
1301
+
1302
+ # this turns the line into an actual `ReportLine`
1303
+ line = file ._lines [idx ] = file ._line (_line )
1304
+
1305
+ for session in line .sessions :
1306
+ if session .id == old_id :
1307
+ session .id = new_id
1308
+ all_sessions .add (session .id )
1309
+
1310
+ if line .datapoints :
1311
+ for point in line .datapoints :
1312
+ if point .sessionid == old_id :
1313
+ point .sessionid = new_id
1314
+
1315
+ file ._invalidate_caches ()
1316
+ file .__present_sessions = all_sessions
1317
+
1318
+ self ._invalidate_caches ()
1319
+
1233
1320
1234
1321
def _ignore_to_func (ignore ):
1235
1322
"""Returns a function to determine whether a a line should be saved to the ReportFile
0 commit comments