diff --git a/dropmate_py/log_utils.py b/dropmate_py/log_utils.py index 284bb02..5f08c85 100644 --- a/dropmate_py/log_utils.py +++ b/dropmate_py/log_utils.py @@ -13,7 +13,7 @@ ) -def _keyer(short_record: str) -> tuple[str, str]: # pragma: no cover +def _keyer(short_record: str) -> tuple[str, int]: # pragma: no cover """ Sorting key based on consolidated drop record. @@ -21,7 +21,7 @@ def _keyer(short_record: str) -> tuple[str, str]: # pragma: no cover index. """ split_record = short_record.split(",") - return (split_record[0], split_record[1]) + return (split_record[0], int(split_record[1])) def consolidate_drop_records( @@ -29,6 +29,7 @@ def consolidate_drop_records( log_pattern: str, out_filepath: Path, keep_headers: abc.Sequence[str] = CONSOLIDATED_HEADERS, + write_file: bool = True, ) -> list[str]: """ Merge a directory of Dropmate drop record outputs into a deduplicated, simplified drop record. @@ -61,8 +62,9 @@ def consolidate_drop_records( consolidated_records.sort(key=_keyer) - with out_filepath.open("w") as f: - f.write(f"{','.join(keep_headers)}\n") - f.writelines(f"{record}\n" for record in consolidated_records) + if write_file: + with out_filepath.open("w") as f: + f.write(f"{','.join(keep_headers)}\n") + f.writelines(f"{record}\n" for record in consolidated_records) return consolidated_records diff --git a/tests/test_consolidate.py b/tests/test_consolidate.py index 8e7782a..5fd0116 100644 --- a/tests/test_consolidate.py +++ b/tests/test_consolidate.py @@ -45,3 +45,43 @@ def test_consolidate_drop_records(tmp_path: Path) -> None: out_data = out_log.read_text() assert out_data == TRUTH_CONSOLIDATED + + +SAMPLE_LOG_TEN_RECORDS = dedent( + """\ + serial_number,uid,battery,device_health,firmware_version,log_timestamp,log_altitude,total_flights,flights_over_18kft,recorded_flights,flight_index,start_time_utc,end_time_utc,start_barometric_altitude_msl_ft,end_barometric_altitude_msl_ft,dropmate_internal_time_utc,last_scanned_time_utc,scan_device_type,scan_device_os,dropmate_app_version + 0,ABC123,Good,good,5.0,true,true,12,0,12,1,2023-04-20T13:00:00Z,2023-04-20T13:00:00Z,10000,0,2023-04-20T14:00:00Z,2023-04-20T14:00:00.003Z,SM G965U1,29,1.5.16 + 0,ABC123,Good,good,5.0,true,true,12,0,12,2,2023-04-20T16:00:00Z,2023-04-20T16:00:00Z,10000,0,2023-04-20T14:00:00Z,2023-04-20T14:00:00.003Z,SM G965U1,29,1.5.16 + 0,ABC123,Good,good,5.0,true,true,12,0,12,3,2023-04-20T14:00:00Z,2023-04-20T14:00:00Z,10000,0,2023-04-20T14:00:00Z,2023-04-20T14:00:00.003Z,SM G965U1,29,1.5.16 + 0,ABC123,Good,good,5.0,true,true,12,0,12,4,2023-04-20T16:00:00Z,2023-04-20T16:00:00Z,10000,0,2023-04-20T14:00:00Z,2023-04-20T14:00:00.003Z,SM G965U1,29,1.5.16 + 0,ABC123,Good,good,5.0,true,true,12,0,12,5,2023-04-20T18:00:00Z,2023-04-20T18:00:00Z,10000,0,2023-04-20T14:00:00Z,2023-04-20T14:00:00.003Z,SM G965U1,29,1.5.16 + 0,ABC123,Good,good,5.0,true,true,12,0,12,6,2023-04-20T14:00:00Z,2023-04-20T14:00:00Z,10000,0,2023-04-20T14:00:00Z,2023-04-20T14:00:00.003Z,SM G965U1,29,1.5.16 + 0,ABC123,Good,good,5.0,true,true,12,0,12,7,2023-04-20T13:00:00Z,2023-04-20T14:00:00Z,10000,0,2023-04-20T14:00:00Z,2023-04-20T14:00:00.003Z,SM G965U1,29,1.5.16 + 0,ABC123,Good,good,5.0,true,true,12,0,12,8,2023-04-20T18:00:00Z,2023-04-20T18:00:00Z,10000,0,2023-04-20T14:00:00Z,2023-04-20T14:00:00.003Z,SM G965U1,29,1.5.16 + 0,ABC123,Good,good,5.0,true,true,12,0,12,9,2023-04-20T16:00:00Z,2023-04-20T16:00:00Z,10000,0,2023-04-20T14:00:00Z,2023-04-20T14:00:00.003Z,SM G965U1,29,1.5.16 + 0,ABC123,Good,good,5.0,true,true,12,0,12,10,2023-04-20T13:00:00Z,2023-04-20T13:00:00Z,1000,0,2023-04-20T14:00:00Z,2023-04-20T14:00:00.003Z,SM G965U1,29,1.5.16 + """ +) + +TRUTH_CONSOLIDATED_TEN_RECORDS = [ + "ABC123,1,2023-04-20T13:00:00Z,2023-04-20T13:00:00Z,10000,0", + "ABC123,2,2023-04-20T16:00:00Z,2023-04-20T16:00:00Z,10000,0", + "ABC123,3,2023-04-20T14:00:00Z,2023-04-20T14:00:00Z,10000,0", + "ABC123,4,2023-04-20T16:00:00Z,2023-04-20T16:00:00Z,10000,0", + "ABC123,5,2023-04-20T18:00:00Z,2023-04-20T18:00:00Z,10000,0", + "ABC123,6,2023-04-20T14:00:00Z,2023-04-20T14:00:00Z,10000,0", + "ABC123,7,2023-04-20T13:00:00Z,2023-04-20T14:00:00Z,10000,0", + "ABC123,8,2023-04-20T18:00:00Z,2023-04-20T18:00:00Z,10000,0", + "ABC123,9,2023-04-20T16:00:00Z,2023-04-20T16:00:00Z,10000,0", + "ABC123,10,2023-04-20T13:00:00Z,2023-04-20T13:00:00Z,1000,0", +] + + +def test_consolidated_record_sorting(tmp_path: Path) -> None: + sample_log = tmp_path / "dropmate_records_ten_records.csv" + sample_log.write_text(SAMPLE_LOG_TEN_RECORDS) + + consolidated = consolidate_drop_records( + tmp_path, log_pattern="dropmate_records_*", out_filepath=Path(), write_file=False + ) + assert consolidated == TRUTH_CONSOLIDATED_TEN_RECORDS