Skip to content

Commit

Permalink
Fix consolidation sorting for devices with >9 logs
Browse files Browse the repository at this point in the history
  • Loading branch information
sco1 committed Sep 20, 2023
1 parent aed7e6b commit 3654c97
Show file tree
Hide file tree
Showing 2 changed files with 47 additions and 5 deletions.
12 changes: 7 additions & 5 deletions dropmate_py/log_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,22 +13,23 @@
)


def _keyer(short_record: str) -> tuple[str, str]: # pragma: no cover
def _keyer(short_record: str) -> tuple[str, int]: # pragma: no cover
"""
Sorting key based on consolidated drop record.
NOTE: It is assumed that the first column is the Dropmate UID and second column is the flight
index.
"""
split_record = short_record.split(",")
return (split_record[0], split_record[1])
return (split_record[0], int(split_record[1]))


def consolidate_drop_records(
log_dir: Path,
log_pattern: str,
out_filepath: Path,
keep_headers: abc.Sequence[str] = CONSOLIDATED_HEADERS,
write_file: bool = True,
) -> list[str]:
"""
Merge a directory of Dropmate drop record outputs into a deduplicated, simplified drop record.
Expand Down Expand Up @@ -61,8 +62,9 @@ def consolidate_drop_records(

consolidated_records.sort(key=_keyer)

with out_filepath.open("w") as f:
f.write(f"{','.join(keep_headers)}\n")
f.writelines(f"{record}\n" for record in consolidated_records)
if write_file:
with out_filepath.open("w") as f:
f.write(f"{','.join(keep_headers)}\n")
f.writelines(f"{record}\n" for record in consolidated_records)

return consolidated_records
40 changes: 40 additions & 0 deletions tests/test_consolidate.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,3 +45,43 @@ def test_consolidate_drop_records(tmp_path: Path) -> None:

out_data = out_log.read_text()
assert out_data == TRUTH_CONSOLIDATED


SAMPLE_LOG_TEN_RECORDS = dedent(
"""\
serial_number,uid,battery,device_health,firmware_version,log_timestamp,log_altitude,total_flights,flights_over_18kft,recorded_flights,flight_index,start_time_utc,end_time_utc,start_barometric_altitude_msl_ft,end_barometric_altitude_msl_ft,dropmate_internal_time_utc,last_scanned_time_utc,scan_device_type,scan_device_os,dropmate_app_version
0,ABC123,Good,good,5.0,true,true,12,0,12,1,2023-04-20T13:00:00Z,2023-04-20T13:00:00Z,10000,0,2023-04-20T14:00:00Z,2023-04-20T14:00:00.003Z,SM G965U1,29,1.5.16
0,ABC123,Good,good,5.0,true,true,12,0,12,2,2023-04-20T16:00:00Z,2023-04-20T16:00:00Z,10000,0,2023-04-20T14:00:00Z,2023-04-20T14:00:00.003Z,SM G965U1,29,1.5.16
0,ABC123,Good,good,5.0,true,true,12,0,12,3,2023-04-20T14:00:00Z,2023-04-20T14:00:00Z,10000,0,2023-04-20T14:00:00Z,2023-04-20T14:00:00.003Z,SM G965U1,29,1.5.16
0,ABC123,Good,good,5.0,true,true,12,0,12,4,2023-04-20T16:00:00Z,2023-04-20T16:00:00Z,10000,0,2023-04-20T14:00:00Z,2023-04-20T14:00:00.003Z,SM G965U1,29,1.5.16
0,ABC123,Good,good,5.0,true,true,12,0,12,5,2023-04-20T18:00:00Z,2023-04-20T18:00:00Z,10000,0,2023-04-20T14:00:00Z,2023-04-20T14:00:00.003Z,SM G965U1,29,1.5.16
0,ABC123,Good,good,5.0,true,true,12,0,12,6,2023-04-20T14:00:00Z,2023-04-20T14:00:00Z,10000,0,2023-04-20T14:00:00Z,2023-04-20T14:00:00.003Z,SM G965U1,29,1.5.16
0,ABC123,Good,good,5.0,true,true,12,0,12,7,2023-04-20T13:00:00Z,2023-04-20T14:00:00Z,10000,0,2023-04-20T14:00:00Z,2023-04-20T14:00:00.003Z,SM G965U1,29,1.5.16
0,ABC123,Good,good,5.0,true,true,12,0,12,8,2023-04-20T18:00:00Z,2023-04-20T18:00:00Z,10000,0,2023-04-20T14:00:00Z,2023-04-20T14:00:00.003Z,SM G965U1,29,1.5.16
0,ABC123,Good,good,5.0,true,true,12,0,12,9,2023-04-20T16:00:00Z,2023-04-20T16:00:00Z,10000,0,2023-04-20T14:00:00Z,2023-04-20T14:00:00.003Z,SM G965U1,29,1.5.16
0,ABC123,Good,good,5.0,true,true,12,0,12,10,2023-04-20T13:00:00Z,2023-04-20T13:00:00Z,1000,0,2023-04-20T14:00:00Z,2023-04-20T14:00:00.003Z,SM G965U1,29,1.5.16
"""
)

TRUTH_CONSOLIDATED_TEN_RECORDS = [
"ABC123,1,2023-04-20T13:00:00Z,2023-04-20T13:00:00Z,10000,0",
"ABC123,2,2023-04-20T16:00:00Z,2023-04-20T16:00:00Z,10000,0",
"ABC123,3,2023-04-20T14:00:00Z,2023-04-20T14:00:00Z,10000,0",
"ABC123,4,2023-04-20T16:00:00Z,2023-04-20T16:00:00Z,10000,0",
"ABC123,5,2023-04-20T18:00:00Z,2023-04-20T18:00:00Z,10000,0",
"ABC123,6,2023-04-20T14:00:00Z,2023-04-20T14:00:00Z,10000,0",
"ABC123,7,2023-04-20T13:00:00Z,2023-04-20T14:00:00Z,10000,0",
"ABC123,8,2023-04-20T18:00:00Z,2023-04-20T18:00:00Z,10000,0",
"ABC123,9,2023-04-20T16:00:00Z,2023-04-20T16:00:00Z,10000,0",
"ABC123,10,2023-04-20T13:00:00Z,2023-04-20T13:00:00Z,1000,0",
]


def test_consolidated_record_sorting(tmp_path: Path) -> None:
sample_log = tmp_path / "dropmate_records_ten_records.csv"
sample_log.write_text(SAMPLE_LOG_TEN_RECORDS)

consolidated = consolidate_drop_records(
tmp_path, log_pattern="dropmate_records_*", out_filepath=Path(), write_file=False
)
assert consolidated == TRUTH_CONSOLIDATED_TEN_RECORDS

0 comments on commit 3654c97

Please sign in to comment.