Skip to content

Commit

Permalink
Add audit for drop records that may be stopped & started in midair
Browse files Browse the repository at this point in the history
Closes: #4
  • Loading branch information
sco1 committed Aug 31, 2023
1 parent 5313533 commit e750606
Show file tree
Hide file tree
Showing 4 changed files with 101 additions and 63 deletions.
28 changes: 15 additions & 13 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -50,23 +50,25 @@ The following environment variables are provided to help customize pipeline beha
### `dropmate audit`
Process a consolidated Dropmate log CSV.
#### Input Parameters
| Parameter | Description | Type | Default |
|------------------------|-----------------------------------------------|--------------|------------|
| `--log-filepath` | Path to Dropmate log CSV to parse. | `Path\|None` | GUI Prompt |
| `--min-alt-loss-ft` | Threshold altitude delta, feet. | `int` | `200` |
| `--min-firmware` | Threshold firmware version. | `int\|float` | `5` |
| `--time-delta-minutes` | Dropmate internal clock delta from real-time. | `int` | `60` |
| Parameter | Description | Type | Default |
|---------------------------------|------------------------------------------------------------------|--------------|------------|
| `--log-filepath` | Path to Dropmate log CSV to parse. | `Path\|None` | GUI Prompt |
| `--min-alt-loss-ft` | Threshold altitude delta, feet. | `int` | `200` |
| `--min-firmware` | Threshold firmware version. | `int\|float` | `5` |
| `--internal-time-delta-minutes` | Dropmate internal clock delta from real-time. | `int` | `60` |
| `--time-between-delta-minutes` | Delta between the start of a drop record and end of the previous | `int` | `10` |

### `dropmate audit-bulk`
Batch process a directory of consolidated Dropmate log CSVs.
#### Input Parameters
| Parameter | Description | Type | Default |
|------------------------|-----------------------------------------------|--------------|------------|
| `--log-dir` | Path to Dropmate log directory to parse. | `Path\|None` | GUI Prompt |
| `--log-pattern` | Dropmate log file glob pattern.<sup>1,2</sup> | `str` | `"*.csv"` |
| `--min-alt-loss-ft` | Threshold altitude delta, feet. | `int` | `200` |
| `--min-firmware` | Threshold firmware version. | `int\|float` | `5` |
| `--time-delta-minutes` | Dropmate internal clock delta from real-time. | `int` | `60` |
| Parameter | Description | Type | Default |
|---------------------------------|------------------------------------------------------------------|--------------|------------|
| `--log-dir` | Path to Dropmate log directory to parse. | `Path\|None` | GUI Prompt |
| `--log-pattern` | Dropmate log file glob pattern.<sup>1,2</sup> | `str` | `"*.csv"` |
| `--min-alt-loss-ft` | Threshold altitude delta, feet. | `int` | `200` |
| `--min-firmware` | Threshold firmware version. | `int\|float` | `5` |
| `--internal-time-delta-minutes` | Dropmate internal clock delta from real-time. | `int` | `60` |
| `--time-between-delta-minutes` | Delta between the start of a drop record and end of the previous | `int` | `10` |

1. Case sensitivity is deferred to the host OS
2. Recursive globbing requires manual specification (e.g. `**/*.csv`)
Expand Down
60 changes: 45 additions & 15 deletions dropmate_py/audits.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,36 +3,59 @@
from dropmate_py.parser import Dropmate


def _audit_drops(dropmate: Dropmate, min_alt_loss_ft: int) -> list[str]:
"""Audit for missing drop records and for issues with total altitude loss."""
def _audit_drops(
dropmate: Dropmate,
min_alt_loss_ft: int,
min_delta_to_next_sec: int,
) -> list[str]:
"""
Audit the drop records for the provided Dropmate.
Dropmate is audited for the following issues:
* Empty drop records
* Altitude loss below the provided threshold
* For Dropmates with more than one record, check that the beginning of the drop record is
far enough away from the end of the previous record, which may indicate that a drop has been
double counted
"""
found_issues = []

if len(dropmate.drops) == 0:
found_issues.append(f"UID {dropmate.uid} contains no drop records.")
return found_issues

# Once we've gotten here then we've already exited on an empty log so we can't have any None
# values in our DropRecords

for drop_record in dropmate.drops:
# Type guard, we should have already returned early before we get to an empty drop record
if (
drop_record.start_barometric_altitude_msl_ft is None
or drop_record.end_barometric_altitude_msl_ft is None
):
continue # pragma: no cover

altitude_loss = (
drop_record.start_barometric_altitude_msl_ft
- drop_record.end_barometric_altitude_msl_ft
)
start = drop_record.start_barometric_altitude_msl_ft
end = drop_record.end_barometric_altitude_msl_ft
altitude_loss = start - end # type: ignore[operator]
if altitude_loss < min_alt_loss_ft:
found_issues.append(
f"UID {dropmate.uid} drop #{drop_record.flight_index} below threshold altitude loss: {altitude_loss} feet." # noqa: E501
)

# Check for log start timestamps that are in close proximity to the end of the previous log,
# indicating that a jump may have ended prematurely
# We shouldn't have enough records where performance is critical, so we can just do a new loop
# rather than complicating the altitude checking one
for prev_rec, next_rec in zip(dropmate.drops, dropmate.drops[1:]):
next_start = next_rec.start_time_utc
prev_end = prev_rec.end_time_utc
start_delta = (next_start - prev_end).total_seconds() # type: ignore[operator]
if abs(start_delta) < min_delta_to_next_sec:
found_issues.append(
f"UID {dropmate.uid} drop #{next_rec.flight_index} start time below threshold from previous flight: {start_delta} seconds" # noqa: E501
)

return found_issues


def _audit_dropmate(
dropmate: Dropmate, min_firmware: float, max_scanned_time_delta_sec: int
dropmate: Dropmate,
min_firmware: float,
max_scanned_time_delta_sec: int,
) -> list[str]:
"""Audit for issues with firmware version and delta between internal and external clocks."""
found_issues = []
Expand All @@ -54,6 +77,7 @@ def _audit_dropmate(
def audit_pipeline(
consolidated_log: abc.Iterable[Dropmate],
min_alt_loss_ft: int,
min_delta_to_next_sec: int,
min_firmware: float,
max_scanned_time_delta_sec: int,
) -> list[str]:
Expand All @@ -68,6 +92,12 @@ def audit_pipeline(
max_scanned_time_delta_sec=max_scanned_time_delta_sec,
)
)
found_issues.extend(_audit_drops(dropmate, min_alt_loss_ft=min_alt_loss_ft))
found_issues.extend(
_audit_drops(
dropmate,
min_alt_loss_ft=min_alt_loss_ft,
min_delta_to_next_sec=min_delta_to_next_sec,
)
)

return found_issues
13 changes: 9 additions & 4 deletions dropmate_py/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
MIN_ALT_LOSS = 200 # feet
MIN_FIRMWARE = 5
MIN_TIME_DELTA_MINUTES = 60
MIN_DELTA_BETWEEN_MINUTES = 10

load_dotenv()
start_dir = os.environ.get("PROMPT_START_DIR", ".")
Expand All @@ -25,7 +26,8 @@ def audit(
log_filepath: Path = typer.Option(None, exists=True, file_okay=True, dir_okay=False),
min_alt_loss_ft: int = typer.Option(default=MIN_ALT_LOSS),
min_firmware: float = typer.Option(default=MIN_FIRMWARE),
time_delta_minutes: int = typer.Option(default=MIN_TIME_DELTA_MINUTES),
internal_time_delta_minutes: int = typer.Option(default=MIN_TIME_DELTA_MINUTES),
time_delta_between_minutes: int = typer.Option(default=MIN_DELTA_BETWEEN_MINUTES),
) -> None:
"""Audit a consolidated Dropmate log."""
if log_filepath is None:
Expand All @@ -46,7 +48,8 @@ def audit(
consolidated_log=conslidated_log,
min_alt_loss_ft=min_alt_loss_ft,
min_firmware=min_firmware,
max_scanned_time_delta_sec=time_delta_minutes * 60,
max_scanned_time_delta_sec=internal_time_delta_minutes * 60,
min_delta_to_next_sec=time_delta_between_minutes * 60,
)

print(f"Found {len(found_errs)} errors.")
Expand All @@ -61,7 +64,8 @@ def audit_bulk(
log_pattern: str = typer.Option("*.csv"),
min_alt_loss_ft: int = typer.Option(default=MIN_ALT_LOSS),
min_firmware: float = typer.Option(default=MIN_FIRMWARE),
time_delta_minutes: int = typer.Option(default=MIN_TIME_DELTA_MINUTES),
internal_time_delta_minutes: int = typer.Option(default=MIN_TIME_DELTA_MINUTES),
time_delta_between_minutes: int = typer.Option(default=MIN_DELTA_BETWEEN_MINUTES),
) -> None:
"""Audit a directory of consolidated Dropmate logs."""
if log_dir is None:
Expand All @@ -84,7 +88,8 @@ def audit_bulk(
consolidated_log=compiled_logs,
min_alt_loss_ft=min_alt_loss_ft,
min_firmware=min_firmware,
max_scanned_time_delta_sec=time_delta_minutes * 60,
max_scanned_time_delta_sec=internal_time_delta_minutes * 60,
min_delta_to_next_sec=time_delta_between_minutes * 60,
)

print(f"Found {len(found_errs)} errors.")
Expand Down
63 changes: 32 additions & 31 deletions tests/test_audits.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,15 @@

from dropmate_py import audits, parser

DATE_P = partial(dt.datetime, year=2023, month=4, day=20, second=0, tzinfo=dt.timezone.utc)

DROPMATE_P = partial(
parser.Dropmate,
uid="ABC123",
drops=[],
firmware_version=5.1,
dropmate_internal_time_utc=dt.datetime(
year=2023, month=4, day=20, hour=12, minute=30, second=0, tzinfo=dt.timezone.utc
),
last_scanned_time_utc=dt.datetime(
year=2023, month=4, day=20, hour=12, minute=30, second=0, tzinfo=dt.timezone.utc
),
dropmate_internal_time_utc=DATE_P(hour=12, minute=30),
last_scanned_time_utc=DATE_P(hour=12, minute=30),
)


Expand All @@ -27,20 +25,12 @@
device_health=parser.Health.GOOD,
firmware_version=5.1,
flight_index=1,
start_time_utc=dt.datetime(
year=2023, month=4, day=20, hour=11, minute=00, second=0, tzinfo=dt.timezone.utc
),
end_time_utc=dt.datetime(
year=2023, month=4, day=20, hour=11, minute=30, second=0, tzinfo=dt.timezone.utc
),
start_time_utc=DATE_P(hour=11, minute=00),
end_time_utc=DATE_P(hour=11, minute=30),
start_barometric_altitude_msl_ft=1000,
end_barometric_altitude_msl_ft=0,
dropmate_internal_time_utc=dt.datetime(
year=2023, month=4, day=20, hour=12, minute=30, second=0, tzinfo=dt.timezone.utc
),
last_scanned_time_utc=dt.datetime(
year=2023, month=4, day=20, hour=12, minute=30, second=0, tzinfo=dt.timezone.utc
),
dropmate_internal_time_utc=DATE_P(hour=12, minute=30),
last_scanned_time_utc=DATE_P(hour=12, minute=30),
)

DROP_RECORD_AUDIT_CASES = (
Expand Down Expand Up @@ -95,12 +85,30 @@
),
1,
),
(
DROPMATE_P(
drops=[
DROP_RECORD_P(
start_barometric_altitude_msl_ft=1000, end_barometric_altitude_msl_ft=0
),
DROP_RECORD_P(
flight_index=2,
start_barometric_altitude_msl_ft=1000,
end_barometric_altitude_msl_ft=0,
start_time_utc=DATE_P(hour=11, minute=31),
),
]
),
1,
),
)


@pytest.mark.parametrize(("dropmate", "n_expected_errors"), DROP_RECORD_AUDIT_CASES)
def test_audit_drops(dropmate: parser.Dropmate, n_expected_errors: int) -> None:
reported_errors = audits._audit_drops(dropmate=dropmate, min_alt_loss_ft=200)
reported_errors = audits._audit_drops(
dropmate=dropmate, min_alt_loss_ft=200, min_delta_to_next_sec=600
)
assert len(reported_errors) == n_expected_errors


Expand All @@ -109,24 +117,16 @@ def test_audit_drops(dropmate: parser.Dropmate, n_expected_errors: int) -> None:
(DROPMATE_P(firmware_version=1.0), 1),
(
DROPMATE_P(
dropmate_internal_time_utc=dt.datetime(
year=2023, month=4, day=20, hour=14, minute=30, second=0, tzinfo=dt.timezone.utc
),
last_scanned_time_utc=dt.datetime(
year=2023, month=4, day=20, hour=12, minute=30, second=0, tzinfo=dt.timezone.utc
),
dropmate_internal_time_utc=DATE_P(hour=14, minute=30),
last_scanned_time_utc=DATE_P(hour=12, minute=30),
),
1,
),
(
DROPMATE_P(
firmware_version=1.0,
dropmate_internal_time_utc=dt.datetime(
year=2023, month=4, day=20, hour=14, minute=30, second=0, tzinfo=dt.timezone.utc
),
last_scanned_time_utc=dt.datetime(
year=2023, month=4, day=20, hour=12, minute=30, second=0, tzinfo=dt.timezone.utc
),
dropmate_internal_time_utc=DATE_P(hour=14, minute=30),
last_scanned_time_utc=DATE_P(hour=12, minute=30),
),
2,
),
Expand Down Expand Up @@ -165,5 +165,6 @@ def test_audit_pipeline() -> None:
min_alt_loss_ft=200,
min_firmware=5.1,
max_scanned_time_delta_sec=3600,
min_delta_to_next_sec=600,
)
assert len(reported_errors) == 2

0 comments on commit e750606

Please sign in to comment.