From af943fa1309d8279d7496498c3c24e08d166ace9 Mon Sep 17 00:00:00 2001 From: Simon Lloyd Date: Fri, 20 Mar 2026 14:39:42 +0000 Subject: [PATCH] refactor: quality stats integrated into read models and tui --- CHANGELOG.md | 2 + docs/development/database-models.md | 3 +- docs/usage/alignment.md | 42 +- docs/usage/api.md | 125 ++++- docs/usage/event-selection.md | 45 +- docs/usage/iccs-stack.md | 59 ++- docs/usage/index.md | 85 +++- docs/usage/inspection.md | 26 +- docs/usage/mccc.md | 13 +- docs/usage/project.md | 8 +- docs/usage/snapshots.md | 96 +++- pyproject.toml | 1 + src/aimbat/_cli/common/_parameters.py | 109 +++++ src/aimbat/_cli/event.py | 141 ++++++ src/aimbat/_cli/seismogram.py | 53 ++ src/aimbat/_cli/snapshot.py | 137 ++++++ src/aimbat/_cli/station.py | 138 ++++++ src/aimbat/_tui/_format.py | 118 +++-- src/aimbat/_tui/_widgets.py | 178 ++++++- src/aimbat/_tui/aimbat.tcss | 98 ++-- src/aimbat/_tui/app.py | 334 ++++++++++--- src/aimbat/_tui/help/tab-project.md | 101 ++++ src/aimbat/_tui/help/tab-seismograms.md | 111 +++++ src/aimbat/_tui/help/tab-snapshots.md | 95 ++++ src/aimbat/_tui/modals.py | 149 ++++-- src/aimbat/core/__init__.py | 7 +- src/aimbat/core/_event.py | 239 +++++++--- src/aimbat/core/_note.py | 49 ++ src/aimbat/core/_quality.py | 610 ------------------------ src/aimbat/core/_snapshot.py | 78 +++ src/aimbat/core/_station.py | 90 ++++ src/aimbat/models/_models.py | 166 ++++++- src/aimbat/models/_readers.py | 212 +++++++- tests/functional/test_tui.py | 16 +- tests/integration/core/test_views.py | 349 +++++++------- tests/integration/models/test_note.py | 88 ++++ uv.lock | 102 ++-- 37 files changed, 3036 insertions(+), 1237 deletions(-) create mode 100644 src/aimbat/_tui/help/tab-project.md create mode 100644 src/aimbat/_tui/help/tab-seismograms.md create mode 100644 src/aimbat/_tui/help/tab-snapshots.md create mode 100644 src/aimbat/core/_note.py delete mode 100644 src/aimbat/core/_quality.py create mode 100644 tests/integration/models/test_note.py diff --git a/CHANGELOG.md b/CHANGELOG.md index c83fcd2..72cd634 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -90,6 +90,8 @@ All notable changes to the **AIMBAT** project will be documented in this file. - Active event -> default event - Use default event concept only for cli commands - Update plot seismograms. +- Remove dead code ([#231](https://github.com/pysmo/aimbat/issues/231)) +- Quality stats integrated into read models and tui ### πŸš€ New Features diff --git a/docs/development/database-models.md b/docs/development/database-models.md index eedde20..9d675d8 100644 --- a/docs/development/database-models.md +++ b/docs/development/database-models.md @@ -34,7 +34,6 @@ erDiagram AimbatEvent { uuid id PK - bool is_default UK timestamp time UK float latitude float longitude @@ -81,7 +80,7 @@ erDiagram AimbatSnapshot { uuid id PK - timestamp date UK + timestamp time UK string comment string parameters_hash uuid event_id FK diff --git a/docs/usage/alignment.md b/docs/usage/alignment.md index 3057694..26ceb39 100644 --- a/docs/usage/alignment.md +++ b/docs/usage/alignment.md @@ -17,13 +17,22 @@ making further adjustments. ## Running ICCS -=== "CLI / Shell" +=== "CLI" ```bash - aimbat align iccs # basic run - aimbat align iccs --autoflip # flip inverted polarity automatically - aimbat align iccs --autoselect # deselect poor-quality seismograms automatically - aimbat align iccs --autoflip --autoselect # both + aimbat align iccs # basic run + aimbat align iccs --autoflip # flip inverted polarity automatically + aimbat align iccs --autoselect # deselect poor-quality seismograms automatically + aimbat align iccs --autoflip --autoselect # both + ``` + +=== "Shell" + + ```bash + align iccs # basic run + align iccs --autoflip # flip inverted polarity automatically + align iccs --autoselect # deselect poor-quality seismograms automatically + align iccs --autoflip --autoselect # both ``` === "TUI" @@ -101,19 +110,26 @@ In addition to setting parameters directly, three tools let you adjust values by interacting with the plot β€” clicking or scrolling in a waveform display rather than typing numbers. -=== "CLI / Shell" +=== "CLI" + + ```bash + aimbat pick phase # adjust t1 by clicking on the stack + aimbat pick window # set window_pre / window_post by clicking + aimbat pick ccnorm # set min_ccnorm by scrolling the matrix image + ``` + +=== "Shell" ```bash - aimbat pick phase # adjust t1 by clicking on the stack - aimbat pick window # set window_pre / window_post by clicking - aimbat pick ccnorm # set min_ccnorm by scrolling the matrix image + pick phase # adjust t1 by clicking on the stack + pick window # set window_pre / window_post by clicking + pick ccnorm # set min_ccnorm by scrolling the matrix image ``` - Each command opens a matplotlib window. Click (or scroll, for ccnorm) to - set the value, then close the window to save it. +Each command opens a matplotlib window. Click (or scroll, for ccnorm) to +set the value, then close the window to save it. - All three accept `--no-context` and `--all` (include deselected - seismograms). +All three accept `--no-context` and `--all` (include deselected seismograms). === "TUI" diff --git a/docs/usage/api.md b/docs/usage/api.md index 00e1465..d04abfa 100644 --- a/docs/usage/api.md +++ b/docs/usage/api.md @@ -1,9 +1,16 @@ # Python API -The CLI, shell, TUI, and GUI all use the same underlying Python library. You -can use it directly for custom scripts, automation, or workflows that go beyond -what the other interfaces expose. See the full [API reference](../api/aimbat.md) -for a complete listing. +After running ICCS and MCCC alignment across many events, the accumulated +quality metrics span stations and events in ways that are natural to analyse +with pandas and matplotlib but impossible from the CLI or TUI. The Python API +is the primary interface for that kind of post-processing quality analysis: you +query `AimbatSeismogram`, `AimbatStation`, and `AimbatEvent` records directly, +build DataFrames, and apply whatever aggregation or visualisation you need. + +The same API also drives the CLI and TUI internally, so it covers the full +workflow β€” data ingestion, parameter management, alignment, snapshots β€” not +just quality analysis. See the full [API reference](../api/aimbat.md) for a +complete listing. !!! note "Writing seismogram data" [`AimbatSeismogram.data`][aimbat.models.AimbatSeismogram.data] is backed by @@ -149,6 +156,116 @@ with Session(engine) as session: ) ``` +## Quality Analysis + +After alignment has been run across a set of events, each seismogram carries +quality metrics that can be queried directly from the database. The sections +below show the most common patterns. + +### Quality data model + +Per-seismogram metrics are stored in `AimbatSeismogramQuality` and accessed via +`seismogram.quality`: + +| Attribute | Description | +|---|---| +| `iccs_cc` | ICCS cross-correlation with the stack | +| `mccc_cc_mean` | MCCC waveform quality β€” mean CC across seismogram pairs | +| `mccc_cc_std` | MCCC waveform consistency β€” std of CC across pairs | +| `mccc_error` | MCCC timing precision (`pd.Timedelta`, SEM from covariance matrix) | + +The per-event MCCC global array fit is stored in `AimbatEventQuality` and +accessed via `event.quality`: + +| Attribute | Description | +|---|---| +| `mccc_rmse` | Global array fit (`pd.Timedelta`) | + +### Build a per-seismogram DataFrame across all events + +The most flexible starting point is a flat DataFrame with one row per +seismogram: + +```python +from sqlalchemy.orm import selectinload +from sqlmodel import Session, select +import pandas as pd + +from aimbat.db import engine +from aimbat.models import AimbatSeismogram +from aimbat.utils import rel + +with Session(engine) as session: + seismograms = session.exec( + select(AimbatSeismogram).options( + selectinload(rel(AimbatSeismogram.station)), + selectinload(rel(AimbatSeismogram.event)), + selectinload(rel(AimbatSeismogram.quality)), + ) + ).all() + + rows = [] + for seis in seismograms: + q = seis.quality + rows.append({ + "station": f"{seis.station.network}.{seis.station.name}", + "event_time": seis.event.time, + "iccs_cc": q.iccs_cc if q else None, + "mccc_cc_mean": q.mccc_cc_mean if q else None, + "mccc_error_s": q.mccc_error.total_seconds() if (q and q.mccc_error) else None, + }) + +df = pd.DataFrame(rows) +``` + +From here you can groupby station, pivot on event, filter by quality threshold, +or feed the result directly into matplotlib. + +### Station-level quality summary + +`SeismogramQualityStats.from_station` aggregates all per-seismogram metrics +across every event recorded at a station: + +```python +from aimbat.models import AimbatSeismogram, AimbatStation, SeismogramQualityStats + +with Session(engine) as session: + stations = session.exec( + select(AimbatStation).options( + selectinload(rel(AimbatStation.seismograms)).selectinload( + rel(AimbatSeismogram.quality) + ) + ) + ).all() + stats = [SeismogramQualityStats.from_station(s) for s in stations] +``` + +Each `stats` item exposes `cc_mean`, `mccc_cc_mean`, and `mccc_error` as +(mean, SEM) pairs aggregated across all events at that station. + +### Event-level quality summary + +`SeismogramQualityStats.from_event` aggregates per-seismogram metrics for a +single event and also carries the global `mccc_rmse` array-fit value: + +```python +from aimbat.models import AimbatEvent, AimbatSeismogram, SeismogramQualityStats + +with Session(engine) as session: + events = session.exec( + select(AimbatEvent).options( + selectinload(rel(AimbatEvent.seismograms)).selectinload( + rel(AimbatSeismogram.quality) + ), + selectinload(rel(AimbatEvent.quality)), + ) + ).all() + stats = [SeismogramQualityStats.from_event(e) for e in events] +``` + +`mccc_rmse` on each stats object is the global array fit for that event β€” +useful for comparing event difficulty across a dataset. + ## Worked Example The script below builds a complete project from scratch. It loads **3 events**, diff --git a/docs/usage/event-selection.md b/docs/usage/event-selection.md index 8a6eeb4..c98fccd 100644 --- a/docs/usage/event-selection.md +++ b/docs/usage/event-selection.md @@ -6,15 +6,21 @@ one event at a time. ## Listing events -=== "CLI / Shell" +=== "CLI" ```bash aimbat event list ``` - The table shows each event's ID, time, and location. IDs are displayed in - their shortest unambiguous form β€” use any unique prefix when passing an - ID to other commands. +=== "Shell" + + ```bash + event list + ``` + +The table shows each event's ID, time, and location. IDs are displayed in +their shortest unambiguous form β€” use any unique prefix when passing an +ID to other commands. === "TUI" @@ -26,25 +32,32 @@ one event at a time. --- -## Selecting an Event for CLI / Shell +## Selecting an Event for the CLI and Shell Most processing commands (like `aimbat align iccs` or `aimbat snapshot create`) operate on a single event. You can specify the target event in two ways: -### 1. The `--event-id` flag (or `--event`) +### 1. Positional argument -Pass the ID directly to any command. You can use the full UUID or any unique -prefix: +Pass the ID directly as the first argument. You can use the full UUID or any +unique prefix: ```bash -aimbat align iccs --event-id 6a4a +aimbat align iccs 6a4a +``` + +The named forms `--event` and `--event-id` are also accepted and behave +identically: + +```bash +aimbat align iccs --event 6a4a ``` ### 2. The `DEFAULT_EVENT_ID` environment variable -If you are working on the same event for multiple commands, you can set the -`DEFAULT_EVENT_ID` environment variable in your shell. This tells AIMBAT to -use that event whenever the `--event-id` flag is omitted: +If you are working on the same event for multiple commands, set the +`DEFAULT_EVENT_ID` environment variable in your shell. AIMBAT uses it +whenever no explicit ID is provided: ```bash export DEFAULT_EVENT_ID=6a4a @@ -52,9 +65,15 @@ aimbat align iccs aimbat snapshot create "post-ICCS" ``` -The shell prompt also reflects this ID when set. To clear it, simply unset the +The shell prompt also reflects this ID when set. To clear it, unset the variable: `unset DEFAULT_EVENT_ID`. +!!! note + `DEFAULT_EVENT_ID` is a plain shell environment variable consumed directly + by the CLI argument parser. It is **not** an AIMBAT setting: it has no + `AIMBAT_` prefix, cannot be set in `.env`, and does not appear in + `aimbat settings list`. + --- ## Selecting an event for processing (TUI / GUI) diff --git a/docs/usage/iccs-stack.md b/docs/usage/iccs-stack.md index e9c1dbc..74eed42 100644 --- a/docs/usage/iccs-stack.md +++ b/docs/usage/iccs-stack.md @@ -1,5 +1,34 @@ # The ICCS Stack +## Live data + +The TUI's **Live data** tab shows the seismogram table for the currently +selected event. The name reflects the data's nature: it is always derived +directly from the in-memory ICCS instance and therefore always reflects the +current working state of the project. + +Concretely, this means: + +- **CC values** shown in the table come from `ICCS.ccs` β€” a cached property + that cross-correlates each seismogram against the current stack on first + access and clears automatically whenever parameters change. You do not need + to run `align iccs` to see CC values; they exist as soon as seismograms are + loaded. +- **Picks** (`t1`), **select** and **flip** flags all reflect the values + stored in the database and loaded into the ICCS instance. Any change made + from the CLI, shell, or TUI row-action menu is reflected immediately, without + restarting any interface. + +This is deliberately different from **Snapshots**, which capture a frozen copy +of all parameters at a point in time. Live data is the working set you are +actively adjusting; snapshots are the checkpoints you save along the way. + +The TUI polls the database every five seconds to detect changes made externally +(e.g. from the CLI or shell) and silently rebuilds the ICCS instance if +necessary, keeping the Live data tab in sync. + +--- + ## How the stack is assembled At the start of each ICCS run, each seismogram is windowed around the current @@ -13,7 +42,7 @@ Each seismogram is then cross-correlated with this stack to determine the time shift that aligns it most closely. The picks (`t1`) are updated with these refined shifts and the stack is rebuilt from the newly aligned seismograms. This process repeats iteratively β€” each new stack is better aligned than the -last β€” until the picks converge. The CC norm produced at each iteration +last β€” until the picks converge. The CC value produced at each iteration quantifies how closely each seismogram matches the current stack. Because every seismogram is correlated against the stack rather than against @@ -47,13 +76,13 @@ the two representations is always visible. ## Viewing the stack The **stack view** overlays all individual seismograms as thin lines on top of -the bold stack waveform. Lines are coloured by their CC norm on a light-blue-to-pink scale using a +the bold stack waveform. Lines are coloured by their CC on a light-blue-to-pink scale using a power-law normalisation (Ξ³ = 2), which compresses the low end and spreads out the high end. Differences among well-aligned seismograms are therefore more visually distinct than differences among poorly-matching ones, making it easy to identify which traces are contributing most to the stack. -=== "CLI / Shell" +=== "CLI" ```bash aimbat plot stack # context mode (default) @@ -61,6 +90,14 @@ it easy to identify which traces are contributing most to the stack. aimbat plot stack --all # include deselected seismograms ``` +=== "Shell" + + ```bash + plot stack + plot stack --no-context + plot stack --all + ``` + === "TUI" Press `t` to open the Tools menu and choose **Plot stack**. Before @@ -78,14 +115,14 @@ it easy to identify which traces are contributing most to the stack. The **matrix image** plots each seismogram as a horizontal row in a 2-D colour image, with time on the x-axis and one row per seismogram. Rows are -sorted by CC norm, so the best-aligned seismograms appear at the top and the +sorted by CC, so the best-aligned seismograms appear at the top and the worst at the bottom. This layout makes it easy to spot systematic misalignment or outlier traces that stand out from the rest of the array. The same time window highlight and `context` / `--no-context` toggle apply as in the stack view. -=== "CLI / Shell" +=== "CLI" ```bash aimbat plot matrix @@ -93,6 +130,14 @@ in the stack view. aimbat plot matrix --all ``` +=== "Shell" + + ```bash + plot matrix + plot matrix --no-context + plot matrix --all + ``` + === "TUI" Press `t` and choose **Plot matrix image**. @@ -111,7 +156,7 @@ The two views complement each other: phase arrival β€” the waveform shape of the stack and its coherence with individual traces is immediately apparent. - **Matrix image** is better for spotting patterns: a cluster of rows at the - bottom with poor CC norms, a seismogram whose polarity is inverted (shows as + bottom with poor CCs, a seismogram whose polarity is inverted (shows as an opposite-coloured band), or a group of traces that are consistently shifted in one direction. @@ -127,7 +172,7 @@ used when interactively adjusting the phase pick, time window, and minimum CC norm threshold. Which view is presented depends on the tool and can usually be chosen before launching it. -During interactive adjustment of the minimum CC norm, the matrix image gains +During interactive adjustment of the minimum CC, the matrix image gains an additional behaviour: scrolling the mouse wheel removes rows from the top, progressively revealing where the well-aligned seismograms end and the poor ones begin. The point where the remaining rows stop looking coherent is a diff --git a/docs/usage/index.md b/docs/usage/index.md index 7d2aebb..914acc3 100644 --- a/docs/usage/index.md +++ b/docs/usage/index.md @@ -21,15 +21,21 @@ runs, prints its result, and exits. It is the natural choice for scripting, batch jobs, and any task where you already know what you want to do. Every command accepts `--help` for a full option listing. Most processing -commands require an event to operate on. You can pass an explicit `--event` -flag: +commands require an event to operate on. Pass the event ID as a positional +argument: + +```bash +aimbat align iccs 6a4a +``` + +You can also use the named form (`--event` or `--event-id`) if you prefer: ```bash aimbat align iccs --event 6a4a ``` -Alternatively, you can set the `DEFAULT_EVENT_ID` environment variable to -avoid passing the flag every time: +Alternatively, set the `DEFAULT_EVENT_ID` environment variable to avoid +repeating the ID every time: ```bash export DEFAULT_EVENT_ID=6a4a @@ -44,7 +50,7 @@ in shell scripts: ```bash aimbat project create aimbat data add *.sac -aimbat event default $(aimbat event dump | jq -r '.[0].id') +export DEFAULT_EVENT_ID=$(aimbat event dump | jq -r '.[0].id') aimbat snapshot create "initial import" aimbat align iccs --autoflip --autoselect aimbat align mccc @@ -101,16 +107,16 @@ without leaving the terminal. #### Layout ``` -β”Œβ”€ AIMBAT ───────────────────────────────────────────────────────┐ -β”‚ β–Ά 2000-01-01 12:00 | 45.1Β°, 120.4Β° ● ICCS ready (abc12345) β”‚ ← event bar -β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€ -β”‚ Project β”‚ Seismograms β”‚ Snapshots β”‚ ← tabs -β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ -β”‚ β”‚ ... β”‚ β”‚ -β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ -β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€ -β”‚ e Events a Align t Tools p Parameters n Snapshot q Quit β”‚ ← footer -β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ +β”Œβ”€ AIMBAT ────────────────────────────────────────────────────────┐ +β”‚ β–Ά 2000-01-01 12:00:00 | 45.100Β°, 120.400Β° ● ICCS ready β”‚ ← event bar +β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€ +β”‚ Project β”‚ Live data β”‚ Snapshots β”‚ ← tabs +β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ +β”‚ β”‚ ... β”‚ β”‚ +β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ +β”œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€ +β”‚ e Events d Add Data a Align t Tools p Parameters ... q Quitβ”‚ ← footer +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ ``` The **event bar** shows the event currently selected for processing and the @@ -120,6 +126,14 @@ The **footer** lists the available key bindings. Actions that require an event to be selected (Align, Tools, Parameters, New Snapshot) only appear once one is chosen. +#### Tabs + +The TUI has three tabs: + +- **Project** β€” two tables side by side: the events in the project and the stations. Pressing `Enter` on an event row lets you select it, mark it completed, view its seismograms, or delete it. +- **Live data** β€” the seismogram table for the currently selected event. "Live" means the table always reflects the current in-memory ICCS state: picks, CC norms, and select/flip flags update immediately as you run alignment or change parameters, without any manual refresh. See [The ICCS Stack](iccs-stack.md) for a detailed explanation. +- **Snapshots** β€” a list of saved parameter snapshots for the selected event with a quality summary panel. + #### Navigation Switch tabs with `H` / `L` (vim-style) or with the mouse. All tables support: @@ -131,10 +145,27 @@ Switch tabs with `H` / `L` (vim-style) or with the mouse. All tables support: | `g` / `G` | Jump to top / bottom | | `Enter` | Open row action menu | -#### Seismogram row actions +#### Row actions + +Pressing `Enter` on any table row opens a context menu. Available actions depend on the tab: -Pressing `Enter` on a seismogram row opens a context menu with the following -actions: +**Project β€” Events table:** + +| Action | Description | +|--------|-------------| +| Select event | Make this the active event for processing | +| Toggle completed | Mark or unmark the event as completed | +| View seismograms | Switch to the Live data tab filtered to this event | +| Delete event | Remove the event and its seismograms from the project | + +**Project β€” Stations table:** + +| Action | Description | +|--------|-------------| +| View seismograms | Switch to the Live data tab filtered to this station | +| Delete station | Remove the station from the project | + +**Live data β€” Seismograms table:** | Action | Description | |--------|-------------| @@ -212,10 +243,17 @@ export AIMBAT_PROJECT=/path/to/my/project.db ### Event selection Projects can contain multiple seismic events. Most commands operate on a single -event at a time. You can choose the target event by passing the `--event-id` -(or `--event`) flag to any command. +event at a time. Pass the event ID as a positional argument: -For convenience, you can also set the `DEFAULT_EVENT_ID` environment variable: +```bash +aimbat align iccs 6a4a +``` + +The named forms `--event` and `--event-id` are also accepted and behave +identically. IDs can be the full UUID or any unique prefix. + +For convenience, set the `DEFAULT_EVENT_ID` environment variable to avoid +repeating the ID: ```bash export DEFAULT_EVENT_ID=6a4a @@ -226,6 +264,11 @@ whenever an explicit ID is omitted. The shell prompt also reflects this ID. The TUI and GUI maintain their own event selection independently and never change it. +Note that `DEFAULT_EVENT_ID` is a plain shell environment variable consumed +directly by the CLI argument parser β€” it has no `AIMBAT_` prefix, cannot be +set in `.env`, and does not appear in `aimbat settings list`. See +[Selecting an Event](event-selection.md) for details. + ### The ICCS instance When you work on an event, AIMBAT builds an **ICCS instance** β€” an in-memory diff --git a/docs/usage/inspection.md b/docs/usage/inspection.md index 51f5002..e2aa21e 100644 --- a/docs/usage/inspection.md +++ b/docs/usage/inspection.md @@ -16,11 +16,16 @@ epicentral distance, with absolute time on the x-axis. This gives an immediate overview of the array β€” coherent arrivals should appear as a roughly linear moveout across the traces. -=== "CLI / Shell" +=== "CLI" ```bash - aimbat plot seismograms - aimbat plot seismograms --event # specific event + aimbat plot seismograms + ``` + +=== "Shell" + + ```bash + plot seismograms ``` === "TUI" @@ -32,6 +37,10 @@ moveout across the traces. Select an event in the **Project** tab and click **View seismograms**. +When there are many traces, only a subset is shown initially. Scroll the mouse +wheel to pan through the remaining traces; hold **Shift** and scroll to pan +along the time axis. + **What to look for:** - Traces that are flat, clipped, or visually incoherent with the rest of the array @@ -49,17 +58,26 @@ to the pick; traces are stacked vertically in chronological order. This view is useful for checking whether a station is consistently problematic across multiple events, or whether an issue is isolated to one. -=== "CLI / Shell" +=== "CLI" ```bash aimbat station plotseis ``` +=== "Shell" + + ```bash + station plotseis + ``` + === "TUI" In the **Project** tab, navigate to the **Stations** table, press `Enter` on a row, and choose **View seismograms**. +The same scroll behaviour applies: scroll to pan through traces, shift+scroll +to pan the time axis. + === "API" ```python diff --git a/docs/usage/mccc.md b/docs/usage/mccc.md index d621f94..c36d9ac 100644 --- a/docs/usage/mccc.md +++ b/docs/usage/mccc.md @@ -58,11 +58,18 @@ a filter β€” is done beforehand with ICCS. ## Running MCCC -=== "CLI / Shell" +=== "CLI" ```bash - aimbat align mccc # selected seismograms only - aimbat align mccc --all # include deselected seismograms + aimbat align mccc # selected seismograms only + aimbat align mccc --all # include deselected seismograms + ``` + +=== "Shell" + + ```bash + align mccc # selected seismograms only + align mccc --all # include deselected seismograms ``` === "TUI" diff --git a/docs/usage/project.md b/docs/usage/project.md index 4b7fb76..cde1760 100644 --- a/docs/usage/project.md +++ b/docs/usage/project.md @@ -5,12 +5,18 @@ Before adding data, a project must be initialised. This creates the database schema in a new SQLite file. -=== "CLI / Shell" +=== "CLI" ```bash aimbat project create ``` +=== "Shell" + + ```bash + project create + ``` + === "TUI" Launch the TUI β€” if no project is found in the current directory, a prompt diff --git a/docs/usage/snapshots.md b/docs/usage/snapshots.md index cc50238..0f5434c 100644 --- a/docs/usage/snapshots.md +++ b/docs/usage/snapshots.md @@ -48,14 +48,21 @@ point available is worth it. ## Creating a snapshot -=== "CLI / Shell" +=== "CLI" ```bash - aimbat snapshot create # no comment - aimbat snapshot create "after bandpass 1–3Hz" # with comment + aimbat snapshot create # no comment + aimbat snapshot create "after bandpass 1–3Hz" # with comment ``` - The comment is optional but useful for identifying the snapshot later. +=== "Shell" + + ```bash + snapshot create # no comment + snapshot create "after bandpass 1–3Hz" # with comment + ``` + +The comment is optional but useful for identifying the snapshot later. === "TUI" @@ -71,15 +78,22 @@ point available is worth it. ## Listing snapshots -=== "CLI / Shell" +=== "CLI" ```bash - aimbat snapshot list # for the default event - aimbat snapshot list --all-events # across all events + aimbat snapshot list # for a specific event + aimbat snapshot list --all-events # across all events ``` - The table shows the snapshot ID, date and time, comment, and number of - seismograms captured. +=== "Shell" + + ```bash + snapshot list # uses the current event context + snapshot list --all-events + ``` + +The table shows the snapshot ID, date and time, comment, and number of +seismograms captured. === "TUI" @@ -97,18 +111,26 @@ point available is worth it. Before rolling back, it can be useful to see what a snapshot contains. -=== "CLI / Shell" +=== "CLI" + + ```bash + aimbat snapshot details # view saved event parameters + aimbat snapshot preview # view stack plot + aimbat snapshot preview --matrix # view matrix image + ``` + +=== "Shell" ```bash - aimbat snapshot details # view saved event parameters - aimbat snapshot preview # view stack plot - aimbat snapshot preview --matrix # view matrix image + snapshot details + snapshot preview + snapshot preview --matrix ``` - `details` shows the event-level parameters (window, filter, min_ccnorm) as - they were when the snapshot was taken. `preview` builds the ICCS stack from - the snapshot's parameters and displays it β€” without modifying anything in - the database. +`details` shows the event-level parameters (window, filter, min_ccnorm) as +they were when the snapshot was taken. `preview` builds the ICCS stack from +the snapshot's parameters and displays it β€” without modifying anything in +the database. === "TUI" @@ -134,12 +156,18 @@ Before rolling back, it can be useful to see what a snapshot contains. Rolling back restores the snapshot's parameters as the current live values. This overwrites the current event and seismogram parameters for this event. -=== "CLI / Shell" +=== "CLI" ```bash aimbat snapshot rollback ``` +=== "Shell" + + ```bash + snapshot rollback + ``` + === "TUI" Press `Enter` on a snapshot row and choose **Rollback to this snapshot**. @@ -163,12 +191,18 @@ the most recent snapshot with the same parameters and MCCC data is used instead. ## Deleting a snapshot -=== "CLI / Shell" +=== "CLI" ```bash aimbat snapshot delete ``` +=== "Shell" + + ```bash + snapshot delete + ``` + === "TUI" Press `Enter` on a snapshot row and choose **Delete snapshot**. A @@ -186,13 +220,27 @@ Deletion is permanent. The snapshot cannot be recovered after deletion. For archiving or scripting purposes, snapshot data can be exported to JSON: -=== "CLI / Shell" +=== "CLI" ```bash - aimbat snapshot dump # default event + aimbat snapshot dump # specific event aimbat snapshot dump --all-events # all events ``` - The output contains three sections: snapshot metadata, event parameter - snapshots, and seismogram parameter snapshots, cross-referenced by - snapshot ID. +=== "Shell" + + ```bash + snapshot dump # uses the current event context + snapshot dump --all-events + ``` + +The output is a JSON object with five keys, all cross-referenced by +`snapshot_id`: + +| Key | Contents | Always present? | +|-----|----------|----------------| +| `snapshots` | Snapshot metadata (ID, time, comment, hash) | Yes | +| `event_parameters` | Event parameter snapshots | Yes | +| `seismogram_parameters` | Per-seismogram parameter snapshots | Yes | +| `event_quality` | Event quality snapshots (MCCC RMSE) | Only if MCCC has been run | +| `seismogram_quality` | Per-seismogram quality snapshots (ICCS CC, MCCC metrics) | Only if quality metrics exist | diff --git a/pyproject.toml b/pyproject.toml index e9a39e2..a7e2b83 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,6 +31,7 @@ dependencies = [ "prompt-toolkit>=3.0.52", "mplcursors>=0.7", "typing-extensions>=4.15.0", + "textual-plotext>=1.0.1", ] [project.urls] diff --git a/src/aimbat/_cli/common/_parameters.py b/src/aimbat/_cli/common/_parameters.py index 1e128ef..75cfaf6 100644 --- a/src/aimbat/_cli/common/_parameters.py +++ b/src/aimbat/_cli/common/_parameters.py @@ -20,9 +20,12 @@ "event_parameter", "event_parameter_with_all", "event_parameter_is_all", + "station_parameter_with_all", + "station_parameter_is_all", "use_station_parameter", "use_event_parameter", "use_matrix_image", + "open_in_editor", "DebugParameter", "EventDebugParameters", "IccsPlotParameters", @@ -80,6 +83,12 @@ def _converter(hint: type, tokens: tuple[Token, ...]) -> UUID | Literal["all"]: def id_parameter(model_class: type, help: str = "") -> Parameter: + """Return a cyclopts `Parameter` for selecting a record by UUID or unique prefix. + + Args: + model_class: AIMBAT model class used to resolve short UUID prefixes. + help: Custom help string; falls back to a generic UUID prompt if empty. + """ return Parameter( name="id", help=help or "UUID (or any unique prefix).", @@ -88,6 +97,11 @@ def id_parameter(model_class: type, help: str = "") -> Parameter: def event_parameter(help: str | None = None) -> Parameter: + """Return a cyclopts `Parameter` for selecting a single event by UUID or prefix. + + Args: + help: Custom help string; falls back to a generic event UUID prompt. + """ from aimbat.models import AimbatEvent return Parameter( @@ -99,6 +113,11 @@ def event_parameter(help: str | None = None) -> Parameter: def event_parameter_with_all(help: str | None = None) -> Parameter: + """Return a cyclopts `Parameter` for selecting an event or the literal `"all"`. + + Args: + help: Custom help string; falls back to a generic prompt. + """ from aimbat.models import AimbatEvent return Parameter( @@ -112,12 +131,40 @@ def event_parameter_with_all(help: str | None = None) -> Parameter: def event_parameter_is_all(event_id: UUID | Literal["all"]) -> TypeIs[Literal["all"]]: + """Return `True` if `event_id` is the literal string `"all"` (case-insensitive).""" if isinstance(event_id, str) and event_id.lower() == "all": return True return False +def station_parameter_with_all(help: str | None = None) -> Parameter: + """Return a cyclopts `Parameter` for selecting a station or the literal `"all"`. + + Args: + help: Custom help string; falls back to a generic prompt. + """ + from aimbat.models import AimbatStation + + return Parameter( + name=["station", "station-id"], + help=help + or '"all" for all stations, or UUID (or unique prefix) of station to process.', + converter=_make_uuid_converter(AimbatStation, allow_all=True), + show_choices=False, + ) + + +def station_parameter_is_all( + station_id: UUID | Literal["all"], +) -> TypeIs[Literal["all"]]: + """Return `True` if `station_id` is the literal string `"all"` (case-insensitive).""" + if isinstance(station_id, str) and station_id.lower() == "all": + return True + return False + + def use_station_parameter() -> Parameter: + """Return a cyclopts `Parameter` for linking data to an existing station record.""" from aimbat.models import AimbatStation return Parameter( @@ -129,6 +176,7 @@ def use_station_parameter() -> Parameter: def use_event_parameter() -> Parameter: + """Return a cyclopts `Parameter` for linking data to an existing event record.""" from aimbat.models import AimbatEvent return Parameter( @@ -140,12 +188,57 @@ def use_event_parameter() -> Parameter: def use_matrix_image() -> Parameter: + """Return a cyclopts `Parameter` for switching from stack to matrix image plots.""" return Parameter( name="matrix", help="Use matrix image instead of stack plot.", ) +# ----------------------------------------------------------------------- +# Editor helper +# ----------------------------------------------------------------------- + + +def open_in_editor(initial_content: str) -> str: + """Write `initial_content` to a temporary Markdown file, open it in `$EDITOR`, + and return the (possibly updated) content after the editor exits. + + The temporary file uses `delete=False` so that it can be opened by a + second process on Windows (which prohibits opening a file that is already + open). It is always removed in a `finally` block. + + The editor command is taken from `$EDITOR` or `$VISUAL`. If neither is + set, `notepad` is used on Windows and `vi` elsewhere. To use a GUI editor + that does not block by default (e.g. VS Code), set + ``EDITOR="code --wait"``. + """ + import os + import shlex + import subprocess + import tempfile + + editor = os.environ.get("EDITOR") or os.environ.get("VISUAL") + if not editor: + editor = "notepad" if sys.platform == "win32" else "vi" + + with tempfile.NamedTemporaryFile( + mode="w", + suffix=".md", + delete=False, + encoding="utf-8", + ) as tmp: + tmp.write(initial_content) + tmp_path = tmp.name + + try: + subprocess.run([*shlex.split(editor), tmp_path], check=False) + with open(tmp_path, encoding="utf-8") as f: + return f.read() + finally: + os.unlink(tmp_path) + + # ----------------------------------------------------------------------- # Common parameters # ----------------------------------------------------------------------- @@ -153,6 +246,8 @@ def use_matrix_image() -> Parameter: @dataclass class _DebugTrait: + """Mixin that adds an optional `--debug` flag to a CLI command.""" + debug: bool = False """Enable verbose logging for troubleshooting.""" @@ -167,16 +262,22 @@ def __post_init__(self) -> None: @dataclass class _EventContextTrait: + """Mixin that adds a required `--event` argument to a CLI command.""" + event_id: Annotated[UUID, event_parameter()] @dataclass class _TableParametersTrait: + """Mixin that adds a `--raw` flag for unformatted table output.""" + raw: bool = False @dataclass class _ByAliasTrait: + """Mixin that adds a `--alias` flag for alias-keyed JSON output.""" + by_alias: Annotated[ bool, Parameter( @@ -197,24 +298,32 @@ class EventDebugParameters(_DebugTrait, _EventContextTrait): @Parameter(name="*") @dataclass class JsonDumpParameters(_ByAliasTrait, _DebugTrait): + """Shared parameters for JSON dump commands (`--alias`, `--debug`).""" + pass @Parameter(name="*") @dataclass class TableParameters(_TableParametersTrait, _DebugTrait): + """Shared parameters for table display commands (`--raw`, `--debug`).""" + pass @Parameter(name="*") @dataclass class DebugParameter(_DebugTrait): + """Shared parameter that adds `--debug` to any CLI command.""" + pass @Parameter(name="*") @dataclass class IccsPlotParameters: + """Shared parameters for ICCS plot commands (`--context`, `--all`).""" + context: Annotated[ bool, Parameter( diff --git a/src/aimbat/_cli/event.py b/src/aimbat/_cli/event.py index cfd599a..2ecd59e 100644 --- a/src/aimbat/_cli/event.py +++ b/src/aimbat/_cli/event.py @@ -17,6 +17,7 @@ event_parameter, event_parameter_is_all, event_parameter_with_all, + open_in_editor, simple_exception, ) @@ -24,17 +25,27 @@ "cli_event_delete", "cli_event_dump", "cli_event_list", + "cli_event_note_read", + "cli_event_note_edit", "cli_event_parameter_get", "cli_event_parameter_set", "cli_event_parameter_dump", "cli_event_parameter_list", + "cli_event_quality_dump", + "cli_event_quality_list", ] app = App(name="event", help=__doc__, help_format="markdown") +_note = App(name="note", help="Read and edit event notes.", help_format="markdown") _parameter = App( name="parameter", help="Manage event parameters.", help_format="markdown" ) +_quality = App( + name="quality", help="View event quality metrics.", help_format="markdown" +) +app.command(_note) app.command(_parameter) +app.command(_quality) @app.command(name="delete") @@ -104,6 +115,60 @@ def cli_event_list( ) +@_note.command(name="read") +@simple_exception +def cli_event_note_read( + event_id: Annotated[uuid.UUID, event_parameter()], + *, + _: DebugParameter = DebugParameter(), +) -> None: + """Display the note attached to an event, rendered as Markdown.""" + from rich.console import Console + from rich.markdown import Markdown + + from aimbat.core import get_note_content, resolve_event + from aimbat.db import engine + + with Session(engine) as session: + event = resolve_event(session, event_id) + content = get_note_content(session, "event", event.id) + + Console().print(Markdown(content) if content else "(no note)") + + +@_note.command(name="edit") +@simple_exception +def cli_event_note_edit( + event_id: Annotated[uuid.UUID, event_parameter()], + *, + _: DebugParameter = DebugParameter(), +) -> None: + """Open the event note in `$EDITOR` and save changes on exit. + + The note is written to a temporary Markdown file. When the editor closes, + the updated content is saved back to the database. If the file is left + unchanged, no write is performed. + + On Windows, set the `EDITOR` environment variable to your preferred editor + (e.g. `notepad`, `notepad++`). The editor must be a blocking process; for + GUI editors that do not block by default (such as VS Code), pass the + appropriate wait flag (e.g. `EDITOR="code --wait"`). + """ + from aimbat.core import get_note_content, resolve_event, save_note + from aimbat.db import engine + + with Session(engine) as session: + event = resolve_event(session, event_id) + original = get_note_content(session, "event", event.id) + + updated = open_in_editor(original) + + if updated != original: + with Session(engine) as session: + event = resolve_event(session, event_id) + save_note(session, "event", event.id, updated) + + @_parameter.command(name="get") @simple_exception def cli_event_parameter_get( @@ -225,5 +290,81 @@ def cli_event_parameter_list( ) +@_quality.command(name="dump") +@simple_exception +def cli_event_quality_dump( + *, dump_parameters: JsonDumpParameters = JsonDumpParameters() +) -> None: + """Dump event quality statistics to JSON. + + Output can be piped or redirected for use in external tools or scripts. + """ + from rich import print_json + + from aimbat.core import dump_event_quality_table + from aimbat.db import engine + + with Session(engine) as session: + data = dump_event_quality_table(session, by_alias=dump_parameters.by_alias) + + print_json(data=data) + + +@_quality.command(name="list") +@simple_exception +def cli_event_quality_list( + event_id: Annotated[uuid.UUID | Literal["all"], event_parameter_with_all()], + *, + table_parameters: TableParameters = TableParameters(), +) -> None: + """Show aggregated quality statistics for an event or all events. + + Displays ICCS and MCCC quality metrics (means, SEMs, RMSE) aggregated + across the seismograms of each event. + """ + from aimbat.core import dump_event_quality_table + from aimbat.db import engine + from aimbat.models import RichColSpec, SeismogramQualityStats + from aimbat.utils import uuid_shortener + + from .common import json_to_table + + raw = table_parameters.raw + is_all = event_parameter_is_all(event_id) + + with Session(engine) as session: + if is_all: + title = "Quality statistics for all events" + exclude = None + else: + label = ( + str(event_id) + if raw + else uuid_shortener(session, AimbatEvent, str_uuid=str(event_id)) + ) + title = f"Quality statistics for event: {label}" + exclude = {"event_id"} + + col_specs = { + "event_id": RichColSpec( + formatter=lambda x: uuid_shortener(session, AimbatEvent, str_uuid=x), + ), + } + + data = dump_event_quality_table( + session, + event_id=None if event_parameter_is_all(event_id) else event_id, + exclude=exclude, + ) + + json_to_table( + data=data, + model=SeismogramQualityStats, + title=title, + raw=raw, + col_specs=col_specs, + ) + + if __name__ == "__main__": app() diff --git a/src/aimbat/_cli/seismogram.py b/src/aimbat/_cli/seismogram.py index 4abce65..e98a2c2 100644 --- a/src/aimbat/_cli/seismogram.py +++ b/src/aimbat/_cli/seismogram.py @@ -15,16 +15,69 @@ event_parameter_is_all, event_parameter_with_all, id_parameter, + open_in_editor, simple_exception, ) app = App(name="seismogram", help=__doc__, help_format="markdown") +_note = App(name="note", help="Read and edit seismogram notes.", help_format="markdown") parameter = App( name="parameter", help="Manage seismogram parameters.", help_format="markdown" ) +app.command(_note) app.command(parameter) +@_note.command(name="read") +@simple_exception +def cli_seismogram_note_read( + seismogram_id: Annotated[ + UUID, + id_parameter(AimbatSeismogram, help="UUID (or unique prefix) of seismogram."), + ], + *, + _: DebugParameter = DebugParameter(), +) -> None: + """Display the note attached to a seismogram, rendered as Markdown.""" + from rich.console import Console + from rich.markdown import Markdown + from sqlmodel import Session + + from aimbat.core import get_note_content + from aimbat.db import engine + + with Session(engine) as session: + content = get_note_content(session, "seismogram", seismogram_id) + + Console().print(Markdown(content) if content else "(no note)") + + +@_note.command(name="edit") +@simple_exception +def cli_seismogram_note_edit( + seismogram_id: Annotated[ + UUID, + id_parameter(AimbatSeismogram, help="UUID (or unique prefix) of seismogram."), + ], + *, + _: DebugParameter = DebugParameter(), +) -> None: + """Open the seismogram note in `$EDITOR` and save changes on exit.""" + from sqlmodel import Session + + from aimbat.core import get_note_content, save_note + from aimbat.db import engine + + with Session(engine) as session: + original = get_note_content(session, "seismogram", seismogram_id) + + updated = open_in_editor(original) + + if updated != original: + with Session(engine) as session: + save_note(session, "seismogram", seismogram_id, updated) + + @app.command(name="delete") @simple_exception def cli_seismogram_delete( diff --git a/src/aimbat/_cli/snapshot.py b/src/aimbat/_cli/snapshot.py index 6092096..9203e09 100644 --- a/src/aimbat/_cli/snapshot.py +++ b/src/aimbat/_cli/snapshot.py @@ -22,10 +22,67 @@ event_parameter_is_all, event_parameter_with_all, id_parameter, + open_in_editor, simple_exception, ) app = App(name="snapshot", help=__doc__, help_format="markdown") +_note = App(name="note", help="Read and edit snapshot notes.", help_format="markdown") +_quality = App( + name="quality", help="View snapshot quality metrics.", help_format="markdown" +) +app.command(_note) +app.command(_quality) + + +@_note.command(name="read") +@simple_exception +def cli_snapshot_note_read( + snapshot_id: Annotated[ + UUID, + id_parameter(AimbatSnapshot, help="UUID (or unique prefix) of snapshot."), + ], + *, + _: DebugParameter = DebugParameter(), +) -> None: + """Display the note attached to a snapshot, rendered as Markdown.""" + from rich.console import Console + from rich.markdown import Markdown + from sqlmodel import Session + + from aimbat.core import get_note_content + from aimbat.db import engine + + with Session(engine) as session: + content = get_note_content(session, "snapshot", snapshot_id) + + Console().print(Markdown(content) if content else "(no note)") + + +@_note.command(name="edit") +@simple_exception +def cli_snapshot_note_edit( + snapshot_id: Annotated[ + UUID, + id_parameter(AimbatSnapshot, help="UUID (or unique prefix) of snapshot."), + ], + *, + _: DebugParameter = DebugParameter(), +) -> None: + """Open the snapshot note in `$EDITOR` and save changes on exit.""" + from sqlmodel import Session + + from aimbat.core import get_note_content, save_note + from aimbat.db import engine + + with Session(engine) as session: + original = get_note_content(session, "snapshot", snapshot_id) + + updated = open_in_editor(original) + + if updated != original: + with Session(engine) as session: + save_note(session, "snapshot", snapshot_id, updated) @app.command(name="create") @@ -281,5 +338,85 @@ def cli_snapshot_details( ) +@_quality.command(name="dump") +@simple_exception +def cli_snapshot_quality_dump( + *, dump_parameters: JsonDumpParameters = JsonDumpParameters() +) -> None: + """Dump snapshot quality statistics to JSON. + + Output can be piped or redirected for use in external tools or scripts. + """ + from rich import print_json + from sqlmodel import Session + + from aimbat.core import dump_snapshot_quality_table + from aimbat.db import engine + + with Session(engine) as session: + data = dump_snapshot_quality_table(session, by_alias=dump_parameters.by_alias) + + print_json(data=data) + + +@_quality.command(name="list") +@simple_exception +def cli_snapshot_quality_list( + event_id: Annotated[UUID | Literal["all"], event_parameter_with_all()], + *, + table_parameters: TableParameters = TableParameters(), +) -> None: + """Show aggregated quality statistics for snapshots of an event or all events. + + Displays ICCS and MCCC quality metrics (means, SEMs, RMSE) from the frozen + quality records of each snapshot. + """ + from sqlmodel import Session + + from aimbat.core import dump_snapshot_quality_table, resolve_event + from aimbat.db import engine + from aimbat.models import AimbatEvent, RichColSpec, SeismogramQualityStats + from aimbat.utils import uuid_shortener + + from .common import json_to_table + + raw = table_parameters.raw + + with Session(engine) as session: + if event_parameter_is_all(event_id): + title = "Quality statistics for all snapshots" + exclude = None + filter_event_id = None + else: + event = resolve_event(session, event_id) + label = str(event.id) if raw else uuid_shortener(session, event) + title = f"Quality statistics for snapshots of event: {label}" + exclude = {"event_id"} + filter_event_id = event.id + + col_specs = { + "event_id": RichColSpec( + formatter=lambda x: uuid_shortener(session, AimbatEvent, str_uuid=x), + ), + "snapshot_id": RichColSpec( + formatter=lambda x: uuid_shortener(session, AimbatSnapshot, str_uuid=x), + ), + } + + data = dump_snapshot_quality_table( + session, + event_id=filter_event_id, + exclude=exclude, + ) + + json_to_table( + data=data, + model=SeismogramQualityStats, + title=title, + raw=raw, + col_specs=col_specs, + ) + + if __name__ == "__main__": app() diff --git a/src/aimbat/_cli/station.py b/src/aimbat/_cli/station.py index fcea13f..6383a9b 100644 --- a/src/aimbat/_cli/station.py +++ b/src/aimbat/_cli/station.py @@ -14,10 +14,69 @@ event_parameter_is_all, event_parameter_with_all, id_parameter, + open_in_editor, simple_exception, + station_parameter_is_all, + station_parameter_with_all, ) app = App(name="station", help=__doc__, help_format="markdown") +_note = App(name="note", help="Read and edit station notes.", help_format="markdown") +_quality = App( + name="quality", help="View station quality metrics.", help_format="markdown" +) +app.command(_note) +app.command(_quality) + + +@_note.command(name="read") +@simple_exception +def cli_station_note_read( + station_id: Annotated[ + UUID, + id_parameter(AimbatStation, help="UUID (or unique prefix) of station."), + ], + *, + _: DebugParameter = DebugParameter(), +) -> None: + """Display the note attached to a station, rendered as Markdown.""" + from rich.console import Console + from rich.markdown import Markdown + from sqlmodel import Session + + from aimbat.core import get_note_content + from aimbat.db import engine + + with Session(engine) as session: + content = get_note_content(session, "station", station_id) + + Console().print(Markdown(content) if content else "(no note)") + + +@_note.command(name="edit") +@simple_exception +def cli_station_note_edit( + station_id: Annotated[ + UUID, + id_parameter(AimbatStation, help="UUID (or unique prefix) of station."), + ], + *, + _: DebugParameter = DebugParameter(), +) -> None: + """Open the station note in `$EDITOR` and save changes on exit.""" + from sqlmodel import Session + + from aimbat.core import get_note_content, save_note + from aimbat.db import engine + + with Session(engine) as session: + original = get_note_content(session, "station", station_id) + + updated = open_in_editor(original) + + if updated != original: + with Session(engine) as session: + save_note(session, "station", station_id, updated) @app.command(name="delete") @@ -134,5 +193,84 @@ def cli_station_list( json_to_table(data, model=AimbatStationRead, title=title, raw=raw) +@_quality.command(name="dump") +@simple_exception +def cli_station_quality_dump( + *, dump_parameters: JsonDumpParameters = JsonDumpParameters() +) -> None: + """Dump station quality statistics to JSON. + + Output can be piped or redirected for use in external tools or scripts. + """ + from rich import print_json + from sqlmodel import Session + + from aimbat.core import dump_station_quality_table + from aimbat.db import engine + + with Session(engine) as session: + data = dump_station_quality_table(session, by_alias=dump_parameters.by_alias) + + print_json(data=data) + + +@_quality.command(name="list") +@simple_exception +def cli_station_quality_list( + station_id: Annotated[UUID | Literal["all"], station_parameter_with_all()], + *, + table_parameters: TableParameters = TableParameters(), +) -> None: + """Show aggregated quality statistics for a station or all stations. + + Displays ICCS and MCCC quality metrics (means, SEMs) aggregated across + all seismograms of each station. + """ + from sqlmodel import Session + + from aimbat.core import dump_station_quality_table + from aimbat.db import engine + from aimbat.models import RichColSpec, SeismogramQualityStats + from aimbat.utils import uuid_shortener + + from .common import json_to_table + + raw = table_parameters.raw + is_all = station_parameter_is_all(station_id) + + with Session(engine) as session: + if is_all: + title = "Quality statistics for all stations" + exclude = None + else: + label = ( + str(station_id) + if raw + else uuid_shortener(session, AimbatStation, str_uuid=str(station_id)) + ) + title = f"Quality statistics for station: {label}" + exclude = {"station_id"} + + col_specs = { + "station_id": RichColSpec( + formatter=lambda x: uuid_shortener(session, AimbatStation, str_uuid=x), + ), + } + + data = dump_station_quality_table( + session, + station_id=None if station_parameter_is_all(station_id) else station_id, + exclude=exclude, + ) + + json_to_table( + data=data, + model=SeismogramQualityStats, + title=title, + raw=raw, + col_specs=col_specs, + ) + + if __name__ == "__main__": app() diff --git a/src/aimbat/_tui/_format.py b/src/aimbat/_tui/_format.py index 265483e..a5af8e6 100644 --- a/src/aimbat/_tui/_format.py +++ b/src/aimbat/_tui/_format.py @@ -3,29 +3,75 @@ from __future__ import annotations from functools import lru_cache -from typing import TYPE_CHECKING from pandas import Timedelta from pydantic import BaseModel +from rich.console import Group, RenderableType +from rich.panel import Panel from rich.text import Text +from aimbat.models import SeismogramQualityStats from aimbat.models._format import TuiColSpec from aimbat.utils.formatters import fmt_bool, fmt_float -if TYPE_CHECKING: - from aimbat.core import FieldGroup - __all__ = [ "fmt_float_sem", - "fmt_groups", - "fmt_td_sem", - "fmt_val", + "format_quality_panel", "tui_cell", "tui_display_title", "tui_fmt", ] +def format_quality_panel( + stats: SeismogramQualityStats | None, +) -> tuple[RenderableType, str]: + """Format quality stats for a Static quality panel. + + Returns `(body, subtitle)` where `body` is a Rich renderable suitable for + `Static.update()` and `subtitle` is intended for `Widget.border_subtitle`. + """ + if stats is None: + return "[dim]No row selected[/dim]", "" + + def _fmt_td(td: Timedelta | None) -> str: + if td is None: + return "[dim]β€”[/dim]" + return f"{td.total_seconds() * 1000:.3f} ms" + + def _fmt_td_sem(mean: Timedelta | None, sem: Timedelta | None) -> str: + if mean is None: + return "[dim]β€”[/dim]" + ms = mean.total_seconds() * 1000 + if sem is not None: + return f"{ms:.3f} Β± {sem.total_seconds() * 1000:.3f} ms" + return f"{ms:.3f} ms" + + iccs_body = f"CC {fmt_float_sem(stats.cc_mean, stats.cc_mean_sem)}" + panels: list[Panel] = [ + Panel(iccs_body, title="ICCS", title_align="left", padding=(0, 1)) + ] + + has_mccc = any( + v is not None for v in [stats.mccc_cc_mean, stats.mccc_error, stats.mccc_rmse] + ) + if has_mccc: + mccc_lines = [ + f"CC {fmt_float_sem(stats.mccc_cc_mean, stats.mccc_cc_mean_sem)}", + f"CC std {fmt_float_sem(stats.mccc_cc_std, stats.mccc_cc_std_sem)}", + f"Error {_fmt_td_sem(stats.mccc_error, stats.mccc_error_sem)}", + ] + if stats.mccc_rmse is not None: + mccc_lines.append(f"RMSE {_fmt_td(stats.mccc_rmse)}") + panels.append( + Panel( + "\n".join(mccc_lines), title="MCCC", title_align="left", padding=(0, 1) + ) + ) + + return Group(*panels), f"n = {stats.count}" + + def tui_display_title(model: type[BaseModel], field_name: str) -> str: """Return the TUI display title for a model field. @@ -52,55 +98,6 @@ def fmt_float_sem(v: float | None, sem: float | None, decimals: int = 4) -> str: return f"{v:.{decimals}f}" -def fmt_td_sem(td: Timedelta | None, sem: Timedelta | None, decimals: int = 5) -> str: - """Format a Timedelta in seconds with an optional SEM, or `β€”` if None.""" - if td is None: - return "β€”" - s = f"{td.total_seconds():.{decimals}f}" - if sem is not None: - s += f" Β± {sem.total_seconds():.{decimals}f}" - return s + " s" - - -def fmt_val(val: object, sem: object = None) -> str: - """Format a model field value for display in a quality panel. - - Dispatches to `fmt_float_sem` or `fmt_td_sem` for numeric types so that an - optional `sem` sibling is rendered as `value Β± sem`. Booleans render as βœ“/βœ—. - Returns `β€”` for None.""" - if val is None: - return "β€”" - if isinstance(val, bool): - return "βœ“" if val else "βœ—" - if isinstance(val, Timedelta): - return fmt_td_sem(val, sem if isinstance(sem, Timedelta) else None) - if isinstance(val, float): - return fmt_float_sem(val, sem if isinstance(sem, float) else None) - return str(val) - - -def fmt_groups( - groups: list[FieldGroup], -) -> list[tuple[str, list[tuple[str, str]]]]: - """Format a list of `FieldGroup` instances for `QualityModal`. - - Returns a list of `(group_title, rows)` pairs, skipping groups with no - content. Each `rows` element is a pre-formatted `(label, value)` pair. - """ - result = [] - for group in groups: - rows: list[tuple[str, str]] = [] - if group.fields: - rows = [ - (spec.title, fmt_val(spec.value, spec.sem)) for spec in group.fields - ] - elif group.empty_message: - rows = [(group.empty_message, "")] - if rows: - result.append((group.title, rows)) - return result - - @lru_cache(maxsize=None) def _col_spec_map(model: type[BaseModel]) -> dict[str, TuiColSpec]: """Return a map of Pydantic field title β†’ `TuiColSpec` for fields that carry one.""" @@ -137,10 +134,11 @@ def tui_cell(model: type[BaseModel], title: str, val: object) -> str | Text: def tui_fmt(val: object) -> str: """Format a raw field value for display in a Textual DataTable cell. - Applies generic type-based rules (bool via ``fmt_bool``, float via - ``fmt_float``, ISO timestamp truncation) before falling back to ``str``. - Field-specific formatting should be handled via ``TuiColSpec.formatter`` - instead. Returns ``β€”`` for ``None``.""" + Applies generic type-based rules (`bool` via `fmt_bool`, `float` via + `fmt_float`, ISO timestamp truncation) before falling back to `str`. + Field-specific formatting should be handled via `TuiColSpec.formatter`. + Returns `β€”` for `None`. + """ if val is None: return "β€”" if isinstance(val, bool): diff --git a/src/aimbat/_tui/_widgets.py b/src/aimbat/_tui/_widgets.py index aea768f..f1b4b40 100644 --- a/src/aimbat/_tui/_widgets.py +++ b/src/aimbat/_tui/_widgets.py @@ -1,9 +1,22 @@ """Reusable Textual widgets for the AIMBAT TUI.""" +import uuid +from contextlib import suppress + +from sqlmodel import Session +from textual import on +from textual.app import ComposeResult from textual.binding import Binding -from textual.widgets import DataTable +from textual.css.query import NoMatches +from textual.message import Message +from textual.widget import Widget +from textual.widgets import DataTable, Markdown, TabbedContent, TabPane, TextArea +from textual_plotext import PlotextPlot + +from aimbat.core import get_note_content, save_note +from aimbat.db import engine -__all__ = ["VimDataTable"] +__all__ = ["NoteWidget", "SeismogramPlotWidget", "VimDataTable"] class VimDataTable(DataTable): @@ -13,6 +26,17 @@ class VimDataTable(DataTable): existing cursor actions) and g/G for jumping to the first/last row. """ + class Focused(Message): + """Posted when this table gains keyboard focus.""" + + def __init__(self, table: "VimDataTable") -> None: + super().__init__() + self.table = table + + @property + def control(self) -> "VimDataTable": + return self.table + BINDINGS = [ Binding("h", "cursor_left", "Cursor left", show=False), Binding("j", "cursor_down", "Cursor down", show=False), @@ -22,9 +46,159 @@ class VimDataTable(DataTable): Binding("G", "scroll_end", "Scroll to bottom", show=False), ] + def on_focus(self) -> None: + self.post_message(self.Focused(self)) + def action_scroll_home(self) -> None: self.move_cursor(row=0) def action_scroll_end(self) -> None: if self.row_count > 0: self.move_cursor(row=self.row_count - 1) + + +class SeismogramPlotWidget(Widget): + """Displays CC and context seismograms for a highlighted seismogram. + + Call `update_plots` to load new data, or `clear` to reset the widget + when no seismogram is selected. + """ + + BORDER_TITLE = "Seismogram" + + def compose(self) -> ComposeResult: + with TabbedContent(initial="seis-plot-tab-cc"): + with TabPane("CC", id="seis-plot-tab-cc"): + yield PlotextPlot(id="seis-cc-plot") + with TabPane("Context", id="seis-plot-tab-context"): + yield PlotextPlot(id="seis-context-plot") + + def update_plots( + self, + cc_times: list[float], + cc_data: list[float], + context_times: list[float], + context_data: list[float], + ) -> None: + """Update both plot tabs with new seismogram data. + + Args: + cc_times: Time values relative to pick (seconds) for the CC seismogram. + cc_data: Amplitude data for the CC seismogram. + context_times: Time values relative to pick (seconds) for the context seismogram. + context_data: Amplitude data for the context seismogram. + """ + for plot_id, times, data in ( + ("#seis-cc-plot", cc_times, cc_data), + ("#seis-context-plot", context_times, context_data), + ): + with suppress(NoMatches): + p = self.query_one(plot_id, PlotextPlot) + p.plt.clf() + p.plt.xlabel("Time relative to pick (s)") + p.plt.yfrequency(0) + p.plt.plot(times, data, marker="braille") + p.refresh() + + def clear(self) -> None: + """Clear both plots.""" + for plot_id in ("#seis-cc-plot", "#seis-context-plot"): + with suppress(NoMatches): + p = self.query_one(plot_id, PlotextPlot) + p.plt.clf() + p.refresh() + + +class _NoteTextArea(TextArea): + """TextArea that posts a bubbling message when it loses focus.""" + + class Blurred(Message): + """Posted when the edit area loses keyboard focus.""" + + def on_blur(self) -> None: + self.post_message(self.Blurred()) + + +class NoteWidget(Widget): + """View/edit Markdown note for an event, station, seismogram, or snapshot. + + Call `set_entity` to load the note for an entity, or `clear` to reset the + widget when no entity is selected. Changes are auto-saved whenever the edit + area loses focus or the View tab is activated. + """ + + BORDER_TITLE = "Note" + + def __init__( + self, + *, + name: str | None = None, + id: str | None = None, + classes: str | None = None, + disabled: bool = False, + ) -> None: + super().__init__(name=name, id=id, classes=classes, disabled=disabled) + self._target_type: str | None = None + self._target_id: uuid.UUID | None = None + self._saved_content: str = "" + + def compose(self) -> ComposeResult: + with TabbedContent(id="note-tabs"): + with TabPane("View", id="note-tab-view"): + yield Markdown("", id="note-markdown") + with TabPane("Edit", id="note-tab-edit"): + yield _NoteTextArea("", id="note-textarea") + + def set_entity(self, target_type: str, target_id: uuid.UUID) -> None: + """Load the note for the given entity and display it. + + Args: + target_type: One of `event`, `station`, `seismogram`, `snapshot`. + target_id: UUID of the target entity. + """ + self._target_type = target_type + self._target_id = target_id + with Session(engine) as session: + content = get_note_content(session, target_type, target_id) # type: ignore[arg-type] + self._saved_content = content + with suppress(NoMatches): + placeholder = "_No note yet. Switch to Edit to add one._" + self.query_one("#note-markdown", Markdown).update(content or placeholder) + self.query_one("#note-textarea", _NoteTextArea).load_text(content) + self.query_one("#note-tabs", TabbedContent).active = "note-tab-view" + + def clear(self) -> None: + """Clear the note display β€” call when no entity is selected.""" + self._target_type = None + self._target_id = None + self._saved_content = "" + with suppress(NoMatches): + self.query_one("#note-markdown", Markdown).update("") + self.query_one("#note-textarea", _NoteTextArea).load_text("") + self.query_one("#note-tabs", TabbedContent).active = "note-tab-view" + + @on(_NoteTextArea.Blurred) + def _on_textarea_blur(self) -> None: + self._auto_save() + + @on(TabbedContent.TabActivated, "#note-tabs") + def _on_note_tab_switch(self, event: TabbedContent.TabActivated) -> None: + if event.pane.id == "note-tab-view": + self._auto_save() + with suppress(NoMatches): + content = self.query_one("#note-textarea", _NoteTextArea).text + placeholder = "_No note yet. Switch to Edit to add one._" + self.query_one("#note-markdown", Markdown).update( + content or placeholder + ) + + def _auto_save(self) -> None: + if self._target_type is None or self._target_id is None: + return + content = self._saved_content + with suppress(NoMatches): + content = self.query_one("#note-textarea", _NoteTextArea).text + if content != self._saved_content: + with Session(engine) as session: + save_note(session, self._target_type, self._target_id, content) # type: ignore[arg-type] + self._saved_content = content diff --git a/src/aimbat/_tui/aimbat.tcss b/src/aimbat/_tui/aimbat.tcss index 36f9d9e..5ef5233 100644 --- a/src/aimbat/_tui/aimbat.tcss +++ b/src/aimbat/_tui/aimbat.tcss @@ -31,24 +31,52 @@ TabbedContent { /* ---- Project tab ---- */ +#project-layout, +#snapshot-layout, +#seismogram-layout { + height: 1fr; +} -.project-divider { - margin: 0; - height: 1; +#project-tables, +#snapshot-table { + width: 3fr; } -.project-table-title { - height: 1; - padding: 0 2; - color: $secondary; - text-style: bold; +#project-right-panel, +#seismogram-right-panel, +#snapshot-right-panel { + width: 1fr; +} + +.quality-panel { + width: 1fr; + height: auto; + border: solid $primary; + padding: 0 1; +} + +NoteWidget { + height: 1fr; + border: solid $primary; + padding: 0 1; } #project-event-table, #project-station-table, -#seismogram-table, #snapshot-table { height: 1fr; + border: solid $primary; +} + +#seismogram-table { + width: 3fr; + height: 1fr; + border: solid $primary; +} + +SeismogramPlotWidget { + height: 2fr; + border: solid $primary; } /* ---- No-project modal ---- */ @@ -231,34 +259,6 @@ SnapshotDetailsModal { border: none; } -QualityModal { - align: center middle; -} - -#quality-dialog { - background: $background; - border: solid $primary; - width: 70; - height: auto; - max-height: 80vh; - padding: 1 2; -} - -#quality-dialog .modal-title { - color: $secondary; - text-style: bold; -} - -#quality-dialog .quality-section { - color: $secondary; - margin-top: 1; -} - -#quality-dialog DataTable { - background: $background; - border: none; -} - /* ---- Parameters modal ---- */ ParametersModal { align: center middle; @@ -299,3 +299,27 @@ ActionMenuModal { background: $background; border: none; } + +/* ---- Help modal ---- */ +HelpModal { + align: center middle; +} + +#help-dialog { + background: $background; + border: solid $secondary; + width: 80%; + max-width: 100; + height: 80%; + padding: 1 2; +} + +#help-dialog .modal-title { + color: $secondary; + text-style: bold; +} + +#help-content { + height: 1fr; + overflow-y: auto; +} diff --git a/src/aimbat/_tui/app.py b/src/aimbat/_tui/app.py index a9d5b98..af6941a 100644 --- a/src/aimbat/_tui/app.py +++ b/src/aimbat/_tui/app.py @@ -9,20 +9,22 @@ from pathlib import Path from typing import Literal +import numpy as np from pandas import Timedelta, Timestamp from rich.console import Console from rich.panel import Panel -from sqlalchemy.exc import NoResultFound +from sqlalchemy.exc import NoResultFound, SQLAlchemyError from sqlmodel import Session, select from textual import on, work from textual.app import App, ComposeResult from textual.binding import Binding +from textual.containers import Horizontal, Vertical +from textual.css.query import NoMatches from textual.screen import ModalScreen from textual.widgets import ( DataTable, Footer, Header, - Rule, Static, TabbedContent, TabPane, @@ -33,16 +35,16 @@ from pysmo.tools.iccs import ICCS from aimbat import settings -from aimbat._tui._widgets import VimDataTable +from aimbat._tui._widgets import NoteWidget, SeismogramPlotWidget, VimDataTable from aimbat._tui.modals import ( ActionMenuModal, AlignModal, ConfirmModal, EventSwitcherModal, + HelpModal, InteractiveToolsModal, NoProjectModal, ParametersModal, - QualityModal, SnapshotActionMenuModal, SnapshotCommentModal, SnapshotDetailsModal, @@ -63,11 +65,13 @@ dump_seismogram_table, dump_snapshot_table, dump_station_table, + get_event_quality, + get_snapshot_quality, + get_station_quality, reset_seismogram_parameters, rollback_to_snapshot, run_iccs, run_mccc, - snapshot_quality_groups, ) from aimbat.core._project import _project_exists from aimbat.db import engine @@ -94,14 +98,13 @@ update_timewindow, ) -from ._format import fmt_float_sem as _fmt_float_sem -from ._format import fmt_groups as _format_groups -from ._format import tui_cell as _tui_cell -from ._format import tui_display_title as _tui_display_title +from ._format import fmt_float_sem, format_quality_panel, tui_cell, tui_display_title _DEFAULT_THEME = settings.tui_dark_theme _LIGHT_THEME = settings.tui_light_theme +_MAIN_TABS = {"tab-project", "tab-seismograms", "tab-snapshots"} + # Extend this dict to add new per-row actions to any tab. _TAB_ROW_ACTIONS: dict[str, list[tuple[str, str]]] = { @@ -236,6 +239,7 @@ class AimbatTUI(App[None]): Binding("n", "new_snapshot", "New Snapshot", show=True), Binding("r", "refresh", "Refresh", show=True), Binding("c", "toggle_theme", "Theme", show=True), + Binding("?", "show_help", "Help", show=True), Binding("H", "vim_left", "Vim left", show=False), Binding("L", "vim_right", "Vim right", show=False), Binding("q", "quit", "Quit", show=True), @@ -246,20 +250,29 @@ def compose(self) -> ComposeResult: yield Static(id="event-bar") with TabbedContent(initial="tab-project"): with TabPane("Project", id="tab-project"): - yield Static("Events", id="events-title", classes="project-table-title") - yield VimDataTable(id="project-event-table") - yield Rule(classes="project-divider") - yield Static( - "Stations", id="stations-title", classes="project-table-title" - ) - yield VimDataTable(id="project-station-table") + with Horizontal(id="project-layout"): + with Vertical(id="project-tables"): + yield VimDataTable(id="project-event-table") + yield VimDataTable(id="project-station-table") + with Vertical(id="project-right-panel"): + yield Static( + id="project-quality-panel", classes="quality-panel" + ) + yield NoteWidget(id="project-note") with TabPane("Live data", id="tab-seismograms"): - yield Static( - "Seismograms", id="seismogram-title", classes="project-table-title" - ) - yield VimDataTable(id="seismogram-table") + with Horizontal(id="seismogram-layout"): + yield VimDataTable(id="seismogram-table") + with Vertical(id="seismogram-right-panel"): + yield SeismogramPlotWidget(id="seismogram-plot") + yield NoteWidget(id="seismogram-note") with TabPane("Snapshots", id="tab-snapshots"): - yield VimDataTable(id="snapshot-table") + with Horizontal(id="snapshot-layout"): + yield VimDataTable(id="snapshot-table") + with Vertical(id="snapshot-right-panel"): + yield Static( + id="snapshot-quality-panel", classes="quality-panel" + ) + yield NoteWidget(id="snapshot-note") yield Footer() def on_mount(self) -> None: @@ -267,7 +280,14 @@ def on_mount(self) -> None: self._iccs_creating: bool = False self._iccs_last_modified_seen: Timestamp | None = None self._current_event_id: uuid.UUID | None = None - self._active_tab: str = "tab-seismograms" + self._active_tab: str = "tab-project" + self._highlighted_event_id: str | None = None + self._highlighted_station_id: str | None = None + self._highlighted_seismogram_id: str | None = None + self._highlighted_snapshot_id: str | None = None + self._quality_source: Literal["event", "station"] = "event" + self._project_refreshing: bool = False + self._seismogram_refreshing: bool = False self.theme = _DEFAULT_THEME @@ -293,12 +313,20 @@ def _on_no_project_modal(self, create: bool | None) -> None: @on(TabbedContent.TabActivated) def on_tab_activated(self, event: TabbedContent.TabActivated) -> None: - if event.pane.id: - self._active_tab = event.pane.id - self.refresh_bindings() - if not isinstance(self.focused, Tabs): - with suppress(Exception): - event.pane.query_one(DataTable).focus() + if event.pane.id not in _MAIN_TABS: + return + self._active_tab = event.pane.id + self.refresh_bindings() + if not isinstance(self.focused, Tabs): + with suppress(NoMatches): + event.pane.query_one(DataTable).focus() + if event.pane.id == "tab-seismograms": + if self.query_one("#seismogram-table", DataTable).row_count == 0: + self._update_seismogram_note(None) + self._update_seismogram_plot(None) + elif event.pane.id == "tab-snapshots": + if self.query_one("#snapshot-table", DataTable).row_count == 0: + self._update_snapshot_quality(None) def check_action(self, action: str, parameters: tuple[object, ...]) -> bool | None: if action in { @@ -412,39 +440,43 @@ def _assign_iccs(self, bound_iccs: BoundICCS) -> None: def _setup_project_tables(self) -> None: et_headers = [ - _tui_display_title(AimbatEventRead, f) + tui_display_title(AimbatEventRead, f) for f in AimbatEventRead.model_fields if f not in _EVENT_TABLE_EXCLUDE | {"id"} ] et = self.query_one("#project-event-table", DataTable) + et.border_title = "Events" et.cursor_type = "row" et.add_columns(" ", *et_headers) station_headers = [ - _tui_display_title(AimbatStationRead, f) + tui_display_title(AimbatStationRead, f) for f in AimbatStationRead.model_fields if f not in _STATION_TABLE_EXCLUDE | {"id"} ] st = self.query_one("#project-station-table", DataTable) + st.border_title = "Stations" st.cursor_type = "row" st.add_columns(*station_headers) def _setup_seismogram_table(self) -> None: seis_headers = [ - _tui_display_title(AimbatSeismogramRead, f) + tui_display_title(AimbatSeismogramRead, f) for f in AimbatSeismogramRead.model_fields if f not in _SEISMOGRAM_TABLE_EXCLUDE | {"id"} ] t = self.query_one("#seismogram-table", DataTable) + t.border_title = "Seismograms" t.cursor_type = "row" t.add_columns(*seis_headers) def _setup_snapshot_table(self) -> None: snap_headers = [ - _tui_display_title(AimbatSnapshotRead, f) + tui_display_title(AimbatSnapshotRead, f) for f in AimbatSnapshotRead.model_fields if f not in _SNAPSHOT_TABLE_EXCLUDE | {"id"} ] t = self.query_one("#snapshot-table", DataTable) + t.border_title = "Snapshots" t.cursor_type = "row" t.add_columns(*snap_headers) @@ -482,7 +514,7 @@ def _refresh_project(self) -> None: total = len(event_rows) completed = sum(1 for r in event_rows if r.get("Completed")) - self.query_one("#events-title", Static).update( + et.border_title = ( f"Events [dim]{total} total Β· {completed} completed[/dim]" ) @@ -496,26 +528,40 @@ def _refresh_project(self) -> None: None, ) if active is not None: - _sc_key = _tui_display_title(AimbatEventRead, "station_count") - self.query_one("#stations-title", Static).update( - f"Stations [dim]{active.get(_sc_key, '?')} in active event[/dim]" - ) + _sc_key = tui_display_title(AimbatEventRead, "station_count") + st.border_title = f"Stations [dim]{active.get(_sc_key, '?')} in active event[/dim]" for row in event_rows: row_id = str(row.pop("ID")) marker = "β–Ά" if row_id == str(self._current_event_id) else " " - cells = [_tui_cell(AimbatEventRead, k, v) for k, v in row.items()] + cells = [tui_cell(AimbatEventRead, k, v) for k, v in row.items()] et.add_row(marker, *cells, key=row_id) for row in station_rows: row_id = str(row.pop("ID")) - cells = [_tui_cell(AimbatStationRead, k, v) for k, v in row.items()] + cells = [tui_cell(AimbatStationRead, k, v) for k, v in row.items()] st.add_row(*cells, key=row_id) + self._project_refreshing = True if et.row_count > 0: et.move_cursor(row=min(et_saved, et.row_count - 1)) if st.row_count > 0: st.move_cursor(row=min(st_saved, st.row_count - 1)) + self.call_after_refresh(self._end_project_refresh) + + def _end_project_refresh(self) -> None: + """Runs after pending RowHighlighted events from move_cursor have been processed.""" + self._project_refreshing = False + if self._quality_source == "station": + self._update_station_quality(self._highlighted_station_id) + else: + self._update_event_quality(self._highlighted_event_id) + + def _end_seismogram_refresh(self) -> None: + """Runs after pending RowHighlighted events from move_cursor have been processed.""" + self._seismogram_refreshing = False + self._update_seismogram_note(self._highlighted_seismogram_id) + self._update_seismogram_plot(self._highlighted_seismogram_id) def _check_iccs_staleness(self) -> None: """Trigger ICCS recreation if the current event has been modified externally. @@ -527,15 +573,16 @@ def _check_iccs_staleness(self) -> None: try: with Session(engine) as session: event = self._get_current_event(session) + last_modified = event.last_modified stale = ( self._bound_iccs.is_stale(event) if self._bound_iccs is not None - else event.last_modified != self._iccs_last_modified_seen + else last_modified != self._iccs_last_modified_seen ) except (NoResultFound, RuntimeError): return if stale: - self._iccs_last_modified_seen = event.last_modified + self._iccs_last_modified_seen = last_modified self._create_iccs() self.refresh_all() @@ -613,10 +660,9 @@ def _refresh_seismograms(self) -> None: if row.get("Select"): selected_ccs.append(float(cc_val)) row_id = str(row.pop("ID")) - cells = [_tui_cell(AimbatSeismogramRead, k, v) for k, v in row.items()] + cells = [tui_cell(AimbatSeismogramRead, k, v) for k, v in row.items()] table.add_row(*cells, key=row_id) - title = self.query_one("#seismogram-title", Static) if all_ccs: n_all = len(all_ccs) mean_all = statistics.mean(all_ccs) @@ -626,15 +672,22 @@ def _refresh_seismograms(self) -> None: sem_sel = ( statistics.stdev(selected_ccs) / n_sel**0.5 if n_sel >= 2 else None ) - title.update( - f"Seismograms [dim]CC: selected {_fmt_float_sem(mean_sel, sem_sel)}" - f" Β· all {_fmt_float_sem(mean_all, sem_all)}[/dim]" + table.border_title = ( + f"Seismograms [dim]CC: selected {fmt_float_sem(mean_sel, sem_sel)}" + f" Β· all {fmt_float_sem(mean_all, sem_all)}[/dim]" ) else: - title.update("Seismograms") + table.border_title = "Seismograms" + self._seismogram_refreshing = True if table.row_count > 0: table.move_cursor(row=min(saved_row, table.row_count - 1)) + self.call_after_refresh(self._end_seismogram_refresh) + else: + self._highlighted_seismogram_id = None + self._seismogram_refreshing = False + self._update_seismogram_note(None) + self._update_seismogram_plot(None) def _refresh_snapshots(self) -> None: table = self.query_one("#snapshot-table", DataTable) @@ -652,10 +705,15 @@ def _refresh_snapshots(self) -> None: ) for row in snapshots: row_id = str(row.pop("ID")) - cells = [_tui_cell(AimbatSnapshotRead, k, v) for k, v in row.items()] + cells = [tui_cell(AimbatSnapshotRead, k, v) for k, v in row.items()] table.add_row(*cells, key=row_id) if table.row_count > 0: table.move_cursor(row=min(saved_row, table.row_count - 1)) + else: + self._highlighted_snapshot_id = None + self.call_after_refresh( + lambda: self._update_snapshot_quality(self._highlighted_snapshot_id) + ) # ------------------------------------------------------------------ # Row event handlers @@ -702,13 +760,176 @@ def on_action(result: tuple[str, bool, bool] | None) -> None: self._preview_snapshot_plot(snap_id, "stack", context, all_seis) elif action == "preview_image": self._preview_snapshot_plot(snap_id, "image", context, all_seis) - elif action == "show_quality": - self._show_quality_snapshot(snap_id) else: self._handle_row_action("tab-snapshots", snap_id, action) self.push_screen(SnapshotActionMenuModal(f"Snapshot {snap_id[:8]}"), on_action) + @on(DataTable.RowHighlighted, "#project-event-table") + def project_event_row_highlighted(self, event: DataTable.RowHighlighted) -> None: + self._highlighted_event_id = event.row_key.value if event.row_key else None + if not self._project_refreshing: + self._quality_source = "event" + self._update_event_quality(self._highlighted_event_id) + + @on(DataTable.RowHighlighted, "#project-station-table") + def project_station_row_highlighted(self, event: DataTable.RowHighlighted) -> None: + self._highlighted_station_id = event.row_key.value if event.row_key else None + if not self._project_refreshing: + self._quality_source = "station" + self._update_station_quality(self._highlighted_station_id) + + @on(VimDataTable.Focused, "#project-event-table") + def _project_event_table_focused(self) -> None: + if not self._project_refreshing: + self._quality_source = "event" + self._update_event_quality(self._highlighted_event_id) + + @on(VimDataTable.Focused, "#project-station-table") + def _project_station_table_focused(self) -> None: + if not self._project_refreshing: + self._quality_source = "station" + self._update_station_quality(self._highlighted_station_id) + + @on(DataTable.RowHighlighted, "#seismogram-table") + def seismogram_row_highlighted(self, event: DataTable.RowHighlighted) -> None: + self._highlighted_seismogram_id = event.row_key.value if event.row_key else None + if not self._seismogram_refreshing: + self._update_seismogram_note(self._highlighted_seismogram_id) + self._update_seismogram_plot(self._highlighted_seismogram_id) + + @on(DataTable.RowHighlighted, "#snapshot-table") + def snapshot_row_highlighted(self, event: DataTable.RowHighlighted) -> None: + self._highlighted_snapshot_id = event.row_key.value if event.row_key else None + self._update_snapshot_quality(self._highlighted_snapshot_id) + + # ------------------------------------------------------------------ + # Quality panel updates + # ------------------------------------------------------------------ + + def _update_event_quality(self, item_id: str | None) -> None: + panel = self.query_one("#project-quality-panel", Static) + panel.border_title = "Live event statistics" + stats = None + if item_id is not None: + try: + with Session(engine) as session: + stats = get_event_quality(session, uuid.UUID(item_id)) + except (ValueError, SQLAlchemyError): + pass + body, subtitle = format_quality_panel(stats) + panel.update(body) + panel.border_subtitle = subtitle + note_widget = self.query_one("#project-note", NoteWidget) + if item_id is None: + note_widget.clear() + else: + with suppress(ValueError): + note_widget.set_entity("event", uuid.UUID(item_id)) + + def _update_station_quality(self, item_id: str | None) -> None: + panel = self.query_one("#project-quality-panel", Static) + panel.border_title = "Live station statistics" + stats = None + if item_id is not None: + try: + with Session(engine) as session: + stats = get_station_quality(session, uuid.UUID(item_id)) + except (ValueError, SQLAlchemyError): + pass + body, subtitle = format_quality_panel(stats) + panel.update(body) + panel.border_subtitle = subtitle + note_widget = self.query_one("#project-note", NoteWidget) + if item_id is None: + note_widget.clear() + else: + with suppress(ValueError): + note_widget.set_entity("station", uuid.UUID(item_id)) + + def _update_seismogram_note(self, item_id: str | None) -> None: + note_widget = self.query_one("#seismogram-note", NoteWidget) + if item_id is None: + note_widget.clear() + else: + with suppress(ValueError): + note_widget.set_entity("seismogram", uuid.UUID(item_id)) + + def _update_seismogram_plot(self, item_id: str | None) -> None: + try: + plot_widget = self.query_one("#seismogram-plot", SeismogramPlotWidget) + except NoMatches: + return + if item_id is None or self._bound_iccs is None: + plot_widget.clear() + return + seis_uuid = uuid.UUID(item_id) + iccs = self._bound_iccs.iccs + idx = next( + ( + i + for i, s in enumerate(iccs.seismograms) + if s.extra.get("id") == seis_uuid + ), + None, + ) + if idx is None: + plot_widget.clear() + return + parent = iccs.seismograms[idx] + pick = parent.t1 if parent.t1 is not None else parent.t0 + try: + cc_seis = iccs.cc_seismograms[idx] + ctx_seis = iccs.context_seismograms[idx] + except Exception: + plot_widget.clear() + return + pick_ns: int = pick.value + cc_n = len(cc_seis.data) + cc_times = ( + ( + cc_seis.begin_time.value + + np.arange(cc_n, dtype=np.int64) * cc_seis.delta.value + - pick_ns + ) + / 1e9 + ).tolist() + ctx_n = len(ctx_seis.data) + ctx_times = ( + ( + ctx_seis.begin_time.value + + np.arange(ctx_n, dtype=np.int64) * ctx_seis.delta.value + - pick_ns + ) + / 1e9 + ).tolist() + plot_widget.update_plots( + cc_times, + cc_seis.data.tolist(), + ctx_times, + ctx_seis.data.tolist(), + ) + + def _update_snapshot_quality(self, item_id: str | None) -> None: + panel = self.query_one("#snapshot-quality-panel", Static) + panel.border_title = "Snapshot statistics" + stats = None + if item_id is not None: + try: + with Session(engine) as session: + stats = get_snapshot_quality(session, uuid.UUID(item_id)) + except (ValueError, SQLAlchemyError): + pass + body, subtitle = format_quality_panel(stats) + panel.update(body) + panel.border_subtitle = subtitle + note_widget = self.query_one("#snapshot-note", NoteWidget) + if item_id is None: + note_widget.clear() + else: + with suppress(ValueError): + note_widget.set_entity("snapshot", uuid.UUID(item_id)) + # ------------------------------------------------------------------ # Row-action menu helpers # ------------------------------------------------------------------ @@ -858,16 +1079,6 @@ def on_confirm(confirmed: bool | None) -> None: self.push_screen(ConfirmModal(msg), on_confirm) - def _show_quality_snapshot(self, snap_id: str) -> None: - try: - with Session(engine) as session: - groups = snapshot_quality_groups(session, uuid.UUID(snap_id)) - self.push_screen( - QualityModal(f"Quality {snap_id[:8]}", _format_groups(groups)) - ) - except Exception as exc: - self.notify(str(exc), severity="error") - def _show_snapshot_details(self, snap_id: str) -> None: try: with Session(engine) as session: @@ -1114,6 +1325,9 @@ def action_vim_right(self) -> None: def action_toggle_theme(self) -> None: self.theme = _LIGHT_THEME if self.theme == _DEFAULT_THEME else _DEFAULT_THEME + def action_show_help(self) -> None: + self.push_screen(HelpModal(self._active_tab)) + def action_refresh(self) -> None: self.refresh_all() self.notify("Refreshed", timeout=1) diff --git a/src/aimbat/_tui/help/tab-project.md b/src/aimbat/_tui/help/tab-project.md new file mode 100644 index 0000000..6e86c79 --- /dev/null +++ b/src/aimbat/_tui/help/tab-project.md @@ -0,0 +1,101 @@ +# Project tab + +This is the starting point. The Project tab gives you an overview of +everything in the database: the seismic events and the recording stations. +**Most processing in AIMBAT is per-event** β€” you need to select an event +here before the Live data and Snapshots tabs show anything useful. + +--- + +## What you see + +### Event bar (top of screen) + +The bar above the tabs always shows the currently selected event and its +ICCS status: + +- **● ICCS ready** β€” the event's seismograms are loaded in memory and + alignment can run. This is the normal working state. +- **β—‹ no ICCS** β€” the ICCS instance could not be built. Usually this means + a parameter combination is invalid (e.g. the time window is too wide) + or a waveform file is missing. Fix the problem and the status updates + automatically. + +### Events table (top) + +Lists every seismic event in the project. Each row shows the event's +origin time, location, depth, and completion status. The highlighted row +drives the quality panel and note on the right. + +Press `Enter` on an event row to open the action menu. The most important +action is **Select event** β€” this loads the event's seismograms into +memory and makes it the target for all processing commands (`a` Align, +`t` Tools, `p` Parameters, `n` New Snapshot). + +### Stations table (bottom) + +Lists every recording station. Highlighting a station row switches the +quality panel and note to show that station's data. + +### Quality panel (right) + +Shows a summary of ICCS and MCCC quality metrics for the highlighted event +or station. The panel updates as you move through the tables. + +### Note (below quality panel) + +A free-text Markdown note for the highlighted event or station. Switch to +**Edit** to type, then back to **View** to render the Markdown. Notes are +saved automatically whenever the editor loses focus β€” no explicit save +action is needed. Each event and station has its own note, which persists +in the database. + +--- + +## Row actions β€” Events + +| Action | Description | +|--------|-------------| +| Select event | Load this event for processing (populates Live data and Snapshots tabs) | +| Toggle completed | Mark or unmark the event as done | +| View seismograms | Switch to the Live data tab showing only this event's seismograms | +| Delete event | Remove the event and all its seismograms from the project | + +## Row actions β€” Stations + +| Action | Description | +|--------|-------------| +| View seismograms | Switch to the Live data tab filtered to this station | +| Delete station | Remove the station from the project | + +--- + +## Navigation + +| Key | Action | +|-----|--------| +| `j` / `↓` | Move down | +| `k` / `↑` | Move up | +| `g` / `G` | Jump to top / bottom | +| `Enter` | Open row action menu | +| `Tab` | Switch focus between Events and Stations tables | + +--- + +## Global key bindings + +These work from any tab: + +| Key | Action | +|-----|--------| +| `e` | Open event switcher (quick select without leaving current tab) | +| `d` | Add data files to the project | +| `p` | Edit processing parameters for the selected event | +| `a` | Run alignment (ICCS or MCCC) | +| `t` | Open interactive tools (matplotlib picking, stack/matrix plots) | +| `n` | Create a new snapshot for the selected event | +| `r` | Refresh all panels | +| `c` | Toggle light/dark colour theme | +| `H` / `L` | Switch tabs (vim-style left/right) | +| `?` | Show this help | +| `q` | Quit | diff --git a/src/aimbat/_tui/help/tab-seismograms.md b/src/aimbat/_tui/help/tab-seismograms.md new file mode 100644 index 0000000..14373c9 --- /dev/null +++ b/src/aimbat/_tui/help/tab-seismograms.md @@ -0,0 +1,111 @@ +# Live data tab + +This tab shows the seismograms for the currently selected event. If the +table is empty, go to the **Project** tab and select an event first (`e` +to open the event switcher, or `Enter` β†’ **Select event** on any event row). + +**Everything here is synced directly to the database.** Row actions (toggle +select, toggle flip, reset, delete) take effect immediately β€” there is no +separate save step. The table reflects the current state at all times. + +--- + +## What you see + +Each row is one seismogram (one station recording of one event). The columns are: + +| Column | Description | +|--------|-------------| +| Name | Recording station name (network.station) | +| Channel | Station channel code | +| Select | `βœ“` if this seismogram is included in the ICCS stack, `βœ—` if excluded | +| Flip | `βœ“` if the waveform's polarity has been inverted (multiplied by βˆ’1) | +| Ξ”t (s) | Arrival time residual (t1 βˆ’ t0) in seconds. Empty until a phase-arrival pick has been set. | +| MCCC err Ξ”t (s) | Per-seismogram timing uncertainty from the last MCCC run. Only shown after MCCC has been run. | +| Stack CC | Correlation coefficient against the current ICCS stack. Higher is better. Seismograms below `min_cc` are excluded automatically if autoselect is on. | +| MCCC CC | Mean cross-correlation coefficient from the MCCC cluster. Only shown after MCCC has been run. | +| MCCC CC std | Standard deviation of cross-correlation coefficients in the MCCC cluster. Only shown after MCCC has been run. | + +### The ICCS stack + +ICCS (Iterative Cross-Correlation and Stack) is the core alignment +algorithm. It cross-correlates each selected seismogram against the current +stack waveform, adjusts the picks, rebuilds the stack, and repeats until +convergence. Only seismograms with `Select = βœ“` contribute to the stack. + +The Stack CC column updates live as soon as the event is loaded β€” it shows +how well each seismogram matches the current stack, even before you run +alignment. After running ICCS (`a`), the picks (Ξ”t) and Stack CC values are +updated and written to the database immediately. + +### Seismogram plot (right panel) + +When a row is highlighted the right panel shows the processed waveform for +that seismogram in two tabs: + +- **CC** β€” the cross-correlation seismogram: tapered and normalised to the + window used for alignment. This is exactly what ICCS correlates against + the stack. +- **Context** β€” the same trace with extra padding beyond the time window, + normalised within the window. Use this to judge whether the window + boundaries make sense in relation to the surrounding signal. + +The x-axis shows time in seconds relative to the phase-arrival pick (t1), or +relative to t0 if t1 has not yet been set. The y-axis shows normalised +amplitude β€” tick labels are hidden since the absolute scale is arbitrary. + +### Note (right panel) + +A free-text Markdown note for the highlighted seismogram. Switch to **Edit** +to type, then back to **View** to render the Markdown. Notes are saved +automatically whenever the editor loses focus. Each seismogram has its own +note, which persists in the database. + +### Typical workflow + +1. Select an event in the Project tab. +2. Check the seismograms here β€” look for outliers (very low Stack CC) or + wrongly-polarised traces (flip flag). +3. Run ICCS (`a`) to align all selected seismograms. +4. Use the interactive tools (`t`) to manually adjust picks or the time + window if needed. +5. Exclude obvious outliers with **Toggle select** (`Enter` on the row). +6. Take a snapshot (`n`) to save a checkpoint. +7. Run MCCC (`a` β†’ MCCC) for the final high-precision picks. + +--- + +## Row actions + +| Action | Description | +|--------|-------------| +| Toggle select | Include or exclude this seismogram from the ICCS stack | +| Toggle flip | Invert polarity β€” use this if a seismogram is clearly upside down | +| Reset parameters | Restore all per-seismogram parameters (t1, select, flip) to their defaults | +| Delete seismogram | Remove this seismogram from the project permanently | + +--- + +## Navigation + +| Key | Action | +|-----|--------| +| `j` / `↓` | Move down | +| `k` / `↑` | Move up | +| `g` / `G` | Jump to top / bottom | +| `Enter` | Open row action menu | + +--- + +## Global key bindings + +| Key | Action | +|-----|--------| +| `e` | Open event switcher | +| `a` | Run alignment (opens ICCS / MCCC menu) | +| `t` | Open interactive tools | +| `p` | Edit processing parameters | +| `n` | Create a new snapshot | +| `r` | Refresh all panels | +| `?` | Show this help | +| `q` | Quit | diff --git a/src/aimbat/_tui/help/tab-snapshots.md b/src/aimbat/_tui/help/tab-snapshots.md new file mode 100644 index 0000000..26b1240 --- /dev/null +++ b/src/aimbat/_tui/help/tab-snapshots.md @@ -0,0 +1,95 @@ +# Snapshots tab + +A snapshot is a saved checkpoint of the current processing state β€” the +time window, bandpass filter, picks, and per-seismogram flags. Snapshots +let you experiment freely: take one before making changes, and roll back +if things go wrong. + +If the list is empty, go to the **Project** tab and select an event first, +then press `n` to create a snapshot. + +--- + +## What you see + +### Snapshot list (left) + +Each row is one snapshot for the selected event, showing when it was taken +and an optional comment you can add at creation time. The most recent +snapshot is at the bottom. + +### Quality panel (right) + +Shows ICCS CC and MCCC quality metrics as they were at snapshot time. This +lets you compare quality across snapshots β€” for example to see whether a +parameter change improved alignment β€” without loading each one. + +### Note (below quality panel) + +A free-text Markdown note for the highlighted snapshot. Switch to **Edit** +to type, then back to **View** to render the Markdown. Notes are saved +automatically whenever the editor loses focus β€” no explicit save action is +needed. Each snapshot has its own note, which persists in the database. + +--- + +## What a snapshot captures + +- **Event parameters** β€” time window (`t0`/`t1` window bounds), bandpass + filter settings, and Min CC threshold +- **Per-seismogram parameters** β€” the `t1` pick, `select` flag, and `flip` + flag for every seismogram +- **Quality metrics** β€” ICCS correlation coefficients per seismogram (always captured); + MCCC metrics (only if MCCC has been run with the current parameters) + +Waveform data is not copied β€” snapshots are lightweight records of where +you are in the parameter space. + +--- + +## Row actions + +| Action | Description | +|--------|-------------| +| Show details | View the event parameters (window, filter, min CC) as saved | +| Preview stack | Open the ICCS stack plot built from this snapshot's parameters, without changing anything in the database | +| Preview matrix image | Open the cross-correlation matrix image from this snapshot | +| Rollback to this snapshot | Restore these parameters as the current live values β€” overwrites the current parameters for this event | +| Delete snapshot | Permanently remove the snapshot (the live parameters are not affected) | + +### About rollback + +Rolling back restores the snapshot's parameters to the live state. Any +ICCS runs or parameter changes made after that snapshot are undone. The +snapshot itself is not deleted β€” you can roll back to it again or compare +it against other snapshots. + +If the snapshot contains MCCC quality data, the live quality metrics are +also restored from the best matching snapshot. + +--- + +## Navigation + +| Key | Action | +|-----|--------| +| `j` / `↓` | Move down | +| `k` / `↑` | Move up | +| `g` / `G` | Jump to top / bottom | +| `Enter` | Open row action menu | + +When previewing a stack or matrix image, two extra toggles appear: +- `c` β€” include context seismograms (wider view around the pick window) +- `a` β€” include all seismograms, even those with `Select = βœ—` + +--- + +## Global key bindings + +| Key | Action | +|-----|--------| +| `e` | Open event switcher | +| `n` | Create a new snapshot for the current event | +| `r` | Refresh all panels | +| `?` | Show this help | +| `q` | Quit | diff --git a/src/aimbat/_tui/modals.py b/src/aimbat/_tui/modals.py index f2166a1..c6787d5 100644 --- a/src/aimbat/_tui/modals.py +++ b/src/aimbat/_tui/modals.py @@ -4,6 +4,7 @@ import uuid from enum import StrEnum +from pathlib import Path from pandas import Timedelta from pydantic import ValidationError @@ -13,7 +14,7 @@ from textual.binding import Binding from textual.containers import Container from textual.screen import ModalScreen -from textual.widgets import DataTable, Input, Label, Static +from textual.widgets import DataTable, Input, Label, Markdown, Static from aimbat._tui._format import tui_cell, tui_display_title from aimbat._tui._widgets import VimDataTable @@ -52,11 +53,11 @@ class _Hint(StrEnum): "AlignModal", "ConfirmModal", "EventSwitcherModal", + "HelpModal", "InteractiveToolsModal", "NoProjectModal", "ParameterInputModal", "ParametersModal", - "QualityModal", "SnapshotActionMenuModal", "SnapshotCommentModal", "SnapshotDetailsModal", @@ -78,11 +79,18 @@ class EventSwitcherModal(ModalScreen[uuid.UUID | None]): ] def __init__(self, current_event_id: uuid.UUID | None = None) -> None: + """Initialise the modal. + + Args: + current_event_id: ID of the currently active event, used to mark + the active row with a `β–Ά` indicator. + """ super().__init__() self._current_event_id = current_event_id self._selected_event_id: str | None = None def check_action(self, action: str, parameters: tuple[object, ...]) -> bool | None: + """Disable destructive actions when no row is highlighted.""" if action in {"delete_event", "toggle_completed"}: return True if self._selected_event_id else False return True @@ -105,6 +113,7 @@ def on_mount(self) -> None: self._populate(table) def _populate(self, table: DataTable) -> None: + """Fetch events from the database and populate `table` with rows.""" try: with Session(engine) as session: rows = dump_event_table( @@ -123,6 +132,7 @@ def _populate(self, table: DataTable) -> None: self.dismiss(None) def _refresh_table(self) -> None: + """Clear and repopulate the event table, preserving cursor position.""" table = self.query_one("#event-table", DataTable) saved_row = table.cursor_row table.clear() @@ -197,6 +207,13 @@ class ParameterInputModal(ModalScreen[str | None]): BINDINGS = [Binding("escape", "cancel", "Cancel", show=False)] def __init__(self, param_name: str, current: str, unit: str) -> None: + """Initialise the modal. + + Args: + param_name: Display name of the parameter being edited. + current: Current value shown as the default input text. + unit: Unit label appended to the hint (e.g. `"s"` for seconds). + """ super().__init__() self._param_name = param_name self._current = current @@ -283,6 +300,11 @@ class ConfirmModal(ModalScreen[bool | None]): ] def __init__(self, message: str) -> None: + """Initialise the modal. + + Args: + message: Confirmation prompt displayed to the user. + """ super().__init__() self._message = message @@ -352,6 +374,11 @@ class ParametersModal(ModalScreen[bool]): BINDINGS = [Binding("escape", "cancel", show=False)] def __init__(self, event_id: uuid.UUID) -> None: + """Initialise the modal. + + Args: + event_id: ID of the event whose parameters are displayed. + """ super().__init__() self._event_id = event_id self._changed = False @@ -370,6 +397,7 @@ def on_mount(self) -> None: table.focus() def _populate(self) -> None: + """Reload the parameter table from the database, preserving cursor position.""" table = self.query_one("#param-modal-table", DataTable) saved_row = table.cursor_row table.clear() @@ -402,6 +430,7 @@ def row_selected(self, event: DataTable.RowSelected) -> None: self._edit_parameter(attr) def _edit_parameter(self, attr: str) -> None: + """Open an edit dialog for `attr`, toggling booleans inline.""" with Session(engine) as session: ev = session.get(AimbatEvent, self._event_id) if ev is None: @@ -435,6 +464,7 @@ def on_input(raw: str | None) -> None: self.app.push_screen(ParameterInputModal(label, current_str, unit), on_input) def _apply_parameter(self, attr: str, value: object) -> None: + """Persist a validated parameter change to the database.""" try: with Session(engine) as session: event = session.get(AimbatEvent, self._event_id) @@ -483,6 +513,12 @@ class ActionMenuModal(ModalScreen[str | None]): ] def __init__(self, title: str, actions: list[tuple[str, str]]) -> None: + """Initialise the modal. + + Args: + title: Heading displayed at the top of the menu. + actions: List of `(action_key, display_label)` pairs shown as rows. + """ super().__init__() self._title = title self._actions = actions # [(action_key, display_label), ...] @@ -522,7 +558,6 @@ def action_cancel(self) -> None: _SNAPSHOT_ACTIONS: list[tuple[str, str]] = [ ("show_details", "Show details"), - ("show_quality", "Show quality"), ("preview_stack", "Preview stack"), ("preview_image", "Preview matrix image"), ("rollback", "Rollback to this snapshot"), @@ -546,6 +581,11 @@ class SnapshotActionMenuModal(ModalScreen[tuple[str, bool, bool] | None]): ] def __init__(self, title: str) -> None: + """Initialise the modal. + + Args: + title: Heading displayed above the action list. + """ super().__init__() self._title = title self._use_context = True @@ -569,6 +609,7 @@ def on_mount(self) -> None: table.focus() def _update_options(self) -> None: + """Refresh the context/all-seismograms toggle display.""" opts = self.query_one("#snapshot-action-options", Static) if self._highlighted in _PREVIEW_ACTIONS: ctx = "βœ“" if self._use_context else "βœ—" @@ -660,6 +701,7 @@ def on_mount(self) -> None: table.focus() def _update_options(self) -> None: + """Refresh the context/all-seismograms toggle display.""" ctx = "βœ“" if self._use_context else "βœ—" al = "βœ“" if self._all_seis else "βœ—" self.query_one("#tools-options", Static).update( @@ -740,6 +782,7 @@ def on_mount(self) -> None: table.focus() def _update_options(self) -> None: + """Refresh the algorithm-specific option toggles.""" opts = self.query_one("#align-options", Static) if self._highlighted_algorithm == "iccs": fl = "βœ“" if self._autoflip else "βœ—" @@ -788,83 +831,91 @@ def action_cancel(self) -> None: # --------------------------------------------------------------------------- -# Quality modal +# Snapshot details modal # --------------------------------------------------------------------------- -class QualityModal(ModalScreen[None]): - """Read-only quality metrics view with one headerless table per group. +class SnapshotDetailsModal(ModalScreen[None]): + """Read-only view of the event parameters captured in a snapshot.""" - Each element of `groups` is a `(title, rows)` pair where `rows` is - a list of pre-formatted `(label, value)` strings. An empty title - suppresses the section heading. - """ + BINDINGS = [ + Binding("escape", "cancel", show=False), + ] - BINDINGS = [Binding("escape", "cancel", show=False)] + def __init__(self, title: str, rows: list[tuple[str, str]]) -> None: + """Initialise the modal. - def __init__( - self, - title: str, - groups: list[tuple[str, list[tuple[str, str]]]], - ) -> None: + Args: + title: Heading displayed above the parameter table. + rows: List of `(label, value)` pairs to display as read-only rows. + """ super().__init__() self._title = title - self._groups = groups + self._rows = rows # [(label, value), ...] def compose(self) -> ComposeResult: - with Container(id="quality-dialog"): + with Container(id="snapshot-details-dialog"): yield Label(self._title, classes=_CSS.TITLE) - for i, (group_title, _) in enumerate(self._groups): - if group_title: - yield Label( - f"[bold]{group_title}[/bold]", classes="quality-section" - ) - yield VimDataTable(id=f"quality-table-{i}", show_header=False) + yield VimDataTable(id="snapshot-details-table", show_header=True) yield Label(_Hint.CLOSE, classes=_CSS.HINT) def on_mount(self) -> None: - for i, (_, rows) in enumerate(self._groups): - table = self.query_one(f"#quality-table-{i}", DataTable) - table.cursor_type = "none" - table.add_columns("label", "value") - for row in rows: - table.add_row(*row) - table.styles.height = len(rows) + 1 + table = self.query_one(DataTable) + table.cursor_type = "none" + table.add_columns("Parameter", "Value") + for row in self._rows: + table.add_row(*row) + table.styles.height = len(self._rows) + 2 def action_cancel(self) -> None: self.dismiss(None) # --------------------------------------------------------------------------- -# Snapshot details modal +# Help modal # --------------------------------------------------------------------------- +_HELP_DIR = Path(__file__).parent / "help" +_HELP_FALLBACK = "No help available for this tab." -class SnapshotDetailsModal(ModalScreen[None]): - """Read-only view of the event parameters captured in a snapshot.""" + +def _load_help(tab_id: str) -> str: + """Load help Markdown for the given tab from a file. + + Args: + tab_id: The `TabPane` ID (e.g. `tab-project`). + + Returns: + Markdown text, or a fallback string if no file exists. + """ + path = _HELP_DIR / f"{tab_id}.md" + if path.is_file(): + return path.read_text(encoding="utf-8") + return _HELP_FALLBACK + + +class HelpModal(ModalScreen[None]): + """Modal screen showing keyboard help for the current TUI tab.""" BINDINGS = [ - Binding("escape", "cancel", show=False), + Binding("escape", "cancel", "Close", show=False), + Binding("question_mark", "cancel", "Close", show=False), ] - def __init__(self, title: str, rows: list[tuple[str, str]]) -> None: + def __init__(self, tab_id: str) -> None: + """Initialise the modal for the given tab. + + Args: + tab_id: The ID of the active `TabPane` whose help to display. + """ super().__init__() - self._title = title - self._rows = rows # [(label, value), ...] + self._tab_id = tab_id def compose(self) -> ComposeResult: - with Container(id="snapshot-details-dialog"): - yield Label(self._title, classes=_CSS.TITLE) - yield VimDataTable(id="snapshot-details-table", show_header=True) + with Container(id="help-dialog"): + yield Label("Help", classes=_CSS.TITLE) + yield Markdown(_load_help(self._tab_id), id="help-content") yield Label(_Hint.CLOSE, classes=_CSS.HINT) - def on_mount(self) -> None: - table = self.query_one(DataTable) - table.cursor_type = "none" - table.add_columns("Parameter", "Value") - for row in self._rows: - table.add_row(*row) - table.styles.height = len(self._rows) + 2 - def action_cancel(self) -> None: self.dismiss(None) diff --git a/src/aimbat/core/__init__.py b/src/aimbat/core/__init__.py index b4ec035..38c44e7 100644 --- a/src/aimbat/core/__init__.py +++ b/src/aimbat/core/__init__.py @@ -11,11 +11,6 @@ - **ICCS / MCCC** β€” run the Iterative Cross-Correlation and Stack (`run_iccs`) and Multi-Channel Cross-Correlation (`run_mccc`) algorithms; update picks, time windows, and correlation thresholds. -- **Quality / views** β€” retrieve and aggregate alignment quality metrics - (`get_quality_seismogram`, `get_quality_event`, `get_quality_station`, - `dump_quality_event`, `dump_quality_station`); structured view data for - rendering (`FieldSpec`, `FieldGroup`, `seismogram_quality_groups`, - `event_quality_groups`, `station_quality_groups`). - **Snapshots** β€” save, restore, and delete parameter snapshots (`create_snapshot`, `rollback_to_snapshot`). - **Project** β€” create and delete the project database (`create_project`, @@ -29,8 +24,8 @@ from ._data import * from ._event import * from ._iccs import * +from ._note import * from ._project import * -from ._quality import * from ._seismogram import * from ._snapshot import * from ._station import * diff --git a/src/aimbat/core/_event.py b/src/aimbat/core/_event.py index ca1b0d1..4c8e6b6 100644 --- a/src/aimbat/core/_event.py +++ b/src/aimbat/core/_event.py @@ -18,6 +18,7 @@ AimbatEventRead, AimbatSeismogram, AimbatStation, + SeismogramQualityStats, ) from aimbat.models._parameters import AimbatEventParametersBase from aimbat.utils import get_title_map, rel @@ -25,17 +26,18 @@ __all__ = [ "delete_event", "get_completed_events", + "get_event_quality", "get_events_using_station", "resolve_event", "set_event_parameter", "dump_event_table", "dump_event_parameter_table", + "dump_event_quality_table", ] def resolve_event(session: Session, event_id: UUID | None = None) -> AimbatEvent: - """ - Resolve an event from an explicit ID. + """Resolve an event from an explicit ID. Args: session: SQL session. @@ -96,6 +98,9 @@ def get_completed_events(session: Session) -> Sequence[AimbatEvent]: Args: session: SQL session. + + Returns: + All events where the `completed` parameter is set. """ logger.debug("Getting completed events from project.") @@ -144,6 +149,119 @@ def get_events_using_station( return events +def get_event_quality(session: Session, event_id: UUID) -> SeismogramQualityStats: + """Get aggregated quality statistics for an event. + + Args: + session: Database session. + event_id: UUID of the event. + + Returns: + Aggregated seismogram quality statistics. + + Raises: + NoResultFound: If no event with the given ID is found. + """ + logger.debug(f"Getting quality stats for event {event_id}.") + + event = session.exec( + select(AimbatEvent) + .where(AimbatEvent.id == event_id) + .options( + selectinload(rel(AimbatEvent.seismograms)).selectinload( + rel(AimbatSeismogram.quality) + ), + selectinload(rel(AimbatEvent.quality)), + ) + ).one_or_none() + + if event is None: + raise NoResultFound(f"No AimbatEvent found with id: {event_id}.") + + return SeismogramQualityStats.from_event(event) + + +@overload +def dump_event_table( + session: Session, + from_read_model: Literal[False] = ..., + by_alias: bool = ..., + by_title: bool = ..., + exclude: set[str] | None = ..., +) -> str: ... + + +@overload +def dump_event_table( + session: Session, + from_read_model: Literal[True], + by_alias: bool = ..., + by_title: bool = ..., + exclude: set[str] | None = ..., +) -> list[dict[str, Any]]: ... + + +def dump_event_table( + session: Session, + from_read_model: bool = False, + by_alias: bool = False, + by_title: bool = False, + exclude: set[str] | None = None, +) -> list[dict[str, Any]] | str: + """Dump the table data to json serialisable list of dicts. + + Args: + session: Database session. + from_read_model: Whether to dump from the read model (True) or the ORM model. + by_alias: Whether to use serialization aliases for the field names. + by_title: Whether to use the field title metadata for the field names in the + output (only applicable when from_read_model is True). Mutually + exclusive with by_alias. + exclude: Set of field names to exclude from the output. + + Raises: + ValueError: If both `by_alias` and `by_title` are True. + ValueError: If `by_title` is True but `from_read_model` is False. + """ + logger.debug("Dumping AIMBAT event table to json.") + + if by_alias and by_title: + raise ValueError("Arguments 'by_alias' and 'by_title' are mutually exclusive.") + + if not from_read_model and by_title: + raise ValueError("'by_title' is only supported when 'from_read_model' is True.") + + if exclude is not None: + exclude: dict[str, set] = {"__all__": exclude} # type: ignore[no-redef] + + statement = select(AimbatEvent).options( + selectinload(rel(AimbatEvent.seismograms)).selectinload( + rel(AimbatSeismogram.parameters) + ), + selectinload(rel(AimbatEvent.parameters)), + selectinload(rel(AimbatEvent.quality)), + ) + events = session.exec(statement).all() + + if from_read_model: + event_reads = [AimbatEventRead.from_event(e, session=session) for e in events] + adapter_reads: TypeAdapter[Sequence[AimbatEventRead]] = TypeAdapter( + Sequence[AimbatEventRead] + ) + data = adapter_reads.dump_python( + event_reads, exclude=exclude, by_alias=by_alias, mode="json" + ) + + if by_title: + title_map = get_title_map(AimbatEventRead) + return [{title_map.get(k, k): v for k, v in row.items()} for row in data] + + return data + + adapter: TypeAdapter[Sequence[AimbatEvent]] = TypeAdapter(Sequence[AimbatEvent]) + return adapter.dump_json(events, exclude=exclude, by_alias=by_alias).decode() + + @overload def set_event_parameter( session: Session, @@ -238,95 +356,68 @@ def set_event_parameter( clear_mccc_quality(session, event) -@overload -def dump_event_table( - session: Session, - from_read_model: Literal[False] = ..., - by_alias: bool = ..., - by_title: bool = ..., - exclude: set[str] | None = ..., -) -> str: ... - - -@overload -def dump_event_table( - session: Session, - from_read_model: Literal[True], - by_alias: bool = ..., - by_title: bool = ..., - exclude: set[str] | None = ..., -) -> list[dict[str, Any]]: ... - - -def dump_event_table( +def dump_event_parameter_table( session: Session, - from_read_model: bool = False, by_alias: bool = False, by_title: bool = False, exclude: set[str] | None = None, -) -> list[dict[str, Any]] | str: - """Dump the table data to json serialisable list of dicts. + event_id: UUID | None = None, +) -> list[dict[str, Any]]: + """Dump the event parameter table data to json. Args: session: Database session. - from_read_model: Whether to dump from the read model (True) or the ORM model. by_alias: Whether to use serialization aliases for the field names. - by_title: Whether to use the field title metadata for the field names in the - output (only applicable when from_read_model is True). Mutually - exclusive with by_alias. + by_title: Whether to use the field title metadata for the field names. + Mutually exclusive with by_alias. exclude: Set of field names to exclude from the output. + event_id: Event ID to filter parameters by (if none is provided, + parameters for all events are dumped). Raises: ValueError: If both `by_alias` and `by_title` are True. - ValueError: If `by_title` is True but `from_read_model` is False. """ - logger.debug("Dumping AIMBAT event table to json.") + + logger.debug("Dumping AIMBAT event parameter table to json.") if by_alias and by_title: raise ValueError("Arguments 'by_alias' and 'by_title' are mutually exclusive.") - if not from_read_model and by_title: - raise ValueError("'by_title' is only supported when 'from_read_model' is True.") - if exclude is not None: exclude: dict[str, set] = {"__all__": exclude} # type: ignore[no-redef] - statement = select(AimbatEvent).options( - selectinload(rel(AimbatEvent.seismograms)).selectinload( - rel(AimbatSeismogram.parameters) - ), - selectinload(rel(AimbatEvent.parameters)), - selectinload(rel(AimbatEvent.quality)), + adapter: TypeAdapter[Sequence[AimbatEventParameters]] = TypeAdapter( + Sequence[AimbatEventParameters] ) - events = session.exec(statement).all() - if from_read_model: - event_reads = [AimbatEventRead.from_event(e, session=session) for e in events] - adapter_reads: TypeAdapter[Sequence[AimbatEventRead]] = TypeAdapter( - Sequence[AimbatEventRead] - ) - data = adapter_reads.dump_python( - event_reads, exclude=exclude, by_alias=by_alias, mode="json" + if event_id is not None: + statement = select(AimbatEventParameters).where( + AimbatEventParameters.event_id == event_id ) + else: + statement = select(AimbatEventParameters) - if by_title: - title_map = get_title_map(AimbatEventRead) - return [{title_map.get(k, k): v for k, v in row.items()} for row in data] + parameters = session.exec(statement).all() - return data + data = adapter.dump_python( + parameters, mode="json", exclude=exclude, by_alias=by_alias + ) - adapter: TypeAdapter[Sequence[AimbatEvent]] = TypeAdapter(Sequence[AimbatEvent]) - return adapter.dump_json(events, exclude=exclude, by_alias=by_alias).decode() + if by_title: + title_map = get_title_map(AimbatEventParameters) + return [{title_map.get(k, k): v for k, v in row.items()} for row in data] + return data -def dump_event_parameter_table( + +def dump_event_quality_table( session: Session, by_alias: bool = False, by_title: bool = False, exclude: set[str] | None = None, event_id: UUID | None = None, ) -> list[dict[str, Any]]: - """Dump the event parameter table data to json. + """Dump event quality statistics to json. Args: session: Database session. @@ -334,40 +425,40 @@ def dump_event_parameter_table( by_title: Whether to use the field title metadata for the field names. Mutually exclusive with by_alias. exclude: Set of field names to exclude from the output. - event_id: Event ID to filter parameters by (if none is provided, - parameters for all events are dumped). + event_id: Event ID to filter by (if none is provided, quality for all + events is dumped). Raises: ValueError: If both `by_alias` and `by_title` are True. """ - logger.debug("Dumping AIMBAT event parameter table to json.") + logger.debug("Dumping AIMBAT event quality table to json.") if by_alias and by_title: raise ValueError("Arguments 'by_alias' and 'by_title' are mutually exclusive.") - if exclude is not None: - exclude: dict[str, set] = {"__all__": exclude} # type: ignore[no-redef] + exclude = (exclude or set()) | {"station_id", "snapshot_id"} + exclude: dict[str, set] = {"__all__": exclude} # type: ignore[no-redef] - adapter: TypeAdapter[Sequence[AimbatEventParameters]] = TypeAdapter( - Sequence[AimbatEventParameters] + statement = select(AimbatEvent).options( + selectinload(rel(AimbatEvent.seismograms)).selectinload( + rel(AimbatSeismogram.quality) + ), + selectinload(rel(AimbatEvent.quality)), ) - if event_id is not None: - statement = select(AimbatEventParameters).where( - AimbatEventParameters.event_id == event_id - ) - else: - statement = select(AimbatEventParameters) + statement = statement.where(AimbatEvent.id == event_id) - parameters = session.exec(statement).all() + events = session.exec(statement).all() + stats = [SeismogramQualityStats.from_event(e) for e in events] - data = adapter.dump_python( - parameters, mode="json", exclude=exclude, by_alias=by_alias + adapter: TypeAdapter[Sequence[SeismogramQualityStats]] = TypeAdapter( + Sequence[SeismogramQualityStats] ) + data = adapter.dump_python(stats, mode="json", exclude=exclude, by_alias=by_alias) if by_title: - title_map = get_title_map(AimbatEventParameters) + title_map = get_title_map(SeismogramQualityStats) return [{title_map.get(k, k): v for k, v in row.items()} for row in data] return data diff --git a/src/aimbat/core/_note.py b/src/aimbat/core/_note.py new file mode 100644 index 0000000..de4e6ad --- /dev/null +++ b/src/aimbat/core/_note.py @@ -0,0 +1,49 @@ +"""Read and write notes attached to events, stations, seismograms, or snapshots.""" + +import uuid +from typing import Literal + +from sqlmodel import Session, select + +from aimbat.models import AimbatNote + +__all__ = ["get_note_content", "save_note"] + +NoteTarget = Literal["event", "station", "seismogram", "snapshot"] + + +def get_note_content(session: Session, target: NoteTarget, target_id: uuid.UUID) -> str: + """Return the note content for the given entity. + + Args: + session: Active database session. + target: Entity type β€” one of `event`, `station`, `seismogram`, `snapshot`. + target_id: UUID of the target entity. + + Returns: + Markdown note content, or an empty string if no note exists yet. + """ + attr = getattr(AimbatNote, f"{target}_id") + note = session.exec(select(AimbatNote).where(attr == target_id)).first() + return note.content if note is not None else "" + + +def save_note( + session: Session, target: NoteTarget, target_id: uuid.UUID, content: str +) -> None: + """Save note content for the given entity, creating the note record if needed. + + Args: + session: Active database session. + target: Entity type β€” one of `event`, `station`, `seismogram`, `snapshot`. + target_id: UUID of the target entity. + content: Markdown note content to save. + """ + attr = getattr(AimbatNote, f"{target}_id") + note = session.exec(select(AimbatNote).where(attr == target_id)).first() + if note is None: + note = AimbatNote(**{f"{target}_id": target_id, "content": content}) + else: + note.content = content + session.add(note) + session.commit() diff --git a/src/aimbat/core/_quality.py b/src/aimbat/core/_quality.py deleted file mode 100644 index d2753ee..0000000 --- a/src/aimbat/core/_quality.py +++ /dev/null @@ -1,610 +0,0 @@ -"""Display views and quality metrics for AIMBAT. - -Provides two layers: - -- **Raw quality data** β€” `SeismogramQualityStats`, `get_quality_*`, - `dump_quality_*`: SQL retrieval, aggregation, and JSON-serialisable export. -- **Structured view data** β€” `FieldSpec`, `FieldGroup`, and - `*_quality_groups` functions: ready-to-render lists of labelled field values - consumed by the TUI, GUI, and CLI display layers. -""" - -import uuid -from dataclasses import dataclass, field -from typing import Any - -import pandas as pd -from pydantic import BaseModel, ConfigDict, Field -from sqlalchemy import func -from sqlalchemy.orm import selectinload -from sqlmodel import Session, col, select - -from aimbat._types import PydanticTimedelta -from aimbat.logger import logger -from aimbat.models import ( - AimbatEvent, - AimbatEventQualityBase, - AimbatEventQualitySnapshot, - AimbatSeismogram, - AimbatSeismogramParametersSnapshot, - AimbatSeismogramQualityBase, - AimbatSeismogramQualitySnapshot, - AimbatSnapshot, -) -from aimbat.utils import mean_and_sem, mean_and_sem_timedelta, rel - -__all__ = [ - "FieldSpec", - "FieldGroup", - "SeismogramQualityStats", - "get_quality_seismogram", - "get_quality_event", - "get_quality_station", - "dump_quality_event", - "dump_quality_station", - "get_seismogram_mccc_map", - "seismogram_quality_groups", - "event_quality_groups", - "station_quality_groups", - "snapshot_quality_groups", -] - - -# --------------------------------------------------------------------------- -# View data structures -# --------------------------------------------------------------------------- - - -@dataclass -class FieldSpec: - """A single labelled field value for display. - - The `name` is the canonical key used in JSON and enum lookups. - The `title` is the human-readable label sourced from the model's - `Field(title=...)`. `value` and `sem` are raw Python values; - formatters live in the rendering layer. - """ - - name: str - title: str - value: Any - sem: Any = None - - -@dataclass -class FieldGroup: - """A labelled group of `FieldSpec` instances for display. - - When `fields` is empty the rendering layer should show - `empty_message` if provided. - """ - - title: str - fields: list[FieldSpec] = field(default_factory=list) - empty_message: str | None = None - - -# --------------------------------------------------------------------------- -# Aggregated seismogram quality stats (Pydantic model) -# --------------------------------------------------------------------------- - - -class SeismogramQualityStats(BaseModel): - """Aggregated seismogram quality statistics computed from one or more seismograms. - - All mean fields are None when no seismograms in the group have quality data. - SEM fields are None when fewer than two values are available. - """ - - model_config = ConfigDict(frozen=True) - - count: int = Field(title="Count") - cc_mean: float | None = Field(default=None, title="ICCS CC mean") - cc_mean_sem: float | None = Field(default=None, title="ICCS CC mean SEM") - mccc_cc_mean: float | None = Field(default=None, title="MCCC CC mean") - mccc_cc_mean_sem: float | None = Field(default=None, title="MCCC CC mean SEM") - mccc_cc_std: float | None = Field(default=None, title="MCCC CC std") - mccc_cc_std_sem: float | None = Field(default=None, title="MCCC CC std SEM") - mccc_error: PydanticTimedelta | None = Field(default=None, title="MCCC error") - mccc_error_sem: PydanticTimedelta | None = Field( - default=None, title="MCCC error SEM" - ) - - -# --------------------------------------------------------------------------- -# Internal helpers -# --------------------------------------------------------------------------- - - -def _stats_from_quality_snapshots( - records: list[AimbatSeismogramQualitySnapshot], -) -> SeismogramQualityStats: - """Aggregate seismogram quality snapshot records into a `SeismogramQualityStats`.""" - iccs_cc_vals = [r.iccs_cc for r in records if r.iccs_cc is not None] - mccc_cc_mean_vals = [r.mccc_cc_mean for r in records if r.mccc_cc_mean is not None] - mccc_cc_std_vals = [r.mccc_cc_std for r in records if r.mccc_cc_std is not None] - mccc_error_vals = [r.mccc_error for r in records if r.mccc_error is not None] - - cc_mean, cc_mean_sem = mean_and_sem(iccs_cc_vals) - mccc_cc_mean, mccc_cc_mean_sem = mean_and_sem(mccc_cc_mean_vals) - cc_std_mean, cc_std_sem = mean_and_sem(mccc_cc_std_vals) - error_mean, error_sem = mean_and_sem_timedelta(mccc_error_vals) - - return SeismogramQualityStats( - count=len(records), - cc_mean=cc_mean, - cc_mean_sem=cc_mean_sem, - mccc_cc_mean=mccc_cc_mean, - mccc_cc_mean_sem=mccc_cc_mean_sem, - mccc_cc_std=cc_std_mean, - mccc_cc_std_sem=cc_std_sem, - mccc_error=error_mean, - mccc_error_sem=error_sem, - ) - - -def _stats_dump(stats: SeismogramQualityStats, prefix: str = "") -> dict[str, Any]: - """Serialise SeismogramQualityStats to a flat JSON-serialisable dict. - - Timedelta values are serialised as total seconds (float) via PydanticTimedelta. - An optional prefix is prepended to every key. - """ - raw = stats.model_dump(mode="json") - if not prefix: - return raw - return {f"{prefix}{k}": v for k, v in raw.items()} - - -def _specs_from_model( - obj: BaseModel, - fields_from: type[BaseModel], -) -> list[FieldSpec]: - """Build a `FieldSpec` list from a model instance. - - Iterates `fields_from.model_fields` so that id and foreign-key columns - added by table subclasses are excluded. Fields whose names end in `_sem` - are treated as SEM companions and paired with their parent field. - """ - fields = fields_from.model_fields - specs = [] - for name, field_info in fields.items(): - if name.endswith("_sem"): - continue - val = getattr(obj, name) - sem_name = f"{name}_sem" - sem = getattr(obj, sem_name, None) if sem_name in fields else None - specs.append( - FieldSpec( - name=name, - title=field_info.title or name, - value=val, - sem=sem, - ) - ) - return specs - - -def _latest_snapshot_seis_quality( - session: Session, event_id: uuid.UUID -) -> list[AimbatSeismogramQualitySnapshot]: - """Return seismogram quality records from the most recent snapshot with MCCC data.""" - stmt = ( - select(AimbatSnapshot) - .join( - AimbatEventQualitySnapshot, - col(AimbatEventQualitySnapshot.snapshot_id) == col(AimbatSnapshot.id), - ) - .where(col(AimbatSnapshot.event_id) == event_id) - .order_by(col(AimbatSnapshot.time).desc()) - .limit(1) - ) - snapshot = session.exec(stmt).first() - if snapshot is None: - return [] - return snapshot.seismogram_quality_snapshots - - -# --------------------------------------------------------------------------- -# Per-seismogram MCCC display map -# --------------------------------------------------------------------------- - - -def get_seismogram_mccc_map( - event: AimbatEvent, -) -> dict[uuid.UUID, tuple[pd.Timedelta | None, float, float | None]]: - """Return per-seismogram MCCC quality values for display from the live quality table. - - Reads directly from the `AimbatSeismogramQuality` live records for the - event's seismograms. Only seismograms with a non-None `mccc_cc_mean` are - included (i.e. those for which MCCC has been run). - - Must be called within an active SQLModel session so that ORM relationships - on `event` can lazy-load. - - Warning: - This function can cause an N+1 query issue. It iterates over - `event.seismograms` and accesses `seis.quality`, which may trigger - lazy loading. To avoid performance problems, the `AimbatEvent` object - passed to this function should be queried with `selectinload` for the - `seismograms` and their nested `quality` relationships. - - Args: - event: Default AimbatEvent. - - Returns: - Mapping from seismogram ID to `(mccc_error, mccc_cc_mean, mccc_cc_std)`. - Empty when MCCC has not been run. - """ - result: dict[uuid.UUID, tuple[pd.Timedelta | None, float, float | None]] = {} - for seis in event.seismograms: - sq = seis.quality - if sq is not None and sq.mccc_cc_mean is not None: - result[seis.id] = (sq.mccc_error, sq.mccc_cc_mean, sq.mccc_cc_std) - return result - - -# --------------------------------------------------------------------------- -# Raw quality retrieval -# --------------------------------------------------------------------------- - - -def get_quality_seismogram( - session: Session, seismogram_id: uuid.UUID -) -> AimbatSeismogramQualitySnapshot | None: - """Get the quality snapshot for a seismogram from the most recent MCCC run. - - Returns the seismogram's quality record from the most recent snapshot that - has event-level quality data. Returns None if no MCCC has been run, if - the seismogram has no live quality record, or if the most recent MCCC run - excluded this seismogram. - - Args: - session: Database session. - seismogram_id: Seismogram UUID. - - Returns: - The `AimbatSeismogramQualitySnapshot` from the most recent MCCC snapshot - that includes this seismogram, or None. - """ - logger.debug(f"Getting quality for seismogram {seismogram_id}.") - - seismogram = session.get(AimbatSeismogram, seismogram_id) - if seismogram is None: - return None - if seismogram.quality is None: - return None - quality_id = seismogram.quality.id - - snap_stmt = ( - select(AimbatSnapshot) - .join( - AimbatEventQualitySnapshot, - col(AimbatEventQualitySnapshot.snapshot_id) == col(AimbatSnapshot.id), - ) - .where(col(AimbatSnapshot.event_id) == seismogram.event_id) - .order_by(col(AimbatSnapshot.time).desc()) - .limit(1) - ) - latest = session.exec(snap_stmt).first() - if latest is None: - return None - - stmt = select(AimbatSeismogramQualitySnapshot).where( - col(AimbatSeismogramQualitySnapshot.snapshot_id) == latest.id, - col(AimbatSeismogramQualitySnapshot.seismogram_quality_id) == quality_id, - ) - return session.exec(stmt).first() - - -def get_quality_event( - session: Session, event_id: uuid.UUID -) -> tuple[AimbatEventQualitySnapshot | None, SeismogramQualityStats]: - """Get MCCC quality metrics for an event from the most recent snapshot. - - Returns the event-level quality record together with aggregated seismogram - quality statistics across all seismograms included in that MCCC run. - - Args: - session: Database session. - event_id: Event UUID. - - Returns: - A tuple of `(event_quality_snapshot, stats)`. - `event_quality_snapshot` is None if no MCCC has been run yet. - """ - logger.debug(f"Getting quality for event {event_id}.") - - stmt = ( - select(AimbatSnapshot) - .join( - AimbatEventQualitySnapshot, - col(AimbatEventQualitySnapshot.snapshot_id) == col(AimbatSnapshot.id), - ) - .where(col(AimbatSnapshot.event_id) == event_id) - .order_by(col(AimbatSnapshot.time).desc()) - .limit(1) - ) - latest = session.exec(stmt).first() - - if latest is None: - return None, SeismogramQualityStats(count=0) - - event_quality = latest.event_quality_snapshot - stats = _stats_from_quality_snapshots(latest.seismogram_quality_snapshots) - return event_quality, stats - - -def get_quality_station( - session: Session, station_id: uuid.UUID -) -> tuple[SeismogramQualityStats, SeismogramQualityStats]: - """Get aggregated MCCC quality metrics for a station from the most recent snapshots. - - Args: - session: Database session. - station_id: Station UUID. - - Returns: - A tuple of `(all_stats, selected_stats)`. - """ - logger.debug(f"Getting quality for station {station_id}.") - - # 1. Get all event IDs for the station - stmt = ( - select(AimbatSeismogram.event_id) - .where(col(AimbatSeismogram.station_id) == station_id) - .distinct() - ) - event_ids = session.exec(stmt).all() - - if not event_ids: - return SeismogramQualityStats(count=0), SeismogramQualityStats(count=0) - - # 2. Get the latest snapshot for each of these events that has quality data. - # Using a subquery to get the max time for each event_id - subq = ( - select( - AimbatSnapshot.event_id, - func.max(AimbatSnapshot.time).label("max_time"), - ) - .join(AimbatEventQualitySnapshot) - .where(col(AimbatSnapshot.event_id).in_(event_ids)) - .group_by(col(AimbatSnapshot.event_id)) - .subquery() - ) - - # Now join the snapshot table with the subquery to get the latest snapshots - snap_stmt = ( - select(AimbatSnapshot) - .join( - subq, - (col(AimbatSnapshot.event_id) == subq.c.event_id) - & (col(AimbatSnapshot.time) == subq.c.max_time), - ) - .options( - selectinload(rel(AimbatSnapshot.event)).selectinload( - rel(AimbatEvent.seismograms) - ), - selectinload( - rel(AimbatSnapshot.seismogram_parameters_snapshots) - ).selectinload(rel(AimbatSeismogramParametersSnapshot.parameters)), - selectinload(rel(AimbatSnapshot.seismogram_quality_snapshots)).selectinload( - rel(AimbatSeismogramQualitySnapshot.quality) - ), - ) - ) - - snaps = session.exec(snap_stmt).all() - - all_records: list[AimbatSeismogramQualitySnapshot] = [] - selected_records: list[AimbatSeismogramQualitySnapshot] = [] - - for snap in snaps: - # Seismograms at this station in this snapshot. - station_seis_ids = { - seis.id for seis in snap.event.seismograms if seis.station_id == station_id - } - select_map = { - sp.parameters.seismogram_id: sp.select - for sp in snap.seismogram_parameters_snapshots - } - - for sq in snap.seismogram_quality_snapshots: - seis_id = sq.quality.seismogram_id - if seis_id in station_seis_ids: - all_records.append(sq) - if select_map.get(seis_id, False): - selected_records.append(sq) - - return _stats_from_quality_snapshots(all_records), _stats_from_quality_snapshots( - selected_records - ) - - -def dump_quality_event(session: Session, event_id: uuid.UUID) -> dict[str, Any]: - """Return event MCCC quality as a JSON-serialisable dict. - - Reads from the most recent snapshot that has quality data. Returns null - values for all fields when no MCCC has been run. - - Args: - session: Database session. - event_id: Event UUID. - - Returns: - Flat dict with event quality and seismogram aggregate statistics. - Timedelta values are serialised as total seconds (float). - """ - event_quality, stats = get_quality_event(session, event_id) - - if event_quality is not None: - result: dict[str, Any] = event_quality.model_dump(mode="json") - else: - result = { - "event_id": str(event_id), - **{k: None for k in AimbatEventQualityBase.model_fields}, - } - - result.update(_stats_dump(stats)) - return result - - -def dump_quality_station(session: Session, station_id: uuid.UUID) -> dict[str, Any]: - """Return station quality as a JSON-serialisable dict. - - Aggregates seismogram quality across all events recorded at the station, - with means and SEMs for all and selected seismograms. - - Args: - session: Database session. - station_id: Station UUID. - - Returns: - Flat dict with seismogram aggregate statistics. - Timedelta values are serialised as total seconds (float). - """ - all_stats, selected_stats = get_quality_station(session, station_id) - result: dict[str, Any] = {"station_id": str(station_id)} - result.update(_stats_dump(all_stats)) - result.update(_stats_dump(selected_stats, prefix="selected_")) - return result - - -# --------------------------------------------------------------------------- -# View functions -# --------------------------------------------------------------------------- - - -def seismogram_quality_groups( - session: Session, seismogram_id: uuid.UUID -) -> list[FieldGroup]: - """Return quality view data for a single seismogram. - - Args: - session: Database session. - seismogram_id: Seismogram UUID. - - Returns: - A single-element list containing one `FieldGroup` with per-seismogram - quality fields, or an empty group with a message if no quality data - exists yet. - """ - quality = get_quality_seismogram(session, seismogram_id) - if quality is None: - return [ - FieldGroup( - title="", - empty_message="No quality data β€” run MCCC first", - ) - ] - return [ - FieldGroup( - title="", - fields=_specs_from_model(quality, AimbatSeismogramQualityBase), - ) - ] - - -def event_quality_groups(session: Session, event_id: uuid.UUID) -> list[FieldGroup]: - """Return MCCC quality view data for an event. - - Args: - session: Database session. - event_id: Event UUID. - - Returns: - Two `FieldGroup` instances: event-level statistics and - averages across the seismograms used in the inversion. - """ - event_quality, stats = get_quality_event(session, event_id) - - event_group = FieldGroup(title="Event statistics") - if event_quality is not None: - event_group.fields = _specs_from_model(event_quality, AimbatEventQualityBase) - else: - event_group.empty_message = "No event quality data β€” run MCCC first" - - return [ - event_group, - FieldGroup( - title=f"Averages across {stats.count} seismograms", - fields=_specs_from_model(stats, SeismogramQualityStats), - ), - ] - - -def station_quality_groups(session: Session, station_id: uuid.UUID) -> list[FieldGroup]: - """Return quality view data for a station. - - Args: - session: Database session. - station_id: Station UUID. - - Returns: - Two `FieldGroup` instances: averages across selected seismograms - and averages across all seismograms. - """ - all_stats, selected_stats = get_quality_station(session, station_id) - return [ - FieldGroup( - title=f"Averages across {selected_stats.count} selected seismograms", - fields=_specs_from_model(selected_stats, SeismogramQualityStats), - ), - FieldGroup( - title=f"Averages across {all_stats.count} seismograms", - fields=_specs_from_model(all_stats, SeismogramQualityStats), - ), - ] - - -def snapshot_quality_groups( - session: Session, snapshot_id: uuid.UUID -) -> list[FieldGroup]: - """Return MCCC quality view data for a snapshot. - - The number of groups depends on whether MCCC was run on all seismograms or - only the selected ones, which is inferred from whether any non-selected - seismogram has MCCC data in the snapshot. - - Args: - session: Database session. - snapshot_id: Snapshot UUID. - - Returns: - Two `FieldGroup` instances: event-level MCCC statistics and - per-seismogram averages (scoped to selected or all seismograms - depending on how MCCC was run). Returns a single empty group - when no quality was captured. - - Raises: - ValueError: If no snapshot with the given ID is found. - """ - snapshot = session.get(AimbatSnapshot, snapshot_id) - if snapshot is None: - raise ValueError(f"Snapshot {snapshot_id} not found.") - - if snapshot.event_quality_snapshot is None: - return [ - FieldGroup( - title="", - empty_message="No quality data β€” run MCCC then create a snapshot", - ) - ] - - eq = snapshot.event_quality_snapshot - - event_specs = _specs_from_model(eq, AimbatEventQualityBase) - - all_sq = [ - sq - for sq in snapshot.seismogram_quality_snapshots - if sq.mccc_cc_mean is not None - ] - stats = _stats_from_quality_snapshots(all_sq) - - return [ - FieldGroup(title="Event statistics", fields=event_specs), - FieldGroup( - title="Averages across seismograms", - fields=_specs_from_model(stats, SeismogramQualityStats), - ), - ] diff --git a/src/aimbat/core/_snapshot.py b/src/aimbat/core/_snapshot.py index bbf7743..04f1aa3 100644 --- a/src/aimbat/core/_snapshot.py +++ b/src/aimbat/core/_snapshot.py @@ -21,6 +21,7 @@ AimbatSeismogramQualitySnapshot, AimbatSnapshot, AimbatSnapshotRead, + SeismogramQualityStats, ) from aimbat.models._parameters import ( AimbatEventParametersBase, @@ -39,7 +40,9 @@ "sync_from_matching_hash", "delete_snapshot", "get_snapshots", + "get_snapshot_quality", "dump_snapshot_table", + "dump_snapshot_quality_table", "dump_event_parameter_snapshot_table", "dump_seismogram_parameter_snapshot_table", "dump_event_quality_snapshot_table", @@ -467,6 +470,81 @@ def dump_snapshot_table( return snapshot_dicts +def get_snapshot_quality(session: Session, snapshot_id: UUID) -> SeismogramQualityStats: + """Get aggregated quality statistics for a snapshot. + + Args: + session: Database session. + snapshot_id: UUID of the snapshot. + + Returns: + Aggregated seismogram quality statistics from the frozen snapshot records. + + Raises: + NoResultFound: If no snapshot with the given ID is found. + """ + logger.debug(f"Getting quality stats for snapshot {snapshot_id}.") + + snapshot = session.exec( + select(AimbatSnapshot) + .where(AimbatSnapshot.id == snapshot_id) + .options( + selectinload(rel(AimbatSnapshot.seismogram_quality_snapshots)), + selectinload(rel(AimbatSnapshot.event_quality_snapshot)), + ) + ).one_or_none() + + if snapshot is None: + raise NoResultFound(f"No AimbatSnapshot found with id: {snapshot_id}.") + + return SeismogramQualityStats.from_snapshot(snapshot) + + +def dump_snapshot_quality_table( + session: Session, + by_alias: bool = False, + by_title: bool = False, + exclude: set[str] | None = None, + event_id: UUID | None = None, +) -> list[dict[str, Any]]: + """Dump snapshot quality statistics to json. + + Args: + session: Database session. + by_alias: Whether to use serialization aliases for the field names. + by_title: Whether to use the field title metadata for the field names. + Mutually exclusive with by_alias. + exclude: Set of field names to exclude from the output. + event_id: Event ID to filter snapshots by (if none is provided, quality + for all snapshots is dumped). + + Raises: + ValueError: If both `by_alias` and `by_title` are True. + """ + + logger.debug("Dumping AIMBAT snapshot quality table to json.") + + if by_alias and by_title: + raise ValueError("Arguments 'by_alias' and 'by_title' are mutually exclusive.") + + exclude = (exclude or set()) | {"station_id"} + exclude: dict[str, set] = {"__all__": exclude} # type: ignore[no-redef] + + snapshots = get_snapshots(session, event_id) + stats = [SeismogramQualityStats.from_snapshot(s) for s in snapshots] + + adapter: TypeAdapter[Sequence[SeismogramQualityStats]] = TypeAdapter( + Sequence[SeismogramQualityStats] + ) + data = adapter.dump_python(stats, mode="json", exclude=exclude, by_alias=by_alias) + + if by_title: + title_map = get_title_map(SeismogramQualityStats) + return [{title_map.get(k, k): v for k, v in row.items()} for row in data] + + return data + + def dump_event_parameter_snapshot_table( session: Session, event_id: UUID | None = None, diff --git a/src/aimbat/core/_station.py b/src/aimbat/core/_station.py index a021a39..6ed23cc 100644 --- a/src/aimbat/core/_station.py +++ b/src/aimbat/core/_station.py @@ -1,3 +1,5 @@ +"""Functions for managing and querying stations in AIMBAT.""" + from collections.abc import Sequence from typing import Any, Literal, overload from uuid import UUID @@ -14,6 +16,7 @@ AimbatSeismogramQuality, AimbatStation, AimbatStationRead, + SeismogramQualityStats, ) from aimbat.utils import get_title_map, rel @@ -21,7 +24,9 @@ "delete_station", "get_stations_in_event", "get_station_iccs_ccs", + "get_station_quality", "dump_station_table", + "dump_station_quality_table", ] @@ -124,6 +129,37 @@ def get_station_iccs_ccs( return tuple(session.exec(statement).all()) +def get_station_quality(session: Session, station_id: UUID) -> SeismogramQualityStats: + """Get aggregated quality statistics for a station. + + Args: + session: Database session. + station_id: UUID of the station. + + Returns: + Aggregated seismogram quality statistics. + + Raises: + NoResultFound: If no station with the given ID is found. + """ + logger.debug(f"Getting quality stats for station {station_id}.") + + station = session.exec( + select(AimbatStation) + .where(AimbatStation.id == station_id) + .options( + selectinload(rel(AimbatStation.seismograms)).selectinload( + rel(AimbatSeismogram.quality) + ), + ) + ).one_or_none() + + if station is None: + raise NoResultFound(f"No AimbatStation found with id: {station_id}.") + + return SeismogramQualityStats.from_station(station) + + def dump_station_table( session: Session, from_read_model: bool = False, @@ -133,6 +169,7 @@ def dump_station_table( event_id: UUID | None = None, ) -> list[dict[str, Any]]: """Create a JSON serialisable dict from the AimbatStation table data. + Args: session: Database session. from_read_model: Whether to dump from the read model (True) or the ORM model. @@ -208,3 +245,56 @@ def dump_station_table( return adapter.dump_python( stations, mode="json", by_alias=by_alias, exclude=exclude ) + + +def dump_station_quality_table( + session: Session, + by_alias: bool = False, + by_title: bool = False, + exclude: set[str] | None = None, + station_id: UUID | None = None, +) -> list[dict[str, Any]]: + """Dump station quality statistics to json. + + Args: + session: Database session. + by_alias: Whether to use serialization aliases for the field names. + by_title: Whether to use the field title metadata for the field names. + Mutually exclusive with by_alias. + exclude: Set of field names to exclude from the output. + station_id: Station ID to filter by (if none is provided, quality for + all stations is dumped). + + Raises: + ValueError: If both `by_alias` and `by_title` are True. + """ + + logger.debug("Dumping AIMBAT station quality table to json.") + + if by_alias and by_title: + raise ValueError("Arguments 'by_alias' and 'by_title' are mutually exclusive.") + + exclude = (exclude or set()) | {"event_id", "snapshot_id"} + exclude: dict[str, set] = {"__all__": exclude} # type: ignore[no-redef] + + statement = select(AimbatStation).options( + selectinload(rel(AimbatStation.seismograms)).selectinload( + rel(AimbatSeismogram.quality) + ), + ) + if station_id is not None: + statement = statement.where(AimbatStation.id == station_id) + + stations = session.exec(statement).all() + stats = [SeismogramQualityStats.from_station(s) for s in stations] + + adapter: TypeAdapter[Sequence[SeismogramQualityStats]] = TypeAdapter( + Sequence[SeismogramQualityStats] + ) + data = adapter.dump_python(stats, mode="json", exclude=exclude, by_alias=by_alias) + + if by_title: + title_map = get_title_map(SeismogramQualityStats) + return [{title_map.get(k, k): v for k, v in row.items()} for row in data] + + return data diff --git a/src/aimbat/models/_models.py b/src/aimbat/models/_models.py index bfe3a52..fb37ac1 100644 --- a/src/aimbat/models/_models.py +++ b/src/aimbat/models/_models.py @@ -9,9 +9,9 @@ import numpy as np import numpy.typing as npt from pandas import Timestamp -from pydantic import computed_field +from pydantic import computed_field, model_validator from pydantic.alias_generators import to_camel -from sqlalchemy import Column, PickleType, func +from sqlalchemy import CheckConstraint, Column, PickleType, func from sqlalchemy.ext.mutable import MutableDict from sqlalchemy.orm import column_property from sqlmodel import Field, Relationship, SQLModel, col, select @@ -35,6 +35,7 @@ __all__ = [ "AimbatTypes", "AimbatDataSource", + "AimbatNote", "AimbatStation", "AimbatEvent", "AimbatEventParameters", @@ -425,7 +426,13 @@ class AimbatSeismogram(SQLModel, table=True): populate_by_name=True, ) - id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + id: uuid.UUID = Field( + default_factory=uuid.uuid4, + primary_key=True, + title="ID", + description="Unique seismogram ID.", + schema_extra={"rich": RichColSpec(style="yellow", highlight=False)}, + ) begin_time: PydanticTimestamp = Field( sa_type=SAPandasTimestamp, title="Begin time", @@ -546,21 +553,49 @@ def data(self, value: npt.NDArray[np.float64]) -> None: class AimbatStation(SQLModel, table=True): - """Class to store station information.""" + """Recording station with network, location, and channel metadata.""" model_config = SQLModelConfig( alias_generator=to_camel, populate_by_name=True, ) - id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) - name: str = Field(allow_mutation=False) - network: str = Field(allow_mutation=False) - location: str = Field(allow_mutation=False) - channel: str = Field(allow_mutation=False) - latitude: float - longitude: float - elevation: float | None = None + id: uuid.UUID = Field( + default_factory=uuid.uuid4, + primary_key=True, + title="ID", + description="Unique station ID.", + schema_extra={"rich": RichColSpec(style="yellow", highlight=False)}, + ) + name: str = Field( + allow_mutation=False, + title="Name", + description="Station name (SEED station code).", + ) + network: str = Field( + allow_mutation=False, + title="Network", + description="Network code.", + ) + location: str = Field( + allow_mutation=False, + title="Location", + description="Location code.", + ) + channel: str = Field( + allow_mutation=False, + title="Channel", + description="Channel code (e.g. BHZ).", + ) + latitude: float = Field( + title="Latitude", description="Station latitude in degrees." + ) + longitude: float = Field( + title="Longitude", description="Station longitude in degrees." + ) + elevation: float | None = Field( + default=None, title="Elevation", description="Station elevation in metres." + ) seismograms: list[AimbatSeismogram] = Relationship( back_populates="station", cascade_delete=True ) @@ -573,22 +608,39 @@ class AimbatStation(SQLModel, table=True): class AimbatEvent(SQLModel, table=True): - """Class to store seismic event information.""" + """Seismic event with origin time, location, and depth.""" model_config = SQLModelConfig( alias_generator=to_camel, populate_by_name=True, ) - id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + id: uuid.UUID = Field( + default_factory=uuid.uuid4, + primary_key=True, + title="ID", + description="Unique event ID.", + schema_extra={"rich": RichColSpec(style="yellow", highlight=False)}, + ) time: PydanticTimestamp = Field( - unique=True, sa_type=SAPandasTimestamp, allow_mutation=False + unique=True, + sa_type=SAPandasTimestamp, + allow_mutation=False, + title="Time", + description="Event origin time (UTC).", + ) + latitude: float = Field(title="Latitude", description="Event latitude in degrees.") + longitude: float = Field( + title="Longitude", description="Event longitude in degrees." + ) + depth: float | None = Field( + default=None, title="Depth", description="Event depth in metres." ) - latitude: float - longitude: float - depth: float | None = None last_modified: PydanticTimestamp | None = Field( - default=None, sa_type=SAPandasTimestamp + default=None, + sa_type=SAPandasTimestamp, + title="Last modified", + description="Timestamp of the last parameter modification.", ) seismograms: list[AimbatSeismogram] = Relationship( back_populates="event", cascade_delete=True @@ -694,8 +746,84 @@ class AimbatEvent(SQLModel, table=True): "Number of seismogram parameter snapshots associated with this snapshot that are marked as flipped." +class AimbatNote(SQLModel, table=True): + """Free-text Markdown note attached to an event, station, seismogram, or snapshot. + + At most one of the four FK fields is set per row. Deletion of the parent + record cascades to delete the note via the DB-level foreign key constraint. + """ + + model_config = SQLModelConfig( + alias_generator=to_camel, + populate_by_name=True, + ) + + __table_args__ = ( + CheckConstraint( + "(CASE WHEN event_id IS NOT NULL THEN 1 ELSE 0 END" + " + CASE WHEN station_id IS NOT NULL THEN 1 ELSE 0 END" + " + CASE WHEN seismogram_id IS NOT NULL THEN 1 ELSE 0 END" + " + CASE WHEN snapshot_id IS NOT NULL THEN 1 ELSE 0 END) <= 1", + name="aimbat_note_at_most_one_parent", + ), + ) + + id: uuid.UUID = Field( + default_factory=uuid.uuid4, + primary_key=True, + description="Unique note ID.", + ) + content: str = Field( + default="", + description="Note content in Markdown format.", + ) + event_id: uuid.UUID | None = Field( + default=None, + foreign_key="aimbatevent.id", + ondelete="CASCADE", + description="Foreign key referencing the parent event.", + ) + station_id: uuid.UUID | None = Field( + default=None, + foreign_key="aimbatstation.id", + ondelete="CASCADE", + description="Foreign key referencing the parent station.", + ) + seismogram_id: uuid.UUID | None = Field( + default=None, + foreign_key="aimbatseismogram.id", + ondelete="CASCADE", + description="Foreign key referencing the parent seismogram.", + ) + snapshot_id: uuid.UUID | None = Field( + default=None, + foreign_key="aimbatsnapshot.id", + ondelete="CASCADE", + description="Foreign key referencing the parent snapshot.", + ) + + @model_validator(mode="after") + def _at_most_one_parent(self) -> "AimbatNote": + set_count = sum( + fk is not None + for fk in ( + self.event_id, + self.station_id, + self.seismogram_id, + self.snapshot_id, + ) + ) + if set_count > 1: + raise ValueError( + "At most one of event_id, station_id, seismogram_id, snapshot_id" + " may be set on AimbatNote." + ) + return self + + type AimbatTypes = ( AimbatDataSource + | AimbatNote | AimbatStation | AimbatEvent | AimbatEventParameters diff --git a/src/aimbat/models/_readers.py b/src/aimbat/models/_readers.py index 582c1f7..aca0566 100644 --- a/src/aimbat/models/_readers.py +++ b/src/aimbat/models/_readers.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import TYPE_CHECKING, Self from uuid import UUID @@ -5,7 +7,8 @@ from pydantic.alias_generators import to_camel from aimbat._types import PydanticTimedelta, PydanticTimestamp -from aimbat.utils import mean_and_sem +from aimbat.logger import logger +from aimbat.utils import mean_and_sem, mean_and_sem_timedelta from aimbat.utils.formatters import fmt_depth_km, fmt_flip from ._format import RichColSpec, TuiColSpec @@ -18,11 +21,210 @@ __all__ = [ "AimbatEventRead", "AimbatSeismogramRead", + "SeismogramQualityStats", "AimbatSnapshotRead", "AimbatStationRead", ] +class SeismogramQualityStats(BaseModel): + """Aggregated seismogram quality statistics for an event or station. + + Built from live quality records. All mean fields are `None` when no + seismograms in the group have quality data. SEM fields are `None` when + fewer than two values are available. `mccc_rmse` is only populated by + `from_event` and `from_snapshot`; it is always `None` for `from_station`. + `event_id` is populated by `from_event` and `from_snapshot`; it is always + `None` for `from_station`. + """ + + model_config = ConfigDict(frozen=True) + + event_id: UUID | None = Field( + default=None, + title="Event ID", + json_schema_extra={ + "rich": RichColSpec(style="magenta", no_wrap=True, highlight=False), # type: ignore[dict-item] + }, + ) + snapshot_id: UUID | None = Field( + default=None, + title="Snapshot ID", + json_schema_extra={ + "rich": RichColSpec(style="magenta", no_wrap=True, highlight=False), # type: ignore[dict-item] + }, + ) + station_id: UUID | None = Field( + default=None, + title="Station ID", + json_schema_extra={ + "rich": RichColSpec(style="magenta", no_wrap=True, highlight=False), # type: ignore[dict-item] + }, + ) + count: int = Field(title="Count") + cc_mean: float | None = Field(default=None, title="ICCS CC mean") + cc_mean_sem: float | None = Field(default=None, title="ICCS CC mean SEM") + mccc_cc_mean: float | None = Field(default=None, title="MCCC CC mean") + mccc_cc_mean_sem: float | None = Field(default=None, title="MCCC CC mean SEM") + mccc_cc_std: float | None = Field(default=None, title="MCCC CC std") + mccc_cc_std_sem: float | None = Field(default=None, title="MCCC CC std SEM") + mccc_error: PydanticTimedelta | None = Field(default=None, title="MCCC error") + mccc_error_sem: PydanticTimedelta | None = Field( + default=None, title="MCCC error SEM" + ) + mccc_rmse: PydanticTimedelta | None = Field(default=None, title="MCCC RMSE") + + @classmethod + def from_event(cls, event: AimbatEvent) -> Self: + """Build quality stats from live quality records for an event. + + Aggregates `iccs_cc` and MCCC metrics across all seismograms that + have live quality records. `mccc_rmse` is taken from the event-level + quality record. + + Warning: + This method may trigger lazy-loading of `event.seismograms` and + each `seis.quality` relationship. For performance, query `event` + with `selectinload` for `seismograms` and their nested `quality` + relationships before calling. + + Args: + event: The event whose seismograms' live quality to aggregate. + + Returns: + Aggregated quality statistics. + """ + logger.debug(f"Building quality stats for event {event.id}.") + qualities = [ + seis.quality for seis in event.seismograms if seis.quality is not None + ] + + cc_mean, cc_mean_sem = mean_and_sem( + [q.iccs_cc for q in qualities if q.iccs_cc is not None] + ) + mccc_cc_mean, mccc_cc_mean_sem = mean_and_sem( + [q.mccc_cc_mean for q in qualities if q.mccc_cc_mean is not None] + ) + mccc_cc_std, mccc_cc_std_sem = mean_and_sem( + [q.mccc_cc_std for q in qualities if q.mccc_cc_std is not None] + ) + mccc_error, mccc_error_sem = mean_and_sem_timedelta( + [q.mccc_error for q in qualities if q.mccc_error is not None] + ) + mccc_rmse = event.quality.mccc_rmse if event.quality is not None else None + + return cls( + event_id=event.id, + count=len(event.seismograms), + cc_mean=cc_mean, + cc_mean_sem=cc_mean_sem, + mccc_cc_mean=mccc_cc_mean, + mccc_cc_mean_sem=mccc_cc_mean_sem, + mccc_cc_std=mccc_cc_std, + mccc_cc_std_sem=mccc_cc_std_sem, + mccc_error=mccc_error, + mccc_error_sem=mccc_error_sem, + mccc_rmse=mccc_rmse, + ) + + @classmethod + def from_station(cls, station: AimbatStation) -> Self: + """Build quality stats from live quality records for a station. + + Aggregates `iccs_cc` and MCCC metrics across all seismograms at the + station that have live quality records. + + Warning: + This method may trigger lazy-loading of `station.seismograms` and + each `seis.quality` relationship. For performance, query `station` + with `selectinload` for `seismograms` and their nested `quality` + relationships before calling. + + Args: + station: The station whose seismograms' live quality to aggregate. + + Returns: + Aggregated quality statistics. `mccc_rmse` is always `None`. + """ + logger.debug(f"Building quality stats for station {station.id}.") + qualities = [ + seis.quality for seis in station.seismograms if seis.quality is not None + ] + + cc_mean, cc_mean_sem = mean_and_sem( + [q.iccs_cc for q in qualities if q.iccs_cc is not None] + ) + mccc_cc_mean, mccc_cc_mean_sem = mean_and_sem( + [q.mccc_cc_mean for q in qualities if q.mccc_cc_mean is not None] + ) + mccc_cc_std, mccc_cc_std_sem = mean_and_sem( + [q.mccc_cc_std for q in qualities if q.mccc_cc_std is not None] + ) + mccc_error, mccc_error_sem = mean_and_sem_timedelta( + [q.mccc_error for q in qualities if q.mccc_error is not None] + ) + + return cls( + station_id=station.id, + count=len(station.seismograms), + cc_mean=cc_mean, + cc_mean_sem=cc_mean_sem, + mccc_cc_mean=mccc_cc_mean, + mccc_cc_mean_sem=mccc_cc_mean_sem, + mccc_cc_std=mccc_cc_std, + mccc_cc_std_sem=mccc_cc_std_sem, + mccc_error=mccc_error, + mccc_error_sem=mccc_error_sem, + mccc_rmse=None, + ) + + @classmethod + def from_snapshot(cls, snapshot: AimbatSnapshot) -> Self: + """Build quality stats from the frozen quality records in a snapshot. + + Aggregates from `AimbatSeismogramQualitySnapshot` records rather than + live quality, so the result reflects the state at snapshot time. + + Args: + snapshot: The snapshot to aggregate quality from. + + Returns: + Aggregated quality statistics. + """ + logger.debug(f"Building quality stats for snapshot {snapshot.id}.") + records = snapshot.seismogram_quality_snapshots + + cc_mean, cc_mean_sem = mean_and_sem( + [r.iccs_cc for r in records if r.iccs_cc is not None] + ) + mccc_cc_mean, mccc_cc_mean_sem = mean_and_sem( + [r.mccc_cc_mean for r in records if r.mccc_cc_mean is not None] + ) + mccc_cc_std, mccc_cc_std_sem = mean_and_sem( + [r.mccc_cc_std for r in records if r.mccc_cc_std is not None] + ) + mccc_error, mccc_error_sem = mean_and_sem_timedelta( + [r.mccc_error for r in records if r.mccc_error is not None] + ) + eq = snapshot.event_quality_snapshot + mccc_rmse = eq.mccc_rmse if eq is not None else None + + return cls( + event_id=snapshot.event_id, + snapshot_id=snapshot.id, + count=snapshot.seismogram_count, + cc_mean=cc_mean, + cc_mean_sem=cc_mean_sem, + mccc_cc_mean=mccc_cc_mean, + mccc_cc_mean_sem=mccc_cc_mean_sem, + mccc_cc_std=mccc_cc_std, + mccc_cc_std_sem=mccc_cc_std_sem, + mccc_error=mccc_error, + mccc_error_sem=mccc_error_sem, + mccc_rmse=mccc_rmse, + ) + + class AimbatEventRead(BaseModel): """Read model for AimbatEvent including computed counts.""" @@ -177,8 +379,8 @@ class AimbatStationRead(BaseModel): @classmethod def from_station( cls, - station: "AimbatStation", - session: "Session | None" = None, + station: AimbatStation, + session: Session | None = None, ) -> Self: data = station.model_dump() @@ -296,7 +498,7 @@ class AimbatSeismogramRead(BaseModel): @classmethod def from_seismogram( - cls, seismogram: "AimbatSeismogram", session: "Session | None" = None + cls, seismogram: AimbatSeismogram, session: Session | None = None ) -> Self: name = (f"{seismogram.station.network}." or "") + seismogram.station.name @@ -413,7 +615,7 @@ class AimbatSnapshotRead(BaseModel): @classmethod def from_snapshot( - cls, snapshot: "AimbatSnapshot", session: "Session | None" = None + cls, snapshot: AimbatSnapshot, session: Session | None = None ) -> Self: """Create an AimbatSnapshotRead from an AimbatSnapshot ORM instance.""" diff --git a/tests/functional/test_tui.py b/tests/functional/test_tui.py index 47d0a11..1cd86bd 100644 --- a/tests/functional/test_tui.py +++ b/tests/functional/test_tui.py @@ -1,9 +1,13 @@ """Functional tests for the AIMBAT Terminal User Interface. Each test runs the Textual app in headless mode via ``App.run_test()``. -Because ``aimbat._tui.app`` imports ``engine`` at module level, both -``aimbat.db.engine`` and ``aimbat._tui.app.engine`` must be monkeypatched -to the test fixture's database. +All TUI sub-modules that import ``engine`` at module level must be +monkeypatched to the test fixture's database: + +- ``aimbat.db.engine`` β€” the canonical engine attribute +- ``aimbat._tui.app.engine`` β€” top-level import in the app module +- ``aimbat._tui.modals.engine`` β€” top-level import in the modals module +- ``aimbat._tui._widgets.engine`` β€” top-level import in the widgets module """ import asyncio @@ -14,7 +18,9 @@ from sqlmodel import Session, select from textual.widgets import DataTable, Static, TabbedContent, TabPane +import aimbat._tui._widgets import aimbat._tui.app +import aimbat._tui.modals import aimbat.db from aimbat._tui.app import AimbatTUI from aimbat.models import AimbatEvent @@ -28,9 +34,11 @@ def _patch_engine(monkeypatch: pytest.MonkeyPatch, engine: Engine) -> None: - """Patch the engine in both the db module and the TUI app module.""" + """Patch the engine in all TUI modules that import it at module level.""" monkeypatch.setattr(aimbat.db, "engine", engine) monkeypatch.setattr(aimbat._tui.app, "engine", engine) + monkeypatch.setattr(aimbat._tui.modals, "engine", engine) + monkeypatch.setattr(aimbat._tui._widgets, "engine", engine) # =========================================================================== diff --git a/tests/integration/core/test_views.py b/tests/integration/core/test_views.py index 40690d3..045b2f7 100644 --- a/tests/integration/core/test_views.py +++ b/tests/integration/core/test_views.py @@ -1,4 +1,4 @@ -"""Integration tests for quality view functions in aimbat.core._quality.""" +"""Integration tests for SeismogramQualityStats in aimbat.models.""" import uuid @@ -6,53 +6,30 @@ import pytest from sqlmodel import Session, col, select -from aimbat.core._quality import get_quality_event, get_quality_seismogram from aimbat.core._snapshot import create_snapshot -from aimbat.models import AimbatEvent +from aimbat.models import ( + AimbatEvent, + AimbatEventQuality, + AimbatSeismogramQuality, + AimbatSnapshot, + AimbatStation, + SeismogramQualityStats, +) -def _write_mock_mccc_quality( +def _write_seismogram_quality( session: Session, - event_id: uuid.UUID, seismogram_ids: list[uuid.UUID], - select_flags: list[bool], - all_seismograms: bool, + *, + with_mccc: bool = True, ) -> None: - """Simulate an MCCC run by writing mock quality data to the live DB tables. - - Upserts `AimbatEventQuality` and per-seismogram `AimbatSeismogramQuality` - entries. Only seismograms indicated by `all_seismograms` / `select_flags` - receive MCCC metric values; the rest have their MCCC fields cleared. + """Write mock seismogram quality records to live DB tables. Args: session: Database session. - event_id: UUID of the event. - seismogram_ids: UUIDs of the seismograms in order. - select_flags: Per-seismogram select flag, same order as seismogram_ids. - all_seismograms: If True, all seismograms receive MCCC data. + seismogram_ids: UUIDs of the seismograms to populate. + with_mccc: If True, write MCCC fields alongside ICCS CC. """ - from aimbat.models import AimbatEventQuality, AimbatSeismogramQuality - - used_ids = { - sid for sid, sel in zip(seismogram_ids, select_flags) if all_seismograms or sel - } - - # Event quality - eq = session.exec( - select(AimbatEventQuality).where(col(AimbatEventQuality.event_id) == event_id) - ).first() - if eq is None: - eq = AimbatEventQuality( - id=uuid.uuid4(), - event_id=event_id, - mccc_rmse=pd.Timedelta(milliseconds=1), - ) - session.add(eq) - else: - eq.mccc_rmse = pd.Timedelta(milliseconds=1) - session.add(eq) - - # Per-seismogram quality for seis_id in seismogram_ids: sq = session.exec( select(AimbatSeismogramQuality).where( @@ -61,8 +38,8 @@ def _write_mock_mccc_quality( ).first() if sq is None: sq = AimbatSeismogramQuality(id=uuid.uuid4(), seismogram_id=seis_id) - session.add(sq) - if seis_id in used_ids: + sq.iccs_cc = 0.8 + if with_mccc: sq.mccc_error = pd.Timedelta(microseconds=100) sq.mccc_cc_mean = 0.9 sq.mccc_cc_std = 0.05 @@ -71,197 +48,209 @@ def _write_mock_mccc_quality( sq.mccc_cc_mean = None sq.mccc_cc_std = None session.add(sq) - session.commit() -class TestGetQualitySeismogram: - """Tests for get_quality_seismogram staleness fix.""" +def _write_event_quality(session: Session, event_id: uuid.UUID) -> None: + """Write a mock event-level quality record to the live DB table. + + Args: + session: Database session. + event_id: UUID of the event. + """ + eq = session.exec( + select(AimbatEventQuality).where(col(AimbatEventQuality.event_id) == event_id) + ).first() + if eq is None: + eq = AimbatEventQuality( + id=uuid.uuid4(), + event_id=event_id, + mccc_rmse=pd.Timedelta(milliseconds=1), + ) + else: + eq.mccc_rmse = pd.Timedelta(milliseconds=1) + session.add(eq) + session.commit() + - def test_returns_none_when_no_mccc_run(self, loaded_session: Session) -> None: - """Verifies that None is returned when no MCCC snapshot exists. +class TestSeismogramQualityStatsFromEvent: + """Tests for SeismogramQualityStats.from_event.""" - Args: - loaded_session: The database session. - """ + def test_all_none_when_no_quality(self, loaded_session: Session) -> None: + """Aggregate fields are None and count equals total seismograms when no quality exists.""" event = loaded_session.exec(select(AimbatEvent)).first() assert event is not None - seis = event.seismograms[0] - assert get_quality_seismogram(loaded_session, seis.id) is None + total = len(event.seismograms) - def test_returns_quality_for_selected_seismogram( - self, loaded_session: Session - ) -> None: - """Verifies that quality data is returned for a selected seismogram. + stats = SeismogramQualityStats.from_event(event) + + assert stats.count == total + assert stats.cc_mean is None + assert stats.mccc_cc_mean is None + assert stats.mccc_rmse is None - Args: - loaded_session: The database session. - """ + def test_aggregates_iccs_cc(self, loaded_session: Session) -> None: + """cc_mean is computed from seismograms with iccs_cc set.""" event = loaded_session.exec(select(AimbatEvent)).first() assert event is not None seis_ids = [s.id for s in event.seismograms] - select_flags = [True] * len(seis_ids) - _write_mock_mccc_quality( - loaded_session, - event.id, - seis_ids, - select_flags, - all_seismograms=False, - ) - loaded_session.refresh(event) - create_snapshot(loaded_session, event) - result = get_quality_seismogram(loaded_session, seis_ids[0]) - assert result is not None - assert result.mccc_cc_mean == pytest.approx(0.9) + _write_seismogram_quality(loaded_session, seis_ids, with_mccc=False) + loaded_session.refresh(event) - def test_returns_none_for_deselected_seismogram_when_selected_only( - self, loaded_session: Session - ) -> None: - """Verifies that None is returned for a deselected seismogram when MCCC ran on selected only. + stats = SeismogramQualityStats.from_event(event) - The most recent MCCC snapshot excluded the deselected seismogram. - Returning its quality from an older snapshot would be misleading. + assert stats.count == len(seis_ids) + assert stats.cc_mean == pytest.approx(0.8) + assert stats.mccc_cc_mean is None + assert stats.mccc_rmse is None - Args: - loaded_session: The database session. - """ + def test_aggregates_mccc_fields(self, loaded_session: Session) -> None: + """MCCC aggregate fields are populated after an MCCC run.""" event = loaded_session.exec(select(AimbatEvent)).first() assert event is not None - seis_ids = [s.id for s in event.seismograms] - # Snapshot 1: all_seismograms=True β€” deselected seismogram gets quality data. - select_flags_all_deselected = [False] + [True] * (len(seis_ids) - 1) - for i, seis in enumerate(event.seismograms): - seis.parameters.select = select_flags_all_deselected[i] - loaded_session.commit() - _write_mock_mccc_quality( - loaded_session, - event.id, - seis_ids, - select_flags_all_deselected, - all_seismograms=True, - ) + + _write_seismogram_quality(loaded_session, seis_ids, with_mccc=True) loaded_session.refresh(event) - create_snapshot(loaded_session, event) - # Snapshot 2 (most recent): all_seismograms=False β€” deselected seismogram is excluded. - _write_mock_mccc_quality( - loaded_session, - event.id, - seis_ids, - select_flags_all_deselected, - all_seismograms=False, - ) + stats = SeismogramQualityStats.from_event(event) + + assert stats.mccc_cc_mean == pytest.approx(0.9) + assert stats.mccc_cc_std == pytest.approx(0.05) + + def test_mccc_rmse_from_event_quality(self, loaded_session: Session) -> None: + """mccc_rmse is taken from the event-level quality record.""" + event = loaded_session.exec(select(AimbatEvent)).first() + assert event is not None + seis_ids = [s.id for s in event.seismograms] + + _write_seismogram_quality(loaded_session, seis_ids, with_mccc=True) + _write_event_quality(loaded_session, event.id) loaded_session.refresh(event) - create_snapshot(loaded_session, event) - # The deselected seismogram should return None despite having data in snapshot 1. - deselected_id = seis_ids[0] - assert get_quality_seismogram(loaded_session, deselected_id) is None + stats = SeismogramQualityStats.from_event(event) - def test_returns_quality_for_deselected_seismogram_when_all_seismograms( + assert stats.mccc_rmse == pd.Timedelta(milliseconds=1) + + def test_count_is_total_not_just_with_quality( self, loaded_session: Session ) -> None: - """Verifies that quality data is returned for a deselected seismogram when MCCC ran on all. - - Args: - loaded_session: The database session. - """ + """count reflects all seismograms in the event, not just those with quality data.""" event = loaded_session.exec(select(AimbatEvent)).first() assert event is not None + total = len(event.seismograms) + partial_ids = [s.id for s in event.seismograms][: total // 2] - seis_ids = [s.id for s in event.seismograms] - select_flags = [False] + [True] * (len(seis_ids) - 1) - for i, seis in enumerate(event.seismograms): - seis.parameters.select = select_flags[i] - loaded_session.commit() - - _write_mock_mccc_quality( - loaded_session, - event.id, - seis_ids, - select_flags, - all_seismograms=True, - ) + _write_seismogram_quality(loaded_session, partial_ids, with_mccc=False) loaded_session.refresh(event) - create_snapshot(loaded_session, event) - deselected_id = seis_ids[0] - result = get_quality_seismogram(loaded_session, deselected_id) - assert result is not None - assert result.mccc_cc_mean == pytest.approx(0.9) + stats = SeismogramQualityStats.from_event(event) + assert stats.count == total -class TestGetQualityEvent: - """Tests for get_quality_event returning quality data from the most recent snapshot.""" - def test_returns_none_when_no_mccc(self, loaded_session: Session) -> None: - """Verifies that the event quality snapshot is None when no MCCC has been run. +class TestSeismogramQualityStatsFromStation: + """Tests for SeismogramQualityStats.from_station.""" - Args: - loaded_session: The database session. - """ - event = loaded_session.exec(select(AimbatEvent)).first() - assert event is not None - event_quality, stats = get_quality_event(loaded_session, event.id) - assert event_quality is None - assert stats.count == 0 + def test_all_none_when_no_quality(self, loaded_session: Session) -> None: + """Aggregate fields are None when no seismograms have quality records.""" + station = loaded_session.exec(select(AimbatStation)).first() + assert station is not None + + stats = SeismogramQualityStats.from_station(station) + + assert stats.count == len(station.seismograms) + assert stats.cc_mean is None + assert stats.mccc_cc_mean is None + + def test_aggregates_quality(self, loaded_session: Session) -> None: + """ICCS and MCCC fields are aggregated across all seismograms at the station.""" + station = loaded_session.exec(select(AimbatStation)).first() + assert station is not None + seis_ids = [s.id for s in station.seismograms] + + _write_seismogram_quality(loaded_session, seis_ids, with_mccc=True) + loaded_session.refresh(station) + + stats = SeismogramQualityStats.from_station(station) + + assert stats.cc_mean == pytest.approx(0.8) + assert stats.mccc_cc_mean == pytest.approx(0.9) + + def test_mccc_rmse_is_always_none(self, loaded_session: Session) -> None: + """mccc_rmse is None for station stats β€” it is an event-level metric.""" + station = loaded_session.exec(select(AimbatStation)).first() + assert station is not None + seis_ids = [s.id for s in station.seismograms] + + _write_seismogram_quality(loaded_session, seis_ids, with_mccc=True) + loaded_session.refresh(station) - def test_includes_all_quality_records_from_snapshot( + stats = SeismogramQualityStats.from_station(station) + + assert stats.mccc_rmse is None + + +class TestSeismogramQualityStatsFromSnapshot: + """Tests for SeismogramQualityStats.from_snapshot.""" + + def test_all_none_when_no_quality_in_snapshot( self, loaded_session: Session ) -> None: - """Verifies that stats aggregate all seismogram quality records in the snapshot. - - Args: - loaded_session: The database session. - """ + """Aggregate fields are None when the snapshot has no quality records.""" event = loaded_session.exec(select(AimbatEvent)).first() assert event is not None + create_snapshot(loaded_session, event) + snapshot = loaded_session.exec(select(AimbatSnapshot)).first() + assert snapshot is not None + + stats = SeismogramQualityStats.from_snapshot(snapshot) + + assert stats.count == snapshot.seismogram_count + assert stats.cc_mean is None + assert stats.mccc_cc_mean is None + assert stats.mccc_rmse is None + + def test_aggregates_frozen_quality(self, loaded_session: Session) -> None: + """Quality fields reflect values frozen at snapshot time, not live changes.""" + event = loaded_session.exec(select(AimbatEvent)).first() + assert event is not None seis_ids = [s.id for s in event.seismograms] - select_flags = [s.select for s in event.seismograms] - _write_mock_mccc_quality( - loaded_session, - event.id, - seis_ids, - select_flags, - all_seismograms=False, - ) + + _write_seismogram_quality(loaded_session, seis_ids, with_mccc=True) + _write_event_quality(loaded_session, event.id) loaded_session.refresh(event) create_snapshot(loaded_session, event) - _, stats = get_quality_event(loaded_session, event.id) - assert stats.count == sum(select_flags) + snapshot = loaded_session.exec(select(AimbatSnapshot)).first() + assert snapshot is not None + stats = SeismogramQualityStats.from_snapshot(snapshot) + + assert stats.count == snapshot.seismogram_count + assert stats.cc_mean == pytest.approx(0.8) + assert stats.mccc_cc_mean == pytest.approx(0.9) + assert stats.mccc_rmse == pd.Timedelta(milliseconds=1) - def test_includes_deselected_seismograms_when_present_in_snapshot( + def test_is_independent_of_live_quality_changes( self, loaded_session: Session ) -> None: - """Verifies that deselected seismograms with quality data are included in stats. - - When MCCC ran with all_seismograms=True, quality records exist for - deselected seismograms too, and they should be counted. - - Args: - loaded_session: The database session. - """ + """Snapshot stats are unaffected by live quality changes made after snapshotting.""" event = loaded_session.exec(select(AimbatEvent)).first() assert event is not None - - event.seismograms[0].parameters.select = False - loaded_session.commit() - seis_ids = [s.id for s in event.seismograms] - select_flags = [s.select for s in event.seismograms] - _write_mock_mccc_quality( - loaded_session, - event.id, - seis_ids, - select_flags, - all_seismograms=True, - ) + + _write_seismogram_quality(loaded_session, seis_ids, with_mccc=True) + _write_event_quality(loaded_session, event.id) loaded_session.refresh(event) create_snapshot(loaded_session, event) - _, stats = get_quality_event(loaded_session, event.id) - assert stats.count == len(seis_ids) + # Overwrite live quality after snapshotting. + _write_seismogram_quality(loaded_session, seis_ids, with_mccc=False) + + snapshot = loaded_session.exec(select(AimbatSnapshot)).first() + assert snapshot is not None + stats = SeismogramQualityStats.from_snapshot(snapshot) + + assert stats.mccc_cc_mean == pytest.approx(0.9) diff --git a/tests/integration/models/test_note.py b/tests/integration/models/test_note.py new file mode 100644 index 0000000..8e3231c --- /dev/null +++ b/tests/integration/models/test_note.py @@ -0,0 +1,88 @@ +"""Integration tests for the AimbatNote model's single-parent constraint.""" + +import uuid +from datetime import timezone + +import pytest +from pandas import Timestamp +from pydantic import ValidationError +from sqlalchemy.exc import IntegrityError +from sqlmodel import Session + +from aimbat.models import AimbatEvent, AimbatEventParameters, AimbatNote, AimbatStation + + +def _make_station(session: Session) -> AimbatStation: + sta = AimbatStation( + name="AAK", + network="II", + location="00", + channel="BHZ", + latitude=42.63, + longitude=74.49, + ) + session.add(sta) + session.flush() + return sta + + +def _make_event(session: Session) -> AimbatEvent: + ev = AimbatEvent( + time=Timestamp("2010-02-27T06:34:14", tz=timezone.utc), + latitude=-36.12, + longitude=-72.90, + ) + session.add(ev) + session.flush() + session.add(AimbatEventParameters(event=ev)) + session.flush() + return ev + + +class TestAimbatNoteAtMostOneParent: + """AimbatNote must have at most one FK set.""" + + def test_note_with_no_parent_is_valid(self, patched_session: Session) -> None: + note = AimbatNote.model_validate({"content": "orphan note"}) + patched_session.add(note) + patched_session.commit() + + def test_note_with_event_parent_is_valid(self, patched_session: Session) -> None: + ev = _make_event(patched_session) + note = AimbatNote.model_validate({"content": "event note", "event_id": ev.id}) + patched_session.add(note) + patched_session.commit() + + def test_note_with_station_parent_is_valid(self, patched_session: Session) -> None: + sta = _make_station(patched_session) + note = AimbatNote.model_validate( + {"content": "station note", "station_id": sta.id} + ) + patched_session.add(note) + patched_session.commit() + + def test_model_validator_rejects_two_parents(self) -> None: + """Pydantic model_validator raises when two FK fields are set.""" + + with pytest.raises(ValidationError, match="At most one"): + AimbatNote.model_validate( + { + "content": "bad note", + "event_id": uuid.uuid4(), + "station_id": uuid.uuid4(), + } + ) + + def test_db_constraint_rejects_two_parents(self, patched_session: Session) -> None: + """DB check constraint rejects a row with two FK fields set.""" + + ev_id = uuid.uuid4() + sta_id = uuid.uuid4() + + # Bypass the model_validator by constructing via __init__ (SQLModel skips + # Pydantic validation on __init__ for table models) to confirm the DB + # constraint fires independently. + note = AimbatNote(content="bypass note", event_id=ev_id, station_id=sta_id) + patched_session.add(note) + with pytest.raises(IntegrityError): + patched_session.flush() diff --git a/uv.lock b/uv.lock index dbfe033..cc4fc2e 100644 --- a/uv.lock +++ b/uv.lock @@ -27,6 +27,7 @@ dependencies = [ { name = "sqlmodel" }, { name = "textual" }, { name = "textual-fspicker" }, + { name = "textual-plotext" }, { name = "typing-extensions" }, ] @@ -67,6 +68,7 @@ requires-dist = [ { name = "sqlmodel", specifier = ">=0.0.24" }, { name = "textual", specifier = ">=8.0.0" }, { name = "textual-fspicker", specifier = ">=1.0.0" }, + { name = "textual-plotext", specifier = ">=1.0.1" }, { name = "typing-extensions", specifier = ">=4.15.0" }, ] @@ -236,11 +238,11 @@ wheels = [ [[package]] name = "attrs" -version = "25.4.0" +version = "26.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9a/8e/82a0fe20a541c03148528be8cac2408564a6c9a0cc7e9171802bc1d26985/attrs-26.1.0.tar.gz", hash = "sha256:d03ceb89cb322a8fd706d4fb91940737b6642aa36998fe130a9bc96c985eff32", size = 952055, upload-time = "2026-03-19T14:22:25.026Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, + { url = "https://files.pythonhosted.org/packages/64/b4/17d4b0b2a2dc85a6df63d1157e028ed19f90d4cd97c36717afef2bc2f395/attrs-26.1.0-py3-none-any.whl", hash = "sha256:c647aa4a12dfbad9333ca4e71fe62ddc36f4e63b2d260a37a8b83d2f043ac309", size = 67548, upload-time = "2026-03-19T14:22:23.645Z" }, ] [[package]] @@ -1669,6 +1671,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/63/d7/97f7e3a6abb67d8080dd406fd4df842c2be0efaf712d1c899c32a075027c/platformdirs-4.9.4-py3-none-any.whl", hash = "sha256:68a9a4619a666ea6439f2ff250c12a853cd1cbd5158d258bd824a7df6be2f868", size = 21216, upload-time = "2026-03-05T18:34:12.172Z" }, ] +[[package]] +name = "plotext" +version = "5.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/d7/f75f397af966fe252d0d34ffd3cae765317fce2134f925f95e7d6725d1ce/plotext-5.3.2.tar.gz", hash = "sha256:52d1e932e67c177bf357a3f0fe6ce14d1a96f7f7d5679d7b455b929df517068e", size = 61967, upload-time = "2024-09-24T15:13:37.728Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f6/1e/12fe7c40cd2099a1f454518754ed229b01beaf3bbb343127f0cc13ce6c22/plotext-5.3.2-py3-none-any.whl", hash = "sha256:394362349c1ddbf319548cfac17ca65e6d5dfc03200c40dfdc0503b3e95a2283", size = 64047, upload-time = "2024-09-24T15:13:36.296Z" }, +] + [[package]] name = "pluggy" version = "1.6.0" @@ -2169,27 +2180,27 @@ wheels = [ [[package]] name = "ruff" -version = "0.15.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/51/df/f8629c19c5318601d3121e230f74cbee7a3732339c52b21daa2b82ef9c7d/ruff-0.15.6.tar.gz", hash = "sha256:8394c7bb153a4e3811a4ecdacd4a8e6a4fa8097028119160dffecdcdf9b56ae4", size = 4597916, upload-time = "2026-03-12T23:05:47.51Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/2f/4e03a7e5ce99b517e98d3b4951f411de2b0fa8348d39cf446671adcce9a2/ruff-0.15.6-py3-none-linux_armv6l.whl", hash = "sha256:7c98c3b16407b2cf3d0f2b80c80187384bc92c6774d85fefa913ecd941256fff", size = 10508953, upload-time = "2026-03-12T23:05:17.246Z" }, - { url = "https://files.pythonhosted.org/packages/70/60/55bcdc3e9f80bcf39edf0cd272da6fa511a3d94d5a0dd9e0adf76ceebdb4/ruff-0.15.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ee7dcfaad8b282a284df4aa6ddc2741b3f4a18b0555d626805555a820ea181c3", size = 10942257, upload-time = "2026-03-12T23:05:23.076Z" }, - { url = "https://files.pythonhosted.org/packages/e7/f9/005c29bd1726c0f492bfa215e95154cf480574140cb5f867c797c18c790b/ruff-0.15.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:3bd9967851a25f038fc8b9ae88a7fbd1b609f30349231dffaa37b6804923c4bb", size = 10322683, upload-time = "2026-03-12T23:05:33.738Z" }, - { url = "https://files.pythonhosted.org/packages/5f/74/2f861f5fd7cbb2146bddb5501450300ce41562da36d21868c69b7a828169/ruff-0.15.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13f4594b04e42cd24a41da653886b04d2ff87adbf57497ed4f728b0e8a4866f8", size = 10660986, upload-time = "2026-03-12T23:05:53.245Z" }, - { url = "https://files.pythonhosted.org/packages/c1/a1/309f2364a424eccb763cdafc49df843c282609f47fe53aa83f38272389e0/ruff-0.15.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e2ed8aea2f3fe57886d3f00ea5b8aae5bf68d5e195f487f037a955ff9fbaac9e", size = 10332177, upload-time = "2026-03-12T23:05:56.145Z" }, - { url = "https://files.pythonhosted.org/packages/30/41/7ebf1d32658b4bab20f8ac80972fb19cd4e2c6b78552be263a680edc55ac/ruff-0.15.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70789d3e7830b848b548aae96766431c0dc01a6c78c13381f423bf7076c66d15", size = 11170783, upload-time = "2026-03-12T23:06:01.742Z" }, - { url = "https://files.pythonhosted.org/packages/76/be/6d488f6adca047df82cd62c304638bcb00821c36bd4881cfca221561fdfc/ruff-0.15.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:542aaf1de3154cea088ced5a819ce872611256ffe2498e750bbae5247a8114e9", size = 12044201, upload-time = "2026-03-12T23:05:28.697Z" }, - { url = "https://files.pythonhosted.org/packages/71/68/e6f125df4af7e6d0b498f8d373274794bc5156b324e8ab4bf5c1b4fc0ec7/ruff-0.15.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c22e6f02c16cfac3888aa636e9eba857254d15bbacc9906c9689fdecb1953ab", size = 11421561, upload-time = "2026-03-12T23:05:31.236Z" }, - { url = "https://files.pythonhosted.org/packages/f1/9f/f85ef5fd01a52e0b472b26dc1b4bd228b8f6f0435975442ffa4741278703/ruff-0.15.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98893c4c0aadc8e448cfa315bd0cc343a5323d740fe5f28ef8a3f9e21b381f7e", size = 11310928, upload-time = "2026-03-12T23:05:45.288Z" }, - { url = "https://files.pythonhosted.org/packages/8c/26/b75f8c421f5654304b89471ed384ae8c7f42b4dff58fa6ce1626d7f2b59a/ruff-0.15.6-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:70d263770d234912374493e8cc1e7385c5d49376e41dfa51c5c3453169dc581c", size = 11235186, upload-time = "2026-03-12T23:05:50.677Z" }, - { url = "https://files.pythonhosted.org/packages/fc/d4/d5a6d065962ff7a68a86c9b4f5500f7d101a0792078de636526c0edd40da/ruff-0.15.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:55a1ad63c5a6e54b1f21b7514dfadc0c7fb40093fa22e95143cf3f64ebdcd512", size = 10635231, upload-time = "2026-03-12T23:05:37.044Z" }, - { url = "https://files.pythonhosted.org/packages/d6/56/7c3acf3d50910375349016cf33de24be021532042afbed87942858992491/ruff-0.15.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8dc473ba093c5ec238bb1e7429ee676dca24643c471e11fbaa8a857925b061c0", size = 10340357, upload-time = "2026-03-12T23:06:04.748Z" }, - { url = "https://files.pythonhosted.org/packages/06/54/6faa39e9c1033ff6a3b6e76b5df536931cd30caf64988e112bbf91ef5ce5/ruff-0.15.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:85b042377c2a5561131767974617006f99f7e13c63c111b998f29fc1e58a4cfb", size = 10860583, upload-time = "2026-03-12T23:05:58.978Z" }, - { url = "https://files.pythonhosted.org/packages/cb/1e/509a201b843b4dfb0b32acdedf68d951d3377988cae43949ba4c4133a96a/ruff-0.15.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cef49e30bc5a86a6a92098a7fbf6e467a234d90b63305d6f3ec01225a9d092e0", size = 11410976, upload-time = "2026-03-12T23:05:39.955Z" }, - { url = "https://files.pythonhosted.org/packages/6c/25/3fc9114abf979a41673ce877c08016f8e660ad6cf508c3957f537d2e9fa9/ruff-0.15.6-py3-none-win32.whl", hash = "sha256:bbf67d39832404812a2d23020dda68fee7f18ce15654e96fb1d3ad21a5fe436c", size = 10616872, upload-time = "2026-03-12T23:05:42.451Z" }, - { url = "https://files.pythonhosted.org/packages/89/7a/09ece68445ceac348df06e08bf75db72d0e8427765b96c9c0ffabc1be1d9/ruff-0.15.6-py3-none-win_amd64.whl", hash = "sha256:aee25bc84c2f1007ecb5037dff75cef00414fdf17c23f07dc13e577883dca406", size = 11787271, upload-time = "2026-03-12T23:05:20.168Z" }, - { url = "https://files.pythonhosted.org/packages/7f/d0/578c47dd68152ddddddf31cd7fc67dc30b7cdf639a86275fda821b0d9d98/ruff-0.15.6-py3-none-win_arm64.whl", hash = "sha256:c34de3dd0b0ba203be50ae70f5910b17188556630e2178fd7d79fc030eb0d837", size = 11060497, upload-time = "2026-03-12T23:05:25.968Z" }, +version = "0.15.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/22/9e4f66ee588588dc6c9af6a994e12d26e19efbe874d1a909d09a6dac7a59/ruff-0.15.7.tar.gz", hash = "sha256:04f1ae61fc20fe0b148617c324d9d009b5f63412c0b16474f3d5f1a1a665f7ac", size = 4601277, upload-time = "2026-03-19T16:26:22.605Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/2f/0b08ced94412af091807b6119ca03755d651d3d93a242682bf020189db94/ruff-0.15.7-py3-none-linux_armv6l.whl", hash = "sha256:a81cc5b6910fb7dfc7c32d20652e50fa05963f6e13ead3c5915c41ac5d16668e", size = 10489037, upload-time = "2026-03-19T16:26:32.47Z" }, + { url = "https://files.pythonhosted.org/packages/91/4a/82e0fa632e5c8b1eba5ee86ecd929e8ff327bbdbfb3c6ac5d81631bef605/ruff-0.15.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:722d165bd52403f3bdabc0ce9e41fc47070ac56d7a91b4e0d097b516a53a3477", size = 10955433, upload-time = "2026-03-19T16:27:00.205Z" }, + { url = "https://files.pythonhosted.org/packages/ab/10/12586735d0ff42526ad78c049bf51d7428618c8b5c467e72508c694119df/ruff-0.15.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7fbc2448094262552146cbe1b9643a92f66559d3761f1ad0656d4991491af49e", size = 10269302, upload-time = "2026-03-19T16:26:26.183Z" }, + { url = "https://files.pythonhosted.org/packages/eb/5d/32b5c44ccf149a26623671df49cbfbd0a0ae511ff3df9d9d2426966a8d57/ruff-0.15.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b39329b60eba44156d138275323cc726bbfbddcec3063da57caa8a8b1d50adf", size = 10607625, upload-time = "2026-03-19T16:27:03.263Z" }, + { url = "https://files.pythonhosted.org/packages/5d/f1/f0001cabe86173aaacb6eb9bb734aa0605f9a6aa6fa7d43cb49cbc4af9c9/ruff-0.15.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:87768c151808505f2bfc93ae44e5f9e7c8518943e5074f76ac21558ef5627c85", size = 10324743, upload-time = "2026-03-19T16:27:09.791Z" }, + { url = "https://files.pythonhosted.org/packages/7a/87/b8a8f3d56b8d848008559e7c9d8bf367934d5367f6d932ba779456e2f73b/ruff-0.15.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb0511670002c6c529ec66c0e30641c976c8963de26a113f3a30456b702468b0", size = 11138536, upload-time = "2026-03-19T16:27:06.101Z" }, + { url = "https://files.pythonhosted.org/packages/e4/f2/4fd0d05aab0c5934b2e1464784f85ba2eab9d54bffc53fb5430d1ed8b829/ruff-0.15.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0d19644f801849229db8345180a71bee5407b429dd217f853ec515e968a6912", size = 11994292, upload-time = "2026-03-19T16:26:48.718Z" }, + { url = "https://files.pythonhosted.org/packages/64/22/fc4483871e767e5e95d1622ad83dad5ebb830f762ed0420fde7dfa9d9b08/ruff-0.15.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4806d8e09ef5e84eb19ba833d0442f7e300b23fe3f0981cae159a248a10f0036", size = 11398981, upload-time = "2026-03-19T16:26:54.513Z" }, + { url = "https://files.pythonhosted.org/packages/b0/99/66f0343176d5eab02c3f7fcd2de7a8e0dd7a41f0d982bee56cd1c24db62b/ruff-0.15.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dce0896488562f09a27b9c91b1f58a097457143931f3c4d519690dea54e624c5", size = 11242422, upload-time = "2026-03-19T16:26:29.277Z" }, + { url = "https://files.pythonhosted.org/packages/5d/3a/a7060f145bfdcce4c987ea27788b30c60e2c81d6e9a65157ca8afe646328/ruff-0.15.7-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:1852ce241d2bc89e5dc823e03cff4ce73d816b5c6cdadd27dbfe7b03217d2a12", size = 11232158, upload-time = "2026-03-19T16:26:42.321Z" }, + { url = "https://files.pythonhosted.org/packages/a7/53/90fbb9e08b29c048c403558d3cdd0adf2668b02ce9d50602452e187cd4af/ruff-0.15.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5f3e4b221fb4bd293f79912fc5e93a9063ebd6d0dcbd528f91b89172a9b8436c", size = 10577861, upload-time = "2026-03-19T16:26:57.459Z" }, + { url = "https://files.pythonhosted.org/packages/2f/aa/5f486226538fe4d0f0439e2da1716e1acf895e2a232b26f2459c55f8ddad/ruff-0.15.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b15e48602c9c1d9bdc504b472e90b90c97dc7d46c7028011ae67f3861ceba7b4", size = 10327310, upload-time = "2026-03-19T16:26:35.909Z" }, + { url = "https://files.pythonhosted.org/packages/99/9e/271afdffb81fe7bfc8c43ba079e9d96238f674380099457a74ccb3863857/ruff-0.15.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1b4705e0e85cedc74b0a23cf6a179dbb3df184cb227761979cc76c0440b5ab0d", size = 10840752, upload-time = "2026-03-19T16:26:45.723Z" }, + { url = "https://files.pythonhosted.org/packages/bf/29/a4ae78394f76c7759953c47884eb44de271b03a66634148d9f7d11e721bd/ruff-0.15.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:112c1fa316a558bb34319282c1200a8bf0495f1b735aeb78bfcb2991e6087580", size = 11336961, upload-time = "2026-03-19T16:26:39.076Z" }, + { url = "https://files.pythonhosted.org/packages/26/6b/8786ba5736562220d588a2f6653e6c17e90c59ced34a2d7b512ef8956103/ruff-0.15.7-py3-none-win32.whl", hash = "sha256:6d39e2d3505b082323352f733599f28169d12e891f7dd407f2d4f54b4c2886de", size = 10582538, upload-time = "2026-03-19T16:26:15.992Z" }, + { url = "https://files.pythonhosted.org/packages/2b/e9/346d4d3fffc6871125e877dae8d9a1966b254fbd92a50f8561078b88b099/ruff-0.15.7-py3-none-win_amd64.whl", hash = "sha256:4d53d712ddebcd7dace1bc395367aec12c057aacfe9adbb6d832302575f4d3a1", size = 11755839, upload-time = "2026-03-19T16:26:19.897Z" }, + { url = "https://files.pythonhosted.org/packages/8f/e8/726643a3ea68c727da31570bde48c7a10f1aa60eddd628d94078fec586ff/ruff-0.15.7-py3-none-win_arm64.whl", hash = "sha256:18e8d73f1c3fdf27931497972250340f92e8c861722161a9caeb89a58ead6ed2", size = 11023304, upload-time = "2026-03-19T16:26:51.669Z" }, ] [[package]] @@ -2397,6 +2408,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/92/2f/bf13ae8a1f17186a122968c7ba0f18716d63d2fa67a9e6f905a20067c022/textual_fspicker-1.0.0-py3-none-any.whl", hash = "sha256:71c80258f8885a67abf85d68b42ac8defc8226281481717af722346cbc3e0305", size = 24932, upload-time = "2026-02-18T20:02:15.492Z" }, ] +[[package]] +name = "textual-plotext" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "plotext" }, + { name = "textual" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9a/b0/e4e0f38df057db778252db0dd2c08522d7222b8537b6a0181d797b9044bd/textual_plotext-1.0.1.tar.gz", hash = "sha256:836f53a3316756609e194129a35c2875638e7958c261f541e0a794f7c98011be", size = 16489, upload-time = "2024-11-30T19:25:56.625Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/53/fba7da208f9d3f59254413660fa0aa6599f2aca806f3ae356670455fd4ea/textual_plotext-1.0.1-py3-none-any.whl", hash = "sha256:6b6bfd00b29f121ddf216eaaf9bdac9d688ed72f40028484d279a10cbbb169ed", size = 16558, upload-time = "2024-11-30T19:25:32.208Z" }, +] + [[package]] name = "textual-serve" version = "1.1.3" @@ -2609,7 +2633,7 @@ wheels = [ [[package]] name = "zensical" -version = "0.0.27" +version = "0.0.28" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, @@ -2619,18 +2643,18 @@ dependencies = [ { name = "pymdown-extensions" }, { name = "pyyaml" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8f/83/969152d927b522a0fed1f20b1730575d86b920ce51530b669d9fad4537de/zensical-0.0.27.tar.gz", hash = "sha256:6d8d74aba4a9f9505e6ba1c43d4c828ba4ff7bb1ff9b005e5174c5b92cf23419", size = 3841776, upload-time = "2026-03-13T17:56:14.494Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d8/fe/0335f1a521eb6c0ab96028bf67148390eb1d5c742c23e6a4b0f8381508bd/zensical-0.0.27-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:d51ebf4b038f3eea99fd337119b99d92ad92bbe674372d5262e6dbbabbe4e9b5", size = 12262017, upload-time = "2026-03-13T17:55:36.403Z" }, - { url = "https://files.pythonhosted.org/packages/02/cb/ac24334fc7959b49496c97cb9d2bed82a8db8b84eafaf68189048e7fe69a/zensical-0.0.27-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:a627cd4599cf2c5a5a5205f0510667227d1fe4579b6f7445adba2d84bab9fbc8", size = 12147361, upload-time = "2026-03-13T17:55:39.736Z" }, - { url = "https://files.pythonhosted.org/packages/a2/0f/31c981f61006fdaf0460d15bde1248a045178d67307bad61a4588414855d/zensical-0.0.27-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99cbc493022f8749504ef10c71772d360b705b4e2fd1511421393157d07bdccf", size = 12505771, upload-time = "2026-03-13T17:55:42.993Z" }, - { url = "https://files.pythonhosted.org/packages/30/1e/f6842c94ec89e5e9184f407dbbab2a497b444b28d4fb5b8df631894be896/zensical-0.0.27-cp310-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ecc20a85e8a23ad9ab809b2f268111321be7b2e214021b3b00f138936a87a434", size = 12455689, upload-time = "2026-03-13T17:55:46.055Z" }, - { url = "https://files.pythonhosted.org/packages/4c/ad/866c3336381cca7528e792469958fbe2e65b9206a2657bef3dd8ed4ac88b/zensical-0.0.27-cp310-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da11e0f0861dbd7d3b5e6fe1e3a53b361b2181c53f3abe9fb4cdf2ed0cea47bf", size = 12791263, upload-time = "2026-03-13T17:55:49.193Z" }, - { url = "https://files.pythonhosted.org/packages/e5/df/fca5ed6bebdb61aa656dfa65cce4b4d03324a79c75857728230872fbdf7c/zensical-0.0.27-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e11d220181477040a4b22bf2b8678d5b0c878e7aae194fad4133561cb976d69", size = 12549796, upload-time = "2026-03-13T17:55:52.55Z" }, - { url = "https://files.pythonhosted.org/packages/4a/e2/43398b5ec64ed78204a5a5929a3990769fc0f6a3094a30395882bda1399a/zensical-0.0.27-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:06b9e308aec8c5db1cd623e2e98e1b25c3f5cab6b25fcc9bac1e16c0c2b93837", size = 12683568, upload-time = "2026-03-13T17:55:56.151Z" }, - { url = "https://files.pythonhosted.org/packages/b3/3c/5c98f9964c7e30735aacd22a389dacec12bcc5bc8162c58e76b76d20db6e/zensical-0.0.27-cp310-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:682085155126965b091cb9f915cd2e4297383ac500122fd4b632cf4511733eb2", size = 12725214, upload-time = "2026-03-13T17:55:59.286Z" }, - { url = "https://files.pythonhosted.org/packages/50/0f/ebaa159cac6d64b53bf7134420c2b43399acc7096cb79795be4fb10768fc/zensical-0.0.27-cp310-abi3-musllinux_1_2_i686.whl", hash = "sha256:b367c285157c8e1099ae9e2b36564e07d3124bf891e96194a093bc836f3058d2", size = 12860416, upload-time = "2026-03-13T17:56:02.456Z" }, - { url = "https://files.pythonhosted.org/packages/88/06/d82bfccbf5a1f43256dbc4d1984e398035a65f84f7c1e48b69ba15ea7281/zensical-0.0.27-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:847c881209e65e1db1291c59a9db77966ac50f7c66bf9a733c3c7832144dbfca", size = 12819533, upload-time = "2026-03-13T17:56:05.487Z" }, - { url = "https://files.pythonhosted.org/packages/4d/1f/d25e421d91f063a9404c59dd032f65a67c7c700e9f5f40436ab98e533482/zensical-0.0.27-cp310-abi3-win32.whl", hash = "sha256:f31ec13c700794be3f9c0b7d90f09a7d23575a3a27c464994b9bb441a22d880b", size = 11862822, upload-time = "2026-03-13T17:56:08.933Z" }, - { url = "https://files.pythonhosted.org/packages/5a/b5/5b86d126fcc42b96c5dbecde5074d6ea766a1a884e3b25b3524843c5e6a5/zensical-0.0.27-cp310-abi3-win_amd64.whl", hash = "sha256:9d3b1fca7ea99a7b2a8db272dd7f7839587c4ebf4f56b84ff01c97b3893ec9f8", size = 12059658, upload-time = "2026-03-13T17:56:11.859Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/14/0a/ed78749cd30c8b72f6b3f85de7f4da45ddcbbd006222aa63f7d6e27d68db/zensical-0.0.28.tar.gz", hash = "sha256:af7d75a1b297721dfc9b897f729b601e56b3e566990a989e9e3e373a8cd04c40", size = 3842655, upload-time = "2026-03-19T14:28:09.17Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/c5/05e6a8b8ecfc255ff59414c71e1904b1ceaf3ccbc26f14b90ce82aaab16e/zensical-0.0.28-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:2db2997dd124dc9361b9d3228925df9e51281af9529c26187a865407588f8abb", size = 12302942, upload-time = "2026-03-19T14:27:32.009Z" }, + { url = "https://files.pythonhosted.org/packages/10/aa/c10fcbee69bcca8a545b1a868e3fec2560b984f68e91cbbce3eaee0814ff/zensical-0.0.28-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:5c6e5ea5c057492a1473a68f0e71359d663057d7d864b32a8fd429c8ea390346", size = 12186436, upload-time = "2026-03-19T14:27:34.866Z" }, + { url = "https://files.pythonhosted.org/packages/c2/ea/d0aaa0f0ed1b7a69aeec5f25ce2ff2ea7b13e581c9115d51a4a50bc7bf57/zensical-0.0.28-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2ee8a1d29b61de61e6b0f9123fa395c06c24c94e509170c7f7f9ccddaeaaad4", size = 12545239, upload-time = "2026-03-19T14:27:37.613Z" }, + { url = "https://files.pythonhosted.org/packages/d9/b1/508ea4de8b5c93a2ceb4d536314041a19a520866a5ce61c55d64417afaa9/zensical-0.0.28-cp310-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7cef68b363c0d3598d37a1090bfc5c6267e36a87a55e9fb6a6f9d7f2768f1dfd", size = 12488943, upload-time = "2026-03-19T14:27:40.663Z" }, + { url = "https://files.pythonhosted.org/packages/1d/35/9c1878845dfcec655f538ef523c606e585d38b84415d65009b83ebc356b2/zensical-0.0.28-cp310-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3175440fd526cf0273859d0de355e769ba43e082e09deb04b6f6afd77af6c91", size = 12840468, upload-time = "2026-03-19T14:27:43.758Z" }, + { url = "https://files.pythonhosted.org/packages/d0/1f/50f0ca6db76dc7888f9e0f0103c8faaaa6ee25a2c1e3664f2db5cc7bf24b/zensical-0.0.28-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0887436c5fd8fe7008c0d93407876695db67bcf55c8aec9fb36c339d82bb7fce", size = 12591152, upload-time = "2026-03-19T14:27:46.629Z" }, + { url = "https://files.pythonhosted.org/packages/f1/6b/621b7031c24c9fb0d38c2c488d79d73fcc2e645330c27fbab4ecccc06528/zensical-0.0.28-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b8a0ca92e04687f71aa20c9ae80fe8b840125545657e6b7c0f83adecd04d512e", size = 12723744, upload-time = "2026-03-19T14:27:50.101Z" }, + { url = "https://files.pythonhosted.org/packages/8d/89/a8bdd6a8423e0bb4f8792793681cbe101cdfbb1e0c1128b3226afe53af5f/zensical-0.0.28-cp310-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:acb31723ca82c367d1c41a6a7b0f52ce1ed87f0ee437de2ee2fc2e284e120e44", size = 12760416, upload-time = "2026-03-19T14:27:52.667Z" }, + { url = "https://files.pythonhosted.org/packages/86/07/af4ec58b63a14c0fb6b21c8c875f34effa71d4258530a3e3d301b1c518b9/zensical-0.0.28-cp310-abi3-musllinux_1_2_i686.whl", hash = "sha256:3680b3a75560881e7fa32b450cf6de09895680b84d0dd2b611cb5fa552fdfc49", size = 12907390, upload-time = "2026-03-19T14:27:56.71Z" }, + { url = "https://files.pythonhosted.org/packages/61/70/1b3f319ac2c05bdcd27ae73ae315a893683eb286a42a746e7e572e2675f6/zensical-0.0.28-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:93e1bc47981b50bcd9c4098edc66fb86fd881c5b52b355db92dcef626cc0b468", size = 12864434, upload-time = "2026-03-19T14:28:00.443Z" }, + { url = "https://files.pythonhosted.org/packages/8b/21/be7c94b25e0f4281a6b5fbd471236e33c44b832a830fedad40a6c119f290/zensical-0.0.28-cp310-abi3-win32.whl", hash = "sha256:eee014ca1290463cf8471e3e1b05b7c627ac7afa0881635024d23d4794675980", size = 11888008, upload-time = "2026-03-19T14:28:03.565Z" }, + { url = "https://files.pythonhosted.org/packages/de/88/5ce79445489edae6c1a3ff9e06b4885bea5d8e8bb8e26e1aa1b24395c337/zensical-0.0.28-cp310-abi3-win_amd64.whl", hash = "sha256:6077a85ee1f0154dbfe542db36789322fe8625d716235a000d4e0a8969b14175", size = 12094496, upload-time = "2026-03-19T14:28:06.311Z" }, ]