diff --git a/.vscode/launch.json b/.vscode/launch.json
index 2ebdba1..77832fe 100644
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -491,11 +491,11 @@
// tethys CANON September 2012 (classic test case)
//"args": ["-v", "1", "--dlist", "tethys/missionlogs/2012/20120908_20120920.dlist"]
// ahi planktivore deployment April 2025 add --update_ssds_provenance
- "args": ["-v", "1", "--dlist", "ahi/missionlogs/2025/20250414_20250418.dlist", "--update_ssds_provenance", "--force"]
+ //"args": ["-v", "1", "--dlist", "ahi/missionlogs/2025/20250414_20250418.dlist", "--update_ssds_provenance", "--force"]
// Test time range of DeploymentPlots with ahi planktivore deployment April 2025
//"args": ["-v", "1", "--auv_name", "ahi", "--start", "20250401", "--end", "20250501", "--update_ssds_provenance", "--force"]
// Test web page building with a short deployment
- //"args": ["-v", "1", "--dlist", "ahi/missionlogs/2025/20251022_20251024.dlist", "--update_ssds_provenance", "--force", "--notify", "mccann@mbari.org"]
+ "args": ["-v", "1", "--dlist", "ahi/missionlogs/2025/20251022_20251024.dlist", "--update_ssds_provenance", "--force", "--notify", "mccann@mbari.org"]
// Test --force option for rebuilding web pages with a short deployment
//"args": ["-v", "1", "--last_n_days", "10", "--update_ssds_provenance", "--force"]
// Test --notify option
diff --git a/src/data/lrauv_deployment_plots.py b/src/data/lrauv_deployment_plots.py
index cede223..dfb3d9a 100755
--- a/src/data/lrauv_deployment_plots.py
+++ b/src/data/lrauv_deployment_plots.py
@@ -715,6 +715,23 @@ def _per_log_stoqs_url(
return url
return fallback
+ def _duration_min_from_nc_url(self, nc_url: str) -> int | None:
+ """Parse start/end timestamps from nc filename and return duration in minutes.
+
+ Expects filenames like ``202506092228_202506101928_1S.nc`` where each
+ timestamp is ``YYYYMMDDHHNN`` (12 digits).
+ """
+ fname = nc_url.rsplit("/", 1)[-1]
+ m = re.match(r"(\d{12})_(\d{12})", fname)
+ if not m:
+ return None
+ try:
+ t0 = datetime.strptime(m.group(1), "%Y%m%d%H%M").replace(tzinfo=UTC)
+ t1 = datetime.strptime(m.group(2), "%Y%m%d%H%M").replace(tzinfo=UTC)
+ return int((t1 - t0).total_seconds() / 60)
+ except ValueError:
+ return None
+
def _per_log_png_links(self, nc_urls: list[str]) -> list[tuple[str, str]]:
"""Return (url, label) pairs for each existing per-log PNG."""
parts: list[tuple[str, str]] = []
@@ -756,14 +773,38 @@ def _write_per_png_html( # noqa: C901, PLR0913
nc_urls = grouped[log_dir]
log_stoqs_url = self._per_log_stoqs_url(nc_urls, auv_name, stoqs_url)
png_links = self._per_log_png_links(nc_urls)
+
+ dap_links = []
+ for nc_url in nc_urls:
+ nc4_url = re.sub(rf"_{FREQ}\.nc$", ".nc4", nc_url)
+ dap_links.append(
+ f'Original .nc4'
+ f"
"
+ f'Resampled .nc'
+ )
+
+ stoqs_label = "STOQS"
+ if log_stoqs_url:
+ dur_min = next(
+ (
+ d
+ for nc_url in nc_urls
+ if (d := self._duration_min_from_nc_url(nc_url)) is not None
+ ),
+ None,
+ )
+ after_scheme = log_stoqs_url.split("//", 1)[-1]
+ db_label = after_scheme.split("/")[1] if "/" in after_scheme else after_scheme
+ stoqs_label = f"{dur_min} min from {db_label}" if dur_min is not None else db_label
+
rows.append(
{
"dir": log_dir,
"plots": "
".join(f'{lbl}' for u, lbl in png_links),
- "dap": "".join(
- f'OPeNDAP' for nc_url in nc_urls
+ "dap": "
".join(dap_links),
+ "stoqs": (
+ f'{stoqs_label}' if log_stoqs_url else ""
),
- "stoqs": f'STOQS' if log_stoqs_url else "",
}
)
@@ -780,7 +821,7 @@ def _write_per_png_html( # noqa: C901, PLR0913
log_rows += f"