Skip to content

Commit

Permalink
Merge pull request #202 from akrherz/none
Browse files Browse the repository at this point in the history
obhistory issues
  • Loading branch information
akrherz committed Jun 3, 2024
2 parents 477c54f + 766cfff commit bcd09e7
Show file tree
Hide file tree
Showing 5 changed files with 60 additions and 42 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ ci:
autoupdate_schedule: quarterly
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: "v0.4.6"
rev: "v0.4.7"
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
Expand Down
80 changes: 42 additions & 38 deletions src/iemws/services/obhistory.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,11 @@
from zoneinfo import ZoneInfo

import numpy as np
import pandas as pd
import pytz
from fastapi import APIRouter, HTTPException, Query
from metpy.calc import dewpoint_from_relative_humidity
from metpy.units import masked_array, units
from pandas.io.sql import read_sql
from pyiem.network import Table as NetworkTable

from ..models import SupportedFormatsNoGeoJSON
Expand All @@ -37,7 +37,7 @@ def get_df(network, station, date):
if date == datetime.date.today() and network not in ["ISUSM", "SCAN"]:
# Use IEM Access
with get_sqlalchemy_conn("iem") as pgconn:
df = read_sql(
df = pd.read_sql(
"""
SELECT distinct valid at time zone 'UTC' as utc_valid,
valid at time zone t.tzname as local_valid, tmpf, dwpf, sknt,
Expand Down Expand Up @@ -66,7 +66,7 @@ def get_df(network, station, date):
if network.find("_ASOS") > 0:
# Use ASOS
with get_sqlalchemy_conn("asos") as pgconn:
df = read_sql(
df = pd.read_sql(
"""
SELECT valid at time zone 'UTC' as utc_valid,
valid at time zone %s as local_valid, tmpf, dwpf, sknt, drct,
Expand All @@ -85,7 +85,7 @@ def get_df(network, station, date):
if network.find("_RWIS") > 0:
# Use RWIS
with get_sqlalchemy_conn("rwis") as pgconn:
df = read_sql(
df = pd.read_sql(
"""
SELECT valid at time zone 'UTC' as utc_valid,
valid at time zone %s as local_valid, tmpf, dwpf, sknt, drct,
Expand All @@ -100,7 +100,7 @@ def get_df(network, station, date):
if network in ["ISUSM", "ISUAG"]:
# Use ISUAG
with get_sqlalchemy_conn("isuag") as pgconn:
df = read_sql(
df = pd.read_sql(
"SELECT valid at time zone 'UTC' as utc_valid, phour, "
"valid at time zone %s as local_valid, tmpf, relh, sknt, drct "
"from alldata WHERE station = %s and "
Expand All @@ -125,7 +125,7 @@ def get_df(network, station, date):
return df
if network == "SCAN":
with get_sqlalchemy_conn("scan") as pgconn:
df = read_sql(
df = pd.read_sql(
"""
SELECT valid at time zone 'UTC' as utc_valid,
valid at time zone %s as local_valid, tmpf, dwpf, sknt, drct,
Expand All @@ -146,7 +146,7 @@ def get_df(network, station, date):
# lazy
providers = {"OT": "other", "WMO_BUFR_SRF": "other"}
with get_sqlalchemy_conn(providers.get(network, "snet")) as pgconn:
df = read_sql(
df = pd.read_sql(
"SELECT valid at time zone 'UTC' as utc_valid, "
"valid at time zone %s as local_valid, tmpf, dwpf, sknt, drct "
"from alldata WHERE station = %s and "
Expand All @@ -158,7 +158,7 @@ def get_df(network, station, date):
return df
if network == "USCRN":
with get_sqlalchemy_conn("uscrn") as pgconn:
df = read_sql(
df = pd.read_sql(
"SELECT valid at time zone 'UTC' as utc_valid, "
"valid at time zone %s as local_valid, tmpc, rh, "
"wind_mps from alldata WHERE station = %s and "
Expand All @@ -170,18 +170,19 @@ def get_df(network, station, date):
if df.empty:
return df
# Do some unit work
tmpc = masked_array(df["tmpc"].values, units("degC"))
df["tmpf"] = tmpc.to(units("degF")).m
if df["rh"].isna().all():
df["dwpf"] = np.nan
else:
df["dwpf"] = (
dewpoint_from_relative_humidity(
tmpc, masked_array(df["rh"].values, units("percent"))
if not df["tmpc"].isna().all():
tmpc = masked_array(df["tmpc"].values, units("degC"))
df["tmpf"] = tmpc.to(units("degF")).m
if df["rh"].isna().all():
df["dwpf"] = np.nan
else:
df["dwpf"] = (
dewpoint_from_relative_humidity(
tmpc, masked_array(df["rh"].values, units("percent"))
)
.to(units("degF"))
.m
)
.to(units("degF"))
.m
)
if df["wind_mps"].isna().all():
df["sknt"] = np.nan
else:
Expand All @@ -194,7 +195,7 @@ def get_df(network, station, date):
if network.find("_COOP") > 0 or network.find("_DCP") > 0:
# Use HADS
with get_sqlalchemy_conn("hads") as pgconn:
df = read_sql(
df = pd.read_sql(
"SELECT distinct valid at time zone 'UTC' as utc_valid, "
"key, value "
f"from raw{date.strftime('%Y')} WHERE station = %s and "
Expand All @@ -203,24 +204,27 @@ def get_df(network, station, date):
params=(station, sts, ets),
index_col=None,
)
if df.empty:
return df
df = df.pivot_table(
index="utc_valid", columns="key", values="value", aggfunc="first"
)
df = df.reset_index()
# Query alldata too as it has the variable conversions done
df2 = read_sql(
"SELECT valid at time zone 'UTC' as utc_valid, "
"tmpf, dwpf, sknt, drct "
"from alldata WHERE station = %s and "
"valid >= %s and valid < %s ORDER by utc_valid ASC",
pgconn,
params=(station, sts, ets),
index_col=None,
)
if not df2.empty:
df = df.merge(df2, on="utc_valid")
if df.empty:
return df
df = df.pivot_table(
index="utc_valid",
columns="key",
values="value",
aggfunc="first",
)
df = df.reset_index()
# Query alldata too as it has the variable conversions done
df2 = pd.read_sql(
"SELECT valid at time zone 'UTC' as utc_valid, "
"tmpf, dwpf, sknt, drct "
"from alldata WHERE station = %s and "
"valid >= %s and valid < %s ORDER by utc_valid ASC",
pgconn,
params=(station, sts, ets),
index_col=None,
)
if not df2.empty:
df = df.merge(df2, on="utc_valid")

# Generate the local_valid column
df["local_valid"] = (
Expand Down
8 changes: 6 additions & 2 deletions testdata/hads_snowfall.sql
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
-- 6 hour snowfall
insert into raw2023_11(station, valid, key, value) values
('DMX', '2023-11-10 12:00+00', 'SFQRZZZ', 10);
('DNKI4', '2023-11-10 12:00+00', 'SFQRZZZ', 10);

-- needed for 6 hour join to work.
insert into stations(iemid, id, name, network, geom) values
(-1, 'DMX', 'Des Moines', 'IA_DCP', ST_Point(-93.648, 41.533, 4326));
(-1, 'DNKI4', 'Des Moines', 'IA_DCP', ST_Point(-93.648, 41.533, 4326));

insert into t2023(station, valid, tmpf) values
('DNKI4', '2023-11-10 12:00+00', 32.0);
2 changes: 1 addition & 1 deletion tests/nws/test_snowfall_6hour.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,4 @@ def test_basic():
"""Test basic calls."""
service = "/nws/snowfall_6hour.geojson?valid=2023-11-10T12:00:00Z"
res = client.get(service).json()
assert res["features"][0]["properties"]["station"] == "DMX"
assert res["features"][0]["properties"]["station"] == "DNKI4"
10 changes: 10 additions & 0 deletions tests/test_services_obhistory.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,16 @@ def test_basic():
assert res is not None


def test_dcp_alldata():
"""Test a query that should hit alldata."""
req = client.get(
"/obhistory.json",
params={"network": "IA_DCP", "station": "DNKI4", "date": "2023-11-10"},
)
res = req.json()
assert res["data"]


def test_dcp():
"""Test a DCP station request."""
req = client.get(
Expand Down

0 comments on commit bcd09e7

Please sign in to comment.