From 3dda9781b3cf58c8c1927f760a3b079e54690bda Mon Sep 17 00:00:00 2001 From: Dmitry Shemetov Date: Tue, 13 Apr 2021 14:47:20 -0700 Subject: [PATCH] NANS for HHS: * add missing columns --- hhs_hosp/delphi_hhs/run.py | 25 +++++++++++++------- hhs_hosp/tests/test_run.py | 47 +++++++++++++++++++++++++------------- 2 files changed, 48 insertions(+), 24 deletions(-) diff --git a/hhs_hosp/delphi_hhs/run.py b/hhs_hosp/delphi_hhs/run.py index 5fc5c20d02..6a478ff0a4 100644 --- a/hhs_hosp/delphi_hhs/run.py +++ b/hhs_hosp/delphi_hhs/run.py @@ -9,9 +9,7 @@ import time from delphi_epidata import Epidata -from delphi_utils.export import create_export_csv -from delphi_utils.geomap import GeoMapper -from delphi_utils import get_structured_logger +from delphi_utils import create_export_csv, get_structured_logger, Nans, GeoMapper import numpy as np import pandas as pd @@ -63,6 +61,17 @@ def generate_date_ranges(start, end): output.append(Epidata.range(_date_to_int(start), _date_to_int(end))) return output +def add_nancodes(df): + """Add nancodes to a signal dataframe.""" + # Default missingness codes + df["missing_val"] = Nans.NOT_MISSING + df["missing_se"] = Nans.NOT_APPLICABLE + df["missing_sample_size"] = Nans.NOT_APPLICABLE + + # Mark any remaining nans with unknown + remaining_nans_mask = df["val"].isnull() + df.loc[remaining_nans_mask, "missing_val"] = Nans.UNKNOWN + return df def run_module(params): """ @@ -114,9 +123,12 @@ def run_module(params): "state_code", from_col="state") if sensor.endswith("_prop"): - df=pop_proportion(df, geo_mapper) + df = pop_proportion(df, geo_mapper) df = make_geo(df, geo, geo_mapper) + df["se"] = np.nan + df["sample_size"] = np.nan df = smooth_values(df, smoother[0]) + df = add_nancodes(df) if df.empty: continue sensor_name = sensor + smoother[1] @@ -162,12 +174,9 @@ def make_geo(state, geo, geo_mapper): if geo == "state": exported = state.rename(columns={"state": "geo_id"}) else: - exported = geo_mapper.replace_geocode(state, "state_code", geo, new_col="geo_id") - exported["se"] = np.nan - exported["sample_size"] = np.nan + exported = geo_mapper.replace_geocode(state, "state_code", geo, new_col="geo_id", date_col="timestamp") return exported - def make_signal(all_columns, sig): """Generate column sums according to signal name.""" assert sig in SIGNALS, f"Unexpected signal name '{sig}';" + \ diff --git a/hhs_hosp/tests/test_run.py b/hhs_hosp/tests/test_run.py index fc393e3365..073371511a 100644 --- a/hhs_hosp/tests/test_run.py +++ b/hhs_hosp/tests/test_run.py @@ -4,11 +4,11 @@ import tempfile import os -from delphi_hhs.run import _date_to_int, int_date_to_previous_day_datetime, generate_date_ranges, \ +from delphi_hhs.run import _date_to_int, add_nancodes, int_date_to_previous_day_datetime, generate_date_ranges, \ make_signal, make_geo, run_module, pop_proportion from delphi_hhs.constants import SMOOTHERS, GEOS, SIGNALS, \ CONFIRMED, SUM_CONF_SUSP, CONFIRMED_FLU, CONFIRMED_PROP, SUM_CONF_SUSP_PROP, CONFIRMED_FLU_PROP -from delphi_utils.geomap import GeoMapper +from delphi_utils import GeoMapper, Nans from freezegun import freeze_time import numpy as np import pandas as pd @@ -85,7 +85,7 @@ def test_make_signal(): }) pd.testing.assert_frame_equal(expected_flu, make_signal(data, CONFIRMED_FLU)) pd.testing.assert_frame_equal(expected_flu, make_signal(data, CONFIRMED_FLU_PROP)) - + with pytest.raises(Exception): make_signal(data, "zig") @@ -93,7 +93,7 @@ def test_pop_proportion(): geo_mapper = GeoMapper() state_pop = geo_mapper.get_crosswalk("state_code", "pop") - test_df = pd.DataFrame({ + test_df = pd.DataFrame({ 'state': ['PA'], 'state_code': [42], 'timestamp': [datetime(year=2020, month=1, day=1)], @@ -109,7 +109,7 @@ def test_pop_proportion(): 'val': [15/pa_pop*100000],}) ) - test_df= pd.DataFrame({ + test_df= pd.DataFrame({ 'state': ['WV'], 'state_code': [54], 'timestamp': [datetime(year=2020, month=1, day=1)], @@ -137,30 +137,23 @@ def test_make_geo(): 'val': [1., 2., 4.], }) - template = { - 'se': np.nan, - 'sample_size': np.nan, - } expecteds = { "state": pd.DataFrame( - dict(template, - geo_id=data.state, + dict(geo_id=data.state, timestamp=data.timestamp, val=data.val)), "hhs": pd.DataFrame( - dict(template, - geo_id=['3', '5'], + dict(geo_id=['3', '5'], timestamp=[test_timestamp] * 2, val=[3., 4.])), "nation": pd.DataFrame( - dict(template, - geo_id=['us'], + dict(geo_id=['us'], timestamp=[test_timestamp], val=[7.])) } for geo, expected in expecteds.items(): result = make_geo(data, geo, geo_mapper) - for series in ["geo_id", "timestamp", "val", "se", "sample_size"]: + for series in ["geo_id", "timestamp", "val"]: pd.testing.assert_series_equal(expected[series], result[series], obj=f"{geo}:{series}") @@ -207,3 +200,25 @@ def test_ignore_last_range_no_results(mock_covid_hosp, mock_export): } } assert not run_module(params) # function should not raise value error and has no return value + +def test_add_nancode(): + data = pd.DataFrame({ + 'state': ['PA','WV','OH'], + 'state_code': [42, 54, 39], + 'timestamp': [pd.to_datetime("20200601")]*3, + 'val': [1, 2, np.nan], + 'se': [np.nan] * 3, + 'sample_size': [np.nan] * 3, + }) + expected = pd.DataFrame({ + 'state': ['PA','WV','OH'], + 'state_code': [42, 54, 39], + 'timestamp': [pd.to_datetime("20200601")]*3, + 'val': [1, 2, np.nan], + 'se': [np.nan] * 3, + 'sample_size': [np.nan] * 3, + 'missing_val': [Nans.NOT_MISSING] * 2 + [Nans.UNKNOWN], + 'missing_se': [Nans.NOT_APPLICABLE] * 3, + 'missing_sample_size': [Nans.NOT_APPLICABLE] * 3, + }) + pd.testing.assert_frame_equal(expected, add_nancodes(data))