Skip to content

Commit

Permalink
Add original and SI units
Browse files Browse the repository at this point in the history
  • Loading branch information
gutzbenj committed Apr 22, 2021
1 parent e320090 commit 16c300c
Show file tree
Hide file tree
Showing 20 changed files with 1,929 additions and 259 deletions.
418 changes: 253 additions & 165 deletions poetry.lock

Large diffs are not rendered by default.

2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,8 @@ xarray = {version = "^0.17.0", optional = true}
h5netcdf = {version = "^0.10.0", markers = "sys_platform != 'darwin' or (sys_platform == 'darwin' and platform_machine != 'arm64')"}
measurement = "^3.2.0"
rapidfuzz = "^1.4.1"
Pint = "^0.17"
aenum = "^3.0.0"


[tool.poetry.dev-dependencies]
Expand Down
2 changes: 2 additions & 0 deletions tests/provider/dwd/observation/test_api_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -322,6 +322,7 @@ def test_dwd_observation_data_result_tabular():
end_date="1934-01-01", # few days after official start,
tidy=False,
humanize=False,
metric=False
).filter_by_station_id(
station_id=[1048],
)
Expand Down Expand Up @@ -389,6 +390,7 @@ def test_dwd_observation_data_result_tidy():
end_date="1934-01-01", # few days after official start,
tidy=True,
humanize=False,
metric=False
).filter_by_station_id(
station_id=(1048,),
)
Expand Down
14 changes: 7 additions & 7 deletions tests/provider/dwd/observation/test_api_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,17 +14,17 @@


def test_dwd_observation_metadata_discover_parameters():
parameters = DwdObservationRequest.discover(filter_="minute_1", flatten=True)
parameters = DwdObservationRequest.discover(filter_="minute_1", flatten=True, metric=True)

assert (
json.dumps(
{
Resolution.MINUTE_1.name.lower(): [
DwdObservationParameter.MINUTE_1.PRECIPITATION_HEIGHT.name.lower(),
DwdObservationParameter.MINUTE_1.PRECIPITATION_HEIGHT_DROPLET.name.lower(),
DwdObservationParameter.MINUTE_1.PRECIPITATION_HEIGHT_ROCKER.name.lower(),
DwdObservationParameter.MINUTE_1.PRECIPITATION_FORM.name.lower(),
]
"minute_1": {
"precipitation_height": "kg / m ** 2",
"precipitation_height_droplet": "kg / m ** 2",
"precipitation_height_rocker": "kg / m ** 2",
"precipitation_form": "-"
}
},
indent=4,
)
Expand Down
2 changes: 2 additions & 0 deletions tests/provider/dwd/observation/test_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,6 +226,7 @@ def test_export_spreadsheet(tmpdir_factory):
start_date="2019",
end_date="2020",
tidy=False,
metric=False
).filter_by_station_id(
station_id=[1048],
)
Expand Down Expand Up @@ -527,6 +528,7 @@ def test_export_sqlite(tmpdir_factory):
start_date="2019",
end_date="2020",
tidy=False,
metric=False
).filter_by_station_id(
station_id=[1048],
)
Expand Down
7 changes: 4 additions & 3 deletions tests/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,16 +21,17 @@
("eccc", "observation", {"parameter": "daily", "resolution": "daily"}),
],
)
def test_api(provider, kind, kwargs):
@pytest.mark.parametrize("metric", (False, True))
def test_api(provider, kind, kwargs, metric):
""" Test main wetterdienst API """
# Build API
api = Wetterdienst(provider, kind)

# Discover parameters
assert api.discover()
assert api.discover(metric=metric)

# All stations
request = api(**kwargs).all()
request = api(**kwargs, metric=metric).all()

stations = request.df

Expand Down
103 changes: 87 additions & 16 deletions wetterdienst/core/scalar/request.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,24 @@ def _parameter_to_dataset_mapping(self) -> dict:
)
return dict()

@property
@abstractmethod
def _origin_unit_tree(self):
pass

@property
@abstractmethod
def _metric_unit_tree(self):
pass

@property
def datasets(self):
datasets = self._dataset_tree[self._dataset_accessor].__dict__.keys()

datasets = list(filter(lambda x: x not in ("__module__", "__doc__"), datasets))

return datasets

@property
@abstractmethod
def _values(self):
Expand Down Expand Up @@ -276,10 +294,18 @@ def __init__(
end_date: Optional[Union[str, datetime, pd.Timestamp]] = None,
humanize: bool = True,
tidy: bool = True,
metric: bool = True,
) -> None:
"""
:param parameter:
:param resolution:
:param period:
:param start_date: Start date for filtering stations for their available data
:param end_date: End date for filtering stations for their available data
:param humanize:
:param tidy:
:param metric:
"""

super().__init__()
Expand All @@ -301,6 +327,7 @@ def __init__(
)

self.tidy = tidy
self.metric = metric

log.info(
f"Processing request for "
Expand All @@ -311,7 +338,8 @@ def __init__(
f"start_date={self.start_date}, "
f"end_date={self.end_date}, "
f"humanize={self.humanize}, "
f"tidy={self.tidy}"
f"tidy={self.tidy}, "
f"metric={self.metric}"
)

@staticmethod
Expand Down Expand Up @@ -359,14 +387,19 @@ def convert_timestamps(
return pd.Timestamp(start_date), pd.Timestamp(end_date)

@classmethod
def discover(cls, filter_=None, dataset=None, flatten: bool = True) -> str:
def discover(
cls, filter_=None, dataset=None, metric: bool = True, flatten: bool = True
) -> str:
""" Function to print/discover available parameters """
# TODO: Refactor this!
# TODO: Refactor this!!!
flatten = cls._unique_dataset or flatten

filter_ = cls._setup_discover_filter(filter_)

filter_ = [f.name for f in filter_]
if metric:
unit_tree = cls._metric_unit_tree
else:
unit_tree = cls._origin_unit_tree

if flatten:
if dataset:
Expand All @@ -375,9 +408,27 @@ def discover(cls, filter_=None, dataset=None, flatten: bool = True) -> str:
parameters = {}

for f in filter_:
parameters[f.lower()] = []
for parameter in cls._parameter_base[f]:
parameters[f.lower()].append(parameter.name.lower())
parameters[f.name.lower()] = {}
for parameter in cls._parameter_base[f.name]:

if cls._unique_dataset:
unit = unit_tree[f.name][parameter.name].value
else:
dataset = cls._parameter_to_dataset_mapping[f][parameter]

unit = unit_tree[f.name][dataset.name][parameter.name].value

try:
unit = unit.units
except AttributeError:
pass

unit_string = format(unit, "~")

if unit_string == "":
unit_string = "-"

parameters[f.name.lower()][parameter.name.lower()] = unit_string

return json.dumps(parameters, indent=4)

Expand All @@ -393,31 +444,51 @@ def discover(cls, filter_=None, dataset=None, flatten: bool = True) -> str:
parameters = {}

for f in filter_:
parameters[f.lower()] = {}
parameters[f.name.lower()] = {}

for dataset in cls._dataset_tree[f].__dict__:
for dataset in cls._dataset_tree[f.name].__dict__:
if dataset.startswith("_") or dataset not in datasets_filter:
continue

parameters[f.lower()][dataset.lower()] = []
parameters[f.name.lower()][dataset.lower()] = {}

for parameter in cls._dataset_tree[f.name][dataset]:
unit = unit_tree[f.name][dataset][parameter.name].value

for parameter in cls._dataset_tree[f][dataset]:
parameters[f.lower()][dataset.lower()].append(
try:
unit = unit.units
except AttributeError:
pass

unit_string = format(unit, "~")

if unit_string == "":
unit_string = "-"

parameters[f.name.lower()][dataset.lower()][
parameter.name.lower()
)
] = unit_string

return json.dumps(parameters, indent=4)

@classmethod
def _setup_discover_filter(cls, filter_):
"""Helper method to create filter for discover method, can be overwritten by subclasses to use other then the
resolutoon for filtering"""
if cls._resolution_type == ResolutionType.FIXED:
log.warning("resolution filter will be ignored due to fixed resolution")

filter_ = [cls.resolution]
elif not filter_:
filter_ = [*cls._resolution_base]

filter_ = pd.Series(filter_).apply(
parse_enumeration_from_template, args=(cls._resolution_base,)
).tolist() or [*cls._resolution_base]
filter_ = (
pd.Series(filter_)
.apply(
parse_enumeration_from_template, args=(cls._resolution_base, Resolution)
)
.tolist()
)

return filter_

Expand Down

0 comments on commit 16c300c

Please sign in to comment.