diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index ed582373b..d571f5d3f 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -48,7 +48,7 @@ jobs: uses: actions/cache@v2 env: # Increase this value to reset cache if `poetry.lock` has not changed. - CACHE_NUMBER: 3 + CACHE_NUMBER: 1 with: path: ${{ steps.poetry-cache-dir.outputs.dir }} key: poetry-${{ runner.os }}-py${{ matrix.python-version }}-${{ hashFiles('poetry.lock') }}-${{ env.CACHE_NUMBER }} diff --git a/README.rst b/README.rst index 5d9575d62..8a337b1f8 100644 --- a/README.rst +++ b/README.rst @@ -109,6 +109,12 @@ ECCC (Environnement et Changement Climatique Canada / Environment and Climate Ch - Hourly, daily, monthly, (annual) resolution - Time series of stations in Canada +NOAA (National Oceanic And Atmospheric Administration / National Oceanic And Atmospheric Administration / United States Of America) + - Global Historical Climatology Network + - Historical, daily weather observations from around the globe + - more then 100k stations + - data for weather services which don't publish data themselves + To get better insight on which data we have currently made available and under which license those are published take a look at the data_ section. diff --git a/THIRD_PARTY_NOTICES b/THIRD_PARTY_NOTICES index c9ef339b2..08f8b50cb 100644 --- a/THIRD_PARTY_NOTICES +++ b/THIRD_PARTY_NOTICES @@ -1701,7 +1701,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. aenum -3.1.6 +3.1.7 BSD License Ethan Furman https://github.com/ethanfurman/aenum @@ -4273,6 +4273,32 @@ immunities granted to it by virtue of its status as an Intergovernmental Organization or submit itself to any jurisdiction. +docformatter +1.4 +MIT License +Steven Myint +https://github.com/myint/docformatter +Copyright (C) 2012-2018 Steven Myint + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + docutils 0.16 BSD License; GNU General Public License (GPL); Public Domain; Python Software Foundation License @@ -4894,6 +4920,13 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +eradicate +2.0.0 +MIT License +Steven Myint +https://github.com/myint/eradicate +UNKNOWN + et-xmlfile 1.1.0 MIT License @@ -5225,6 +5258,32 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +flake8-2020 +1.6.1 +MIT License +Anthony Sottile +https://github.com/asottile/flake8-2020 +Copyright (c) 2019 Anthony Sottile + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + flake8-bandit 2.1.2 MIT License @@ -5679,6 +5738,67 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +flake8-copyright +0.2.2 +UNKNOWN +Virgil Dupras +https://github.com/savoirfairelinux/flake8-copyright +UNKNOWN + +flake8-docstrings +1.6.0 +MIT License +Simon ANDRÉ +https://gitlab.com/pycqa/flake8-docstrings +Copyright (c) 2013 Simon Andre, Ian Cordasco + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +flake8-eradicate +1.2.0 +MIT License +Nikita Sobolev +https://github.com/wemake-services/flake8-eradicate +MIT License + +Copyright (c) 2018 Nikita Sobolev + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + flake8-isort 4.1.1 GNU General Public License v2 (GPLv2) @@ -6025,6 +6145,34 @@ library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. +flake8-plugin-utils +1.3.2 +MIT License +Afonasev Evgeniy +https://pypi.org/project/flake8-plugin-utils +MIT License + +Copyright (c) 2019 Afonasev Evgeniy + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + flake8-polyfill 1.0.2 MIT License @@ -6039,19 +6187,14 @@ Joseph Kahn https://github.com/jbkahn/flake8-print UNKNOWN -flakeheaven -0.11.0 +flake8-return +1.1.3 MIT License -Gram (@orsinium) -https://github.com/flakeheaven/flakeheaven -UNKNOWN - -flakehell -0.8.0 +Afonasev Evgeniy +https://pypi.org/project/flake8-return MIT License -Gram (@orsinium) -https://github.com/life4/flakehell -MIT License 2019 Gram (@orsinium) + +Copyright (c) 2019 Afonasev Evgeniy Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -6060,8 +6203,8 @@ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, @@ -6072,6 +6215,13 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +flakeheaven +0.11.0 +MIT License +Gram (@orsinium) +https://github.com/flakeheaven/flakeheaven +UNKNOWN + freezegun 1.1.0 Apache Software License @@ -7191,7 +7341,7 @@ http://jupyter.org UNKNOWN jupyter-client -7.1.1 +7.1.2 BSD License Jupyter Development Team https://jupyter.org @@ -7393,7 +7543,7 @@ to indicate the copyright and license terms: jupyter-server -1.13.3 +1.13.4 BSD License Jupyter Development Team https://jupyter.org @@ -8167,7 +8317,7 @@ Copyright (C) 2008-2011 INADA Naoki multidict -5.2.0 +6.0.0 Apache Software License Andrew Svetlov https://github.com/aio-libs/multidict @@ -11380,6 +11530,36 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +pydocstyle +6.1.1 +MIT License +Amir Rachum +https://github.com/PyCQA/pydocstyle/ +Copyright (c) 2012 GreenSteam, + +Copyright (c) 2014-2020 Amir Rachum, + +Copyright (c) 2020 Sambhav Kothari, + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + pyflakes 2.4.0 MIT License @@ -15208,6 +15388,13 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +untokenize +0.1.1 +MIT License +Steven Myint +https://github.com/myint/untokenize +UNKNOWN + urllib3 1.26.8 MIT License diff --git a/example/dwd_describe_fields.py b/example/dwd_describe_fields.py index 5be410147..8ef68a083 100644 --- a/example/dwd_describe_fields.py +++ b/example/dwd_describe_fields.py @@ -7,7 +7,7 @@ ===== Acquire information about the data fields from DWD. -""" +""" # Noqa:D205,D400 import logging from pprint import pprint @@ -22,11 +22,12 @@ def fields_example(): + """Print DWD field examples for one specification.""" # Output in JSON format. - # import json; print(json.dumps(metadata.describe_fields(), indent=4)) + # import json; print(json.dumps(metadata.describe_fields(), indent=4)) # Noqa:E800 # Output in YAML format. - # import yaml; print(yaml.dump(dict(metadata.describe_fields()), default_style="|")) + # import yaml; print(yaml.dump(dict(metadata.describe_fields()), default_style="|")) # Noqa:E800 # Output in pretty-print format. pprint( @@ -49,6 +50,7 @@ def fields_example(): def main(): + """Run example.""" logging.basicConfig(level=logging.INFO) fields_example() diff --git a/example/mosmix_forecasts.py b/example/mosmix_forecasts.py index 9e0fbe021..805aa9544 100644 --- a/example/mosmix_forecasts.py +++ b/example/mosmix_forecasts.py @@ -12,7 +12,7 @@ Other MOSMIX variants are also listed and can be enabled on demand. -""" +""" # Noqa:D205,D400 from wetterdienst.provider.dwd.forecast import ( DwdForecastDate, DwdMosmixRequest, @@ -22,7 +22,7 @@ def mosmix_example(): - + """Retrieve Mosmix forecast data by DWD.""" # A. MOSMIX-L -- Specific stations - each station with own file request = DwdMosmixRequest( parameter=["DD", "ww"], @@ -71,6 +71,7 @@ def output_section(title, data): # pragma: no cover def main(): + """Run example.""" setup_logging() mosmix_example() diff --git a/example/observations_sql.py b/example/observations_sql.py index fad5a98b7..6f3f54543 100644 --- a/example/observations_sql.py +++ b/example/observations_sql.py @@ -15,7 +15,7 @@ pip install wetterdienst[sql] -""" +""" # Noqa:D205,D400 import logging from wetterdienst.provider.dwd.observation import ( @@ -28,7 +28,7 @@ def sql_example(): - + """Retrieve temperature data by DWD and filter by sql statement.""" request = DwdObservationRequest( parameter=[DwdObservationDataset.TEMPERATURE_AIR], resolution=DwdObservationResolution.HOURLY, @@ -41,10 +41,7 @@ def sql_example(): stations = request.filter_by_station_id(station_id=(1048,)) - sql = ( - "SELECT * FROM data WHERE " - "parameter='temperature_air_mean_200' AND value < -7.0;" - ) + sql = "SELECT * FROM data WHERE " "parameter='temperature_air_mean_200' AND value < -7.0;" log.info(f"Invoking SQL query '{sql}'") # Acquire observation values and filter with SQL. @@ -55,6 +52,7 @@ def sql_example(): def main(): + """Run example.""" logging.basicConfig(level=logging.INFO) sql_example() diff --git a/example/observations_stations.py b/example/observations_stations.py index 05972da4a..5582fe046 100644 --- a/example/observations_stations.py +++ b/example/observations_stations.py @@ -7,7 +7,7 @@ ===== Acquire station information from DWD. -""" +""" # Noqa:D205,D400 import logging from datetime import datetime @@ -22,6 +22,7 @@ def station_example(): + """Retrieve stations of DWD that measure temperature.""" stations = DwdObservationRequest( parameter=DwdObservationDataset.TEMPERATURE_AIR, resolution=DwdObservationResolution.HOURLY, @@ -38,6 +39,7 @@ def station_example(): def main(): + """Run example.""" logging.basicConfig(level=logging.INFO) station_example() diff --git a/example/radar/radar_composite_rw.py b/example/radar/radar_composite_rw.py index 5a864ae91..6579a3146 100644 --- a/example/radar/radar_composite_rw.py +++ b/example/radar/radar_composite_rw.py @@ -22,7 +22,7 @@ brew install gdal pip install wradlib -""" +""" # Noqa:D205,D400 import logging import os @@ -38,10 +38,7 @@ def plot(data: np.ndarray, attributes: dict, label: str = None): - """ - Convenience function for plotting RADOLAN data. - """ - + """Plot RADOLAN data with prefixed settings.""" # Get coordinates. radolan_grid_xy = wrl.georef.get_radolan_grid(900, 900) @@ -53,8 +50,7 @@ def plot(data: np.ndarray, attributes: dict, label: str = None): def plot_radolan(data: np.ndarray, attrs: dict, grid: np.dstack, clabel: str = None): - """ - Plotting function for RADOLAN data. + """Plot RADOLAN data. Shamelessly stolen from the wradlib RADOLAN Product Showcase documentation. https://docs.wradlib.org/en/stable/notebooks/radolan/radolan_showcase.html @@ -70,18 +66,14 @@ def plot_radolan(data: np.ndarray, attrs: dict, grid: np.dstack, clabel: str = N cb.set_label(clabel) plt.xlabel("x [km]") plt.ylabel("y [km]") - plt.title( - "{0} Product\n{1}".format(attrs["producttype"], attrs["datetime"].isoformat()) - ) + plt.title("{0} Product\n{1}".format(attrs["producttype"], attrs["datetime"].isoformat())) plt.xlim((x[0, 0], x[-1, -1])) plt.ylim((y[0, 0], y[-1, -1])) plt.grid(color="r") def radar_info(data: np.ndarray, attributes: dict): - """ - Display metadata from RADOLAN request. - """ + """Display metadata from RADOLAN request.""" log.info("Data shape: %s", data.shape) log.info("Attributes") @@ -90,7 +82,7 @@ def radar_info(data: np.ndarray, attributes: dict): def radar_rw_example(): - + """Retrieve radar rw reflectivity data by DWD.""" log.info("Acquiring radar RW composite data") radolan = DwdRadarValues( parameter=DwdRadarParameter.RW_REFLECTIVITY, @@ -112,6 +104,7 @@ def radar_rw_example(): def main(): + """Run example.""" radar_rw_example() diff --git a/example/radar/radar_radolan_cdc.py b/example/radar/radar_radolan_cdc.py index ed3302804..9337a1a6f 100644 --- a/example/radar/radar_radolan_cdc.py +++ b/example/radar/radar_radolan_cdc.py @@ -43,9 +43,10 @@ - https://docs.wradlib.org/en/stable/notebooks/radolan/radolan_showcase.html#RADOLAN-Composite # noqa - Hourly: https://docs.wradlib.org/en/stable/notebooks/radolan/radolan_showcase.html#RADOLAN-RW-Product # noqa - Daily: https://docs.wradlib.org/en/stable/notebooks/radolan/radolan_showcase.html#RADOLAN-SF-Product # noqa -""" +""" # Noqa:D205,D400 import logging import os +from typing import Optional import matplotlib.pyplot as plt import numpy as np @@ -63,10 +64,7 @@ def plot(data: np.ndarray, attributes: dict, label: str): - """ - Convenience function for plotting RADOLAN data. - """ - + """Plot RADOLAN data with prefixed settings.""" # Get coordinates. radolan_grid_xy = wrl.georef.get_radolan_grid(900, 900) @@ -78,8 +76,7 @@ def plot(data: np.ndarray, attributes: dict, label: str): def plot_radolan(data: np.ndarray, attrs: dict, grid: np.dstack, clabel: str = None): - """ - Plotting function for RADOLAN data. + """Plot RADOLAN data. Shamelessly stolen from the wradlib RADOLAN Product Showcase documentation. https://docs.wradlib.org/en/stable/notebooks/radolan/radolan_showcase.html @@ -95,18 +92,14 @@ def plot_radolan(data: np.ndarray, attrs: dict, grid: np.dstack, clabel: str = N cb.set_label(clabel) plt.xlabel("x [km]") plt.ylabel("y [km]") - plt.title( - "{0} Product\n{1}".format(attrs["producttype"], attrs["datetime"].isoformat()) - ) + plt.title("{0} Product\n{1}".format(attrs["producttype"], attrs["datetime"].isoformat())) plt.xlim((x[0, 0], x[-1, -1])) plt.ylim((y[0, 0], y[-1, -1])) plt.grid(color="r") def radolan_info(data: np.ndarray, attributes: dict): - """ - Display metadata from RADOLAN request. - """ + """Display metadata from RADOLAN request.""" log.info("Data shape: %s", data.shape) log.info("Attributes:") @@ -114,24 +107,22 @@ def radolan_info(data: np.ndarray, attributes: dict): print(f"- {key}: {value}") -def label_by_producttype(producttype: str) -> str: - """ - Compute label for RW/SF product. +def label_by_producttype(producttype: str) -> Optional[str]: + """Compute label for RW/SF product. :param producttype: Either RW or SF. :return: Label for plot. """ if producttype == "RW": - label = "mm * h-1" + return "mm * h-1" elif producttype == "SF": - label = "mm * 24h-1" + return "mm * 24h-1" else: - label = None - return label + return None def radolan_grid_example(): - + """Retrieve radolan cdc gridded data by DWD.""" log.info("Acquiring RADOLAN_CDC data") radolan = DwdRadarValues( parameter=DwdRadarParameter.RADOLAN_CDC, @@ -157,6 +148,7 @@ def radolan_grid_example(): def main(): + """Run example.""" radolan_grid_example() diff --git a/example/radar/radar_radolan_rw.py b/example/radar/radar_radolan_rw.py index 27069f590..f331ecdd8 100644 --- a/example/radar/radar_radolan_rw.py +++ b/example/radar/radar_radolan_rw.py @@ -43,9 +43,10 @@ - https://docs.wradlib.org/en/stable/notebooks/radolan/radolan_showcase.html#RADOLAN-Composite # noqa - Hourly: https://docs.wradlib.org/en/stable/notebooks/radolan/radolan_showcase.html#RADOLAN-RW-Product # noqa - Daily: https://docs.wradlib.org/en/stable/notebooks/radolan/radolan_showcase.html#RADOLAN-SF-Product # noqa -""" +""" # Noqa:D205,D400 import logging import os +from typing import Optional import matplotlib.pyplot as plt import numpy as np @@ -62,10 +63,7 @@ def plot(data: np.ndarray, attributes: dict, label: str): - """ - Convenience function for plotting RADOLAN data. - """ - + """Plot RADOLAN data with prefixed settings.""" # Get coordinates. radolan_grid_xy = wrl.georef.get_radolan_grid(900, 900) @@ -77,8 +75,7 @@ def plot(data: np.ndarray, attributes: dict, label: str): def plot_radolan(data: np.ndarray, attrs: dict, grid: np.dstack, clabel: str = None): - """ - Plotting function for RADOLAN data. + """Plot RADOLAN data. Shamelessly stolen from the wradlib RADOLAN Product Showcase documentation. https://docs.wradlib.org/en/stable/notebooks/radolan/radolan_showcase.html @@ -94,42 +91,36 @@ def plot_radolan(data: np.ndarray, attrs: dict, grid: np.dstack, clabel: str = N cb.set_label(clabel) plt.xlabel("x [km]") plt.ylabel("y [km]") - plt.title( - "{0} Product\n{1}".format(attrs["producttype"], attrs["datetime"].isoformat()) - ) + plt.title("{0} Product\n{1}".format(attrs["producttype"], attrs["datetime"].isoformat())) plt.xlim((x[0, 0], x[-1, -1])) plt.ylim((y[0, 0], y[-1, -1])) plt.grid(color="r") def radolan_info(data: np.ndarray, attributes: dict): - """ - Display metadata from RADOLAN request. - """ + """Display metadata from RADOLAN request.""" log.info("Data shape: %s", data.shape) log.info("Attributes:") for key, value in attributes.items(): print(f"- {key}: {value}") -def label_by_producttype(producttype: str) -> str: - """ - Compute label for RW/SF product. +def label_by_producttype(producttype: str) -> Optional[str]: + """Compute label for RW/SF product. :param producttype: Either RW or SF. :return: Label for plot. """ if producttype == "RW": - label = "mm * h-1" + return "mm * h-1" elif producttype == "SF": - label = "mm * 24h-1" + return "mm * 24h-1" else: - label = None - return label + return None def radolan_rw_example(): - + """Retrieve RADOLAN rw reflectivity data by DWD.""" log.info("Acquiring RADOLAN RW composite data") radolan = DwdRadarValues( parameter=DwdRadarParameter.RW_REFLECTIVITY, @@ -152,6 +143,7 @@ def radolan_rw_example(): def main(): + """Run example.""" radolan_rw_example() diff --git a/example/radar/radar_scan_precip.py b/example/radar/radar_scan_precip.py index 36e25a7e5..26f32666b 100644 --- a/example/radar/radar_scan_precip.py +++ b/example/radar/radar_scan_precip.py @@ -23,12 +23,13 @@ brew install gdal pip install wradlib -""" +""" # Noqa:D205,D400 import logging import os from itertools import chain import matplotlib.pyplot as plt +import pytest import wradlib as wrl from wetterdienst.provider.dwd.radar import ( @@ -45,10 +46,7 @@ def plot(data: wrl.io.XRadVolume): - """ - Convenience function for plotting radar data. - """ - + """Plot radar data with prefixed settings.""" # Get first sweep in volume. swp0 = data[0].data @@ -64,9 +62,7 @@ def plot(data: wrl.io.XRadVolume): def radar_info(data: dict): - """ - Display data from radar request. - """ + """Display data from radar request.""" print(data) return @@ -77,8 +73,9 @@ def radar_info(data: dict): print(f"- {key}: {value}") +@pytest.mark.remote def radar_scan_precip(): - + """Retrieve radar sweep scan of precipitation provided by DWD.""" request_velocity = DwdRadarValues( parameter=DwdRadarParameter.SWEEP_PCP_VELOCITY_H, start_date=DwdRadarDate.MOST_RECENT, @@ -94,10 +91,7 @@ def radar_scan_precip(): subset=DwdRadarDataSubset.POLARIMETRIC, ) - log.info( - f"Acquiring radar SWEEP_PCP data for {DwdRadarSite.ESS} at " - f"{request_velocity.start_date}" - ) + log.info(f"Acquiring radar SWEEP_PCP data for {DwdRadarSite.ESS} at " f"{request_velocity.start_date}") # Submit requests. results = chain(request_velocity.query(), request_reflectivity.query()) @@ -118,6 +112,7 @@ def radar_scan_precip(): def main(): + """Run example.""" radar_scan_precip() diff --git a/example/radar/radar_scan_volume.py b/example/radar/radar_scan_volume.py index b28860783..33ffdef9f 100644 --- a/example/radar/radar_scan_volume.py +++ b/example/radar/radar_scan_volume.py @@ -23,7 +23,7 @@ brew install gdal pip install wradlib -""" +""" # Noqa:D205,D400 import logging import os from itertools import chain @@ -45,10 +45,7 @@ def plot(data: wrl.io.XRadVolume): - """ - Convenience function for plotting radar data. - """ - + """Plot radar data with prefixed settings.""" # Get first sweep in volume. swp0 = data[0].data @@ -64,9 +61,7 @@ def plot(data: wrl.io.XRadVolume): def radar_info(data: dict): - """ - Display data from radar request. - """ + """Display data from radar request.""" print(data) return @@ -78,7 +73,7 @@ def radar_info(data: dict): def radar_scan_volume(): - + """Retrieve radar sweep volume velocity h from site ESS in format HDF5 as subset polarimetric.""" request_velocity = DwdRadarValues( parameter=DwdRadarParameter.SWEEP_VOL_VELOCITY_H, start_date=DwdRadarDate.MOST_RECENT, @@ -94,10 +89,7 @@ def radar_scan_volume(): subset=DwdRadarDataSubset.POLARIMETRIC, ) - log.info( - f"Acquiring radar SWEEP_VOL data for {DwdRadarSite.ESS} at " - f"{request_velocity.start_date}" - ) + log.info(f"Acquiring radar SWEEP_VOL data for {DwdRadarSite.ESS} at " f"{request_velocity.start_date}") # Submit requests. results = chain(request_velocity.query(), request_reflectivity.query()) @@ -122,6 +114,7 @@ def radar_scan_volume(): def main(): + """Run example.""" radar_scan_volume() diff --git a/example/radar/radar_site_dx.py b/example/radar/radar_site_dx.py index 4a4aa98ff..ffffddb86 100644 --- a/example/radar/radar_site_dx.py +++ b/example/radar/radar_site_dx.py @@ -26,7 +26,7 @@ brew install gdal pip install wradlib -""" +""" # Noqa:D205,D400 import logging import os @@ -46,20 +46,14 @@ def plot(data: np.ndarray): - """ - Convenience function for plotting radar data. - """ - + """Plot radar data with prefixed settings.""" fig = plt.figure(figsize=(10, 8)) wrl.vis.plot_ppi(data, fig=fig, proj="cg") def radar_info(data: np.ndarray, metadata: dict): - """ - Display metadata from radara request. - """ + """Display metadata from radar request.""" log.info("Data shape: %s", data.shape) - # log.info("Metadata: %s", metadata) log.info("Metadata") for key, value in metadata.items(): @@ -67,7 +61,7 @@ def radar_info(data: np.ndarray, metadata: dict): def radar_dx_example(): - + """Retrieve radar dx reflectivity data by DWD.""" log.info("Acquiring radar DX data") request = DwdRadarValues( parameter=DwdRadarParameter.DX_REFLECTIVITY, @@ -91,6 +85,7 @@ def radar_dx_example(): def main(): + """Run example.""" radar_dx_example() diff --git a/example/radar/radar_sweep_hdf5.py b/example/radar/radar_sweep_hdf5.py index 9a4c37981..1dd5ed9f2 100644 --- a/example/radar/radar_sweep_hdf5.py +++ b/example/radar/radar_sweep_hdf5.py @@ -22,7 +22,7 @@ brew install gdal pip install wradlib -""" +""" # Noqa:D205,D400 import logging import os @@ -44,18 +44,13 @@ def plot(data: np.ndarray): - """ - Convenience function for plotting radar data. - """ - + """Plot radar data with prefixed settings.""" fig = plt.figure(figsize=(10, 8)) wrl.vis.plot_ppi(data["dataset1/data1/data"], fig=fig, proj="cg") def radar_info(data: dict): - """ - Display data from radar request. - """ + """Display data from radar request.""" print("Keys:", data.keys()) log.info("Data") @@ -64,7 +59,7 @@ def radar_info(data: dict): def radar_hdf5_example(): - + """Retrieve HDF5 data by DWD as example.""" log.info("Acquiring radar sweep data in HDF5") request = DwdRadarValues( parameter=DwdRadarParameter.SWEEP_PCP_VELOCITY_H, @@ -90,6 +85,7 @@ def radar_hdf5_example(): def main(): + """Run example.""" radar_hdf5_example() diff --git a/poetry.lock b/poetry.lock index 680ea23c7..5a215d8f6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,6 +1,6 @@ [[package]] name = "aenum" -version = "3.1.6" +version = "3.1.7" description = "Advanced Enumerations (compatible with Python's stdlib Enum), NamedTuples, and NamedConstants" category = "main" optional = false @@ -309,14 +309,14 @@ pycparser = "*" [[package]] name = "cftime" -version = "1.5.1.1" +version = "1.5.2" description = "Time-handling functionality from netcdf4-python" category = "main" optional = true python-versions = "*" [package.dependencies] -numpy = "*" +numpy = ">1.13.3" [[package]] name = "charset-normalizer" @@ -566,6 +566,17 @@ docs = ["Sphinx (>=3)", "sphinx-rtd-theme (>=0.2)"] numpy = ["numpy (>=1.13.0)", "numpy (>=1.15.0)", "numpy (>=1.18.0)", "numpy (>=1.20.0)"] tests = ["check-manifest (>=0.42)", "mock (>=1.3.0)", "pytest-cov (>=2.10.1)", "pytest-isort (>=1.2.0)", "sphinx (>=3)", "tox (>=3.7.0)", "pytest (==5.4.3)", "pytest-pycodestyle (>=2)", "pytest-pydocstyle (>=2)", "pytest (>=6)", "pytest-pycodestyle (>=2.2.0)", "pytest-pydocstyle (>=2.2.0)"] +[[package]] +name = "docformatter" +version = "1.4" +description = "Formats docstrings to follow PEP 257." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +untokenize = "*" + [[package]] name = "docutils" version = "0.16" @@ -605,6 +616,14 @@ category = "dev" optional = false python-versions = ">=2.7" +[[package]] +name = "eradicate" +version = "2.0.0" +description = "Removes commented-out code." +category = "dev" +optional = false +python-versions = "*" + [[package]] name = "et-xmlfile" version = "1.1.0" @@ -664,6 +683,18 @@ mccabe = ">=0.6.0,<0.7.0" pycodestyle = ">=2.8.0,<2.9.0" pyflakes = ">=2.4.0,<2.5.0" +[[package]] +name = "flake8-2020" +version = "1.6.1" +description = "flake8 plugin which checks for misuse of `sys.version` or `sys.version_info`" +category = "dev" +optional = false +python-versions = ">=3.6.1" + +[package.dependencies] +flake8 = ">=3.7" +importlib-metadata = {version = ">=0.9", markers = "python_version < \"3.8\""} + [[package]] name = "flake8-bandit" version = "2.1.2" @@ -732,6 +763,39 @@ python-versions = ">=3.7" flake8 = ">=3.0,<3.2.0 || >3.2.0" importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} +[[package]] +name = "flake8-copyright" +version = "0.2.2" +description = "Adds copyright checks to flake8" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "flake8-docstrings" +version = "1.6.0" +description = "Extension for flake8 which uses pydocstyle to check docstrings" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +flake8 = ">=3" +pydocstyle = ">=2.1" + +[[package]] +name = "flake8-eradicate" +version = "1.2.0" +description = "Flake8 plugin to find commented out code" +category = "dev" +optional = false +python-versions = ">=3.6,<4.0" + +[package.dependencies] +attrs = "*" +eradicate = ">=2.0,<3.0" +flake8 = ">=3.5,<5" + [[package]] name = "flake8-isort" version = "4.1.1" @@ -748,6 +812,14 @@ testfixtures = ">=6.8.0,<7" [package.extras] test = ["pytest-cov"] +[[package]] +name = "flake8-plugin-utils" +version = "1.3.2" +description = "The package provides base classes and utils for flake8 plugin writing" +category = "dev" +optional = false +python-versions = ">=3.6,<4.0" + [[package]] name = "flake8-polyfill" version = "1.0.2" @@ -773,32 +845,36 @@ pycodestyle = "*" six = "*" [[package]] -name = "flakehell" -version = "0.8.0" +name = "flake8-return" +version = "1.1.3" +description = "Flake8 plugin that checks return values" +category = "dev" +optional = false +python-versions = ">=3.6,<4.0" + +[package.dependencies] +flake8-plugin-utils = ">=1.0,<2.0" + +[[package]] +name = "flakeheaven" +version = "0.11.0" description = "Flake8 wrapper to make it nice and configurable" category = "dev" optional = false python-versions = ">=3.5" -develop = false [package.dependencies] colorama = "*" entrypoints = "*" -flake8 = ">=3.8.0" +flake8 = ">=4.0.1" pygments = "*" toml = "*" urllib3 = "*" [package.extras] -dev = ["dlint", "flake8-2020", "flake8-aaa", "flake8-absolute-import", "flake8-alfred", "flake8-annotations-complexity", "flake8-bandit", "flake8-black", "flake8-broken-line", "flake8-bugbear", "flake8-builtins", "flake8-coding", "flake8-cognitive-complexity", "flake8-commas", "flake8-comprehensions", "flake8-debugger", "flake8-django", "flake8-docstrings", "flake8-eradicate", "flake8-executable", "flake8-expression-complexity", "flake8-fixme", "flake8-functions", "flake8-future-import", "flake8-import-order", "flake8-isort", "flake8-logging-format", "flake8-mock", "flake8-mutable", "flake8-mypy", "flake8-pep3101", "flake8-pie", "flake8-print", "flake8-printf-formatting", "flake8-pyi", "flake8-pytest-style", "flake8-pytest", "flake8-quotes", "flake8-requirements", "flake8-rst-docstrings", "flake8-scrapy", "flake8-spellcheck", "flake8-sql", "flake8-strict", "flake8-string-format", "flake8-tidy-imports", "flake8-todo", "flake8-use-fstring", "flake8-variables-names", "mccabe", "pandas-vet", "pep8-naming", "pylint", "typing-extensions", "wemake-python-styleguide", "pytest", "isort"] +dev = ["dlint", "flake8-2020", "flake8-aaa", "flake8-absolute-import", "flake8-alfred", "flake8-annotations-complexity", "flake8-bandit", "flake8-black", "flake8-broken-line", "flake8-bugbear", "flake8-builtins", "flake8-coding", "flake8-cognitive-complexity", "flake8-commas", "flake8-comprehensions", "flake8-debugger", "flake8-django", "flake8-docstrings", "flake8-eradicate", "flake8-executable", "flake8-expression-complexity", "flake8-fixme", "flake8-functions", "flake8-future-import", "flake8-import-order", "flake8-isort", "flake8-logging-format", "flake8-mock", "flake8-mutable", "flake8-mypy", "flake8-pep3101", "flake8-pie", "flake8-print", "flake8-printf-formatting", "flake8-pyi", "flake8-pytest-style", "flake8-pytest", "flake8-quotes", "flake8-requirements", "flake8-rst-docstrings", "flake8-scrapy", "flake8-spellcheck", "flake8-sql", "flake8-strict", "black (==21.10b0)", "flake8-string-format", "flake8-tidy-imports", "flake8-todo", "flake8-use-fstring", "flake8-variables-names", "mccabe", "pandas-vet", "pep8-naming", "pylint", "typing-extensions", "wemake-python-styleguide", "mypy", "pytest", "isort"] docs = ["alabaster", "pygments-github-lexers", "recommonmark", "sphinx"] -[package.source] -type = "git" -url = "https://github.com/mcarans/flakehell.git" -reference = "master" -resolved_reference = "1b84f4dd6c16232b5c0c6206511427676ab55f5b" - [[package]] name = "flask" version = "2.0.2" @@ -1183,7 +1259,7 @@ format_nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- [[package]] name = "jupyter-client" -version = "7.1.1" +version = "7.1.2" description = "Jupyter protocol implementation and client libraries" category = "dev" optional = false @@ -1216,7 +1292,7 @@ traitlets = "*" [[package]] name = "jupyter-server" -version = "1.13.3" +version = "1.13.4" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." category = "dev" optional = false @@ -1399,11 +1475,11 @@ python-versions = "*" [[package]] name = "multidict" -version = "5.2.0" +version = "6.0.0" description = "multidict implementation" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "mypy-extensions" @@ -1850,6 +1926,20 @@ typing-extensions = ">=3.7.4.3" dotenv = ["python-dotenv (>=0.10.4)"] email = ["email-validator (>=1.0.3)"] +[[package]] +name = "pydocstyle" +version = "6.1.1" +description = "Python docstring style checker" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +snowballstemmer = "*" + +[package.extras] +toml = ["toml"] + [[package]] name = "pyflakes" version = "2.4.0" @@ -2650,6 +2740,14 @@ category = "main" optional = true python-versions = ">=3.5" +[[package]] +name = "untokenize" +version = "0.1.1" +description = "Transforms tokens into original source code (while preserving whitespace)." +category = "dev" +optional = false +python-versions = "*" + [[package]] name = "urllib3" version = "1.26.8" @@ -2848,13 +2946,13 @@ sql = ["duckdb"] [metadata] lock-version = "1.1" python-versions = "^3.7.1" -content-hash = "e073da8b224a91579fea249bdf234656e26134ef38d460c5e94354aaf8b330af" +content-hash = "b1e6fe0eed4f9ca8092517e4002f55ef115cb6b627b8a75e548bd338e7c95295" [metadata.files] aenum = [ - {file = "aenum-3.1.6-py2-none-any.whl", hash = "sha256:76250a4e66610e9f10dff75bd883793417c5beb76a752e98204d369abab32987"}, - {file = "aenum-3.1.6-py3-none-any.whl", hash = "sha256:8b85ed27c45b6fe68fe1e058862f99c38ba286ce9d94c52d27f02f47eeb1c897"}, - {file = "aenum-3.1.6.tar.gz", hash = "sha256:3ba2c25dd03fbf3992353595be18152e2fb6042f47b526ea66cd5838bb9f1fb6"}, + {file = "aenum-3.1.7-py2-none-any.whl", hash = "sha256:9d4f960c045bfe049016b919c1ee4a55bd2d5ce47c338005a2f65a3c73c92699"}, + {file = "aenum-3.1.7-py3-none-any.whl", hash = "sha256:e04dcf6c733a20d6be5faa4c23759aa9adc57bb4ce7d55f305761b449ae68ca7"}, + {file = "aenum-3.1.7.tar.gz", hash = "sha256:78614f5c1e3064db7cf1f49a8bfdd9c3b7a9514cb3336c219e3f05501978d3e4"}, ] aiohttp = [ {file = "aiohttp-3.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1ed0b6477896559f17b9eaeb6d38e07f7f9ffe40b9f0f9627ae8b9926ae260a8"}, @@ -3156,31 +3254,37 @@ cffi = [ {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, ] cftime = [ - {file = "cftime-1.5.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:06033c132beb5e560f1957c048fdfbb3c876b3cc2f3e5e0f664927d98b5fae44"}, - {file = "cftime-1.5.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdfc3f3da4efc22ae22096c2b20ac9d5da01b3038c4ed4407fcd501efab26099"}, - {file = "cftime-1.5.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4260ca466a445af7b243d84a7c96833b9261bdfd15ba86028d9710b7d700f490"}, - {file = "cftime-1.5.1.1-cp310-none-win_amd64.whl", hash = "sha256:cd33cd2e220c1c283f6fe3273352d7754795f86835ad04520f26a441a5b45193"}, - {file = "cftime-1.5.1.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:72eff06667df45cc87c7b50c87ee67471a32b73cdfaac903bb65945823a4bdf3"}, - {file = "cftime-1.5.1.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:940fcaec3159e8ba6ad93bd939108c665c35650fcf6cd026687b40b48bca8110"}, - {file = "cftime-1.5.1.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcca217713f6468ea0fe18637ff2a113bcb5cf9c7328ba535a0323033110e419"}, - {file = "cftime-1.5.1.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f549ad17102403984a9c865313f758c346cbb04dd4550946cab81a5fea132ff3"}, - {file = "cftime-1.5.1.1-cp36-none-win_amd64.whl", hash = "sha256:40517a5c2352c847f919b7f29963015c37c071ebc2e89ecc4f62aa4b80d9a72e"}, - {file = "cftime-1.5.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a220f9652f34038c0d9ca275bbfb50efc52dbf867733746cf8a7dcd3b5b84cf1"}, - {file = "cftime-1.5.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75dc17b031e85feece5a5770f00342dedeb4349dc530bcd1cd25deb7064cab32"}, - {file = "cftime-1.5.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff292fcd4d2322fe28bcc0c9e70a37e048858aa68a9aaa43710a73100fa1e77d"}, - {file = "cftime-1.5.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01882a6f540f61b87db7e6c1a389d278e97b63b8c40457df7d5b98a25413c1a7"}, - {file = "cftime-1.5.1.1-cp37-none-win_amd64.whl", hash = "sha256:35cb5cbcbd9f8083265a3ce4283ebc62114e28d589d6bf62d98532ada6cfc890"}, - {file = "cftime-1.5.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fd2311da209bbcff4221d344cb6ee217ce6b465832b013106e1ae21e947818f8"}, - {file = "cftime-1.5.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5850ed2ae6c218d57249cf75fa27accd4e3436617a8ea9cd2bea6cb031f449fd"}, - {file = "cftime-1.5.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3196615252312762ce2196664b2614d254d5e4d2b0cf5c7866c5916e45cec66"}, - {file = "cftime-1.5.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fae15273aafd0c9382637b03ef6fa51274c28c84dc3b459a969be2121f41c648"}, - {file = "cftime-1.5.1.1-cp38-none-win_amd64.whl", hash = "sha256:2ebdc5b20fb113ad0aa41c61fa7f9d52cb3a22b4b847ed2bf653d7e1f81ae8e2"}, - {file = "cftime-1.5.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c5d0985d99a4c749d90016e993dedd6c37c5b6d459b5747eb3a14e9ad63dc669"}, - {file = "cftime-1.5.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db62b4383815596a92acb6cfa3779b208292aa17f90c10669ee50806b2795cee"}, - {file = "cftime-1.5.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae7151d4c0759f7263d3baf4ca7283f715f99a02cf3bd4cf2403ae0e5312b64e"}, - {file = "cftime-1.5.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcdf2a3d52a36e81e30d07ae93afdeb057cca8eb0a28c21cfef1f523ade1fe74"}, - {file = "cftime-1.5.1.1-cp39-none-win_amd64.whl", hash = "sha256:6db9129b4f26202c1ba4610b667693e686922011bd7166e7573dea668bb70c88"}, - {file = "cftime-1.5.1.1.tar.gz", hash = "sha256:6dc4d76ec7fe5a2d3c00dbe6604c757f1319613b75ef157554ef3648bf102a50"}, + {file = "cftime-1.5.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1eac1695ffcd7cc02e0fc608c5ba7373f1bc39b56a8e76ba62136f1d9ca7069b"}, + {file = "cftime-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:291d7d5f19525d2b667fc4ba7d18bcd95ece8384224f8e8ee22c8e72ebddf4de"}, + {file = "cftime-1.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:740d11d0b567390e2892d1dd81387cc2e547b1856ed3969dd51c4a089d9501b2"}, + {file = "cftime-1.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ff10b4ac0be73ee319c1dded6814e509d52ccb993da8a6496f153f5bb0e6efb"}, + {file = "cftime-1.5.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e8f183ed9143308d098b149539ae2a29b367ca825126efaa835d9fda7c7c3f"}, + {file = "cftime-1.5.2-cp310-none-win_amd64.whl", hash = "sha256:e7fe234c7bbc04af7d1d8a2ee5649f9a2f2de2aa424996453ed2173302bd9638"}, + {file = "cftime-1.5.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3fb31f26828c909c0234834a8882c137428d75638734ba8d2ef9e66a57e81fec"}, + {file = "cftime-1.5.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:670bc389b7d7a7e61cf42e7842a820c059efb2adce50eacef51e25389daab01f"}, + {file = "cftime-1.5.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c8716983468ebfa9b6fba0040881f431334ea56ad9d1df075ae15cd37ae7985"}, + {file = "cftime-1.5.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832c7674f6fec8bdacc09928bd976600d886035ad39d2e781897a0e180552fce"}, + {file = "cftime-1.5.2-cp36-none-win_amd64.whl", hash = "sha256:695f45898a00acceed4795f76b2ac4f1455945fe9e3a174b0ea9fff9dc2c9a17"}, + {file = "cftime-1.5.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f24330721024500b5dbe87f164aa30b82a0e785f62d3d09fb9efeac5e5623385"}, + {file = "cftime-1.5.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd4a94376989a6cf69d4a59e9e5871d19ded6de9956e4fc6b5d2775d509c49e"}, + {file = "cftime-1.5.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a818f59878ed029066748980ecfd435600be76b62f9269ddc06da91668ed506a"}, + {file = "cftime-1.5.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6bfb44c72e277c12f60c4c4b6186963191743bf80a438246d37c7320a95fa54a"}, + {file = "cftime-1.5.2-cp37-none-win_amd64.whl", hash = "sha256:a9db712b1092771d7e79c3e44838f2ddd751fad874d90d05b37c209c9dd160ea"}, + {file = "cftime-1.5.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2b774870f21045c7b0728ac3cdea9c57a2121c01eb95bf17e0ed4aaf58a939ef"}, + {file = "cftime-1.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be258adaba9298d00cd420e27f5ba4a3e379aaf4d7009d8826f607ca58c9b813"}, + {file = "cftime-1.5.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bf7021e635126323ca84622838fef634f9dd5334c1c5658b7a9181bb4d819d27"}, + {file = "cftime-1.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c49599320537db1c067f1d77e00264a3d3c53ebde13a0e62269c1c2cf9828d72"}, + {file = "cftime-1.5.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f904e3efa04adc727e787b1306ef50c181e9c448a71656938d7483cc104c90c8"}, + {file = "cftime-1.5.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f3d8623b1f6c35a4b8c702bf1f4ec5340713b8e2944fd549f8f8a25abd2f0ff"}, + {file = "cftime-1.5.2-cp38-none-win_amd64.whl", hash = "sha256:cfebcf6e9e5afe015d848908cb7d56009aa98b3a63a28bcd6faee18b8d7b75de"}, + {file = "cftime-1.5.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1f336afb2841ae769b33a720237ab2a4ccd8c14160f1332d2813a39ac9d1c045"}, + {file = "cftime-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7410c1435d52758c8cf2d46b8021c5bfcabf2ad85e360a466bc2b6d224316a82"}, + {file = "cftime-1.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:66af87cdde654d0cde0a0b936d11e0675b57697b369a142de8d5e5efae1e53d8"}, + {file = "cftime-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ae8313bce817bf4a8de0569bc97332101fc580e457bc9ff7afe42ac090c7c7b"}, + {file = "cftime-1.5.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8ad56b7185a68fa4b0b27fee949cb7af2de1ca37207e177efc9231a0785562b"}, + {file = "cftime-1.5.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf9871732414fa2841452fd0f9233e52f4e1bf49ba47005ea81bff0f9c4f40a1"}, + {file = "cftime-1.5.2-cp39-none-win_amd64.whl", hash = "sha256:dfc6a15acce854571d82565aac3d47df40317f2290e8e304a89dcb9351de19eb"}, + {file = "cftime-1.5.2.tar.gz", hash = "sha256:375d37d9ab8bf501c048e44efce2276296e3d67bb276e891e0e93b0a8bbb988a"}, ] charset-normalizer = [ {file = "charset-normalizer-2.0.10.tar.gz", hash = "sha256:876d180e9d7432c5d1dfd4c5d26b72f099d503e8fcc0feb7532c9289be60fcbd"}, @@ -3313,6 +3417,9 @@ dictdiffer = [ {file = "dictdiffer-0.9.0-py2.py3-none-any.whl", hash = "sha256:442bfc693cfcadaf46674575d2eba1c53b42f5e404218ca2c2ff549f2df56595"}, {file = "dictdiffer-0.9.0.tar.gz", hash = "sha256:17bacf5fbfe613ccf1b6d512bd766e6b21fb798822a133aa86098b8ac9997578"}, ] +docformatter = [ + {file = "docformatter-1.4.tar.gz", hash = "sha256:064e6d81f04ac96bc0d176cbaae953a0332482b22d3ad70d47c8a7f2732eef6f"}, +] docutils = [ {file = "docutils-0.16-py2.py3-none-any.whl", hash = "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af"}, {file = "docutils-0.16.tar.gz", hash = "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"}, @@ -3352,6 +3459,9 @@ entrypoints = [ {file = "entrypoints-0.3-py2.py3-none-any.whl", hash = "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19"}, {file = "entrypoints-0.3.tar.gz", hash = "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"}, ] +eradicate = [ + {file = "eradicate-2.0.0.tar.gz", hash = "sha256:27434596f2c5314cc9b31410c93d8f7e8885747399773cd088d3adea647a60c8"}, +] et-xmlfile = [ {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, @@ -3372,6 +3482,10 @@ flake8 = [ {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, ] +flake8-2020 = [ + {file = "flake8_2020-1.6.1-py2.py3-none-any.whl", hash = "sha256:efcc056fb723e1ea5307e3b663c7c328f1c23a5ff0a0fd3be695a918d8245c3a"}, + {file = "flake8_2020-1.6.1.tar.gz", hash = "sha256:db523e3383befc17c895219551ff6c9b2f6e0a5cae4c7739ea65a2238bdc6f74"}, +] flake8-bandit = [ {file = "flake8_bandit-2.1.2.tar.gz", hash = "sha256:687fc8da2e4a239b206af2e54a90093572a60d0954f3054e23690739b0b0de3b"}, ] @@ -3391,10 +3505,27 @@ flake8-comprehensions = [ {file = "flake8-comprehensions-3.8.0.tar.gz", hash = "sha256:8e108707637b1d13734f38e03435984f6b7854fa6b5a4e34f93e69534be8e521"}, {file = "flake8_comprehensions-3.8.0-py3-none-any.whl", hash = "sha256:9406314803abe1193c064544ab14fdc43c58424c0882f6ff8a581eb73fc9bb58"}, ] +flake8-copyright = [ + {file = "flake8-copyright-0.2.2.tar.gz", hash = "sha256:5c3632dd8c586547b25fff4272282005fdbcba56eeb77b7487564aa636b6e533"}, + {file = "flake8_copyright-0.2.2-py2.py3-none-any.whl", hash = "sha256:616a960c9602ad2d0136bf3f12564e253caffe82f151d2982f78a12a42e1faa0"}, + {file = "flake8_copyright-0.2.2-py3-none-any.whl", hash = "sha256:dbad92ee5f51398722cd571b6e36cc3651914bf1b286b0e638bba1f4af0b6f5b"}, +] +flake8-docstrings = [ + {file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"}, + {file = "flake8_docstrings-1.6.0-py2.py3-none-any.whl", hash = "sha256:99cac583d6c7e32dd28bbfbef120a7c0d1b6dde4adb5a9fd441c4227a6534bde"}, +] +flake8-eradicate = [ + {file = "flake8-eradicate-1.2.0.tar.gz", hash = "sha256:acaa1b6839ff00d284b805c432fdfa6047262bd15a5504ec945797e87b4de1fa"}, + {file = "flake8_eradicate-1.2.0-py3-none-any.whl", hash = "sha256:51dc660d0c1c1ed93af0f813540bbbf72ab2d3466c14e3f3bac371c618b6042f"}, +] flake8-isort = [ {file = "flake8-isort-4.1.1.tar.gz", hash = "sha256:d814304ab70e6e58859bc5c3e221e2e6e71c958e7005239202fee19c24f82717"}, {file = "flake8_isort-4.1.1-py3-none-any.whl", hash = "sha256:c4e8b6dcb7be9b71a02e6e5d4196cefcef0f3447be51e82730fb336fff164949"}, ] +flake8-plugin-utils = [ + {file = "flake8-plugin-utils-1.3.2.tar.gz", hash = "sha256:20fa2a8ca2decac50116edb42e6af0a1253ef639ad79941249b840531889c65a"}, + {file = "flake8_plugin_utils-1.3.2-py3-none-any.whl", hash = "sha256:1fe43e3e9acf3a7c0f6b88f5338cad37044d2f156c43cb6b080b5f9da8a76f06"}, +] flake8-polyfill = [ {file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"}, {file = "flake8_polyfill-1.0.2-py2.py3-none-any.whl", hash = "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9"}, @@ -3403,7 +3534,14 @@ flake8-print = [ {file = "flake8-print-4.0.0.tar.gz", hash = "sha256:5afac374b7dc49aac2c36d04b5eb1d746d72e6f5df75a6ecaecd99e9f79c6516"}, {file = "flake8_print-4.0.0-py3-none-any.whl", hash = "sha256:6c0efce658513169f96d7a24cf136c434dc711eb00ebd0a985eb1120103fe584"}, ] -flakehell = [] +flake8-return = [ + {file = "flake8-return-1.1.3.tar.gz", hash = "sha256:13a31edeb0c6157dd4f77166cdaa6141703d2b8b24def5558ae659852a003cb4"}, + {file = "flake8_return-1.1.3-py3-none-any.whl", hash = "sha256:4a266191f7ed69aa26b835ec90c5a5522fa8f79f5cf6363a877ac499f8eb418b"}, +] +flakeheaven = [ + {file = "flakeheaven-0.11.0-py3-none-any.whl", hash = "sha256:7c13bce95cfa496c47e46532f4ed0a020a6532d88f037e79bb9e342d9446992d"}, + {file = "flakeheaven-0.11.0.tar.gz", hash = "sha256:002f2de2bbc2ae72c2920cfc91cf068c89b7a561c61f63efe4a06c4af146c2dd"}, +] flask = [ {file = "Flask-2.0.2-py3-none-any.whl", hash = "sha256:cb90f62f1d8e4dc4621f52106613488b5ba826b2e1e10a33eac92f723093ab6a"}, {file = "Flask-2.0.2.tar.gz", hash = "sha256:7b2fb8e934ddd50731893bdcdb00fc8c0315916f9fcd50d22c7cc1a95ab634e2"}, @@ -3583,16 +3721,16 @@ jsonschema = [ {file = "jsonschema-4.4.0.tar.gz", hash = "sha256:636694eb41b3535ed608fe04129f26542b59ed99808b4f688aa32dcf55317a83"}, ] jupyter-client = [ - {file = "jupyter_client-7.1.1-py3-none-any.whl", hash = "sha256:f0c576cce235c727e30b0a0da88c2755d0947d0070fa1bc45f195079ffd64e66"}, - {file = "jupyter_client-7.1.1.tar.gz", hash = "sha256:540ca35e57e83c5ece81abd9b781a57cba39a37c60a2a30c8c1b2f6663544343"}, + {file = "jupyter_client-7.1.2-py3-none-any.whl", hash = "sha256:d56f1c57bef42ff31e61b1185d3348a5b2bcde7c9a05523ae4dbe5ee0871797c"}, + {file = "jupyter_client-7.1.2.tar.gz", hash = "sha256:4ea61033726c8e579edb55626d8ee2e6bf0a83158ddf3751b8dd46b2c5cd1e96"}, ] jupyter-core = [ {file = "jupyter_core-4.9.1-py3-none-any.whl", hash = "sha256:1c091f3bbefd6f2a8782f2c1db662ca8478ac240e962ae2c66f0b87c818154ea"}, {file = "jupyter_core-4.9.1.tar.gz", hash = "sha256:dce8a7499da5a53ae3afd5a9f4b02e5df1d57250cf48f3ad79da23b4778cd6fa"}, ] jupyter-server = [ - {file = "jupyter_server-1.13.3-py3-none-any.whl", hash = "sha256:3608129b90cfdcfb7dd275f15a1113d119b7c19e8356303b14312ac5c216c42a"}, - {file = "jupyter_server-1.13.3.tar.gz", hash = "sha256:4d622161f4d378ff28548b49cc180024ce102d25ba5805821fcc17ab1bc5c754"}, + {file = "jupyter_server-1.13.4-py3-none-any.whl", hash = "sha256:3a1df2e27a322e84c028e52272e6ff72fd875f9a74c84409263c5c2f1afbf6fa"}, + {file = "jupyter_server-1.13.4.tar.gz", hash = "sha256:5fb5a219385338b1d13a013a68f54688b6a69ecff4e757fd230e27ecacdbf212"}, ] jupyter-server-mathjax = [ {file = "jupyter_server_mathjax-0.2.3-py3-none-any.whl", hash = "sha256:740de2ed0d370f1856faddfaf8c09a6d7435d09d3672f24826451467b268969d"}, @@ -3866,78 +4004,65 @@ msgpack = [ {file = "msgpack-1.0.3.tar.gz", hash = "sha256:51fdc7fb93615286428ee7758cecc2f374d5ff363bdd884c7ea622a7a327a81e"}, ] multidict = [ - {file = "multidict-5.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3822c5894c72e3b35aae9909bef66ec83e44522faf767c0ad39e0e2de11d3b55"}, - {file = "multidict-5.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:28e6d883acd8674887d7edc896b91751dc2d8e87fbdca8359591a13872799e4e"}, - {file = "multidict-5.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b61f85101ef08cbbc37846ac0e43f027f7844f3fade9b7f6dd087178caedeee7"}, - {file = "multidict-5.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9b668c065968c5979fe6b6fa6760bb6ab9aeb94b75b73c0a9c1acf6393ac3bf"}, - {file = "multidict-5.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:517d75522b7b18a3385726b54a081afd425d4f41144a5399e5abd97ccafdf36b"}, - {file = "multidict-5.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1b4ac3ba7a97b35a5ccf34f41b5a8642a01d1e55454b699e5e8e7a99b5a3acf5"}, - {file = "multidict-5.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:df23c83398715b26ab09574217ca21e14694917a0c857e356fd39e1c64f8283f"}, - {file = "multidict-5.2.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e58a9b5cc96e014ddf93c2227cbdeca94b56a7eb77300205d6e4001805391747"}, - {file = "multidict-5.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f76440e480c3b2ca7f843ff8a48dc82446b86ed4930552d736c0bac507498a52"}, - {file = "multidict-5.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cfde464ca4af42a629648c0b0d79b8f295cf5b695412451716531d6916461628"}, - {file = "multidict-5.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0fed465af2e0eb6357ba95795d003ac0bdb546305cc2366b1fc8f0ad67cc3fda"}, - {file = "multidict-5.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:b70913cbf2e14275013be98a06ef4b412329fe7b4f83d64eb70dce8269ed1e1a"}, - {file = "multidict-5.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a5635bcf1b75f0f6ef3c8a1ad07b500104a971e38d3683167b9454cb6465ac86"}, - {file = "multidict-5.2.0-cp310-cp310-win32.whl", hash = "sha256:77f0fb7200cc7dedda7a60912f2059086e29ff67cefbc58d2506638c1a9132d7"}, - {file = "multidict-5.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:9416cf11bcd73c861267e88aea71e9fcc35302b3943e45e1dbb4317f91a4b34f"}, - {file = "multidict-5.2.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fd77c8f3cba815aa69cb97ee2b2ef385c7c12ada9c734b0f3b32e26bb88bbf1d"}, - {file = "multidict-5.2.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98ec9aea6223adf46999f22e2c0ab6cf33f5914be604a404f658386a8f1fba37"}, - {file = "multidict-5.2.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e5283c0a00f48e8cafcecadebfa0ed1dac8b39e295c7248c44c665c16dc1138b"}, - {file = "multidict-5.2.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5f79c19c6420962eb17c7e48878a03053b7ccd7b69f389d5831c0a4a7f1ac0a1"}, - {file = "multidict-5.2.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e4a67f1080123de76e4e97a18d10350df6a7182e243312426d508712e99988d4"}, - {file = "multidict-5.2.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:94b117e27efd8e08b4046c57461d5a114d26b40824995a2eb58372b94f9fca02"}, - {file = "multidict-5.2.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2e77282fd1d677c313ffcaddfec236bf23f273c4fba7cdf198108f5940ae10f5"}, - {file = "multidict-5.2.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:116347c63ba049c1ea56e157fa8aa6edaf5e92925c9b64f3da7769bdfa012858"}, - {file = "multidict-5.2.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:dc3a866cf6c13d59a01878cd806f219340f3e82eed514485e094321f24900677"}, - {file = "multidict-5.2.0-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ac42181292099d91217a82e3fa3ce0e0ddf3a74fd891b7c2b347a7f5aa0edded"}, - {file = "multidict-5.2.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:f0bb0973f42ffcb5e3537548e0767079420aefd94ba990b61cf7bb8d47f4916d"}, - {file = "multidict-5.2.0-cp36-cp36m-win32.whl", hash = "sha256:ea21d4d5104b4f840b91d9dc8cbc832aba9612121eaba503e54eaab1ad140eb9"}, - {file = "multidict-5.2.0-cp36-cp36m-win_amd64.whl", hash = "sha256:e6453f3cbeb78440747096f239d282cc57a2997a16b5197c9bc839099e1633d0"}, - {file = "multidict-5.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d3def943bfd5f1c47d51fd324df1e806d8da1f8e105cc7f1c76a1daf0f7e17b0"}, - {file = "multidict-5.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35591729668a303a02b06e8dba0eb8140c4a1bfd4c4b3209a436a02a5ac1de11"}, - {file = "multidict-5.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8cacda0b679ebc25624d5de66c705bc53dcc7c6f02a7fb0f3ca5e227d80422"}, - {file = "multidict-5.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:baf1856fab8212bf35230c019cde7c641887e3fc08cadd39d32a421a30151ea3"}, - {file = "multidict-5.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a43616aec0f0d53c411582c451f5d3e1123a68cc7b3475d6f7d97a626f8ff90d"}, - {file = "multidict-5.2.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:25cbd39a9029b409167aa0a20d8a17f502d43f2efebfe9e3ac019fe6796c59ac"}, - {file = "multidict-5.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a2cbcfbea6dc776782a444db819c8b78afe4db597211298dd8b2222f73e9cd0"}, - {file = "multidict-5.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3d2d7d1fff8e09d99354c04c3fd5b560fb04639fd45926b34e27cfdec678a704"}, - {file = "multidict-5.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a37e9a68349f6abe24130846e2f1d2e38f7ddab30b81b754e5a1fde32f782b23"}, - {file = "multidict-5.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:637c1896497ff19e1ee27c1c2c2ddaa9f2d134bbb5e0c52254361ea20486418d"}, - {file = "multidict-5.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9815765f9dcda04921ba467957be543423e5ec6a1136135d84f2ae092c50d87b"}, - {file = "multidict-5.2.0-cp37-cp37m-win32.whl", hash = "sha256:8b911d74acdc1fe2941e59b4f1a278a330e9c34c6c8ca1ee21264c51ec9b67ef"}, - {file = "multidict-5.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:380b868f55f63d048a25931a1632818f90e4be71d2081c2338fcf656d299949a"}, - {file = "multidict-5.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e7d81ce5744757d2f05fc41896e3b2ae0458464b14b5a2c1e87a6a9d69aefaa8"}, - {file = "multidict-5.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d1d55cdf706ddc62822d394d1df53573d32a7a07d4f099470d3cb9323b721b6"}, - {file = "multidict-5.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4771d0d0ac9d9fe9e24e33bed482a13dfc1256d008d101485fe460359476065"}, - {file = "multidict-5.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da7d57ea65744d249427793c042094c4016789eb2562576fb831870f9c878d9e"}, - {file = "multidict-5.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdd68778f96216596218b4e8882944d24a634d984ee1a5a049b300377878fa7c"}, - {file = "multidict-5.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ecc99bce8ee42dcad15848c7885197d26841cb24fa2ee6e89d23b8993c871c64"}, - {file = "multidict-5.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:067150fad08e6f2dd91a650c7a49ba65085303fcc3decbd64a57dc13a2733031"}, - {file = "multidict-5.2.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:78c106b2b506b4d895ddc801ff509f941119394b89c9115580014127414e6c2d"}, - {file = "multidict-5.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e6c4fa1ec16e01e292315ba76eb1d012c025b99d22896bd14a66628b245e3e01"}, - {file = "multidict-5.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b227345e4186809d31f22087d0265655114af7cda442ecaf72246275865bebe4"}, - {file = "multidict-5.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:06560fbdcf22c9387100979e65b26fba0816c162b888cb65b845d3def7a54c9b"}, - {file = "multidict-5.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7878b61c867fb2df7a95e44b316f88d5a3742390c99dfba6c557a21b30180cac"}, - {file = "multidict-5.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:246145bff76cc4b19310f0ad28bd0769b940c2a49fc601b86bfd150cbd72bb22"}, - {file = "multidict-5.2.0-cp38-cp38-win32.whl", hash = "sha256:c30ac9f562106cd9e8071c23949a067b10211917fdcb75b4718cf5775356a940"}, - {file = "multidict-5.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:f19001e790013ed580abfde2a4465388950728861b52f0da73e8e8a9418533c0"}, - {file = "multidict-5.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c1ff762e2ee126e6f1258650ac641e2b8e1f3d927a925aafcfde943b77a36d24"}, - {file = "multidict-5.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bd6c9c50bf2ad3f0448edaa1a3b55b2e6866ef8feca5d8dbec10ec7c94371d21"}, - {file = "multidict-5.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc66d4016f6e50ed36fb39cd287a3878ffcebfa90008535c62e0e90a7ab713ae"}, - {file = "multidict-5.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9acb76d5f3dd9421874923da2ed1e76041cb51b9337fd7f507edde1d86535d6"}, - {file = "multidict-5.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dfc924a7e946dd3c6360e50e8f750d51e3ef5395c95dc054bc9eab0f70df4f9c"}, - {file = "multidict-5.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32fdba7333eb2351fee2596b756d730d62b5827d5e1ab2f84e6cbb287cc67fe0"}, - {file = "multidict-5.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b9aad49466b8d828b96b9e3630006234879c8d3e2b0a9d99219b3121bc5cdb17"}, - {file = "multidict-5.2.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:93de39267c4c676c9ebb2057e98a8138bade0d806aad4d864322eee0803140a0"}, - {file = "multidict-5.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f9bef5cff994ca3026fcc90680e326d1a19df9841c5e3d224076407cc21471a1"}, - {file = "multidict-5.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:5f841c4f14331fd1e36cbf3336ed7be2cb2a8f110ce40ea253e5573387db7621"}, - {file = "multidict-5.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:38ba256ee9b310da6a1a0f013ef4e422fca30a685bcbec86a969bd520504e341"}, - {file = "multidict-5.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3bc3b1621b979621cee9f7b09f024ec76ec03cc365e638126a056317470bde1b"}, - {file = "multidict-5.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6ee908c070020d682e9b42c8f621e8bb10c767d04416e2ebe44e37d0f44d9ad5"}, - {file = "multidict-5.2.0-cp39-cp39-win32.whl", hash = "sha256:1c7976cd1c157fa7ba5456ae5d31ccdf1479680dc9b8d8aa28afabc370df42b8"}, - {file = "multidict-5.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:c9631c642e08b9fff1c6255487e62971d8b8e821808ddd013d8ac058087591ac"}, - {file = "multidict-5.2.0.tar.gz", hash = "sha256:0dd1c93edb444b33ba2274b66f63def8a327d607c6c790772f448a53b6ea59ce"}, + {file = "multidict-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:26085160f0f7d495d25a2ca6218180bda34fd71b2282b67fe5f86742e1571e23"}, + {file = "multidict-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fc077dbcb5ed6c7e905bb461d95aaa3484e7d87b7a0acf19f6fbe4006c85069a"}, + {file = "multidict-6.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:11458ac6d54477f120aab6a5298f1c1ada15ef7464425f340f01c5099841e8d9"}, + {file = "multidict-6.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2738c572c208da2530f224cf8df6bdaaea2007e911a6e34cf565c96b798e979c"}, + {file = "multidict-6.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d6e4814e27733eec32d3f4ef0e6ff26b0bbeb82e0c3dcf4464267c643607ab3"}, + {file = "multidict-6.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eca49c108b150d04868b2985402f992aab43851108a53d15aebe0db02bfd7dce"}, + {file = "multidict-6.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b79957ed1484ce21a8092a6f497e840a1562c339364c3c4601a30168dfe37f7"}, + {file = "multidict-6.0.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:caead879750449e14d284c9191519322f6bfcc53cc4226f3f65076e2ed0f39b7"}, + {file = "multidict-6.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76a85573b9cc2d444614f498dac21af47a9eadade8b0efb81101a197f44825b3"}, + {file = "multidict-6.0.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9850f7092cd075949f72432592bc90c8f90f9a3f740b33ac6cef22e65ec94844"}, + {file = "multidict-6.0.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:d41ea7fe6dcab6cde44123c53d5fa98ab7c20b2100dd93ba407c26fceb0198ea"}, + {file = "multidict-6.0.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:15344d052efd3804c3543d28971a6468fe96da02370390c10805189ec2c5ccfa"}, + {file = "multidict-6.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4dabfec9e7300a307f8c3f8dc6d58f372f35f0e1658be7ad6852b78ccb51b091"}, + {file = "multidict-6.0.0-cp310-cp310-win32.whl", hash = "sha256:80d5eb932d59719b5e80664e01d36874d45ed880baf400f04beee529d196d13e"}, + {file = "multidict-6.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:06783c9323152d33ddf5faa9fe6aca032bcb5b62808cd3bf60de29e3b20ed3bd"}, + {file = "multidict-6.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4228dab0981bec49390071671d77156de9644897323501965a11d4e0c68fb001"}, + {file = "multidict-6.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13ac8cfcac4e251ea19f1e730031c4698258e389ccb292f8764018505a7894de"}, + {file = "multidict-6.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b0676c21a31290d276ead3aaf216d5bd0721a126be8f6e3d46b8259f01300699"}, + {file = "multidict-6.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e6331d8e62161633daef698cf0dd04d9617cb1fe957647caaf6fc947948e771"}, + {file = "multidict-6.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49befe4287176ba535f13b40fff5581f4a07e124cb43853695a47e154f39cab5"}, + {file = "multidict-6.0.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a37600049ec482fce72d8f5b4e3737222bdc86f2239fd4ad9c1354155cfaaff9"}, + {file = "multidict-6.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:498e9bb39bfec122f0abaf3d6499f305c609f7ca5e6eaad97dd3e43a73dd94b4"}, + {file = "multidict-6.0.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1988d59390897c92f8b012267ba1d1c389677a73151c9962ce32ec09039122c6"}, + {file = "multidict-6.0.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:0e7b7cfb3304b1defda145ddb491b5788c8efd5a5003df406b4c124e703af725"}, + {file = "multidict-6.0.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7967e613a1ca8502f94fd6e9e7c4154955a72322290700a5680c5bd5cdc1b9e8"}, + {file = "multidict-6.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f6ee62540e5c8d9938e8aafa23de5a05a9e8ff1c19cbd0fa0bf9b9061d66751a"}, + {file = "multidict-6.0.0-cp37-cp37m-win32.whl", hash = "sha256:d91bb418d38e804d0c8deb3bdefe74ce135396d1f1dea526e81b5c153f2a2731"}, + {file = "multidict-6.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:55878da557e8fa5d51d4747c4d750288cdfd0d1a6c0248809890fd31e87ba31d"}, + {file = "multidict-6.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2275c097c2e8de0b6b9f368e36ada9160d68536f65734f9366bf9019021e73a9"}, + {file = "multidict-6.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:32b6288b1e444c0055116a084d22ec28a212b241d09855644ed96d377c7b4e96"}, + {file = "multidict-6.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:13570d7c4b274b78db3826cdbeccc7e7bef229b7f933e162aea40ab2657dd55f"}, + {file = "multidict-6.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9722dc28f088a25de4aa17199b51d80d895d07417bbe7579393a6338ebbe1091"}, + {file = "multidict-6.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cf5cccb38dd142e035d312ebe6ab785d7cdd6d074ce423d1cc30b22476cc9f45"}, + {file = "multidict-6.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73c4c42c3da96de49a3488354520cc06bcc5bf3d585a4a46fcb2800840a008fa"}, + {file = "multidict-6.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6eee3d4ffea677176239b8834d7747d1d3211fd6bba1226eef68a26acbac699"}, + {file = "multidict-6.0.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cea1af57c624ef968887b960e1d8c4118f0983b406ed0f3d2e3cdc6417629f6f"}, + {file = "multidict-6.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d2ea6ca1c6aa6680415b96cb4934e40ca1ed329b94c9ffc2a8f5c6658106b74f"}, + {file = "multidict-6.0.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:818baca084516eddeb5c6949a6decb6b89bf5aa2fd72cf6631a2e562d221d44f"}, + {file = "multidict-6.0.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8d553ea63140e5e904f789f504dfdcc406b0d08d8f1a94389e6ab9f2ae81c92d"}, + {file = "multidict-6.0.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:32285d67f5fd7d49aedd9faeccf28cca5d9e15b87f47dd770d1fbadfbca33c0c"}, + {file = "multidict-6.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:72ad371f0c523cf6a23862b71725a8bfce9f808cbb36b1e54feec61b3cf51cf0"}, + {file = "multidict-6.0.0-cp38-cp38-win32.whl", hash = "sha256:a94f35d0de60986d4793e3670dbe91f275bedad2be6162f8f410e5396dddd4cc"}, + {file = "multidict-6.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:a9e1f103e80d9691e57d5be813f23a00bdc548e859c655a3cd4c25f8fd13f56d"}, + {file = "multidict-6.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:93335cd685c71bae6a990a1db555c4d87c011a290682a229f56dcc9e88134256"}, + {file = "multidict-6.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7446d2a416882207ae97ede18a2cc5a5fa49d752e8efab12f0dc8f15dfc6ec74"}, + {file = "multidict-6.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:86ccb63482adbb05832c758edae8b687bd0ce933c7ef9076c89f51d2435ed8be"}, + {file = "multidict-6.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d4be1b5525f7220b18d4e89065373b8190dd6bca506e7706af6a5cc38596ca3"}, + {file = "multidict-6.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa958a183461dff01e8e686b6731e5bd6972ca550d2248cf0bc093c549bdd251"}, + {file = "multidict-6.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e962c57e8d49fed6f767fd7e3dc7ea8055203f4598abe75ab7743dc1723ecb6f"}, + {file = "multidict-6.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e7a915c3dc65f1ab278416fd533d02181e1faf5e04cd7cd92fd905e6b1116d4"}, + {file = "multidict-6.0.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e554c96bf7ffae520d9a1f8f0dc2feae3797de5b754b71b2f63cec7e593e02b2"}, + {file = "multidict-6.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f803054364391b8f91bc58fcba4c14bcdb26cdd3df5db7a1b48cca24c60ef422"}, + {file = "multidict-6.0.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f869d32ae08355f13ad3d28c9964b99b9a2b1dc2623ac289bac12a51891ff18f"}, + {file = "multidict-6.0.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:644691b1cdf59e690b2d9861bcb155657eea2c18b41e1c30ff2b00bd94eabd90"}, + {file = "multidict-6.0.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:088ed9cc3b373b18379dc6dcd68032445597a6508734c46d1ae1013fb5965ab8"}, + {file = "multidict-6.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b773add5af4212cf3f9dcb051b86fa0f13c0227cff6b6156be0bb1d18123ec1a"}, + {file = "multidict-6.0.0-cp39-cp39-win32.whl", hash = "sha256:051edc5bb1325535b50808103433a7294a7ef57d33dd261267e726b688a1a1b0"}, + {file = "multidict-6.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:38129a7b037e9ae57f50b5fafeb2964dfcfbddaca6ffcb5178dc79e8d8392e72"}, + {file = "multidict-6.0.0.tar.gz", hash = "sha256:84eae1eb73e64a34925eda18961e09e7441b2864d302fd726fa684d14151b3ae"}, ] mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, @@ -4322,6 +4447,10 @@ pydantic = [ {file = "pydantic-1.9.0-py3-none-any.whl", hash = "sha256:085ca1de245782e9b46cefcf99deecc67d418737a1fd3f6a4f511344b613a5b3"}, {file = "pydantic-1.9.0.tar.gz", hash = "sha256:742645059757a56ecd886faf4ed2441b9c0cd406079c2b4bee51bcc3fbcd510a"}, ] +pydocstyle = [ + {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, + {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"}, +] pyflakes = [ {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, @@ -4916,6 +5045,9 @@ unidecode = [ {file = "Unidecode-1.3.2-py3-none-any.whl", hash = "sha256:215fe33c9d1c889fa823ccb66df91b02524eb8cc8c9c80f9c5b8129754d27829"}, {file = "Unidecode-1.3.2.tar.gz", hash = "sha256:669898c1528912bcf07f9819dc60df18d057f7528271e31f8ec28cc88ef27504"}, ] +untokenize = [ + {file = "untokenize-0.1.1.tar.gz", hash = "sha256:3865dbbbb8efb4bb5eaa72f1be7f3e0be00ea8b7f125c69cbd1f5fda926f37a2"}, +] urllib3 = [ {file = "urllib3-1.26.8-py2.py3-none-any.whl", hash = "sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed"}, {file = "urllib3-1.26.8.tar.gz", hash = "sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"}, diff --git a/pyproject.toml b/pyproject.toml index ec26e8fe5..3eb776b72 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -141,33 +141,53 @@ timezonefinder = "^5.2.0" [tool.poetry.dev-dependencies] +poethepoet = "^0.9.0" + +pip-licenses = "^3.3.0" + +# Formatting/Linting black = "^20.8b1" -flakehell = { git = "https://github.com/mcarans/flakehell.git" }# "^0.7.1" + isort = "^5.7.0" + +flakeheaven = "^0.11.0" +flake8-bandit = "^2.1.2" flake8-black = "^0.2.1" flake8-bugbear = "^20.1.4" -flake8-bandit = "^2.1.2" +flake8-builtins = "^1.5.3" +flake8-comprehensions = "^3.7.0" +flake8-copyright = "^0.2.2" +flake8-docstrings = "^1.6.0" +flake8-eradicate = "^1.2.0" flake8-isort = "^4.0.0" -coverage = { version = "^5.3", extras = ["toml"] } +flake8-print = "^4.0.0" +flake8-return = "^1.1.3" +flake8-2020 = "^1.6.1" + +# Testing pytest = "^6.0.2" pytest-cov = "^2.10.1" -pytest-notebook = "^0.6.1" pytest-dictsdiff = "^0.5.8" +pytest-notebook = "^0.6.1" +pytest-xdist = "^2.2.1" + mock = "^4.0.2" +freezegun = "^1.1.0" + +# Test required libraries surrogate = "^0.1" pybufrkit = "^0.2.17" -freezegun = "^1.1.0" -poethepoet = "^0.9.0" -pip-licenses = "^3.3.0" -sphinx-autobuild = "^2020.9.1" selenium = "^3.141.0" percy = "^2.0.2" h5py = "^3.1.0" h5netcdf = "^0.11.0" -pytest-xdist = "^2.2.1" -flake8-print = "^4.0.0" -flake8-builtins = "^1.5.3" -flake8-comprehensions = "^3.7.0" + +# Coverage +coverage = { version = "^5.3", extras = ["toml"] } + +# Docs +sphinx-autobuild = "^2020.9.1" +docformatter = "^1.4" [tool.poetry.extras] mpl = ["matplotlib"] @@ -201,62 +221,51 @@ wddump = 'wetterdienst.provider.dwd.radar.cli:wddump' from = {format = "poetry", path = "pyproject.toml"} to = {format = "pip", path = "requirements.txt"} +[tool.black] +line-length = 120 + [tool.isort] profile = "black" multi_line_output = 3 -[tool.flakehell] +[tool.flakeheaven] exclude = [ "example/climate_observations.ipynb" ] -max-line-length = 88 +format = "grouped" +max-line-length = 120 extended_default_ignore = [] -[tool.flakehell.plugins] -pycodestyle = ["+*", "-E203", "-W503"] +[tool.flakeheaven.plugins] +pycodestyle = ["+*", "-E203", "-W503", "-E501", "-E231"] pyflakes = ["+*"] flake8-bandit = ["+*"] flake8-black = ["+*"] flake8-bugbear = ["+*"] -flake8-isort = ["+*"] -flake8-print = ["+*"] flake8-builtins = ["+*", "-A003"] flake8-comprehensions = ["+*"] +flake8-copyright = ["+*"] +flake8-eradicate = ["+*"] +flake8-isort = ["+*"] +flake8-print = ["+*"] +flake8-return = ["+*"] +flake8-2020 = ["+*"] -[tool.flakehell.exceptions."**/__init__.py"] +[tool.flakeheaven.exceptions."**/__init__.py"] pyflakes = ["-F401"] - -[tool.flakehell.exceptions."example/"] +[tool.flakeheaven.exceptions."example/"] flake8-print = ["-*"] - -[tool.flakehell.exceptions."tests/"] +[tool.flakeheaven.exceptions."tests/"] flake8-bandit = ["-S101", "-S106"] -[tool.flakehell.exceptions."tests/provider/dwd/radar/test_index.py"] -pycodestyle = ["-E501", "-B950"] -[tool.flakehell.exceptions."tests/provider/dwd/observation/util/test_parameter.py"] -pycodestyle = ["-E501", "-B950"] - -[tool.flakehell.exceptions."wetterdienst/ui/cli.py"] -pycodestyle = ["-E501", "-B950"] -[tool.flakehell.exceptions."wetterdienst/ui/restapi.py"] +[tool.flakeheaven.exceptions."wetterdienst/ui/restapi.py"] flake8-bugbear = ["-B008"] -[tool.flakehell.exceptions."wetterdienst/core/scalar/export.py"] -pycodestyle = ["-E501", "-B950"] -[tool.flakehell.exceptions."wetterdienst/provider/dwd/observation/metadata/field_types.py"] -pycodestyle = ["-E501", "-B950"] -[tool.flakehell.exceptions."wetterdienst/provider/dwd/observation/metadata/parameter.py"] -pycodestyle = ["-E501", "-B950"] -[tool.flakehell.exceptions."wetterdienst/provider/dwd/forecast/metadata/field_types.py"] -pycodestyle = ["-E501", "-B950"] -[tool.flakehell.exceptions."tests/provider/dwd/observation/test_api_metadata.py"] -pycodestyle = ["-E501", "-B950"] [tool.poe.tasks] install_dev = "poetry install -E mpl -E ipython -E docs -E sql -E export -E duckdb -E influxdb -E cratedb -E mysql -E postgresql -E radar -E bufr -E restapi -E explorer" black = "black wetterdienst example tests" isort = "isort wetterdienst example tests" format = ["black", "isort"] -lint = "flakehell lint wetterdienst example tests" +lint = "flakeheaven lint wetterdienst example tests" docs = { shell = "cd docs && poetry run make html" } test = "pytest -vvv tests" # TODO: When parallel testing works on CI, use this: @@ -268,6 +277,7 @@ coverage-parallel = "pytest --cov=wetterdienst --numprocesses=4 -m 'not (explore export_requirements = "poetry export --without-hashes --dev --output requirements.txt" export_licenses = "pip-licenses --from=mixed --format=plain-vertical --with-authors --with-urls --with-license-file --no-license-path --ignore-packages wetterdienst --output-file=THIRD_PARTY_NOTICES" export = ["export_requirements", "export_licenses"] +update = "poetry update" [tool.pytest.ini_options] markers = [ diff --git a/requirements.txt b/requirements.txt index da2966db2..0cd311da6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -aenum==3.1.6 +aenum==3.1.7 aiohttp==3.8.1; python_version >= "3.6" aiosignal==1.2.0; python_version >= "3.6" alabaster==0.7.12; python_version >= "3.6" @@ -9,7 +9,7 @@ argon2-cffi==21.3.0; python_version >= "3.7" async-timeout==4.0.2; python_version >= "3.6" asynctest==0.13.0; python_version < "3.8" and python_version >= "3.6" atomicwrites==1.4.0; python_version >= "3.6" and python_full_version < "3.0.0" and sys_platform == "win32" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6") or sys_platform == "win32" and python_version >= "3.6" and python_full_version >= "3.4.0" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6") -attrs==20.3.0; python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.7" +attrs==20.3.0; python_version >= "3.7" and python_full_version < "3.0.0" and python_version < "4.0" or python_full_version >= "3.5.0" and python_version >= "3.7" and python_version < "4.0" babel==2.9.1; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6" backports.zoneinfo==0.2.1; python_version < "3.9" and python_version >= "3.6" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.6.0" and python_version >= "3.6") bandit==1.7.1; python_version >= "3.5" @@ -32,20 +32,28 @@ decorator==5.1.1; python_version >= "3.6" and python_version < "4.0" defusedxml==0.7.1; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" deprecation==2.1.0 dictdiffer==0.9.0 +docformatter==1.4 docutils==0.16; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" dogpile.cache==1.1.5; python_version >= "3.6" entrypoints==0.3; python_full_version >= "3.6.1" and python_version >= "3.6" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6") +eradicate==2.0.0; python_version >= "3.6" and python_version < "4.0" execnet==1.9.0; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" +flake8-2020==1.6.1; python_full_version >= "3.6.1" flake8-bandit==2.1.2 flake8-black==0.2.3 flake8-bugbear==20.11.1; python_version >= "3.6" flake8-builtins==1.5.3 flake8-comprehensions==3.8.0; python_version >= "3.7" +flake8-copyright==0.2.2 +flake8-docstrings==1.6.0 +flake8-eradicate==1.2.0; python_version >= "3.6" and python_version < "4.0" flake8-isort==4.1.1 +flake8-plugin-utils==1.3.2; python_version >= "3.6" and python_version < "4.0" flake8-polyfill==1.0.2 flake8-print==4.0.0; python_version >= "3.6" -flake8==4.0.1; python_version >= "3.7" -flakehell @ git+https://github.com/mcarans/flakehell.git@master ; python_version >= "3.5" +flake8-return==1.1.3; python_version >= "3.6" and python_version < "4.0" +flake8==4.0.1; python_version >= "3.7" and python_version < "4.0" and python_full_version >= "3.6.1" +flakeheaven==0.11.0; python_version >= "3.5" freezegun==1.1.0; python_version >= "3.5" frozenlist==1.3.0; python_version >= "3.7" fsspec==2021.7.0; python_version >= "3.6" @@ -55,26 +63,26 @@ h5netcdf==0.11.0; python_version >= "3.6" h5py==3.6.0; python_version >= "3.7" idna==3.3; python_full_version >= "3.6.2" and python_version >= "3.7" imagesize==1.3.0; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6" -importlib-metadata==1.7.0; python_version >= "3.7" and python_full_version < "3.0.0" and python_version < "3.8" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6") or python_version < "3.8" and python_version >= "3.7" and python_full_version >= "3.5.0" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6") or python_version < "3.8" +importlib-metadata==1.7.0; python_version < "3.8" and python_version >= "3.7" and python_full_version >= "3.6.1" and (python_version >= "3.6" and python_full_version < "3.0.0" and python_version < "3.8" or python_version < "3.8" and python_version >= "3.6" and python_full_version >= "3.5.0") and (python_version >= "3.6" and python_full_version < "3.0.0" and python_version < "3.8" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6") or python_version < "3.8" and python_version >= "3.6" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6") and python_full_version >= "3.5.0") and (python_version >= "3.7" and python_full_version < "3.0.0" and python_version < "3.8" or python_version < "3.8" and python_version >= "3.7" and python_full_version >= "3.5.0") or python_version < "3.8" importlib-resources==5.4.0; python_version < "3.9" and python_version >= "3.7" iniconfig==1.1.1; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" ipython-genutils==0.2.0 isort==5.10.1; python_full_version >= "3.6.1" and python_version < "4.0" jinja2==3.0.3; python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.7" jsonschema==4.4.0; python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.7" -jupyter-client==7.1.1; python_full_version >= "3.6.1" and python_version >= "3.7" +jupyter-client==7.1.2; python_full_version >= "3.6.1" and python_version >= "3.7" jupyter-core==4.9.1; python_full_version >= "3.6.1" and python_version >= "3.7" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6") jupyter-server-mathjax==0.2.3; python_version >= "3.6" -jupyter-server==1.13.3; python_version >= "3.7" +jupyter-server==1.13.4; python_version >= "3.7" livereload==2.6.3; python_version >= "3.6" lxml==4.7.1; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0") markupsafe==2.0.1; python_version >= "3.6" -mccabe==0.6.1; python_version >= "3.7" +mccabe==0.6.1; python_version >= "3.7" and python_version < "4.0" and python_full_version >= "3.6.1" measurement==3.2.0 mistune==0.8.4; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" mock==4.0.3; python_version >= "3.6" mpmath==1.2.1; python_version >= "3.6" -multidict==5.2.0; python_version >= "3.6" +multidict==6.0.0; python_version >= "3.7" mypy-extensions==0.4.3; python_version >= "3.6" nbconvert==5.6.1; python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.7" nbdime==3.1.1; python_version >= "3.6" @@ -97,9 +105,10 @@ ptable==0.9.2; python_version >= "3.6" and python_version < "4.0" ptyprocess==0.7.0; os_name != "nt" and python_version >= "3.7" py==1.11.0; python_full_version >= "3.6.1" and python_version >= "3.6" and implementation_name == "pypy" pybufrkit==0.2.19 -pycodestyle==2.8.0; python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.7" +pycodestyle==2.8.0; python_full_version >= "3.6.1" and python_version >= "3.7" and python_version < "4.0" pycparser==2.21; python_full_version >= "3.6.1" and python_version >= "3.6" and implementation_name == "pypy" -pyflakes==2.4.0; python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.7" +pydocstyle==6.1.1; python_version >= "3.6" +pyflakes==2.4.0; python_version >= "3.7" and python_version < "4.0" and python_full_version >= "3.6.1" pygments==2.11.2; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" pyparsing==3.0.7; python_version >= "3.6" pypdf2==1.26.0 @@ -124,7 +133,7 @@ requests==2.27.1; (python_version >= "2.7" and python_full_version < "3.0.0") or scipy==1.6.1; python_version >= "3.7" selenium==3.141.0 send2trash==1.8.0; python_version >= "3.7" -six==1.16.0; python_full_version >= "3.7.1" and python_version >= "3.5" and (python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.3.0") and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.3.0" and python_version >= "3.6") and (python_version >= "3.5" and python_full_version < "3.0.0" or python_full_version >= "3.3.0" and python_version >= "3.5") and (python_version >= "3.6" and python_full_version < "3.0.0" and python_version < "4.0" or python_version >= "3.6" and python_version < "4.0" and python_full_version >= "3.3.0") and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6") +six==1.16.0; python_full_version >= "3.7.1" and python_version >= "3.5" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.3.0" and python_version >= "3.6") and (python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.3.0") and (python_version >= "3.5" and python_full_version < "3.0.0" or python_full_version >= "3.3.0" and python_version >= "3.5") and (python_version >= "3.6" and python_full_version < "3.0.0" and python_version < "4.0" or python_version >= "3.6" and python_version < "4.0" and python_full_version >= "3.3.0") and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6") smmap==5.0.0; python_version >= "3.7" sniffio==1.2.0; python_full_version >= "3.6.2" and python_version >= "3.7" snowballstemmer==2.2.0; python_version >= "3.6" @@ -154,9 +163,10 @@ typed-ast==1.5.1; python_version >= "3.6" typing-extensions==4.0.1; python_version < "3.8" and python_version >= "3.7" and python_full_version >= "3.6.2" tzdata==2021.5; platform_system == "Windows" and python_version >= "3.6" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.6.0" and python_version >= "3.6") tzlocal==4.1; python_version >= "3.6" +untokenize==0.1.1 urllib3==1.26.8; python_version >= "3.5" and python_full_version < "3.0.0" or python_full_version >= "3.6.0" and python_version < "4" and python_version >= "3.5" validators==0.18.2; python_version >= "3.6" and python_version < "4.0" webencodings==0.5.1; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" websocket-client==1.2.3; python_version >= "3.7" yarl==1.7.2; python_version >= "3.6" -zipp==3.7.0; python_version >= "3.7" and python_full_version < "3.0.0" and python_version < "3.8" or python_full_version >= "3.5.0" and python_version < "3.8" and python_version >= "3.7" +zipp==3.7.0; python_full_version >= "3.6.1" and python_version < "3.8" and python_version >= "3.7" and (python_version >= "3.6" and python_full_version < "3.0.0" and python_version < "3.8" or python_version < "3.8" and python_version >= "3.6" and python_full_version >= "3.5.0") diff --git a/tests/example/test_notebook_examples.py b/tests/example/test_notebook_examples.py index bb1f40f91..f1b1e32d5 100644 --- a/tests/example/test_notebook_examples.py +++ b/tests/example/test_notebook_examples.py @@ -10,10 +10,7 @@ @pytest.mark.slow -@pytest.mark.xfail( - reason="nbconvert stack has problems, see " - "https://github.com/jupyter/jupyter_client/issues/637" -) +@pytest.mark.xfail(reason="nbconvert stack has problems, see " "https://github.com/jupyter/jupyter_client/issues/637") def test_jupyter_example(): """ Test for climate_observations jupyter notebook """ fixture = NBRegressionFixture( diff --git a/tests/provider/dwd/forecast/test_api_data.py b/tests/provider/dwd/forecast/test_api_data.py index d4f65d5a8..315e86261 100644 --- a/tests/provider/dwd/forecast/test_api_data.py +++ b/tests/provider/dwd/forecast/test_api_data.py @@ -12,18 +12,11 @@ def test_dwd_mosmix_l(): Test some details of a typical MOSMIX-L response. """ - request = DwdMosmixRequest( - parameter="large", mosmix_type=DwdMosmixType.LARGE, humanize=False - ).filter_by_station_id( + request = DwdMosmixRequest(parameter="large", mosmix_type=DwdMosmixType.LARGE, humanize=False).filter_by_station_id( station_id=["01001"], ) response = next(request.values.query()) - # Verify metadata. - # TODO: add to metadata - # assert response.stations.df.loc[0, "ISSUER"] == "Deutscher Wetterdienst" - # assert response.stations.df.loc[0, "PRODUCT_ID"] == "MOSMIX" - # Verify list of stations. station_names = response.stations.df["name"].unique().tolist() assert station_names == ["JAN MAYEN"] @@ -180,11 +173,6 @@ def test_dwd_mosmix_s(): ) response = next(request.values.query()) - # Verify metadata. - # TODO: add to metadata - # assert response.stations.df.loc[0, "ISSUER"] == "Deutscher Wetterdienst" - # assert response.stations.df.loc[0, "PRODUCT_ID"] == "MOSMIX" - # Verify list of stations. station_names = list(response.stations.df["name"].unique()) assert station_names == ["BJORNOYA"] diff --git a/tests/provider/dwd/observation/test_api_data.py b/tests/provider/dwd/observation/test_api_data.py index 2d40b1aeb..f4db360ef 100644 --- a/tests/provider/dwd/observation/test_api_data.py +++ b/tests/provider/dwd/observation/test_api_data.py @@ -42,7 +42,7 @@ def test_dwd_observation_data_api(): assert request.parameter == [ ( - DwdObservationDatasetTree.DAILY.CLIMATE_SUMMARY.PRECIPITATION_HEIGHT, # Noqa: E501, B950 + DwdObservationDatasetTree.DAILY.CLIMATE_SUMMARY.PRECIPITATION_HEIGHT, DwdObservationDataset.CLIMATE_SUMMARY, ) ] @@ -118,9 +118,7 @@ def test_dwd_observation_data_parameter(): period=["recent", "historical"], ) - assert request.parameter == [ - (DwdObservationDataset.CLIMATE_SUMMARY, DwdObservationDataset.CLIMATE_SUMMARY) - ] + assert request.parameter == [(DwdObservationDataset.CLIMATE_SUMMARY, DwdObservationDataset.CLIMATE_SUMMARY)] def test_dwd_observation_data_parameter_dataset_pairs(): @@ -131,9 +129,7 @@ def test_dwd_observation_data_parameter_dataset_pairs(): period=["recent", "historical"], ) - assert request.parameter == [ - (DwdObservationDataset.CLIMATE_SUMMARY, DwdObservationDataset.CLIMATE_SUMMARY) - ] + assert request.parameter == [(DwdObservationDataset.CLIMATE_SUMMARY, DwdObservationDataset.CLIMATE_SUMMARY)] request = DwdObservationRequest( parameter=[("precipitation_height", "precipitation_more")], @@ -172,15 +168,6 @@ def test_dwd_observation_data_fails(): end_date="1951-01-01", ) - # TODO: check first if parameters are found - # with pytest.raises(NoParametersFound): - # DWDObservationStations( - # parameter=["abc"], - # resolution=DWDObservationResolution.DAILY, - # start_date="1951-01-01", - # end_date="1961-01-01", - # ) - def test_dwd_observation_data_dates(): # time input @@ -700,9 +687,7 @@ def test_dwd_observation_data_result_tidy_metric(): def test_dwd_observation_data_10_minutes_result_tidy(): """ Test for actual values (tidy) in metric units """ request = DwdObservationRequest( - parameter=[ - DwdObservationDatasetTree.MINUTE_10.TEMPERATURE_AIR.PRESSURE_AIR_SITE - ], + parameter=[DwdObservationDatasetTree.MINUTE_10.TEMPERATURE_AIR.PRESSURE_AIR_SITE], resolution=DwdObservationResolution.MINUTE_10, start_date="1999-12-31 22:00", end_date="1999-12-31 23:00", @@ -743,9 +728,7 @@ def test_dwd_observation_data_10_minutes_result_tidy(): ], errors="coerce", ).astype(float), - "quality": pd.to_numeric( - [1, 1, 1, 1, 1, 1, pd.NA], errors="coerce" - ).astype(float), + "quality": pd.to_numeric([1, 1, 1, 1, 1, 1, pd.NA], errors="coerce").astype(float), }, ), # Needed since pandas 1.2? @@ -756,10 +739,8 @@ def test_dwd_observation_data_10_minutes_result_tidy(): def test_create_humanized_column_names_mapping(): """ Test for function to create a mapping to humanized column names """ kl_daily_hcnm = { - # "qn_3": "quality_wind", "fx": "wind_gust_max", "fm": "wind_speed", - # "qn_4": "quality_general", "rsk": "precipitation_height", "rskf": "precipitation_form", "sdk": "sunshine_duration", @@ -862,9 +843,7 @@ def test_tidy_up_data(): -7.9, -11.4, ], - "quality": pd.Series( - [10, 10, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3], dtype=float - ), + "quality": pd.Series([10, 10, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3], dtype=float), } ) diff --git a/tests/provider/dwd/observation/test_api_stations_geo.py b/tests/provider/dwd/observation/test_api_stations_geo.py index 2417a894f..191b2a665 100644 --- a/tests/provider/dwd/observation/test_api_stations_geo.py +++ b/tests/provider/dwd/observation/test_api_stations_geo.py @@ -162,9 +162,7 @@ def test_dwd_observation_stations_bbox(): datetime(2020, 1, 1), datetime(2020, 1, 20), ) - nearby_station = request.filter_by_bbox( - left=8.7862, bottom=49.9195, right=8.993, top=50.0899 - ) + nearby_station = request.filter_by_bbox(left=8.7862, bottom=49.9195, right=8.993, top=50.0899) nearby_station = nearby_station.df.drop("to_date", axis="columns") pd.testing.assert_frame_equal( diff --git a/tests/provider/dwd/observation/test_io.py b/tests/provider/dwd/observation/test_io.py index 54d8ef658..6cec1ffa2 100644 --- a/tests/provider/dwd/observation/test_io.py +++ b/tests/provider/dwd/observation/test_io.py @@ -51,7 +51,7 @@ def test_to_dict(): - + """ Test export of DataFrame to dictioanry """ data = ExportMixin(df=df_data).to_dict() assert data == [ @@ -67,7 +67,7 @@ def test_to_dict(): def test_filter_by_date(): - + """ Test filter by date """ df = filter_by_date_and_resolution(df_data, "2019-12-28", Resolution.HOURLY) assert not df.empty @@ -76,10 +76,8 @@ def test_filter_by_date(): def test_filter_by_date_interval(): - - df = filter_by_date_and_resolution( - df_data, "2019-12-27/2019-12-29", Resolution.HOURLY - ) + """ Test filter by date interval """ + df = filter_by_date_and_resolution(df_data, "2019-12-27/2019-12-29", Resolution.HOURLY) assert not df.empty df = filter_by_date_and_resolution(df_data, "2020/2022", Resolution.HOURLY) @@ -87,7 +85,7 @@ def test_filter_by_date_interval(): def test_filter_by_date_monthly(): - + """ Test filter by date in monthly scope """ result = pd.DataFrame.from_dict( { "station_id": ["01048"], @@ -111,7 +109,7 @@ def test_filter_by_date_monthly(): def test_filter_by_date_annual(): - + """ Test filter by date in annual scope """ df = pd.DataFrame.from_dict( { "station_id": ["01048"], @@ -124,14 +122,10 @@ def test_filter_by_date_annual(): } ) - df = filter_by_date_and_resolution( - df, date="2019-05/2019-09", resolution=Resolution.ANNUAL - ) + df = filter_by_date_and_resolution(df, date="2019-05/2019-09", resolution=Resolution.ANNUAL) assert not df.empty - df = filter_by_date_and_resolution( - df, date="2020/2022", resolution=Resolution.ANNUAL - ) + df = filter_by_date_and_resolution(df, date="2020/2022", resolution=Resolution.ANNUAL) assert df.empty df = filter_by_date_and_resolution(df, date="2020", resolution=Resolution.ANNUAL) @@ -140,7 +134,7 @@ def test_filter_by_date_annual(): @pytest.mark.sql def test_filter_by_sql(): - # TODO: change this to a test of historical data + """ Test filter by sql statement """ df = ExportMixin(df=df_data).filter_by_sql( "SELECT * FROM data WHERE parameter='temperature_air_max_200' AND value < 1.5" ) @@ -153,7 +147,7 @@ def test_filter_by_sql(): def test_format_json(): - + """ Test export of DataFrame to json """ output = ExportMixin(df=df_data).to_json() response = json.loads(output) @@ -163,7 +157,7 @@ def test_format_json(): def test_format_geojson(): - + """ Test export of DataFrame to geojson """ output = StationsResult(df=df_station, stations=None).to_geojson() response = json.loads(output) @@ -174,18 +168,16 @@ def test_format_geojson(): def test_format_csv(): - + """ Test export of DataFrame to csv """ output = ExportMixin(df=df_data).to_csv().strip() assert "station_id,dataset,parameter,date,value,quality" in output - assert ( - "01048,climate_summary,temperature_air_max_200,2019-12-28T00-00-00,1.3," - in output - ) + assert "01048,climate_summary,temperature_air_max_200,2019-12-28T00-00-00,1.3," in output +@pytest.mark.remote def test_request(): - + """ Test general data request """ request = DwdObservationRequest( parameter=DwdObservationDataset.CLIMATE_SUMMARY, resolution=DwdObservationResolution.DAILY, @@ -198,7 +190,7 @@ def test_request(): def test_export_unknown(): - + """ Test export of DataFrame to unknown format """ request = DwdObservationRequest( parameter=DwdObservationDataset.CLIMATE_SUMMARY, resolution=DwdObservationResolution.DAILY, @@ -216,7 +208,7 @@ def test_export_unknown(): def test_export_spreadsheet(tmpdir_factory): - + """ Test export of DataFrame to spreadsheet """ import openpyxl # 1. Request data and save to .xlsx file. @@ -291,11 +283,7 @@ def test_export_spreadsheet(tmpdir_factory): (1.5,), ] - last_record = list( - worksheet.iter_cols( - min_row=worksheet.max_row, max_row=worksheet.max_row, values_only=True - ) - ) + last_record = list(worksheet.iter_cols(min_row=worksheet.max_row, max_row=worksheet.max_row, values_only=True)) assert last_record == [ ("01048",), ("climate_summary",), @@ -322,8 +310,9 @@ def test_export_spreadsheet(tmpdir_factory): @mac_arm64_unsupported +@pytest.mark.remote def test_export_parquet(tmpdir_factory): - + """ Test export of DataFrame to parquet """ import pyarrow.parquet as pq # Request data. @@ -377,21 +366,18 @@ def test_export_parquet(tmpdir_factory): # Validate content. data = table.to_pydict() - assert data["date"][0] == datetime.datetime( - 2019, 1, 1, 0, 0, tzinfo=datetime.timezone.utc - ) + assert data["date"][0] == datetime.datetime(2019, 1, 1, 0, 0, tzinfo=datetime.timezone.utc) assert data["temperature_air_min_005"][0] == 1.5 - assert data["date"][-1] == datetime.datetime( - 2020, 1, 1, 0, 0, tzinfo=datetime.timezone.utc - ) + assert data["date"][-1] == datetime.datetime(2020, 1, 1, 0, 0, tzinfo=datetime.timezone.utc) assert data["temperature_air_min_005"][-1] == -4.6 os.unlink(filename) @mac_arm64_unsupported +@pytest.mark.remote def test_export_zarr(tmpdir_factory): - + """ Test export of DataFrame to zarr """ import numpy as np import zarr @@ -447,21 +433,18 @@ def test_export_zarr(tmpdir_factory): # Validate content. data = group - assert data["date"][0] == np.datetime64( - datetime.datetime(2019, 1, 1, 0, 0, tzinfo=datetime.timezone.utc) - ) + assert data["date"][0] == np.datetime64(datetime.datetime(2019, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)) assert data["temperature_air_min_005"][0] == 1.5 - assert data["date"][-1] == np.datetime64( - datetime.datetime(2020, 1, 1, 0, 0, tzinfo=datetime.timezone.utc) - ) + assert data["date"][-1] == np.datetime64(datetime.datetime(2020, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)) assert data["temperature_air_min_005"][-1] == -4.6 shutil.rmtree(filename) @mac_arm64_unsupported +@pytest.mark.remote def test_export_feather(tmpdir_factory): - + """ Test export of DataFrame to feather """ import pyarrow.feather as feather # Request data @@ -515,20 +498,17 @@ def test_export_feather(tmpdir_factory): # Validate content. data = table.to_pydict() - assert data["date"][0] == datetime.datetime( - 2019, 1, 1, 0, 0, tzinfo=datetime.timezone.utc - ) + assert data["date"][0] == datetime.datetime(2019, 1, 1, 0, 0, tzinfo=datetime.timezone.utc) assert data["temperature_air_min_005"][0] == 1.5 - assert data["date"][-1] == datetime.datetime( - 2020, 1, 1, 0, 0, tzinfo=datetime.timezone.utc - ) + assert data["date"][-1] == datetime.datetime(2020, 1, 1, 0, 0, tzinfo=datetime.timezone.utc) assert data["temperature_air_min_005"][-1] == -4.6 os.unlink(filename) +@pytest.mark.remote def test_export_sqlite(tmpdir_factory): - + """ Test export of DataFrame to sqlite db """ import sqlite3 request = DwdObservationRequest( @@ -599,8 +579,9 @@ def test_export_sqlite(tmpdir_factory): ) +@pytest.mark.remote def test_export_cratedb(): - + """ Test export of DataFrame to cratedb """ request = DwdObservationRequest( parameter=DwdObservationDataset.CLIMATE_SUMMARY, resolution=DwdObservationResolution.DAILY, @@ -623,14 +604,14 @@ def test_export_cratedb(): schema="test", if_exists="replace", index=False, - # method="multi", chunksize=5000, ) @surrogate("duckdb.connect") +@pytest.mark.remote def test_export_duckdb(): - + """ Test export of DataFrame to duckdb """ request = DwdObservationRequest( parameter=DwdObservationDataset.CLIMATE_SUMMARY, resolution=DwdObservationResolution.DAILY, @@ -639,9 +620,7 @@ def test_export_duckdb(): ).filter_by_station_id(station_id=[1048]) mock_connection = mock.MagicMock() - with mock.patch( - "duckdb.connect", side_effect=[mock_connection], create=True - ) as mock_connect: + with mock.patch("duckdb.connect", side_effect=[mock_connection], create=True) as mock_connect: df = request.values.all().df ExportMixin(df=df).to_target("duckdb:///test.duckdb?table=testdrive") @@ -650,13 +629,13 @@ def test_export_duckdb(): mock_connection.register.assert_called_once() mock_connection.execute.assert_called() mock_connection.table.assert_called_once_with("testdrive") - # a.table.to_df.assert_called() mock_connection.close.assert_called_once() @surrogate("influxdb.InfluxDBClient") +@pytest.mark.remote def test_export_influxdb1_tabular(): - + """ Test export of DataFrame to influxdb v1 """ request = DwdObservationRequest( parameter=DwdObservationDataset.CLIMATE_SUMMARY, resolution=DwdObservationResolution.DAILY, @@ -718,8 +697,9 @@ def test_export_influxdb1_tabular(): @surrogate("influxdb.InfluxDBClient") +@pytest.mark.remote def test_export_influxdb1_tidy(): - + """ Test export of DataFrame to influxdb v1 """ request = DwdObservationRequest( parameter=DwdObservationDataset.CLIMATE_SUMMARY, resolution=DwdObservationResolution.DAILY, @@ -770,8 +750,9 @@ def test_export_influxdb1_tidy(): @surrogate("influxdb_client.InfluxDBClient") @surrogate("influxdb_client.Point") @surrogate("influxdb_client.client.write_api.SYNCHRONOUS") +@pytest.mark.remote def test_export_influxdb2_tabular(): - + """ Test export of DataFrame to influxdb v2 """ request = DwdObservationRequest( parameter=DwdObservationDataset.CLIMATE_SUMMARY, resolution=DwdObservationResolution.DAILY, @@ -793,20 +774,17 @@ def test_export_influxdb2_tabular(): ): df = request.values.all().df - ExportMixin(df=df).to_target( - "influxdb2://orga:token@localhost/?database=dwd&table=weather" - ) + ExportMixin(df=df).to_target("influxdb2://orga:token@localhost/?database=dwd&table=weather") - mock_connect.assert_called_once_with( - url="http://localhost:8086", org="orga", token="token" - ) + mock_connect.assert_called_once_with(url="http://localhost:8086", org="orga", token="token") @surrogate("influxdb_client.InfluxDBClient") @surrogate("influxdb_client.Point") @surrogate("influxdb_client.client.write_api.SYNCHRONOUS") +@pytest.mark.remote def test_export_influxdb2_tidy(): - + """ Test export of DataFrame to influxdb v2 """ request = DwdObservationRequest( parameter=DwdObservationDataset.CLIMATE_SUMMARY, resolution=DwdObservationResolution.DAILY, @@ -828,10 +806,6 @@ def test_export_influxdb2_tidy(): ): df = request.values.all().df - ExportMixin(df=df).to_target( - "influxdb2://orga:token@localhost/?database=dwd&table=weather" - ) + ExportMixin(df=df).to_target("influxdb2://orga:token@localhost/?database=dwd&table=weather") - mock_connect.assert_called_once_with( - url="http://localhost:8086", org="orga", token="token" - ) + mock_connect.assert_called_once_with(url="http://localhost:8086", org="orga", token="token") diff --git a/tests/provider/dwd/observation/test_metaindex.py b/tests/provider/dwd/observation/test_metaindex.py index b3d8d05a7..5533fa26f 100644 --- a/tests/provider/dwd/observation/test_metaindex.py +++ b/tests/provider/dwd/observation/test_metaindex.py @@ -46,9 +46,7 @@ def test_meta_index_1mph_creation(): Period.HISTORICAL, ) - assert meta_index_1mph.loc[ - meta_index_1mph[Columns.STATION_ID.value] == "00003", : - ].values.tolist() == [ + assert meta_index_1mph.loc[meta_index_1mph[Columns.STATION_ID.value] == "00003", :].values.tolist() == [ [ "00003", "18910101", diff --git a/tests/provider/dwd/observation/util/__init__.py b/tests/provider/dwd/observation/util/__init__.py index 51424f995..91805216a 100644 --- a/tests/provider/dwd/observation/util/__init__.py +++ b/tests/provider/dwd/observation/util/__init__.py @@ -3,12 +3,10 @@ # Distributed under the MIT License. See LICENSE for more info. station_reference_pattern_unsorted = ( - "(asb,)?(boo,)?ros,hnr,umd,pro,ess,fld,drs," - "neu,(nhb,)?oft,eis,(tur,)?(isn,)?fbg,mem" + "(asb,)?(boo,)?ros,hnr,umd,pro,ess,fld,drs," "neu,(nhb,)?oft,eis,(tur,)?(isn,)?fbg,mem" ) station_reference_pattern_sorted = ( - "(asb,)?(boo,)?drs,eis,ess,fbg,fld,hnr,(isn,)?" - "mem,neu,(nhb,)?oft,pro,ros,(tur,)?umd" + "(asb,)?(boo,)?drs,eis,ess,fbg,fld,hnr,(isn,)?" "mem,neu,(nhb,)?oft,pro,ros,(tur,)?umd" ) station_reference_pattern_de = ( "(deasb,)?(deboo,)?dedrs,deeis,deess,(defbg,)?defld,dehnr," diff --git a/tests/provider/dwd/radar/__init__.py b/tests/provider/dwd/radar/__init__.py index da230756a..b6831f854 100644 --- a/tests/provider/dwd/radar/__init__.py +++ b/tests/provider/dwd/radar/__init__.py @@ -3,12 +3,10 @@ # Distributed under the MIT License. See LICENSE for more info. station_reference_pattern_unsorted = ( - "(asb,)?(boo,)?(ros,)?(hnr,)?umd,pro,ess,fld,(drs,)?" - "(neu,)?(nhb,)?(oft,)?eis,(tur,)?(isn,)?(fbg,)?(mem)?" + "(asb,)?(boo,)?(ros,)?(hnr,)?umd,pro,ess,fld,(drs,)?" "(neu,)?(nhb,)?(oft,)?eis,(tur,)?(isn,)?(fbg,)?(mem)?" ) station_reference_pattern_sorted = ( - "(asb,)?(boo,)?(drs,)?eis,ess,(fbg,)?fld,(hnr,)?(isn,)?" - "(mem,)?(neu,)?(nhb,)?oft,pro,(ros,)?(tur,)?umd" + "(asb,)?(boo,)?(drs,)?eis,ess,(fbg,)?fld,(hnr,)?(isn,)?" "(mem,)?(neu,)?(nhb,)?oft,pro,(ros,)?(tur,)?umd" ) station_reference_pattern_de = ( "(deasb,)?(deboo,)?(dedrs,)?deeis,deess,(defbg,)?defld,(dehnr,)?" diff --git a/tests/provider/dwd/radar/test_api_current.py b/tests/provider/dwd/radar/test_api_current.py index 2fe627536..0317f8813 100644 --- a/tests/provider/dwd/radar/test_api_current.py +++ b/tests/provider/dwd/radar/test_api_current.py @@ -43,7 +43,7 @@ def test_radar_request_site_current_sweep_pcp_v_hdf5(): assert payload.startswith(b"\x89HDF\r\n") # Verify more details. - # wddump ras07-stqual-pcpng01_sweeph5onem_vradh_00-2020093000403400-boo-10132-hd5 # noqa:E501,B950 + # wddump ras07-stqual-pcpng01_sweeph5onem_vradh_00-2020093000403400-boo-10132-hd5 hdf = h5py.File(buffer, "r") @@ -86,7 +86,7 @@ def test_radar_request_site_current_sweep_vol_v_hdf5_full(): assert payload.startswith(b"\x89HDF\r\n") # Verify more details. - # wddump ras07-stqual-vol5minng01_sweeph5onem_vradh_00-2020092917055800-boo-10132-hd5 # noqa:E501,B950 + # wddump ras07-stqual-vol5minng01_sweeph5onem_vradh_00-2020092917055800-boo-10132-hd5 hdf = h5py.File(buffer, "r") diff --git a/tests/provider/dwd/radar/test_api_historic.py b/tests/provider/dwd/radar/test_api_historic.py index b121d37e9..1fb189c95 100644 --- a/tests/provider/dwd/radar/test_api_historic.py +++ b/tests/provider/dwd/radar/test_api_historic.py @@ -47,9 +47,7 @@ def test_radar_request_radolan_cdc_hourly_alignment_1(): start_date="2019-08-08 00:53:53", ) - assert request.start_date == datetime( - year=2019, month=8, day=8, hour=0, minute=50, second=0 - ) + assert request.start_date == datetime(year=2019, month=8, day=8, hour=0, minute=50, second=0) def test_radar_request_radolan_cdc_hourly_alignment_2(): @@ -68,9 +66,7 @@ def test_radar_request_radolan_cdc_hourly_alignment_2(): start_date="2019-08-08 00:42:42", ) - assert request.start_date == datetime( - year=2019, month=8, day=7, hour=23, minute=50, second=0 - ) + assert request.start_date == datetime(year=2019, month=8, day=7, hour=23, minute=50, second=0) @pytest.mark.remote @@ -173,7 +169,7 @@ def test_radar_request_composite_historic_fx_yesterday(): date_time = request.start_date.strftime("%d%H%M") month_year = request.start_date.strftime("%m%y") header = ( - f"FX{date_time}10000{month_year}BY.......VS 3SW 2.12.0PR E-01INT 5GP 900x 900VV 000MF 00000002MS " # noqa:E501,B950 + f"FX{date_time}10000{month_year}BY.......VS 3SW 2.12.0PR E-01INT 5GP 900x 900VV 000MF 00000002MS " f"..<{station_reference_pattern_unsorted}>" ) @@ -205,9 +201,7 @@ def test_radar_request_composite_historic_fx_timerange(): # Verify all timestamps are properly propagated from the tarfile. assert all( - request.start_date == result.timestamp - or request.start_date + timedelta(minutes=5) - for result in results + request.start_date == result.timestamp or request.start_date + timedelta(minutes=5) for result in results ) @@ -269,10 +263,7 @@ def test_radar_request_composite_historic_radolan_rw_yesterday(): # radar locations can change over time -> check if at least 10 radar locations # were found and at least 5 of them match with the provided one assert len(requested_attrs["radarlocations"]) >= 10 - assert ( - len(list(set(requested_attrs["radarlocations"]) & set(attrs["radarlocations"]))) - >= 5 - ) + assert len(list(set(requested_attrs["radarlocations"]) & set(attrs["radarlocations"]))) >= 5 skip_attrs = ["radarid", "datasize", "maxrange", "radarlocations", "radolanversion"] for attr in skip_attrs: @@ -308,12 +299,8 @@ def test_radar_request_composite_historic_radolan_rw_timerange(): requested_header = wrl.io.read_radolan_header(buffer) requested_attrs = wrl.io.parse_dwd_composite_header(requested_header) - assert request.start_date.strftime("m%y") == requested_attrs["datetime"].strftime( - "m%y" - ) - assert request.start_date.strftime("%d%H%M") == requested_attrs[ - "datetime" - ].strftime("%d%H%M") + assert request.start_date.strftime("m%y") == requested_attrs["datetime"].strftime("m%y") + assert request.start_date.strftime("%d%H%M") == requested_attrs["datetime"].strftime("%d%H%M") attrs = { "producttype": "RW", @@ -346,10 +333,7 @@ def test_radar_request_composite_historic_radolan_rw_timerange(): # radar locations can change over time -> check if at least 10 radar locations # were found and at least 5 of them match with the provided one assert len(requested_attrs["radarlocations"]) >= 10 - assert ( - len(list(set(requested_attrs["radarlocations"]) & set(attrs["radarlocations"]))) - >= 5 - ) + assert len(list(set(requested_attrs["radarlocations"]) & set(attrs["radarlocations"]))) >= 5 skip_attrs = [ "datetime", @@ -394,12 +378,8 @@ def test_radar_request_site_historic_dx_yesterday(): requested_attrs = wrl.io.radolan.parse_dx_header(requested_header) timestamp_aligned = round_minutes(timestamp, 5) - assert timestamp_aligned.strftime("%m%y") == requested_attrs["datetime"].strftime( - "%m%y" - ) - assert timestamp_aligned.strftime("%d%H%M") == requested_attrs["datetime"].strftime( - "%d%H%M" - ) + assert timestamp_aligned.strftime("%m%y") == requested_attrs["datetime"].strftime("%m%y") + assert timestamp_aligned.strftime("%d%H%M") == requested_attrs["datetime"].strftime("%d%H%M") attrs = { "producttype": "DX", @@ -450,12 +430,8 @@ def test_radar_request_site_historic_dx_timerange(): requested_attrs = wrl.io.radolan.parse_dx_header(requested_header) timestamp_aligned = round_minutes(timestamp, 5) - assert timestamp_aligned.strftime("%m%y") == requested_attrs["datetime"].strftime( - "%m%y" - ) - assert timestamp_aligned.strftime("%d%H%M") == requested_attrs["datetime"].strftime( - "%d%H%M" - ) + assert timestamp_aligned.strftime("%m%y") == requested_attrs["datetime"].strftime("%m%y") + assert timestamp_aligned.strftime("%d%H%M") == requested_attrs["datetime"].strftime("%d%H%M") attrs = { "producttype": "DX", @@ -506,7 +482,7 @@ def test_radar_request_site_historic_pe_binary_yesterday(): date_time = request.start_date.strftime("%d%H") month_year = request.start_date.strftime("%m%y") header = ( - f"PE{date_time}..10132{month_year}BY ....?VS 1LV12 1.0 2.0 3.0 4.0 5.0 " # noqa:E501,B950 + f"PE{date_time}..10132{month_year}BY ....?VS 1LV12 1.0 2.0 3.0 4.0 5.0 " f"6.0 7.0 8.0 9.0 10.0 11.0 12.0CO0CD0CS0ET 5.0FL....MS" ) @@ -845,14 +821,8 @@ def test_radar_request_site_historic_sweep_pcp_v_hdf5_yesterday(): assert hdf["/dataset1/data1/data"].shape == (360, 600) timestamp = round_minutes(request.start_date, 5) - assert hdf["/what"].attrs.get("date") == bytes( - timestamp.strftime("%Y%m%d"), encoding="ascii" - ) - assert ( - hdf["/what"] - .attrs.get("time") - .startswith(bytes(timestamp.strftime("%H%M"), encoding="ascii")) - ) + assert hdf["/what"].attrs.get("date") == bytes(timestamp.strftime("%Y%m%d"), encoding="ascii") + assert hdf["/what"].attrs.get("time").startswith(bytes(timestamp.strftime("%H%M"), encoding="ascii")) @pytest.mark.remote @@ -916,7 +886,7 @@ def test_radar_request_site_historic_sweep_vol_v_hdf5_yesterday(): assert payload.startswith(b"\x89HDF\r\n") # Verify more details. - # h5dump ras07-stqual-vol5minng01_sweeph5onem_vradh_00-2020092917055800-boo-10132-hd5 # noqa:E501,B950 + # h5dump ras07-stqual-vol5minng01_sweeph5onem_vradh_00-2020092917055800-boo-10132-hd5 hdf = h5py.File(buffer, "r") @@ -930,14 +900,8 @@ def test_radar_request_site_historic_sweep_vol_v_hdf5_yesterday(): assert hdf["/dataset1/data1/data"].shape in ((360, 180), (360, 720), (361, 720)) timestamp = round_minutes(request.start_date, 5) - assert hdf["/what"].attrs.get("date") == bytes( - timestamp.strftime("%Y%m%d"), encoding="ascii" - ) - assert ( - hdf["/what"] - .attrs.get("time") - .startswith(bytes(timestamp.strftime("%H%M"), encoding="ascii")) - ) + assert hdf["/what"].attrs.get("date") == bytes(timestamp.strftime("%Y%m%d"), encoding="ascii") + assert hdf["/what"].attrs.get("time").startswith(bytes(timestamp.strftime("%H%M"), encoding="ascii")) # Verify that the second file is the second scan / elevation level. buffer = results[1].data @@ -946,14 +910,8 @@ def test_radar_request_site_historic_sweep_vol_v_hdf5_yesterday(): assert hdf["/dataset1/how"].attrs.get("scan_index") == 2 timestamp = round_minutes(request.start_date, 5) - assert hdf["/what"].attrs.get("date") == bytes( - timestamp.strftime("%Y%m%d"), encoding="ascii" - ) - assert ( - hdf["/what"] - .attrs.get("time") - .startswith(bytes(timestamp.strftime("%H%M"), encoding="ascii")) - ) + assert hdf["/what"].attrs.get("date") == bytes(timestamp.strftime("%Y%m%d"), encoding="ascii") + assert hdf["/what"].attrs.get("time").startswith(bytes(timestamp.strftime("%H%M"), encoding="ascii")) @pytest.mark.remote @@ -1050,10 +1008,7 @@ def test_radar_request_radvor_re_yesterday(): # radar locations can change over time -> check if at least 10 radar locations # were found and at least 5 of them match with the provided one assert len(requested_attrs["radarlocations"]) >= 10 - assert ( - len(list(set(requested_attrs["radarlocations"]) & set(attrs["radarlocations"]))) - >= 5 - ) + assert len(list(set(requested_attrs["radarlocations"]) & set(attrs["radarlocations"]))) >= 5 skip_attrs = [ "radarid", @@ -1164,10 +1119,7 @@ def test_radar_request_radvor_rq_yesterday(): # radar locations can change over time -> check if at least 10 radar locations # were found and at least 5 of them match with the provided one assert len(requested_attrs["radarlocations"]) >= 10 - assert ( - len(list(set(requested_attrs["radarlocations"]) & set(attrs["radarlocations"]))) - >= 5 - ) + assert len(list(set(requested_attrs["radarlocations"]) & set(attrs["radarlocations"]))) >= 5 skip_attrs = [ "datasize", diff --git a/tests/provider/dwd/radar/test_api_invalid.py b/tests/provider/dwd/radar/test_api_invalid.py index 8a3c2a3a0..80945a478 100644 --- a/tests/provider/dwd/radar/test_api_invalid.py +++ b/tests/provider/dwd/radar/test_api_invalid.py @@ -168,23 +168,23 @@ def test_radar_request_radolan_cdc_invalid_time_resolution(): ) -# def test_radar_request_radolan_cdc_future(caplog): -# """ -# Verify that ``DWDRadarRequest`` will properly emit -# log messages when hitting empty results. -# -# This time for RADOLAN_CDC data. -# """ -# # with pytest.raises(ValueError): -# request = DWDRadarData( -# parameter=RadarParameter.RADOLAN_CDC, -# time_resolution=DWDRadarResolution.DAILY, -# period_type=DWDRadarPeriod.RECENT, -# start_date="2099-01-01 00:50:00", -# ) -# -# # results = list(request.collect_data()) -# # assert results == [] -# # -# # assert "WARNING" in caplog.text -# # assert "No radar file found" in caplog.text +@pytest.mark.remote +def test_radar_request_radolan_cdc_future(caplog): + """ + Verify that ``DWDRadarRequest`` will properly emit + log messages when hitting empty results. + + This time for RADOLAN_CDC data. + """ + request = DwdRadarValues( + parameter=DwdRadarParameter.RADOLAN_CDC, + resolution="daily", + period=DwdRadarPeriod.RECENT, + start_date="2099-01-01 00:50:00", + ) + + results = list(request.query()) + assert results == [] + + assert "WARNING" in caplog.text + assert "No radar file found" in caplog.text diff --git a/tests/provider/dwd/radar/test_api_latest.py b/tests/provider/dwd/radar/test_api_latest.py index 2ef1d4643..bcf915ba0 100644 --- a/tests/provider/dwd/radar/test_api_latest.py +++ b/tests/provider/dwd/radar/test_api_latest.py @@ -35,8 +35,8 @@ def test_radar_request_composite_latest_rx_reflectivity(): month_year = datetime.utcnow().strftime("%m%y") header = ( - f"RX......10000{month_year}BY 8101..VS 3SW ......PR E\\+00INT 5GP 900x 900MS " # noqa:E501,B950 - f"..<{station_reference_pattern_unsorted}>" # noqa:E501,B950 + f"RX......10000{month_year}BY 8101..VS 3SW ......PR E\\+00INT 5GP 900x 900MS " + f"..<{station_reference_pattern_unsorted}>" ) assert re.match(bytes(header, encoding="ascii"), payload[:160]) @@ -64,9 +64,7 @@ def test_radar_request_composite_latest_rw_reflectivity(): requested_attrs = wrl.io.parse_dwd_composite_header(requested_header) # Verify data. - assert datetime.utcnow().strftime("%m%y") == requested_attrs["datetime"].strftime( - "%m%y" - ) + assert datetime.utcnow().strftime("%m%y") == requested_attrs["datetime"].strftime("%m%y") attrs = { "producttype": "RW", @@ -99,10 +97,7 @@ def test_radar_request_composite_latest_rw_reflectivity(): # radar locations can change over time -> check if at least 10 radar locations # were found and at least 5 of them match with the provided one assert len(requested_attrs["radarlocations"]) >= 10 - assert ( - len(list(set(requested_attrs["radarlocations"]) & set(attrs["radarlocations"]))) - >= 5 - ) + assert len(list(set(requested_attrs["radarlocations"]) & set(attrs["radarlocations"]))) >= 5 skip_attrs = [ "radarid", @@ -138,9 +133,7 @@ def test_radar_request_site_latest_dx_reflectivity(): # Verify data. timestamp_aligned = round_minutes(datetime.utcnow(), 5) - assert timestamp_aligned.strftime("%m%y") == requested_attrs["datetime"].strftime( - "%m%y" - ) + assert timestamp_aligned.strftime("%m%y") == requested_attrs["datetime"].strftime("%m%y") attrs = { "producttype": "DX", diff --git a/tests/provider/dwd/radar/test_api_most_recent.py b/tests/provider/dwd/radar/test_api_most_recent.py index eddd1f18e..ba519c478 100644 --- a/tests/provider/dwd/radar/test_api_most_recent.py +++ b/tests/provider/dwd/radar/test_api_most_recent.py @@ -90,7 +90,7 @@ def test_radar_request_site_most_recent_sweep_vol_v_hdf5(): raise pytest.skip("Data currently not available") # Verify number of results. - # assert 8 <= len(results) <= 10 + assert 8 <= len(results) <= 10 buffer = results[0].data payload = buffer.getvalue() @@ -99,7 +99,7 @@ def test_radar_request_site_most_recent_sweep_vol_v_hdf5(): assert payload.startswith(b"\x89HDF\r\n") # Verify more details. - # wddump ras07-stqual-vol5minng01_sweeph5onem_vradh_00-2020092917055800-boo-10132-hd5 # noqa:E501,B950 + # wddump ras07-stqual-vol5minng01_sweeph5onem_vradh_00-2020092917055800-boo-10132-hd5 hdf = h5py.File(buffer, "r") @@ -175,10 +175,7 @@ def test_radar_request_radolan_cdc_most_recent(): # radar locations can change over time -> check if at least 10 radar locations # were found and at least 5 of them match with the provided one assert len(requested_attrs["radarlocations"]) >= 10 - assert ( - len(list(set(requested_attrs["radarlocations"]) & set(attrs["radarlocations"]))) - >= 5 - ) + assert len(list(set(requested_attrs["radarlocations"]) & set(attrs["radarlocations"]))) >= 5 skip_attrs = [ "radolanversion", diff --git a/tests/provider/dwd/radar/test_api_recent.py b/tests/provider/dwd/radar/test_api_recent.py index 77eb63fb7..528619e7e 100644 --- a/tests/provider/dwd/radar/test_api_recent.py +++ b/tests/provider/dwd/radar/test_api_recent.py @@ -89,7 +89,7 @@ def test_radar_request_site_recent_sweep_vol_v_hdf5(): assert payload.startswith(b"\x89HDF\r\n") # Verify more details. - # wddump ras07-stqual-vol5minng01_sweeph5onem_vradh_00-2020092917055800-boo-10132-hd5 # noqa:E501,B950 + # wddump ras07-stqual-vol5minng01_sweeph5onem_vradh_00-2020092917055800-boo-10132-hd5 hdf = h5py.File(buffer, "r") diff --git a/tests/provider/dwd/radar/test_index.py b/tests/provider/dwd/radar/test_index.py index 0d16cadc2..dda723672 100644 --- a/tests/provider/dwd/radar/test_index.py +++ b/tests/provider/dwd/radar/test_index.py @@ -24,9 +24,7 @@ def test_radar_fileindex_composite_pg_reflectivity_bin(): ) urls = file_index[DwdColumns.FILENAME.value].tolist() - assert all( - PurePath(url).match("*/weather/radar/composit/pg/*---bin") for url in urls - ) + assert all(PurePath(url).match("*/weather/radar/composit/pg/*---bin") for url in urls) def test_radar_fileindex_composite_pg_reflectivity_bufr(): @@ -37,9 +35,7 @@ def test_radar_fileindex_composite_pg_reflectivity_bufr(): ) urls = file_index[DwdColumns.FILENAME.value].tolist() - assert all( - PurePath(url).match("*/weather/radar/composit/pg/*---bufr") for url in urls - ) + assert all(PurePath(url).match("*/weather/radar/composit/pg/*---bufr") for url in urls) @pytest.mark.xfail(reason="Out of service", strict=True) @@ -50,9 +46,7 @@ def test_radar_fileindex_composite_rx_reflectivity_bin(): ) urls = file_index[DwdColumns.FILENAME.value].tolist() - assert all( - PurePath(url).match("*/weather/radar/composit/rx/*---bin") for url in urls - ) + assert all(PurePath(url).match("*/weather/radar/composit/rx/*---bin") for url in urls) @pytest.mark.parametrize( @@ -70,10 +64,7 @@ def test_radar_fileindex_radolan_reflectivity_bin(parameter): ) urls = file_index[DwdColumns.FILENAME.value].tolist() - assert all( - PurePath(url).match(f"*/weather/radar/radolan/{parameter.value}/*---bin") - for url in urls - ) + assert all(PurePath(url).match(f"*/weather/radar/radolan/{parameter.value}/*---bin") for url in urls) def test_radar_fileindex_sites_px_reflectivity_bin(): @@ -85,9 +76,7 @@ def test_radar_fileindex_sites_px_reflectivity_bin(): ) urls = file_index[DwdColumns.FILENAME.value].tolist() - assert all( - PurePath(url).match("*/weather/radar/sites/px/boo/*---bin") for url in urls - ) + assert all(PurePath(url).match("*/weather/radar/sites/px/boo/*---bin") for url in urls) def test_radar_fileindex_sites_px_reflectivity_bufr(): @@ -99,9 +88,7 @@ def test_radar_fileindex_sites_px_reflectivity_bufr(): ) urls = file_index[DwdColumns.FILENAME.value].tolist() - assert all( - PurePath(url).match("*/weather/radar/sites/px/boo/*---buf") for url in urls - ) + assert all(PurePath(url).match("*/weather/radar/sites/px/boo/*---buf") for url in urls) def test_radar_fileindex_sites_px250_reflectivity_bufr(): @@ -124,10 +111,7 @@ def test_radar_fileindex_sites_sweep_bufr(): ) urls = file_index[DwdColumns.FILENAME.value].tolist() - assert all( - PurePath(url).match("*/weather/radar/sites/sweep_vol_v/boo/*--buf.bz2") - for url in urls - ) + assert all(PurePath(url).match("*/weather/radar/sites/sweep_vol_v/boo/*--buf.bz2") for url in urls) def test_radar_fileindex_sites_sweep_vol_v_hdf5_simple(): @@ -141,9 +125,7 @@ def test_radar_fileindex_sites_sweep_vol_v_hdf5_simple(): urls = file_index[DwdColumns.FILENAME.value].tolist() - assert all( - "/weather/radar/sites/sweep_vol_v/boo/hdf5/filter_simple" in url for url in urls - ) + assert all("/weather/radar/sites/sweep_vol_v/boo/hdf5/filter_simple" in url for url in urls) def test_radar_fileindex_sites_sweep_vol_v_hdf5_polarimetric(): @@ -157,10 +139,7 @@ def test_radar_fileindex_sites_sweep_vol_v_hdf5_polarimetric(): urls = file_index[DwdColumns.FILENAME.value].tolist() - assert all( - "/weather/radar/sites/sweep_vol_v/boo/hdf5/filter_polarimetric" in url - for url in urls - ) + assert all("/weather/radar/sites/sweep_vol_v/boo/hdf5/filter_polarimetric" in url for url in urls) def test_radar_fileindex_radolan_cdc_daily_recent(): @@ -173,9 +152,7 @@ def test_radar_fileindex_radolan_cdc_daily_recent(): urls = file_index[DwdColumns.FILENAME.value].tolist() assert all( - PurePath(url).match( - "*/climate_environment/CDC/grids_germany/daily/radolan/recent/bin/*---bin.gz" - ) + PurePath(url).match("*/climate_environment/CDC/grids_germany/daily/radolan/recent/bin/*---bin.gz") for url in urls if not url.endswith(".pdf") ) @@ -191,9 +168,7 @@ def test_radar_fileindex_radolan_cdc_daily_historical(): urls = file_index[DwdColumns.FILENAME.value].tolist() assert all( - PurePath(url).match( - "*/climate_environment/CDC/grids_germany/daily/radolan/historical/bin/*/SF*.tar.gz" - ) + PurePath(url).match("*/climate_environment/CDC/grids_germany/daily/radolan/historical/bin/*/SF*.tar.gz") for url in urls if not url.endswith(".pdf") ) @@ -209,9 +184,7 @@ def test_radar_fileindex_radolan_cdc_hourly_recent(): urls = file_index[DwdColumns.FILENAME.value].tolist() assert all( - PurePath(url).match( - "*/climate_environment/CDC/grids_germany/hourly/radolan/recent/bin/*---bin.gz" - ) + PurePath(url).match("*/climate_environment/CDC/grids_germany/hourly/radolan/recent/bin/*---bin.gz") for url in urls if not url.endswith(".pdf") ) @@ -227,9 +200,7 @@ def test_radar_fileindex_radolan_cdc_hourly_historical(): urls = file_index[DwdColumns.FILENAME.value].tolist() assert all( - PurePath(url).match( - "*/climate_environment/CDC/grids_germany/hourly/radolan/historical/bin/*/RW*.tar.gz" - ) + PurePath(url).match("*/climate_environment/CDC/grids_germany/hourly/radolan/historical/bin/*/RW*.tar.gz") for url in urls if not url.endswith(".pdf") ) diff --git a/tests/provider/dwd/radar/test_util.py b/tests/provider/dwd/radar/test_util.py index 5bad74892..eacfd2cf3 100644 --- a/tests/provider/dwd/radar/test_util.py +++ b/tests/provider/dwd/radar/test_util.py @@ -12,14 +12,10 @@ def test_radar_get_date_from_filename(): date = get_date_from_filename("sweep_pcp_v_0-20200926143033_10132--buf.bz2") assert date == datetime.datetime(2020, 9, 26, 14, 30) - date = get_date_from_filename( - "ras07-stqual-vol5minng01_sweeph5onem_vradh_00-2020092614305700-boo-10132-hd5" - ) + date = get_date_from_filename("ras07-stqual-vol5minng01_sweeph5onem_vradh_00-2020092614305700-boo-10132-hd5") assert date == datetime.datetime(2020, 9, 26, 14, 30) - date = get_date_from_filename( - "ras07-vol5minng01_sweeph5onem_vradh_00-2020092614305700-boo-10132-hd5" - ) + date = get_date_from_filename("ras07-vol5minng01_sweeph5onem_vradh_00-2020092614305700-boo-10132-hd5") assert date == datetime.datetime(2020, 9, 26, 14, 30) date = get_date_from_filename("rab02-tt_10132-20200926161533-boo---buf") diff --git a/tests/provider/dwd/test_date.py b/tests/provider/dwd/test_date.py index c277fdbac..86dfff485 100644 --- a/tests/provider/dwd/test_date.py +++ b/tests/provider/dwd/test_date.py @@ -10,9 +10,7 @@ def test_mktimerange_annual(): - assert mktimerange( - Resolution.ANNUAL, dateutil.parser.isoparse("2019").replace(tzinfo=pytz.UTC) - ) == ( + assert mktimerange(Resolution.ANNUAL, dateutil.parser.isoparse("2019").replace(tzinfo=pytz.UTC)) == ( dateutil.parser.isoparse("2019-01-01 00:00:00Z"), dateutil.parser.isoparse("2019-12-31 00:00:00Z"), ) @@ -27,9 +25,7 @@ def test_mktimerange_annual(): def test_mktimerange_monthly(): - assert mktimerange( - Resolution.MONTHLY, dateutil.parser.isoparse("2020-05").replace(tzinfo=pytz.UTC) - ) == ( + assert mktimerange(Resolution.MONTHLY, dateutil.parser.isoparse("2020-05").replace(tzinfo=pytz.UTC)) == ( dateutil.parser.isoparse("2020-05-01 00:00:00Z"), dateutil.parser.isoparse("2020-05-31 00:00:00Z"), ) diff --git a/tests/provider/dwd/test_index.py b/tests/provider/dwd/test_index.py index 9f3c5a700..cf978f4fe 100644 --- a/tests/provider/dwd/test_index.py +++ b/tests/provider/dwd/test_index.py @@ -29,8 +29,7 @@ def test_build_index_path(): @pytest.mark.remote def test_list_files_of_climate_observations(): files_server = list_remote_files_fsspec( - "https://opendata.dwd.de/climate_environment/CDC/observations_germany/climate/" - "annual/kl/recent", + "https://opendata.dwd.de/climate_environment/CDC/observations_germany/climate/" "annual/kl/recent", recursive=False, ) diff --git a/tests/provider/dwd/test_util.py b/tests/provider/dwd/test_util.py index ae6c3c238..cde97dfda 100644 --- a/tests/provider/dwd/test_util.py +++ b/tests/provider/dwd/test_util.py @@ -26,10 +26,7 @@ def test_parse_enumeration_from_template(): parse_enumeration_from_template("CLIMATE_SUMMARY", DwdObservationDataset) == DwdObservationDataset.CLIMATE_SUMMARY ) - assert ( - parse_enumeration_from_template("kl", DwdObservationDataset) - == DwdObservationDataset.CLIMATE_SUMMARY - ) + assert parse_enumeration_from_template("kl", DwdObservationDataset) == DwdObservationDataset.CLIMATE_SUMMARY with pytest.raises(InvalidEnumeration): parse_enumeration_from_template("climate", DwdObservationDataset) diff --git a/tests/provider/eccc/test_api.py b/tests/provider/eccc/test_api.py index cf849f147..1ca8e1afb 100644 --- a/tests/provider/eccc/test_api.py +++ b/tests/provider/eccc/test_api.py @@ -112,6 +112,4 @@ def test_eccc_api_values(): } ) - assert_frame_equal( - values.reset_index(drop=True), expected_df, check_categorical=False - ) + assert_frame_equal(values.reset_index(drop=True), expected_df, check_categorical=False) diff --git a/tests/test_api.py b/tests/test_api.py index 17ec6483a..7bfd3939b 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -60,8 +60,6 @@ def test_api(provider, kind, kwargs, si_units): values = next(request.values.query()).df # TODO: DWD Forecast has no quality - assert set(values.columns).issuperset( - {"station_id", "parameter", "date", "value", "quality"} - ) + assert set(values.columns).issuperset({"station_id", "parameter", "date", "value", "quality"}) assert not values.empty diff --git a/tests/ui/explorer/conftest.py b/tests/ui/explorer/conftest.py index 532921e0f..8cb61af28 100644 --- a/tests/ui/explorer/conftest.py +++ b/tests/ui/explorer/conftest.py @@ -38,9 +38,7 @@ def wait_for_element_by_id_clickable(self, element_id, timeout=None): EC.element_to_be_clickable, ((By.ID, element_id),), timeout, - "timeout {}s => waiting for element id {}".format( - timeout if timeout else self._wait_timeout, element_id - ), + "timeout {}s => waiting for element id {}".format(timeout if timeout else self._wait_timeout, element_id), ) diff --git a/tests/ui/test_cli.py b/tests/ui/test_cli.py index 5bb8a3d37..43521de8a 100644 --- a/tests/ui/test_cli.py +++ b/tests/ui/test_cli.py @@ -2,9 +2,9 @@ # Copyright (c) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. import json -import zipfile from datetime import datetime, timedelta +import pandas as pd import pytest from click.testing import CliRunner @@ -48,6 +48,7 @@ def test_cli_help(): + """ Test cli help """ runner = CliRunner() result = runner.invoke(cli, []) @@ -59,41 +60,26 @@ def test_cli_help(): ) -# def test_cli_about_parameters(capsys): -# runner = CliRunner() -# -# result = runner.invoke(cli, "about coverage --provider=dwd --kind=observation") -# -# assert "precipitation" in result.output -# assert "temperature_air" in result.output -# assert "weather_phenomena" in result.output - - -# def test_cli_about_resolutions(capsys): -# runner = CliRunner() -# -# result = runner.invoke(cli, "about coverage --provider=dwd --kind=observation") -# -# sys.argv = ["wetterdienst", "dwd", "about", "resolutions"] -# cli.cli() -# stdout, stderr = capsys.readouterr() -# -# response = stdout -# assert "1_minute" in response -# assert "hourly" in response -# assert "annual" in response - - -# def test_cli_about_periods(capsys): -# -# sys.argv = ["wetterdienst", "dwd", "about", "periods"] -# cli.cli() -# stdout, stderr = capsys.readouterr() -# -# response = stdout -# assert "historical" in response -# assert "recent" in response -# assert "now" in response +def test_cli_about_parameters(): + """ Test cli coverage of dwd parameters""" + runner = CliRunner() + + result = runner.invoke(cli, "about coverage --provider=dwd --kind=observation") + + assert "precipitation" in result.output + assert "temperature_air" in result.output + assert "weather_phenomena" in result.output + + +def test_cli_about_resolutions(): + """ Test cli coverage of dwd resolutions """ + runner = CliRunner() + + result = runner.invoke(cli, "about coverage --provider=dwd --kind=observation") + + assert "minute_1" in result.output + assert "hourly" in result.output + assert "annual" in result.output def test_cli_about_coverage(capsys): @@ -108,110 +94,84 @@ def test_cli_about_coverage(capsys): def invoke_wetterdienst_stations_empty(provider, kind, setting, fmt="json"): runner = CliRunner() - result = runner.invoke( + return runner.invoke( cli, - f"stations --provider={provider} --kind={kind} " - f"{setting} --station=123456 --format={fmt}", + f"stations --provider={provider} --kind={kind} " f"{setting} --station=123456 --format={fmt}", ) - return result - def invoke_wetterdienst_stations_static(provider, kind, setting, station, fmt="json"): runner = CliRunner() - result = runner.invoke( + return runner.invoke( cli, - f"stations --provider={provider} --kind={kind} " - f"{setting} --station={station} --format={fmt}", + f"stations --provider={provider} --kind={kind} " f"{setting} --station={station} --format={fmt}", ) - return result - def invoke_wetterdienst_stations_export(provider, kind, setting, station, target): runner = CliRunner() - result = runner.invoke( + return runner.invoke( cli, - f"stations --provider={provider} --kind={kind} " - f"{setting} --station={station} --target={target}", + f"stations --provider={provider} --kind={kind} " f"{setting} --station={station} --target={target}", ) - return result - def invoke_wetterdienst_stations_geo(provider, kind, setting, fmt="json"): runner = CliRunner() - result = runner.invoke( + return runner.invoke( cli, f"stations --provider={provider} --kind={kind} " f"{setting} --coordinates=51.1280,13.7543 --rank=5 " f"--format={fmt}", ) - return result - def invoke_wetterdienst_values_static(provider, kind, setting, station, fmt="json"): runner = CliRunner() - result = runner.invoke( + return runner.invoke( cli, - f"values --provider={provider} --kind={kind} " - f"{setting} --station={station} --format={fmt}", + f"values --provider={provider} --kind={kind} " f"{setting} --station={station} --format={fmt}", ) - return result - def invoke_wetterdienst_values_export(provider, kind, setting, station, target): runner = CliRunner() - result = runner.invoke( + return runner.invoke( cli, - f"values --provider={provider} --kind={kind} " - f"{setting} --station={station} --target={target}", + f"values --provider={provider} --kind={kind} " f"{setting} --station={station} --target={target}", ) - return result - -def invoke_wetterdienst_values_static_tidy( - provider, kind, setting, station, fmt="json" -): +def invoke_wetterdienst_values_static_tidy(provider, kind, setting, station, fmt="json"): runner = CliRunner() - result = runner.invoke( + return runner.invoke( cli, - f"values --provider={provider} --kind={kind} " - f"{setting} --station={station} --format={fmt} --tidy", + f"values --provider={provider} --kind={kind} " f"{setting} --station={station} --format={fmt} --tidy", ) - return result - def invoke_wetterdienst_values_geo(provider, kind, setting, fmt="json"): runner = CliRunner() - result = runner.invoke( + return runner.invoke( cli, f"values --provider={provider} --kind={kind} {setting} " f"--coordinates=51.1280,13.7543 --rank=5 --format={fmt}", ) - return result - def test_no_provider(): runner = CliRunner() result = runner.invoke(cli, "stations --provider=abc --kind=abc") - assert ( - "Error: Invalid value for '--provider': invalid choice: abc." in result.output - ) + assert "Error: Invalid value for '--provider': invalid choice: abc." in result.output def test_no_kind(): @@ -222,7 +182,7 @@ def test_no_kind(): assert "Invalid value for '--kind': invalid choice: abc." in result.output -def test_data_range(capsys): +def test_data_range(): runner = CliRunner() result = runner.invoke( @@ -232,10 +192,7 @@ def test_data_range(capsys): ) assert isinstance(result.exception, TypeError) - assert ( - "Combination of provider ECCC and kind OBSERVATION requires start and end date" - in str(result.exception) - ) + assert "Combination of provider ECCC and kind OBSERVATION requires start and end date" in str(result.exception) @pytest.mark.parametrize( @@ -254,14 +211,10 @@ def test_cli_stations_json(provider, kind, setting, station_id, station_name): assert station_name in station_names -@pytest.mark.parametrize( - "provider,kind,setting,station_id,station_name", SETTINGS_STATIONS -) +@pytest.mark.parametrize("provider,kind,setting,station_id,station_name", SETTINGS_STATIONS) def test_cli_stations_empty(provider, kind, setting, station_id, station_name, caplog): - result = invoke_wetterdienst_stations_empty( - provider=provider, kind=kind, setting=setting, fmt="json" - ) + result = invoke_wetterdienst_stations_empty(provider=provider, kind=kind, setting=setting, fmt="json") assert isinstance(result.exception, SystemExit) assert "ERROR" in caplog.text @@ -304,11 +257,9 @@ def test_cli_stations_csv(provider, kind, setting, station_id, station_name): "provider,kind,setting,station_id,station_name", SETTINGS_STATIONS, ) -def test_cli_stations_excel( - provider, kind, setting, station_id, station_name, tmpdir_factory -): +def test_cli_stations_excel(provider, kind, setting, station_id, station_name, tmpdir_factory): - # filename = tmpdir_factory.mktemp("data").join("stations.xlsx") + # filename = tmpdir_factory.mktemp("data").join("stations.xlsx") # Noqa:E800 filename = "stations.xlsx" _ = invoke_wetterdienst_stations_export( @@ -319,10 +270,10 @@ def test_cli_stations_excel( target=f"file://{filename}", ) - with zipfile.ZipFile(filename, "r") as zip_file: - payload = zip_file.read("xl/worksheets/sheet1.xml") + df = pd.read_excel("stations.xlsx", sheet_name="Sheet1", dtype=str) - assert bytes(station_name, encoding="utf8") in payload + assert "name" in df + assert station_name in df["name"].values @pytest.mark.parametrize( @@ -351,9 +302,7 @@ def test_cli_values_json(setting, expected_columns, capsys, caplog): assert set(first.keys()).issuperset(expected_columns) -@pytest.mark.parametrize( - "provider,kind,setting,station_id,station_name", SETTINGS_VALUES -) +@pytest.mark.parametrize("provider,kind,setting,station_id,station_name", SETTINGS_VALUES) def test_cli_values_json_tidy(provider, kind, setting, station_id, station_name): result = invoke_wetterdienst_values_static_tidy( @@ -388,10 +337,7 @@ def test_cli_values_geojson(provider, kind, setting, station_id, station_name, c provider=provider, kind=kind, setting=setting, station=station_id, fmt="geojson" ) - assert ( - "Error: Invalid value for '--format': invalid choice: " - "geojson. (choose from json, csv)" in result.output - ) + assert "Error: Invalid value for '--format': invalid choice: " "geojson. (choose from json, csv)" in result.output @pytest.mark.parametrize( @@ -410,11 +356,9 @@ def test_cli_values_csv(provider, kind, setting, station_id, station_name): "provider,kind,setting,station_id,station_name", SETTINGS_VALUES, ) -def test_cli_values_excel( - provider, kind, setting, station_id, station_name, tmpdir_factory -): +def test_cli_values_excel(provider, kind, setting, station_id, station_name, tmpdir_factory): - # filename = tmpdir_factory.mktemp("data").join("values.xlsx") + # filename = tmpdir_factory.mktemp("data").join("values.xlsx") # Noqa:E800 filename = "values.xlsx" _ = invoke_wetterdienst_values_export( @@ -425,12 +369,10 @@ def test_cli_values_excel( target=f"file://{filename}", ) - # pd.read_excel("values.xlsx") + df = pd.read_excel("values.xlsx", sheet_name="Sheet1", dtype=str) - with zipfile.ZipFile(filename, "r") as zip_file: - payload = zip_file.read("xl/worksheets/sheet1.xml") - - assert bytes(station_id, encoding="utf8") in payload + assert "station_id" in df + assert station_id in df["station_id"].values @pytest.mark.parametrize( @@ -442,10 +384,7 @@ def test_cli_values_format_unknown(provider, kind, setting, station_id, station_ provider=provider, kind=kind, setting=setting, station=station_id, fmt="foobar" ) - assert ( - "Error: Invalid value for '--format': " - "invalid choice: foobar. (choose from json, csv)" in result.output - ) + assert "Error: Invalid value for '--format': " "invalid choice: foobar. (choose from json, csv)" in result.output @pytest.mark.parametrize( @@ -454,9 +393,7 @@ def test_cli_values_format_unknown(provider, kind, setting, station_id, station_ ) def test_cli_stations_geospatial(provider, kind, setting, station_id, station_name): - result = invoke_wetterdienst_stations_geo( - provider=provider, kind=kind, setting=setting, fmt="json" - ) + result = invoke_wetterdienst_stations_geo(provider=provider, kind=kind, setting=setting, fmt="json") response = json.loads(result.output) @@ -471,9 +408,7 @@ def test_cli_stations_geospatial(provider, kind, setting, station_id, station_na ) def test_cli_values_geospatial(provider, kind, setting, station_id, station_name): - result = invoke_wetterdienst_values_geo( - provider=provider, kind=kind, setting=setting, fmt="json" - ) + result = invoke_wetterdienst_values_geo(provider=provider, kind=kind, setting=setting, fmt="json") response = json.loads(result.output) diff --git a/tests/ui/test_restapi.py b/tests/ui/test_restapi.py index 6dc00b015..aee047cd1 100644 --- a/tests/ui/test_restapi.py +++ b/tests/ui/test_restapi.py @@ -65,10 +65,7 @@ def test_data_range(capsys): }, ) - assert ( - "Combination of provider ECCC and kind OBSERVATION requires start and end date" - in response.text - ) + assert "Combination of provider ECCC and kind OBSERVATION requires start and end date" in response.text def test_dwd_stations_basic(): @@ -194,9 +191,7 @@ def test_dwd_values_no_parameter(): ) assert response.status_code == 400 - assert response.json() == { - "detail": "Query arguments 'parameter', 'resolution' and 'date' are required" - } + assert response.json() == {"detail": "Query arguments 'parameter', 'resolution' and 'date' are required"} def test_dwd_values_no_resolution(): @@ -213,9 +208,7 @@ def test_dwd_values_no_resolution(): ) assert response.status_code == 400 - assert response.json() == { - "detail": "Query arguments 'parameter', 'resolution' and 'date' are required" - } + assert response.json() == {"detail": "Query arguments 'parameter', 'resolution' and 'date' are required"} @pytest.mark.sql @@ -280,8 +273,7 @@ def test_dwd_values_sql_tidy(dicts_are_same): "parameter": "kl", "resolution": "daily", "date": "2019-12-01/2019-12-31", - "sql-values": "SELECT * FROM data " - "WHERE parameter='temperature_air_max_200' AND value < 1.5", + "sql-values": "SELECT * FROM data " "WHERE parameter='temperature_air_max_200' AND value < 1.5", "si-units": False, }, ) diff --git a/tests/util/test_geo.py b/tests/util/test_geo.py index 8970c0c16..70e51e207 100644 --- a/tests/util/test_geo.py +++ b/tests/util/test_geo.py @@ -8,9 +8,7 @@ def test_get_coordinates(): coordinates = Coordinates(np.array([1, 2, 3, 4]), np.array([5, 6, 7, 8])) - np.testing.assert_equal( - coordinates.get_coordinates(), np.array([[1, 5], [2, 6], [3, 7], [4, 8]]) - ) + np.testing.assert_equal(coordinates.get_coordinates(), np.array([[1, 5], [2, 6], [3, 7], [4, 8]])) def test_get_coordinates_in_radians(): diff --git a/wetterdienst/__init__.py b/wetterdienst/__init__.py index bb0bef98d..c8c6f0da8 100644 --- a/wetterdienst/__init__.py +++ b/wetterdienst/__init__.py @@ -29,17 +29,13 @@ def info() -> None: """ Function that prints some basic information about the wetterdienst instance """ wd_info = { "version": __version__, - "authors": "Benjamin Gutzmann , " - "Andreas Motl ", + "authors": "Benjamin Gutzmann , " "Andreas Motl ", "documentation": "https://wetterdienst.readthedocs.io/", "repository": "https://github.com/earthobservations/wetterdienst", "cache_dir": cache_dir, } - text = ( - "Wetterdienst - Open weather data for humans\n" - "-------------------------------------------" - ) + text = "Wetterdienst - Open weather data for humans\n" "-------------------------------------------" for key, value in wd_info.items(): text += f"\n{key}:\t {value}" diff --git a/wetterdienst/api.py b/wetterdienst/api.py index c6844fd08..49ed3acd1 100644 --- a/wetterdienst/api.py +++ b/wetterdienst/api.py @@ -48,9 +48,7 @@ def __new__(cls, provider: Union[Provider, str], kind: Union[Kind, str]): raise KeyError except (InvalidEnumeration, KeyError): - raise ProviderError( - f"No API available for provider {provider} and kind {kind}" - ) + raise ProviderError(f"No API available for provider {provider} and kind {kind}") return api diff --git a/wetterdienst/core/process.py b/wetterdienst/core/process.py index f6a1289c4..279732a26 100644 --- a/wetterdienst/core/process.py +++ b/wetterdienst/core/process.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- # Copyright (c) 2018-2021, earthobservations developers. # Distributed under the MIT License. See LICENSE for more info. -# import datetime from datetime import datetime from typing import Optional, Tuple @@ -15,9 +14,7 @@ from wetterdienst.util.datetime import mktimerange -def create_date_range( - date: str, resolution: Resolution -) -> Tuple[Optional[datetime], Optional[datetime]]: +def create_date_range(date: str, resolution: Resolution) -> Tuple[Optional[datetime], Optional[datetime]]: date_from, date_to = None, None if "/" in date: @@ -54,9 +51,7 @@ def create_date_range( return date_from, date_to -def filter_by_date_and_resolution( - df: pd.DataFrame, date: str, resolution: Resolution -) -> pd.DataFrame: +def filter_by_date_and_resolution(df: pd.DataFrame, date: str, resolution: Resolution) -> pd.DataFrame: """ Filter Pandas DataFrame by date or date interval. @@ -98,14 +93,10 @@ def filter_by_date_and_resolution( Resolution.MONTHLY, ): date_from, date_to = mktimerange(resolution, date_from, date_to) - expression = (date_from <= df[Columns.FROM_DATE.value]) & ( - df[Columns.TO_DATE.value] <= date_to - ) + expression = (date_from <= df[Columns.FROM_DATE.value]) & (df[Columns.TO_DATE.value] <= date_to) else: - expression = (date_from <= df[Columns.DATE.value]) & ( - df[Columns.DATE.value] <= date_to - ) - df = df[expression] + expression = (date_from <= df[Columns.DATE.value]) & (df[Columns.DATE.value] <= date_to) + return df[expression] # Filter by specific date. else: @@ -118,11 +109,7 @@ def filter_by_date_and_resolution( Resolution.MONTHLY, ): date_from, date_to = mktimerange(resolution, date) - expression = (date_from <= df[Columns.FROM_DATE.value]) & ( - df[Columns.TO_DATE.value] <= date_to - ) + expression = (date_from <= df[Columns.FROM_DATE.value]) & (df[Columns.TO_DATE.value] <= date_to) else: expression = date == df[Columns.DATE.value] - df = df[expression] - - return df + return df[expression] diff --git a/wetterdienst/core/scalar/export.py b/wetterdienst/core/scalar/export.py index 62b1a2f5e..73cfd66a6 100644 --- a/wetterdienst/core/scalar/export.py +++ b/wetterdienst/core/scalar/export.py @@ -45,14 +45,10 @@ def to_dict(self) -> dict: return df.to_dict(orient="records") def to_json(self, indent: int = 4): - output = self.df.to_json( - orient="records", date_format="iso", indent=indent, force_ascii=False - ) - return output + return self.df.to_json(orient="records", date_format="iso", indent=indent, force_ascii=False) def to_csv(self): - output = self.df.to_csv(index=False, date_format="%Y-%m-%dT%H-%M-%S") - return output + return self.df.to_csv(index=False, date_format="%Y-%m-%dT%H-%M-%S") def to_geojson(self, indent: int = 4) -> str: """ @@ -61,9 +57,7 @@ def to_geojson(self, indent: int = 4) -> str: Return: JSON string in GeoJSON FeatureCollection format. """ - return json.dumps( - self.to_ogc_feature_collection(), indent=indent, ensure_ascii=False - ) + return json.dumps(self.to_ogc_feature_collection(), indent=indent, ensure_ascii=False) def to_format(self, fmt: str, **kwargs) -> str: """ @@ -75,16 +69,14 @@ def to_format(self, fmt: str, **kwargs) -> str: fmt = fmt.lower() if fmt == "json": - output = self.to_json(indent=kwargs.get("indent")) + return self.to_json(indent=kwargs.get("indent")) elif fmt == "csv": - output = self.to_csv() + return self.to_csv() elif fmt == "geojson": - output = self.to_geojson(indent=kwargs.get("indent")) + return self.to_geojson(indent=kwargs.get("indent")) else: raise KeyError("Unknown output format") - return output - @staticmethod def _filter_by_sql(df: pd.DataFrame, sql: str) -> pd.DataFrame: """ @@ -233,9 +225,7 @@ def to_target(self, target: str): dimensions = store.get_dimensions() variables = list(store.get_variables().keys()) - log.info( - f"Wrote Zarr file with dimensions={dimensions} and variables={variables}" - ) + log.info(f"Wrote Zarr file with dimensions={dimensions} and variables={variables}") log.info(f"Zarr Dataset Group info:\n{store.ds.info}") else: @@ -269,9 +259,7 @@ def to_target(self, target: str): connection = duckdb.connect(database=database, read_only=False) connection.register("origin", self.df) connection.execute(f"DROP TABLE IF EXISTS {tablename};") - connection.execute( - f"CREATE TABLE {tablename} AS SELECT * FROM origin;" # noqa:S608 - ) + connection.execute(f"CREATE TABLE {tablename} AS SELECT * FROM origin;") # noqa:S608 weather_table = connection.table(tablename) print(weather_table) # noqa: T001 @@ -338,9 +326,7 @@ def to_target(self, target: str): else: raise KeyError(f"Unknown protocol variant '{protocol}' for InfluxDB") - log.info( - f"Writing to InfluxDB version {version}. database={database}, table={tablename}" - ) + log.info(f"Writing to InfluxDB version {version}. database={database}, table={tablename}") # 1. Mungle the data frame. # Use the "date" column as appropriate timestamp index. @@ -381,9 +367,7 @@ def to_target(self, target: str): ssl = protocol.endswith("s") url = f"http{ssl and 's' or ''}://{connspec.url.hostname}:{connspec.url.port or 8086}" - client = InfluxDBClient( - url=url, org=connspec.url.username, token=connspec.url.password - ) + client = InfluxDBClient(url=url, org=connspec.url.username, token=connspec.url.password) write_api = client.write_api(write_options=SYNCHRONOUS) points = [] @@ -391,9 +375,7 @@ def to_target(self, target: str): for date, record in items.iterrows(): time = date.isoformat() - tags = { - tag: record.pop(tag) for tag in tag_columns if tag in record - } + tags = {tag: record.pop(tag) for tag in tag_columns if tag in record} fields = record.dropna().to_dict() if not fields: @@ -463,7 +445,6 @@ def to_target(self, target: str): # Convert timezone-aware datetime fields to naive ones. # FIXME: Omit this as soon as the CrateDB driver is capable of supporting timezone-qualified timestamps. self.df.date = self.df.date.dt.tz_localize(None) - # self.df.date = self.df.date.dt.tz_convert(None) self.df.to_sql( name=tablename, @@ -471,7 +452,6 @@ def to_target(self, target: str): schema=database, if_exists="replace", index=False, - # method="multi", chunksize=5000, ) log.info("Writing to CrateDB finished") diff --git a/wetterdienst/core/scalar/request.py b/wetterdienst/core/scalar/request.py index 10cbcc633..9677faa53 100644 --- a/wetterdienst/core/scalar/request.py +++ b/wetterdienst/core/scalar/request.py @@ -107,20 +107,15 @@ def _has_datasets(self) -> bool: def _dataset_base(self) -> Optional[Enum]: """ Dataset base that is used to differ between different datasets """ if self._has_datasets: - raise NotImplementedError( - "implement _dataset_base enumeration that contains available datasets" - ) - return + raise NotImplementedError("implement _dataset_base enumeration that contains available datasets") @property def _dataset_tree(self) -> Optional[object]: """ Detailed dataset tree with all parameters per dataset """ if self._has_datasets: raise NotImplementedError( - "implement _dataset_tree class that contains available datasets " - "and their parameters" + "implement _dataset_tree class that contains available datasets " "and their parameters" ) - return None @property def _unique_dataset(self) -> bool: @@ -148,9 +143,7 @@ def _has_tidy_data(self) -> bool: def _parameter_to_dataset_mapping(self) -> dict: """ Mapping to go from a (flat) parameter to dataset """ if not self._unique_dataset: - raise NotImplementedError( - "for non unique datasets implement a mapping from parameter to dataset" - ) + raise NotImplementedError("for non unique datasets implement a mapping from parameter to dataset") return {} @property @@ -162,9 +155,7 @@ def _unit_tree(self): def datasets(self): datasets = self._dataset_tree[self._dataset_accessor].__dict__.keys() - datasets = list(filter(lambda x: x not in ("__module__", "__doc__"), datasets)) - - return datasets + return list(filter(lambda x: x not in ("__module__", "__doc__"), datasets)) @property @abstractmethod @@ -209,16 +200,12 @@ def _parse_period(self, period: Period) -> Optional[List[Period]]: else: return ( pd.Series(period) - .apply( - parse_enumeration_from_template, args=(self._period_base, Period) - ) + .apply(parse_enumeration_from_template, args=(self._period_base, Period)) .sort_values() .tolist() ) - def _parse_parameter( - self, parameter: List[Union[str, Enum]] - ) -> List[Tuple[Enum, Enum]]: + def _parse_parameter(self, parameter: List[Union[str, Enum]]) -> List[Tuple[Enum, Enum]]: """ Method to parse parameters, either from string or enum. Case independent for strings. @@ -264,9 +251,7 @@ def _parse_parameter( try: # First parse parameter - parameter_ = parse_enumeration_from_template( - parameter, self._parameter_base[self._dataset_accessor] - ) + parameter_ = parse_enumeration_from_template(parameter, self._parameter_base[self._dataset_accessor]) except (InvalidEnumeration, TypeError): pass else: @@ -277,15 +262,11 @@ def _parse_parameter( elif not dataset_: # If there's multiple datasets the mapping defines which one # is taken for the given parameter - dataset_ = self._parameter_to_dataset_mapping[self.resolution][ - parameter_ - ] + dataset_ = self._parameter_to_dataset_mapping[self.resolution][parameter_] if not self._unique_dataset: # Parameter then has to be taken from the datasets definition - parameter_ = self._dataset_tree[self._dataset_accessor][ - dataset_.name - ][parameter_.name] + parameter_ = self._dataset_tree[self._dataset_accessor][dataset_.name][parameter_.name] parameters.append((parameter_, dataset_)) @@ -342,9 +323,7 @@ def __init__( super().__init__() - self.resolution = parse_enumeration_from_template( - resolution, self._resolution_base, Resolution - ) + self.resolution = parse_enumeration_from_template(resolution, self._resolution_base, Resolution) self.period = self._parse_period(period) self.start_date, self.end_date = self.convert_timestamps(start_date, end_date) @@ -353,12 +332,7 @@ def __init__( tidy = tidy if self._has_datasets: - tidy = tidy or any( - [ - parameter not in self._dataset_base - for parameter, dataset in self.parameter - ] - ) + tidy = tidy or any([parameter not in self._dataset_base for parameter, dataset in self.parameter]) self.tidy = tidy self.si_units = si_units @@ -414,9 +388,7 @@ def convert_timestamps( # TODO: replace this with a response + logging if not start_date <= end_date: - raise StartDateEndDateError( - "Error: 'start_date' must be smaller or equal to 'end_date'." - ) + raise StartDateEndDateError("Error: 'start_date' must be smaller or equal to 'end_date'.") return pd.Timestamp(start_date), pd.Timestamp(end_date) @@ -435,7 +407,7 @@ def _format_unit(unit) -> str: unit_string = format(unit, "~") if unit_string == "": - unit_string = "-" + return "-" return unit_string @@ -466,30 +438,20 @@ def discover(cls, filter_=None, dataset=None, flatten: bool = True) -> dict: parameters[f.name.lower()][parameter.name.lower()] = {} if cls._unique_dataset: - origin_unit, si_unit = cls._unit_tree[f.name][ - parameter.name - ].value + origin_unit, si_unit = cls._unit_tree[f.name][parameter.name].value else: dataset = cls._parameter_to_dataset_mapping[f][parameter] - origin_unit, si_unit = cls._unit_tree[f.name][dataset.name][ - parameter.name - ].value + origin_unit, si_unit = cls._unit_tree[f.name][dataset.name][parameter.name].value - parameters[f.name.lower()][parameter.name.lower()][ - "origin" - ] = cls._format_unit(origin_unit) + parameters[f.name.lower()][parameter.name.lower()]["origin"] = cls._format_unit(origin_unit) - parameters[f.name.lower()][parameter.name.lower()][ - "si" - ] = cls._format_unit(si_unit) + parameters[f.name.lower()][parameter.name.lower()]["si"] = cls._format_unit(si_unit) return parameters datasets_filter = ( - pd.Series(dataset, dtype=str) - .apply(parse_enumeration_from_template, args=(cls._dataset_base,)) - .tolist() + pd.Series(dataset, dtype=str).apply(parse_enumeration_from_template, args=(cls._dataset_base,)).tolist() or cls._dataset_base ) @@ -508,21 +470,17 @@ def discover(cls, filter_=None, dataset=None, flatten: bool = True) -> dict: for parameter in cls._dataset_tree[f.name][dataset]: - parameters[f.name.lower()][dataset.lower()][ - parameter.name.lower() - ] = {} + parameters[f.name.lower()][dataset.lower()][parameter.name.lower()] = {} - origin_unit, si_unit = cls._unit_tree[f.name][dataset][ - parameter.name - ].value + origin_unit, si_unit = cls._unit_tree[f.name][dataset][parameter.name].value - parameters[f.name.lower()][dataset.lower()][parameter.name.lower()][ - "origin" - ] = cls._format_unit(origin_unit) + parameters[f.name.lower()][dataset.lower()][parameter.name.lower()]["origin"] = cls._format_unit( + origin_unit + ) - parameters[f.name.lower()][dataset.lower()][parameter.name.lower()][ - "si" - ] = cls._format_unit(si_unit) + parameters[f.name.lower()][dataset.lower()][parameter.name.lower()]["si"] = cls._format_unit( + si_unit + ) return parameters @@ -536,28 +494,19 @@ def _setup_discover_filter(cls, filter_) -> list: towards something else :return: """ - # if cls._resolution_type == ResolutionType.FIXED: - # log.warning("resolution filter will be ignored due to fixed resolution") - # filter_ = [cls.resolution] if not filter_: filter_ = [*cls._resolution_base] - filter_ = ( - pd.Series(filter_) - .apply( - parse_enumeration_from_template, args=(cls._resolution_base, Resolution) - ) - .tolist() + return ( + pd.Series(filter_).apply(parse_enumeration_from_template, args=(cls._resolution_base, Resolution)).tolist() ) - return filter_ - def _coerce_meta_fields(self, df) -> pd.DataFrame: """ - Method for filed coercion. + Method for metadata column coercion. - :param df: - :return: + :param df: DataFrame with columns as strings + :return: DataFrame with columns coerced to date etc. """ df = df.astype(self._dtype_mapping) @@ -592,20 +541,7 @@ def all(self) -> StationsResult: df = self._coerce_meta_fields(df) - # TODO: exchange with more foreceful filtering if user wants - # if self.start_date: - # df = df[ - # df[Columns.FROM_DATE.value] <= self.start_date - # ] - # - # if self.end_date: - # df = df[ - # df[Columns.TO_DATE.value] >= self.end_date - # ] - - result = StationsResult(self, df.copy().reset_index(drop=True)) - - return result + return StationsResult(self, df.copy().reset_index(drop=True)) def filter_by_station_id(self, station_id: Tuple[str, ...]) -> StationsResult: """ @@ -622,13 +558,9 @@ def filter_by_station_id(self, station_id: Tuple[str, ...]) -> StationsResult: df = df[df[Columns.STATION_ID.value].isin(station_id)] - result = StationsResult(self, df) + return StationsResult(self, df) - return result - - def filter_by_name( - self, name: str, first: bool = True, threshold: int = 90 - ) -> StationsResult: + def filter_by_name(self, name: str, first: bool = True, threshold: int = 90) -> StationsResult: """ Method to filter stations for station name using string comparison. @@ -667,9 +599,7 @@ def filter_by_name( else: df = pd.DataFrame().reindex(columns=df.columns) - result = StationsResult(stations=self, df=df) - - return result + return StationsResult(stations=self, df=df) def filter_by_rank( self, @@ -708,9 +638,7 @@ def filter_by_rank( # If num_stations_nearby is higher then the actual amount of stations # further indices and distances are added which have to be filtered out distances = distances[: min(df.shape[0], rank)] - indices_nearest_neighbours = indices_nearest_neighbours[ - : min(df.shape[0], rank) - ] + indices_nearest_neighbours = indices_nearest_neighbours[: min(df.shape[0], rank)] distances_km = np.array(distances * EARTH_RADIUS_KM) @@ -720,13 +648,10 @@ def filter_by_rank( if df.empty: log.warning( - f"No weather stations were found for coordinate " - f"{latitude}°N and {longitude}°E and number {rank}" + f"No weather stations were found for coordinate " f"{latitude}°N and {longitude}°E and number {rank}" ) - result = StationsResult(self, df.reset_index(drop=True)) - - return result + return StationsResult(self, df.reset_index(drop=True)) def filter_by_distance( self, latitude: float, longitude: float, distance: float, unit: str = "km" @@ -752,13 +677,9 @@ def filter_by_distance( distance_in_km = guess(distance, unit, [Distance]).km # TODO: replace the repeating call to self.all() - all_nearby_stations = self.filter_by_rank( - latitude, longitude, self.all().df.shape[0] - ).df + all_nearby_stations = self.filter_by_rank(latitude, longitude, self.all().df.shape[0]).df - df = all_nearby_stations[ - all_nearby_stations[Columns.DISTANCE.value] <= distance_in_km - ] + df = all_nearby_stations[all_nearby_stations[Columns.DISTANCE.value] <= distance_in_km] if df.empty: log.warning( @@ -766,13 +687,9 @@ def filter_by_distance( f"{latitude}°N and {longitude}°E and distance {distance_in_km}km" ) - result = StationsResult(stations=self, df=df.reset_index(drop=True)) + return StationsResult(stations=self, df=df.reset_index(drop=True)) - return result - - def filter_by_bbox( - self, left: float, bottom: float, right: float, top: float - ) -> StationsResult: + def filter_by_bbox(self, left: float, bottom: float, right: float, top: float) -> StationsResult: """ Method to filter stations by bounding box. @@ -800,9 +717,7 @@ def filter_by_bbox( & df[Columns.LONGITUDE.value].apply(lambda x: x in lon_interval) ] - result = StationsResult(stations=self, df=df.reset_index(drop=True)) - - return result + return StationsResult(stations=self, df=df.reset_index(drop=True)) def filter_by_sql(self, sql: str) -> pd.DataFrame: """ @@ -816,11 +731,7 @@ def filter_by_sql(self, sql: str) -> pd.DataFrame: df = duckdb.query_df(df, "data", sql).df() - df[Columns.FROM_DATE.value] = df[Columns.FROM_DATE.value].dt.tz_localize( - self.tz - ) + df[Columns.FROM_DATE.value] = df[Columns.FROM_DATE.value].dt.tz_localize(self.tz) df[Columns.TO_DATE.value] = df[Columns.TO_DATE.value].dt.tz_localize(self.tz) - result = StationsResult(stations=self, df=df.reset_index(drop=True)) - - return result + return StationsResult(stations=self, df=df.reset_index(drop=True)) diff --git a/wetterdienst/core/scalar/result.py b/wetterdienst/core/scalar/result.py index f2564318b..e4c74ba0a 100644 --- a/wetterdienst/core/scalar/result.py +++ b/wetterdienst/core/scalar/result.py @@ -20,12 +20,7 @@ class StationsResult(ExportMixin): - def __init__( - self, - stations: Union["ScalarRequestCore", "DwdMosmixRequest"], - df: pd.DataFrame, - **kwargs - ) -> None: + def __init__(self, stations: Union["ScalarRequestCore", "DwdMosmixRequest"], df: pd.DataFrame, **kwargs) -> None: # TODO: add more attributes from ScalarStations class self.stations = stations self.df = df @@ -146,7 +141,5 @@ def to_ogc_feature_collection(self): raise NotImplementedError() def filter_by_date(self, date: str) -> pd.DataFrame: - self.df = filter_by_date_and_resolution( - self.df, date=date, resolution=self.stations.resolution - ) + self.df = filter_by_date_and_resolution(self.df, date=date, resolution=self.stations.resolution) return self.df diff --git a/wetterdienst/core/scalar/values.py b/wetterdienst/core/scalar/values.py index 0d43b813b..974bf777e 100644 --- a/wetterdienst/core/scalar/values.py +++ b/wetterdienst/core/scalar/values.py @@ -41,13 +41,13 @@ def _meta_fields(self) -> List[str]: :return: list of strings representing the metadata fields/columns """ if not self.stations.stations.tidy: - fields = [ + return [ Columns.STATION_ID.value, Columns.DATASET.value, Columns.DATE.value, ] else: - fields = [ + return [ Columns.STATION_ID.value, Columns.DATASET.value, Columns.PARAMETER.value, @@ -56,8 +56,6 @@ def _meta_fields(self) -> List[str]: Columns.QUALITY.value, ] - return fields - # Fields for date coercion _date_fields = [Columns.DATE.value, Columns.FROM_DATE.value, Columns.TO_DATE.value] @@ -123,15 +121,13 @@ def _get_complete_dates(self, station_id) -> pd.DatetimeIndex: tz_delta = self._get_timedelta_from_timezones(timezone_, pytz.UTC) # TODO: better manage start and end date and its timezone - date_range = pd.date_range( + return pd.date_range( start_date + datetime.timedelta(hours=tz_delta), end_date + datetime.timedelta(hours=tz_delta), freq=self.stations.frequency.value, tz=pytz.UTC, ) - return date_range - def _get_timezone_from_station(self, station_id: str) -> timezone: """ Get timezone information for explicit station that is used to set the @@ -145,9 +141,7 @@ def _get_timezone_from_station(self, station_id: str) -> timezone: station = stations[stations[Columns.STATION_ID.value] == station_id] - longitude, latitude = station.loc[ - :, [Columns.LONGITUDE.value, Columns.LATITUDE.value] - ].T.values + longitude, latitude = station.loc[:, [Columns.LONGITUDE.value, Columns.LATITUDE.value]].T.values tz_string = self._tf.timezone_at(lng=longitude, lat=latitude) @@ -211,19 +205,15 @@ def _convert_values_to_si(series, parameter: Optional[str] = None) -> pd.Series: if self.stations.stations._has_tidy_data: df_si = pd.DataFrame() for par, group in df.groupby(Columns.PARAMETER.value): - group[Columns.VALUE.value] = _convert_values_to_si( - group[Columns.VALUE.value], par - ) + group[Columns.VALUE.value] = _convert_values_to_si(group[Columns.VALUE.value], par) df_si = df_si.append(group) - else: - df_si = df.apply(_convert_values_to_si, axis=0) - return df_si + return df_si + + return df.apply(_convert_values_to_si, axis=0) - def _create_conversion_factors( - self, dataset - ) -> Dict[str, Tuple[Union[operator.add, operator.mul], float]]: + def _create_conversion_factors(self, dataset) -> Dict[str, Tuple[Union[operator.add, operator.mul], float]]: """ Function to create conversion factors based on a given dataset @@ -249,13 +239,9 @@ def _create_conversion_factors( parameter = parameter.name if self.stations.stations._unique_dataset: - parameter_value = self.stations.stations._dataset_tree[ - dataset_accessor - ][parameter].value + parameter_value = self.stations.stations._dataset_tree[dataset_accessor][parameter].value else: - parameter_value = self.stations.stations._dataset_tree[ - dataset_accessor - ][dataset][parameter].value + parameter_value = self.stations.stations._dataset_tree[dataset_accessor][dataset][parameter].value if si_unit == SIUnit.KILOGRAM_PER_SQUARE_METER.value: # Fixed conversion factors to kg / m², as it only applies @@ -263,9 +249,7 @@ def _create_conversion_factors( if origin_unit == OriginUnit.MILLIMETER.value: conversion_factors[parameter_value] = (operator.mul, 1) else: - raise ValueError( - "manually set conversion factor for precipitation unit" - ) + raise ValueError("manually set conversion factor for precipitation unit") elif si_unit == SIUnit.DEGREE_KELVIN.value: # Apply offset addition to temperature measurements # Take 0 as this is appropriate for adding on other numbers @@ -302,21 +286,15 @@ def __eq__(self, other): and self.stations.start_date == other.stations.start_date and self.stations.end_date == other.stations.end_date ) - pass def __str__(self): """ Str representation of request object """ # TODO: include source # TODO: include data type - station_ids_joined = "& ".join( - [str(station_id) for station_id in self.stations.station_id] - ) + station_ids_joined = "& ".join([str(station_id) for station_id in self.stations.station_id]) parameters_joined = "& ".join( - [ - parameter.value - for parameter, parameter_set in self.stations.stations.parameter - ] + [parameter.value for parameter, parameter_set in self.stations.stations.parameter] ) return ", ".join( @@ -327,11 +305,8 @@ def __str__(self): str(self.stations.end_date), ] ) - pass - def _create_empty_station_parameter_df( - self, station_id: str, parameter: Enum, dataset: Enum - ) -> pd.DataFrame: + def _create_empty_station_parameter_df(self, station_id: str, parameter: Enum, dataset: Enum) -> pd.DataFrame: """ Function to create an empty DataFrame :param station_id: @@ -388,15 +363,15 @@ def _create_empty_station_parameter_df( return df - def _build_complete_df( - self, df: pd.DataFrame, station_id: str, parameter: Enum, dataset: Enum - ) -> pd.DataFrame: - # For cases where requests are not defined by start and end date but rather by - # periods, use the returned df without modifications - # We may put a standard date range here if no data is found - if not self.stations.start_date: - return df + def _build_complete_df(self, df: pd.DataFrame, station_id: str, parameter: Enum, dataset: Enum) -> pd.DataFrame: + """Method to build a complete df with all dates from start to end date included. + :param df: + :param station_id: + :param parameter: + :param dataset: + :return: + """ if parameter != dataset or not self.stations.stations.tidy: base_df = self._get_base_df(station_id) @@ -412,15 +387,11 @@ def _build_complete_df( if self.stations.tidy: df[Columns.PARAMETER.value] = parameter.value - df[Columns.PARAMETER.value] = pd.Categorical( - df[Columns.PARAMETER.value] - ) + df[Columns.PARAMETER.value] = pd.Categorical(df[Columns.PARAMETER.value]) if dataset: df[Columns.DATASET.value] = dataset.name.lower() - df[Columns.DATASET.value] = pd.Categorical( - df[Columns.DATASET.value] - ) + df[Columns.DATASET.value] = pd.Categorical(df[Columns.DATASET.value]) return df else: @@ -429,16 +400,12 @@ def _build_complete_df( if self.stations.stations._unique_dataset: parameter_ = parse_enumeration_from_template( parameter, - self.stations.stations._parameter_base[ - self.stations.resolution.name - ], + self.stations.stations._parameter_base[self.stations.resolution.name], ) else: parameter_ = parse_enumeration_from_template( parameter, - self.stations.stations._dataset_tree[ - self.stations.resolution.name - ][dataset.name], + self.stations.stations._dataset_tree[self.stations.resolution.name][dataset.name], ) df = pd.merge( @@ -471,9 +438,7 @@ def _organize_df_columns(self, df: pd.DataFrame) -> pd.DataFrame: columns.extend(df.columns.difference(columns, sort=False)) - df = df.reindex(columns=columns) - - return df + return df.reindex(columns=columns) def query(self) -> Generator[ValuesResult, None, None]: """ @@ -493,10 +458,7 @@ def query(self) -> Generator[ValuesResult, None, None]: # TODO: For now skip date based parameters as we didn't # yet decide how to convert those values to floats to # make them conform with tidy shape - single_tidy_date_parameters = ( - self.stations.stations.tidy - and parameter.value in self._date_parameters - ) + single_tidy_date_parameters = self.stations.stations.tidy and parameter.value in self._date_parameters if single_tidy_date_parameters: log.warning( f"parameter {parameter.value} is skipped in tidy format " @@ -504,9 +466,7 @@ def query(self) -> Generator[ValuesResult, None, None]: ) continue - parameter_df = self._collect_station_parameter( - station_id, parameter, dataset - ) + parameter_df = self._collect_station_parameter(station_id, parameter, dataset) if parameter_df.empty: continue @@ -525,21 +485,14 @@ def query(self) -> Generator[ValuesResult, None, None]: parameter_df = self.tidy_up_df(parameter_df, dataset) if parameter != dataset: - parameter_df = parameter_df[ - parameter_df[Columns.PARAMETER.value] - == parameter.value.lower() - ] + parameter_df = parameter_df[parameter_df[Columns.PARAMETER.value] == parameter.value.lower()] elif self.stations.stations._has_tidy_data: parameter_df = self.tabulate_df(parameter_df) # Skip date fields in tidy format, no further check required as still # "normal" parameters should be available if self.stations.stations.tidy and self._date_parameters: - parameter_df = parameter_df[ - ~parameter_df[Columns.PARAMETER.value].isin( - self._date_parameters - ) - ] + parameter_df = parameter_df[~parameter_df[Columns.PARAMETER.value].isin(self._date_parameters)] log.warning( f"parameters {self._date_parameters} are skipped in tidy format " f"as the date parameters are currently not converted to floats" @@ -549,9 +502,11 @@ def query(self) -> Generator[ValuesResult, None, None]: # Merge on full date range if values are found to ensure result # even if no actual values exist - parameter_df = self._build_complete_df( - parameter_df, station_id, parameter, dataset - ) + # For cases where requests are not defined by start and end date but rather by + # periods, use the returned df without modifications + # We may put a standard date range here if no data is found + if self.stations.stations.start_date: + parameter_df = self._build_complete_df(parameter_df, station_id, parameter, dataset) parameter_df[Columns.DATASET.value] = dataset.name.lower() @@ -562,9 +517,7 @@ def query(self) -> Generator[ValuesResult, None, None]: try: station_df = pd.concat(station_data, ignore_index=True) except ValueError: - station_df = self._create_empty_station_parameter_df( - station_id, parameter, dataset - ) + station_df = self._create_empty_station_parameter_df(station_id, parameter, dataset) station_df = self._coerce_meta_fields(station_df) @@ -589,9 +542,7 @@ def query(self) -> Generator[ValuesResult, None, None]: yield ValuesResult(stations=self.stations, df=station_df) @abstractmethod - def _collect_station_parameter( - self, station_id: str, parameter: Enum, dataset: Enum - ) -> pd.DataFrame: + def _collect_station_parameter(self, station_id: str, parameter: Enum, dataset: Enum) -> pd.DataFrame: """ Implementation of data collection for a station id plus parameter from the specified weather service. Takes care of the gathering of the data and putting @@ -621,9 +572,7 @@ def tidy_up_df(self, df: pd.DataFrame, dataset: Enum) -> pd.DataFrame: if Columns.QUALITY.value not in df: df[Columns.QUALITY.value] = np.nan - df[Columns.QUALITY.value] = pd.to_numeric(df[Columns.QUALITY.value]).astype( - float - ) + df[Columns.QUALITY.value] = pd.to_numeric(df[Columns.QUALITY.value]).astype(float) # Set quality of NaN values to NaN as well df.loc[df[Columns.VALUE.value].isna(), Columns.QUALITY.value] = np.NaN @@ -712,31 +661,19 @@ def _coerce_meta_fields(self, df: pd.DataFrame) -> pd.DataFrame: :param df: pandas.DataFrame with the "fresh" data :return: pandas.DataFrame with meta fields being coerced """ - df[Columns.STATION_ID.value] = self._parse_station_id( - df[Columns.STATION_ID.value] - ).astype("category") + df[Columns.STATION_ID.value] = self._parse_station_id(df[Columns.STATION_ID.value]).astype("category") # TODO: why do we need this (again)? - df[Columns.DATE.value] = pd.to_datetime( - df[Columns.DATE.value], infer_datetime_format=True - ) + df[Columns.DATE.value] = pd.to_datetime(df[Columns.DATE.value], infer_datetime_format=True) - df[Columns.DATASET.value] = self._coerce_strings( - df[Columns.DATASET.value] - ).astype("category") + df[Columns.DATASET.value] = self._coerce_strings(df[Columns.DATASET.value]).astype("category") if self.stations.stations.tidy: - df[Columns.PARAMETER.value] = self._coerce_strings( - df[Columns.PARAMETER.value] - ).astype("category") - df[Columns.VALUE.value] = pd.to_numeric(df[Columns.VALUE.value]).astype( - float - ) + df[Columns.PARAMETER.value] = self._coerce_strings(df[Columns.PARAMETER.value]).astype("category") + df[Columns.VALUE.value] = pd.to_numeric(df[Columns.VALUE.value]).astype(float) # TODO: may coerce more carefully quality codes or replace them by numbers - df[Columns.QUALITY.value] = pd.to_numeric( - df[Columns.QUALITY.value], errors="coerce" - ).astype(float) + df[Columns.QUALITY.value] = pd.to_numeric(df[Columns.QUALITY.value], errors="coerce").astype(float) return df @@ -761,12 +698,10 @@ def _coerce_dates(self, series: pd.Series, timezone_: timezone) -> pd.Series: series = pd.to_datetime(series, infer_datetime_format=True) try: - series = series.dt.tz_localize(timezone_) + return series.dt.tz_localize(timezone_) except TypeError: pass - return series - @staticmethod def _coerce_integers(series: pd.Series) -> pd.Series: """ @@ -776,11 +711,7 @@ def _coerce_integers(series: pd.Series) -> pd.Series: :param series: :return: """ - return ( - pd.to_numeric(series, errors="coerce") - .astype(pd.Float64Dtype()) - .astype(pd.Int64Dtype()) - ) + return pd.to_numeric(series, errors="coerce").astype(pd.Float64Dtype()).astype(pd.Int64Dtype()) @staticmethod def _coerce_strings(series: pd.Series) -> pd.Series: @@ -812,10 +743,7 @@ def _coerce_irregular_parameter(self, series: pd.Series) -> pd.Series: :return: """ if self._irregular_parameters: - raise NotImplementedError( - "implement _parse_irregular_parameter " - "method to parse irregular parameters" - ) + raise NotImplementedError("implement _parse_irregular_parameter " "method to parse irregular parameters") return pd.Series(series) @@ -831,9 +759,7 @@ def _coerce_parameter_types(self, df: pd.DataFrame) -> pd.DataFrame: continue if column in self._irregular_parameters: df[column] = self._coerce_irregular_parameter(df[column]) - elif column in self._integer_parameters or column.startswith( - Columns.QUALITY_PREFIX.value - ): + elif column in self._integer_parameters or column.startswith(Columns.QUALITY_PREFIX.value): df[column] = self._coerce_integers(df[column]) elif column in self._string_parameters: df[column] = self._coerce_strings(df[column]) @@ -852,9 +778,7 @@ def all(self) -> ValuesResult: tqdm_out = TqdmToLogger(log, level=logging.INFO) - for result in tqdm( - self.query(), total=len(self.stations.station_id), file=tqdm_out - ): + for result in tqdm(self.query(), total=len(self.stations.station_id), file=tqdm_out): data.append(result.df) try: @@ -880,9 +804,7 @@ def _humanize(self, df: pd.DataFrame) -> pd.DataFrame: if not self.stations.tidy: df = df.rename(columns=hcnm) else: - df[Columns.PARAMETER.value] = df[ - Columns.PARAMETER.value - ].cat.rename_categories(hcnm) + df[Columns.PARAMETER.value] = df[Columns.PARAMETER.value].cat.rename_categories(hcnm) return df @@ -894,11 +816,7 @@ def _create_humanized_parameters_mapping(self) -> Dict[str, str]: :return: """ - hcnm = { + return { parameter.value: parameter.name.lower() - for parameter in self.stations.stations._parameter_base[ - self.stations.stations.resolution.name - ] + for parameter in self.stations.stations._parameter_base[self.stations.stations.resolution.name] } - - return hcnm diff --git a/wetterdienst/metadata/columns.py b/wetterdienst/metadata/columns.py index c126da335..c911e23f2 100644 --- a/wetterdienst/metadata/columns.py +++ b/wetterdienst/metadata/columns.py @@ -31,7 +31,6 @@ class Columns(Enum): VALUE = "value" # Columns for quality QUALITY = "quality" - # QUALITY_TERTIARY = "quality_tertiary" # for later # Columns used for RADOLAN PERIOD_TYPE = "period_type" DATETIME = "datetime" diff --git a/wetterdienst/metadata/parameter.py b/wetterdienst/metadata/parameter.py index c2aa07a77..20baeef7b 100644 --- a/wetterdienst/metadata/parameter.py +++ b/wetterdienst/metadata/parameter.py @@ -5,8 +5,8 @@ class Parameter(Enum): - """Parameter enum with fixed names of parameters being used in the - entire library. + """Parameter enum with fixed names of parameters being used in the entire library. + Groups are: - Clouds - Evapotranspiration / Evaporation @@ -47,13 +47,9 @@ class Parameter(Enum): # total CLOUD_COVER_TOTAL = "CLOUD_COVER_TOTAL" CLOUD_COVER_TOTAL_MIDNIGHT_TO_MIDNIGHT = "CLOUD_COVER_TOTAL_MIDNIGHT_TO_MIDNIGHT" - CLOUD_COVER_TOTAL_MIDNIGHT_TO_MIDNIGHT_MANUAL = ( - "CLOUD_COVER_TOTAL_MIDNIGHT_TO_MIDNIGHT_MANUAL" - ) + CLOUD_COVER_TOTAL_MIDNIGHT_TO_MIDNIGHT_MANUAL = "CLOUD_COVER_TOTAL_MIDNIGHT_TO_MIDNIGHT_MANUAL" CLOUD_COVER_TOTAL_SUNRISE_TO_SUNSET = "CLOUD_COVER_TOTAL_SUNRISE_TO_SUNSET" - CLOUD_COVER_TOTAL_SUNRISE_TO_SUNSET_MANUAL = ( - "CLOUD_COVER_TOTAL_SUNRISE_TO_SUNSET_MANUAL" - ) + CLOUD_COVER_TOTAL_SUNRISE_TO_SUNSET_MANUAL = "CLOUD_COVER_TOTAL_SUNRISE_TO_SUNSET_MANUAL" CLOUD_COVER_EFFECTIVE = "CLOUD_COVER_EFFECTIVE" # layers CLOUD_COVER_LAYER1 = "CLOUD_COVER_LAYER1" @@ -127,24 +123,12 @@ class Parameter(Enum): PRECIPITATION_HEIGHT_LAST_24H = "PRECIPITATION_HEIGHT_LAST_24H" PRECIPITATION_HEIGHT_MULTIDAY = "PRECIPITATION_HEIGHT_MULTIDAY" # precipitation height consistent with significant weather - PRECIPITATION_HEIGHT_SIGNIFICANT_WEATHER_LAST_1H = ( - "PRECIPITATION_HEIGHT_SIGNIFICANT_WEATHER_LAST_1H" - ) - PRECIPITATION_HEIGHT_SIGNIFICANT_WEATHER_LAST_3H = ( - "PRECIPITATION_HEIGHT_SIGNIFICANT_WEATHER_LAST_3H" - ) - PRECIPITATION_HEIGHT_SIGNIFICANT_WEATHER_LAST_6H = ( - "PRECIPITATION_HEIGHT_SIGNIFICANT_WEATHER_LAST_6H" - ) - PRECIPITATION_HEIGHT_SIGNIFICANT_WEATHER_LAST_12H = ( - "PRECIPITATION_HEIGHT_SIGNIFICANT_WEATHER_LAST_12H" - ) - PRECIPITATION_HEIGHT_SIGNIFICANT_WEATHER_LAST_24H = ( - "PRECIPITATION_HEIGHT_SIGNIFICANT_WEATHER_LAST_24H" - ) - PRECIPITATION_HEIGHT_LIQUID_SIGNIFICANT_WEATHER_LAST_1H = ( - "PRECIPITATION_HEIGHT_LIQUID_SIGNIFICANT_WEATHER_LAST_1H" - ) + PRECIPITATION_HEIGHT_SIGNIFICANT_WEATHER_LAST_1H = "PRECIPITATION_HEIGHT_SIGNIFICANT_WEATHER_LAST_1H" + PRECIPITATION_HEIGHT_SIGNIFICANT_WEATHER_LAST_3H = "PRECIPITATION_HEIGHT_SIGNIFICANT_WEATHER_LAST_3H" + PRECIPITATION_HEIGHT_SIGNIFICANT_WEATHER_LAST_6H = "PRECIPITATION_HEIGHT_SIGNIFICANT_WEATHER_LAST_6H" + PRECIPITATION_HEIGHT_SIGNIFICANT_WEATHER_LAST_12H = "PRECIPITATION_HEIGHT_SIGNIFICANT_WEATHER_LAST_12H" + PRECIPITATION_HEIGHT_SIGNIFICANT_WEATHER_LAST_24H = "PRECIPITATION_HEIGHT_SIGNIFICANT_WEATHER_LAST_24H" + PRECIPITATION_HEIGHT_LIQUID_SIGNIFICANT_WEATHER_LAST_1H = "PRECIPITATION_HEIGHT_LIQUID_SIGNIFICANT_WEATHER_LAST_1H" # ---- extremes ---- PRECIPITATION_HEIGHT_MAX = "PRECIPITATION_HEIGHT_MAX" PRECIPITATION_HEIGHT_LIQUID_MAX = "PRECIPITATION_HEIGHT_LIQUID_MAX" @@ -156,89 +140,41 @@ class Parameter(Enum): PRECIPITATION_DURATION = "PRECIPITATION_DURATION" # ---- probability ---- # greater 0 - PROBABILITY_PRECIPITATION_HEIGHT_GT_0_0_MM_LAST_6H = ( - "PROBABILITY_PRECIPITATION_HEIGHT_GT_0_0_MM_LAST_6H" - ) - PROBABILITY_PRECIPITATION_HEIGHT_GT_0_0_MM_LAST_12H = ( - "PROBABILITY_PRECIPITATION_HEIGHT_GT_0_0_MM_LAST_12H" - ) - PROBABILITY_PRECIPITATION_HEIGHT_GT_0_0_MM_LAST_24H = ( - "PROBABILITY_PRECIPITATION_HEIGHT_GT_0_0_MM_LAST_24H" - ) + PROBABILITY_PRECIPITATION_HEIGHT_GT_0_0_MM_LAST_6H = "PROBABILITY_PRECIPITATION_HEIGHT_GT_0_0_MM_LAST_6H" + PROBABILITY_PRECIPITATION_HEIGHT_GT_0_0_MM_LAST_12H = "PROBABILITY_PRECIPITATION_HEIGHT_GT_0_0_MM_LAST_12H" + PROBABILITY_PRECIPITATION_HEIGHT_GT_0_0_MM_LAST_24H = "PROBABILITY_PRECIPITATION_HEIGHT_GT_0_0_MM_LAST_24H" # greater 0.1 - PROBABILITY_PRECIPITATION_HEIGHT_GT_0_1_MM_LAST_1H = ( - "PROBABILITY_PRECIPITATION_HEIGHT_GT_0_1_MM_LAST_1H" - ) + PROBABILITY_PRECIPITATION_HEIGHT_GT_0_1_MM_LAST_1H = "PROBABILITY_PRECIPITATION_HEIGHT_GT_0_1_MM_LAST_1H" # greater 0.2 - PROBABILITY_PRECIPITATION_HEIGHT_GT_0_2_MM_LAST_1H = ( - "PROBABILITY_PRECIPITATION_HEIGHT_GT_0_2_MM_LAST_1H" - ) - PROBABILITY_PRECIPITATION_HEIGHT_GT_0_2_MM_LAST_6H = ( - "PROBABILITY_PRECIPITATION_HEIGHT_GT_0_2_MM_LAST_6H" - ) - PROBABILITY_PRECIPITATION_HEIGHT_GT_0_2_MM_LAST_12H = ( - "PROBABILITY_PRECIPITATION_HEIGHT_GT_0_2_MM_LAST_12H" - ) - PROBABILITY_PRECIPITATION_HEIGHT_GT_0_2_MM_LAST_24H = ( - "PROBABILITY_PRECIPITATION_HEIGHT_GT_0_2_MM_LAST_24H" - ) + PROBABILITY_PRECIPITATION_HEIGHT_GT_0_2_MM_LAST_1H = "PROBABILITY_PRECIPITATION_HEIGHT_GT_0_2_MM_LAST_1H" + PROBABILITY_PRECIPITATION_HEIGHT_GT_0_2_MM_LAST_6H = "PROBABILITY_PRECIPITATION_HEIGHT_GT_0_2_MM_LAST_6H" + PROBABILITY_PRECIPITATION_HEIGHT_GT_0_2_MM_LAST_12H = "PROBABILITY_PRECIPITATION_HEIGHT_GT_0_2_MM_LAST_12H" + PROBABILITY_PRECIPITATION_HEIGHT_GT_0_2_MM_LAST_24H = "PROBABILITY_PRECIPITATION_HEIGHT_GT_0_2_MM_LAST_24H" # greater 0.3 - PROBABILITY_PRECIPITATION_HEIGHT_GT_0_3_MM_LAST_1H = ( - "PROBABILITY_PRECIPITATION_HEIGHT_GT_0_3_MM_LAST_1H" - ) + PROBABILITY_PRECIPITATION_HEIGHT_GT_0_3_MM_LAST_1H = "PROBABILITY_PRECIPITATION_HEIGHT_GT_0_3_MM_LAST_1H" # greater 0.5 - PROBABILITY_PRECIPITATION_HEIGHT_GT_0_5_MM_LAST_1H = ( - "PROBABILITY_PRECIPITATION_HEIGHT_GT_0_5_MM_LAST_1H" - ) + PROBABILITY_PRECIPITATION_HEIGHT_GT_0_5_MM_LAST_1H = "PROBABILITY_PRECIPITATION_HEIGHT_GT_0_5_MM_LAST_1H" # greater 0.7 - PROBABILITY_PRECIPITATION_HEIGHT_GT_0_7_MM_LAST_1H = ( - "PROBABILITY_PRECIPITATION_HEIGHT_GT_0_7_MM_LAST_1H" - ) + PROBABILITY_PRECIPITATION_HEIGHT_GT_0_7_MM_LAST_1H = "PROBABILITY_PRECIPITATION_HEIGHT_GT_0_7_MM_LAST_1H" # greater 1.0 - PROBABILITY_PRECIPITATION_HEIGHT_GT_1_0_MM_LAST_1H = ( - "PROBABILITY_PRECIPITATION_HEIGHT_GT_1_0_MM_LAST_1H" - ) - PROBABILITY_PRECIPITATION_HEIGHT_GT_1_0_MM_LAST_6H = ( - "PROBABILITY_PRECIPITATION_HEIGHT_GT_1_0_MM_LAST_6H" - ) - PROBABILITY_PRECIPITATION_HEIGHT_GT_1_0_MM_LAST_12H = ( - "PROBABILITY_PRECIPITATION_HEIGHT_GT_1_0_MM_LAST_12H" - ) - PROBABILITY_PRECIPITATION_HEIGHT_GT_1_0_MM_LAST_24H = ( - "PROBABILITY_PRECIPITATION_HEIGHT_GT_1_0_MM_LAST_24H" - ) + PROBABILITY_PRECIPITATION_HEIGHT_GT_1_0_MM_LAST_1H = "PROBABILITY_PRECIPITATION_HEIGHT_GT_1_0_MM_LAST_1H" + PROBABILITY_PRECIPITATION_HEIGHT_GT_1_0_MM_LAST_6H = "PROBABILITY_PRECIPITATION_HEIGHT_GT_1_0_MM_LAST_6H" + PROBABILITY_PRECIPITATION_HEIGHT_GT_1_0_MM_LAST_12H = "PROBABILITY_PRECIPITATION_HEIGHT_GT_1_0_MM_LAST_12H" + PROBABILITY_PRECIPITATION_HEIGHT_GT_1_0_MM_LAST_24H = "PROBABILITY_PRECIPITATION_HEIGHT_GT_1_0_MM_LAST_24H" # greater 2.0 - PROBABILITY_PRECIPITATION_HEIGHT_GT_2_0_MM_LAST_1H = ( - "PROBABILITY_PRECIPITATION_HEIGHT_GT_2_0_MM_LAST_1H" - ) + PROBABILITY_PRECIPITATION_HEIGHT_GT_2_0_MM_LAST_1H = "PROBABILITY_PRECIPITATION_HEIGHT_GT_2_0_MM_LAST_1H" # greater 3.0 - PROBABILITY_PRECIPITATION_HEIGHT_GT_3_0_MM_LAST_1H = ( - "PROBABILITY_PRECIPITATION_HEIGHT_GT_3_0_MM_LAST_1H" - ) + PROBABILITY_PRECIPITATION_HEIGHT_GT_3_0_MM_LAST_1H = "PROBABILITY_PRECIPITATION_HEIGHT_GT_3_0_MM_LAST_1H" # greater 5.0 - PROBABILITY_PRECIPITATION_HEIGHT_GT_5_0_MM_LAST_1H = ( - "PROBABILITY_PRECIPITATION_HEIGHT_GT_5_0_MM_LAST_1H" - ) - PROBABILITY_PRECIPITATION_HEIGHT_GT_5_0_MM_LAST_6H = ( - "PROBABILITY_PRECIPITATION_HEIGHT_GT_5_0_MM_LAST_6H" - ) - PROBABILITY_PRECIPITATION_HEIGHT_GT_5_0_MM_LAST_12H = ( - "PROBABILITY_PRECIPITATION_HEIGHT_GT_5_0_MM_LAST_12H" - ) - PROBABILITY_PRECIPITATION_HEIGHT_GT_5_0_MM_LAST_24H = ( - "PROBABILITY_PRECIPITATION_HEIGHT_GT_5_0_MM_LAST_24H" - ) + PROBABILITY_PRECIPITATION_HEIGHT_GT_5_0_MM_LAST_1H = "PROBABILITY_PRECIPITATION_HEIGHT_GT_5_0_MM_LAST_1H" + PROBABILITY_PRECIPITATION_HEIGHT_GT_5_0_MM_LAST_6H = "PROBABILITY_PRECIPITATION_HEIGHT_GT_5_0_MM_LAST_6H" + PROBABILITY_PRECIPITATION_HEIGHT_GT_5_0_MM_LAST_12H = "PROBABILITY_PRECIPITATION_HEIGHT_GT_5_0_MM_LAST_12H" + PROBABILITY_PRECIPITATION_HEIGHT_GT_5_0_MM_LAST_24H = "PROBABILITY_PRECIPITATION_HEIGHT_GT_5_0_MM_LAST_24H" # greater 10.0 - PROBABILITY_PRECIPITATION_HEIGHT_GT_10_0_MM_LAST_1H = ( - "PROBABILITY_PRECIPITATION_HEIGHT_GT_10_0_MM_LAST_1H" - ) + PROBABILITY_PRECIPITATION_HEIGHT_GT_10_0_MM_LAST_1H = "PROBABILITY_PRECIPITATION_HEIGHT_GT_10_0_MM_LAST_1H" # greater 15.0 - PROBABILITY_PRECIPITATION_HEIGHT_GT_15_0_MM_LAST_1H = ( - "PROBABILITY_PRECIPITATION_HEIGHT_GT_15_0_MM_LAST_1H" - ) - PROBABILITY_PRECIPITATION_HEIGHT_GT_25_0_MM_LAST_1H = ( - "PROBABILITY_PRECIPITATION_HEIGHT_GT_25_0_MM_LAST_1H" - ) + PROBABILITY_PRECIPITATION_HEIGHT_GT_15_0_MM_LAST_1H = "PROBABILITY_PRECIPITATION_HEIGHT_GT_15_0_MM_LAST_1H" + PROBABILITY_PRECIPITATION_HEIGHT_GT_25_0_MM_LAST_1H = "PROBABILITY_PRECIPITATION_HEIGHT_GT_25_0_MM_LAST_1H" PROBABILITY_PRECIPITATION_LAST_1H = "PROBABILITY_PRECIPITATION_LAST_1H" PROBABILITY_PRECIPITATION_LAST_6H = "PROBABILITY_PRECIPITATION_LAST_6H" @@ -249,60 +185,32 @@ class Parameter(Enum): PROBABILITY_DRIZZLE_LAST_6H = "PROBABILITY_DRIZZLE_LAST_6H" PROBABILITY_DRIZZLE_LAST_12H = "PROBABILITY_DRIZZLE_LAST_12H" - PROBABILITY_PRECIPITATION_STRATIFORM_LAST_1H = ( - "PROBABILITY_PRECIPITATION_STRATIFORM_LAST_1H" - ) - PROBABILITY_PRECIPITATION_STRATIFORM_LAST_6H = ( - "PROBABILITY_PRECIPITATION_STRATIFORM_LAST_6H" - ) - PROBABILITY_PRECIPITATION_STRATIFORM_LAST_12H = ( - "PROBABILITY_PRECIPITATION_STRATIFORM_LAST_12H" - ) + PROBABILITY_PRECIPITATION_STRATIFORM_LAST_1H = "PROBABILITY_PRECIPITATION_STRATIFORM_LAST_1H" + PROBABILITY_PRECIPITATION_STRATIFORM_LAST_6H = "PROBABILITY_PRECIPITATION_STRATIFORM_LAST_6H" + PROBABILITY_PRECIPITATION_STRATIFORM_LAST_12H = "PROBABILITY_PRECIPITATION_STRATIFORM_LAST_12H" - PROBABILITY_PRECIPITATION_CONVECTIVE_LAST_1H = ( - "PROBABILITY_PRECIPITATION_CONVECTIVE_LAST_1H" - ) - PROBABILITY_PRECIPITATION_CONVECTIVE_LAST_6H = ( - "PROBABILITY_PRECIPITATION_CONVECTIVE_LAST_6H" - ) - PROBABILITY_PRECIPITATION_CONVECTIVE_LAST_12H = ( - "PROBABILITY_PRECIPITATION_CONVECTIVE_LAST_12H" - ) + PROBABILITY_PRECIPITATION_CONVECTIVE_LAST_1H = "PROBABILITY_PRECIPITATION_CONVECTIVE_LAST_1H" + PROBABILITY_PRECIPITATION_CONVECTIVE_LAST_6H = "PROBABILITY_PRECIPITATION_CONVECTIVE_LAST_6H" + PROBABILITY_PRECIPITATION_CONVECTIVE_LAST_12H = "PROBABILITY_PRECIPITATION_CONVECTIVE_LAST_12H" - PROBABILITY_PRECIPITATION_LIQUID_LAST_1H = ( - "PROBABILITY_PRECIPITATION_LIQUID_LAST_1H" - ) - PROBABILITY_PRECIPITATION_LIQUID_LAST_6H = ( - "PROBABILITY_PRECIPITATION_LIQUID_LAST_6H" - ) - PROBABILITY_PRECIPITATION_LIQUID_LAST_12H = ( - "PROBABILITY_PRECIPITATION_LIQUID_LAST_12H" - ) + PROBABILITY_PRECIPITATION_LIQUID_LAST_1H = "PROBABILITY_PRECIPITATION_LIQUID_LAST_1H" + PROBABILITY_PRECIPITATION_LIQUID_LAST_6H = "PROBABILITY_PRECIPITATION_LIQUID_LAST_6H" + PROBABILITY_PRECIPITATION_LIQUID_LAST_12H = "PROBABILITY_PRECIPITATION_LIQUID_LAST_12H" PROBABILITY_PRECIPITATION_SOLID_LAST_1H = "PROBABILITY_PRECIPITATION_SOLID_LAST_1H" PROBABILITY_PRECIPITATION_SOLID_LAST_6H = "PROBABILITY_PRECIPITATION_SOLID_LAST_6H" - PROBABILITY_PRECIPITATION_SOLID_LAST_12H = ( - "PROBABILITY_PRECIPITATION_SOLID_LAST_12H" - ) + PROBABILITY_PRECIPITATION_SOLID_LAST_12H = "PROBABILITY_PRECIPITATION_SOLID_LAST_12H" - PROBABILITY_PRECIPITATION_FREEZING_LAST_1H = ( - "PROBABILITY_PRECIPITATION_FREEZING_LAST_1H" - ) - PROBABILITY_PRECIPITATION_FREEZING_LAST_6H = ( - "PROBABILITY_PRECIPITATION_FREEZING_LAST_6H" - ) - PROBABILITY_PRECIPITATION_FREEZING_LAST_12H = ( - "PROBABILITY_PRECIPITATION_FREEZING_LAST_12H" - ) + PROBABILITY_PRECIPITATION_FREEZING_LAST_1H = "PROBABILITY_PRECIPITATION_FREEZING_LAST_1H" + PROBABILITY_PRECIPITATION_FREEZING_LAST_6H = "PROBABILITY_PRECIPITATION_FREEZING_LAST_6H" + PROBABILITY_PRECIPITATION_FREEZING_LAST_12H = "PROBABILITY_PRECIPITATION_FREEZING_LAST_12H" # ---- frequency ---- PRECIPITATION_FREQUENCY = "PRECIPITATION_FREQUENCY" # ---- count ---- # Number of days included in the multiday precipitation total COUNT_DAYS_MULTIDAY_PRECIPITATION = "COUNT_DAYS_MULTIDAY_PRECIPITATION" # Number of days with non-zero precipitation included in multiday precipitation total - COUNT_DAYS_MULTIDAY_PRECIPITATION_HEIGHT_GT_0 = ( - "COUNT_DAYS_MULTIDAY_PRECIPITATION_HEIGHT_GT_0" - ) + COUNT_DAYS_MULTIDAY_PRECIPITATION_HEIGHT_GT_0 = "COUNT_DAYS_MULTIDAY_PRECIPITATION_HEIGHT_GT_0" # PRESSURE # ---- averaged ---- @@ -562,18 +470,12 @@ class Parameter(Enum): COUNT_DAYS_HEATING_DEGREE = "COUNT_DAYS_HEATING_DEGREE" COUNT_DAYS_COOLING_DEGREE = "COUNT_DAYS_COOLING_DEGREE" # Number of days included in the multiday minimum temperature - COUNT_DAYS_MULTIDAY_TEMPERATURE_AIR_MIN_200 = ( - "COUNT_DAYS_MULTIDAY_TEMPERATURE_AIR_MIN_200" - ) + COUNT_DAYS_MULTIDAY_TEMPERATURE_AIR_MIN_200 = "COUNT_DAYS_MULTIDAY_TEMPERATURE_AIR_MIN_200" # Number of days included in the multiday maximum temperature - COUNT_DAYS_MULTIDAY_TEMPERATURE_AIR_MAX_200 = ( - "COUNT_DAYS_MULTIDAY_TEMPERATURE_AIR_MAX_200" - ) + COUNT_DAYS_MULTIDAY_TEMPERATURE_AIR_MAX_200 = "COUNT_DAYS_MULTIDAY_TEMPERATURE_AIR_MAX_200" # ---- error ---- ERROR_ABSOLUTE_TEMPERATURE_AIR_MEAN_200 = "ERROR_ABSOLUTE_TEMPERATURE_AIR_MEAN_200" - ERROR_ABSOLUTE_TEMPERATURE_DEW_POINT_MEAN_200 = ( - "ERROR_ABSOLUTE_TEMPERATURE_DEW_POINT_MEAN_200" - ) + ERROR_ABSOLUTE_TEMPERATURE_DEW_POINT_MEAN_200 = "ERROR_ABSOLUTE_TEMPERATURE_DEW_POINT_MEAN_200" # VISIBILITY # ---- distance ---- @@ -622,12 +524,8 @@ class Parameter(Enum): WEATHER_TYPE_FREEZING_DRIZZLE = "WEATHER_TYPE_FREEZING_DRIZZLE" WEATHER_TYPE_RAIN = "WEATHER_TYPE_RAIN" WEATHER_TYPE_FREEZING_RAIN = "WEATHER_TYPE_FREEZING_RAIN" - WEATHER_TYPE_SNOW_PELLETS_SNOW_GRAINS_ICE_CRYSTALS = ( - "WEATHER_TYPE_SNOW_PELLETS_SNOW_GRAINS_ICE_CRYSTALS" - ) - WEATHER_TYPE_PRECIPITATION_UNKNOWN_SOURCE = ( - "WEATHER_TYPE_PRECIPITATION_UNKNOWN_SOURCE" - ) + WEATHER_TYPE_SNOW_PELLETS_SNOW_GRAINS_ICE_CRYSTALS = "WEATHER_TYPE_SNOW_PELLETS_SNOW_GRAINS_ICE_CRYSTALS" + WEATHER_TYPE_PRECIPITATION_UNKNOWN_SOURCE = "WEATHER_TYPE_PRECIPITATION_UNKNOWN_SOURCE" WEATHER_TYPE_GROUND_FOG = "WEATHER_TYPE_GROUND_FOG" WEATHER_TYPE_ICE_FOG_FREEZING_FOG = "WEATHER_TYPE_ICE_FOG_FREEZING_FOG" diff --git a/wetterdienst/metadata/period.py b/wetterdienst/metadata/period.py index d927ed0f9..8e1ee4095 100644 --- a/wetterdienst/metadata/period.py +++ b/wetterdienst/metadata/period.py @@ -28,18 +28,12 @@ def _period_type_order_mapping(self): def __lt__(self, other): if self.__class__ is other.__class__: - return ( - self._period_type_order_mapping[self.name] - < self._period_type_order_mapping[other.name] - ) + return self._period_type_order_mapping[self.name] < self._period_type_order_mapping[other.name] return NotImplemented def __gt__(self, other): if self.__class__ is other.__class__: - return ( - self._period_type_order_mapping[self.name] - > self._period_type_order_mapping[other.name] - ) + return self._period_type_order_mapping[self.name] > self._period_type_order_mapping[other.name] return NotImplemented def __ge__(self, other): diff --git a/wetterdienst/metadata/provider.py b/wetterdienst/metadata/provider.py index ed0a664e0..b0bb7159d 100644 --- a/wetterdienst/metadata/provider.py +++ b/wetterdienst/metadata/provider.py @@ -31,7 +31,6 @@ class Provider(Enum): "National Oceanic And Atmospheric Administration", "National Oceanic And Atmospheric Administration", "United States Of America", - "© National Oceanic And Atmospheric Administration (NOAA), " - "Global Historical Climatology Network", + "© National Oceanic And Atmospheric Administration (NOAA), " "Global Historical Climatology Network", "ftp://ftp.ncdc.noaa.gov/pub/data/ghcn/daily/", ) diff --git a/wetterdienst/metadata/unit.py b/wetterdienst/metadata/unit.py index 566978ce5..20aad7f15 100644 --- a/wetterdienst/metadata/unit.py +++ b/wetterdienst/metadata/unit.py @@ -40,17 +40,13 @@ class OriginUnit(Enum): KILOGRAM_PER_SQUARE_METER = REGISTRY.kilogram / (REGISTRY.meter ** 2) # Temperature - DEGREE_CELSIUS = ( - 1 * REGISTRY.degree_Celsius - ) # without the "1 *" we get an offset error + DEGREE_CELSIUS = 1 * REGISTRY.degree_Celsius # without the "1 *" we get an offset error DEGREE_KELVIN = 1 * REGISTRY.degree_Kelvin # Speed METER_PER_SECOND = REGISTRY.meter / REGISTRY.second KILOMETER_PER_HOUR = REGISTRY.kilometer / REGISTRY.hour - BEAUFORT = ( - REGISTRY.beaufort - ) # beaufort should always stay beaufort! Calculations to m/s are empirical + BEAUFORT = REGISTRY.beaufort # beaufort should always stay beaufort! Calculations to m/s are empirical # Pressure PASCAL = REGISTRY.pascal diff --git a/wetterdienst/provider/dwd/forecast/access.py b/wetterdienst/provider/dwd/forecast/access.py index 3ccdd91b7..d44e00b9d 100644 --- a/wetterdienst/provider/dwd/forecast/access.py +++ b/wetterdienst/provider/dwd/forecast/access.py @@ -38,12 +38,18 @@ def __init__( self.dwdfs = NetworkFilesystemManager.get(ttl=CacheExpiry.FIVE_MINUTES) - def download(self, url: str): - # https://stackoverflow.com/questions/37573483/progress-bar-while-download-file-over-http-with-requests # noqa:E501,B950 + def download(self, url: str) -> BytesIO: + """Download kml file as bytes. + https://stackoverflow.com/questions/37573483/progress-bar-while-download-file-over-http-with-requests + + block_size: int or None + Bytes to download in one request; use instance value if None. If + zero, will return a streaming Requests file-like instance. + + :param url: url string to kml file + :return: kml file as bytes + """ - # block_size: int or None - # Bytes to download in one request; use instance value if None. If - # zero, will return a streaming Requests file-like instance. response = self.dwdfs.open(url, block_size=0) total = self.dwdfs.size(url) @@ -71,8 +77,7 @@ def fetch(self, url) -> bytes: """ buffer = self.download(url) kmz = ZipFile(buffer, "r") - kml = kmz.open(kmz.namelist()[0], "r").read() - return kml + return kmz.open(kmz.namelist()[0], "r").read() def read(self, url: str): """ @@ -95,14 +100,9 @@ def read(self, url: str): } # Get Basic Metadata - prod_definition = root.findall( - "kml:Document/kml:ExtendedData/dwd:ProductDefinition", root.nsmap - )[0] + prod_definition = root.findall("kml:Document/kml:ExtendedData/dwd:ProductDefinition", root.nsmap)[0] - self.metadata = { - k: prod_definition.find(f"{{{root.nsmap['dwd']}}}{v}").text - for k, v in prod_items.items() - } + self.metadata = {k: prod_definition.find(f"{{{root.nsmap['dwd']}}}{v}").text for k, v in prod_items.items()} self.metadata["issue_time"] = pd.Timestamp(self.metadata["issue_time"]) # Get time steps. @@ -110,9 +110,7 @@ def read(self, url: str): "kml:Document/kml:ExtendedData/dwd:ProductDefinition/dwd:ForecastTimeSteps", root.nsmap, )[0] - self.timesteps = DatetimeIndex( - [pd.Timestamp(i.text) for i in timesteps.getchildren()] - ) + self.timesteps = DatetimeIndex([pd.Timestamp(i.text) for i in timesteps.getchildren()]) # Find all kml:Placemark items. self.items = root.findall("kml:Document/kml:Placemark", root.nsmap) @@ -131,25 +129,19 @@ def get_forecasts(self): for station_forecast in self.iter_items(): station_ids = station_forecast.find("kml:name", self.root.nsmap).text - measurement_list = station_forecast.findall( - "kml:ExtendedData/dwd:Forecast", self.root.nsmap - ) + measurement_list = station_forecast.findall("kml:ExtendedData/dwd:Forecast", self.root.nsmap) data_dict = {"station_id": station_ids, "datetime": self.timesteps} for measurement_item in measurement_list: - measurement_parameter = measurement_item.get( - f"{{{self.root.nsmap['dwd']}}}elementName" - ) + measurement_parameter = measurement_item.get(f"{{{self.root.nsmap['dwd']}}}elementName") if measurement_parameter.lower() in self.parameters: measurement_string = measurement_item.getchildren()[0].text measurement_values = " ".join(measurement_string.split()).split(" ") - measurement_values = [ - np.nan if i == "-" else float(i) for i in measurement_values - ] + measurement_values = [np.nan if i == "-" else float(i) for i in measurement_values] assert len(measurement_values) == len( # noqa:S101 self.timesteps diff --git a/wetterdienst/provider/dwd/forecast/api.py b/wetterdienst/provider/dwd/forecast/api.py index 61b009564..3d8eea0d6 100644 --- a/wetterdienst/provider/dwd/forecast/api.py +++ b/wetterdienst/provider/dwd/forecast/api.py @@ -8,7 +8,6 @@ from typing import Dict, Generator, Optional, Tuple, Union from urllib.parse import urljoin -import numpy as np import pandas as pd import requests from requests import HTTPError @@ -65,7 +64,7 @@ class DwdMosmixValues(ScalarValuesCore): parameter: List - If None, data for all parameters is returned. - If not None, list of parameters, per MOSMIX definition, see - https://www.dwd.de/DE/leistungen/opendata/help/schluessel_datenformate/kml/mosmix_elemente_pdf.pdf?__blob=publicationFile&v=2 # noqa:E501,B950 + https://www.dwd.de/DE/leistungen/opendata/help/schluessel_datenformate/kml/mosmix_elemente_pdf.pdf?__blob=publicationFile&v=2 # noqa:B950 """ _tz = Timezone.GERMANY @@ -84,15 +83,11 @@ def _create_humanized_parameters_mapping(self) -> Dict[str, str]: :return: """ - hcnm = { + return { parameter.value: parameter.name.lower() - for parameter in self.stations.stations._parameter_base[ - self.stations.stations.mosmix_type.name - ] + for parameter in self.stations.stations._parameter_base[self.stations.stations.mosmix_type.name] } - return hcnm - def __init__(self, stations: StationsResult) -> None: """ @@ -191,15 +186,11 @@ def _tidy_up_df(self, df: pd.DataFrame, dataset: Enum) -> pd.DataFrame: value_name=Columns.VALUE.value, ) - df[Columns.QUALITY.value] = np.nan - - df[Columns.QUALITY.value] = df[Columns.QUALITY.value].astype(float) + df_tidy[Columns.QUALITY.value] = pd.Series(dtype=float) return df_tidy - def read_mosmix( - self, date: Union[datetime, DwdForecastDate] - ) -> Generator[pd.DataFrame, None, None]: + def read_mosmix(self, date: Union[datetime, DwdForecastDate]) -> Generator[pd.DataFrame, None, None]: """ Manage data acquisition for a given date that is used to filter the found files on the MOSMIX path of the DWD server. @@ -217,9 +208,7 @@ def read_mosmix( yield df_forecast - def _read_mosmix( - self, date: Union[DwdForecastDate, datetime] - ) -> Generator[pd.DataFrame, None, None]: + def _read_mosmix(self, date: Union[DwdForecastDate, datetime]) -> Generator[pd.DataFrame, None, None]: """ Wrapper that either calls read_mosmix_s or read_mosmix_l depending on defined period type @@ -232,9 +221,7 @@ def _read_mosmix( else: yield from self.read_mosmix_large(date) - def read_mosmix_small( - self, date: Union[DwdForecastDate, datetime] - ) -> Generator[pd.DataFrame, None, None]: + def read_mosmix_small(self, date: Union[DwdForecastDate, datetime]) -> Generator[pd.DataFrame, None, None]: """ Reads single MOSMIX-S file with all stations and returns every forecast that matches with one of the defined station ids. @@ -338,10 +325,7 @@ class DwdMosmixRequest(ScalarRequestCore): _dataset_base = DwdMosmixDataset _unit_tree = DwdMosmixUnit - _url = ( - "https://www.dwd.de/DE/leistungen/met_verfahren_mosmix/" - "mosmix_stationskatalog.cfg?view=nasPublication" - ) + _url = "https://www.dwd.de/DE/leistungen/met_verfahren_mosmix/" "mosmix_stationskatalog.cfg?view=nasPublication" _colspecs = [ (0, 5), @@ -373,12 +357,10 @@ def _setup_discover_filter(cls, filter_): :param filter_: :return: """ - filter_ = pd.Series(filter_, dtype=object).apply( + return pd.Series(filter_, dtype=object).apply( parse_enumeration_from_template, args=(cls._dataset_base,) ).tolist() or [*cls._dataset_base] - return filter_ - _base_columns = [ Columns.STATION_ID.value, Columns.ICAO_ID.value, @@ -408,17 +390,13 @@ def adjust_datetime(datetime_: datetime) -> datetime: delta_hours = (datetime_.hour - regular_date.hour) % 6 - datetime_adjusted = datetime_ - pd.Timedelta(hours=delta_hours) - - return datetime_adjusted + return datetime_ - pd.Timedelta(hours=delta_hours) def __init__( self, parameter: Optional[Tuple[Union[str, DwdMosmixParameter], ...]], mosmix_type: Union[str, DwdMosmixType], - start_issue: Optional[ - Union[str, datetime, DwdForecastDate] - ] = DwdForecastDate.LATEST, + start_issue: Optional[Union[str, datetime, DwdForecastDate]] = DwdForecastDate.LATEST, end_issue: Optional[Union[str, datetime]] = None, start_date: Optional[Union[str, datetime]] = None, end_date: Optional[Union[str, datetime]] = None, @@ -460,9 +438,7 @@ def __init__( # Parse issue date if not set to fixed "latest" string if start_issue is DwdForecastDate.LATEST and end_issue: - log.info( - "end_issue will be ignored as 'latest' was selected for issue date" - ) + log.info("end_issue will be ignored as 'latest' was selected for issue date") if start_issue is not DwdForecastDate.LATEST: if not start_issue and not end_issue: @@ -472,12 +448,8 @@ def __init__( elif not start_issue: start_issue = end_issue - start_issue = pd.to_datetime(start_issue, infer_datetime_format=True).floor( - "1H" - ) - end_issue = pd.to_datetime(end_issue, infer_datetime_format=True).floor( - "1H" - ) + start_issue = pd.to_datetime(start_issue, infer_datetime_format=True).floor("1H") + end_issue = pd.to_datetime(end_issue, infer_datetime_format=True).floor("1H") # Shift start date and end date to 3, 9, 15, 21 hour format if mosmix_type == DwdMosmixType.LARGE: @@ -525,11 +497,7 @@ def _all(self) -> pd.DataFrame: dtype="str", ) - df = df[ - (df.iloc[:, 0] != "=====") - & (df.iloc[:, 0] != "TABLE") - & (df.iloc[:, 0] != "clu") - ] + df = df[(df.iloc[:, 0] != "=====") & (df.iloc[:, 0] != "TABLE") & (df.iloc[:, 0] != "clu")] df = df.iloc[:, [2, 3, 4, 5, 6, 7]] @@ -543,14 +511,8 @@ def _all(self) -> pd.DataFrame: ] # Convert coordinates from degree minutes to decimal degrees - df[Columns.LATITUDE.value] = ( - df[Columns.LATITUDE.value].astype(float).apply(convert_dm_to_dd) - ) - - df[Columns.LONGITUDE.value] = ( - df[Columns.LONGITUDE.value].astype(float).apply(convert_dm_to_dd) - ) + df[Columns.LATITUDE.value] = df[Columns.LATITUDE.value].astype(float).apply(convert_dm_to_dd) - df = df.reindex(columns=self._base_columns) + df[Columns.LONGITUDE.value] = df[Columns.LONGITUDE.value].astype(float).apply(convert_dm_to_dd) - return df + return df.reindex(columns=self._base_columns) diff --git a/wetterdienst/provider/dwd/index.py b/wetterdienst/provider/dwd/index.py index 79ea2c2c1..3bcf32bfc 100644 --- a/wetterdienst/provider/dwd/index.py +++ b/wetterdienst/provider/dwd/index.py @@ -46,11 +46,7 @@ def _create_file_index_for_dwd_server( recursive = False files_server = list_remote_files_fsspec(url, recursive=recursive) - files_server = pd.DataFrame( - files_server, columns=[DwdColumns.FILENAME.value], dtype="str" - ) - - return files_server + return pd.DataFrame(files_server, columns=[DwdColumns.FILENAME.value], dtype=str) def reset_file_index_cache() -> None: diff --git a/wetterdienst/provider/dwd/observation/api.py b/wetterdienst/provider/dwd/observation/api.py index 758b3cf8d..ec36e58bc 100644 --- a/wetterdienst/provider/dwd/observation/api.py +++ b/wetterdienst/provider/dwd/observation/api.py @@ -88,9 +88,7 @@ def _datetime_format(self): :return: """ - return RESOLUTION_TO_DATETIME_FORMAT_MAPPING.get( - self.stations.stations.resolution - ) + return RESOLUTION_TO_DATETIME_FORMAT_MAPPING.get(self.stations.stations.resolution) def __eq__(self, other): """ @@ -99,8 +97,7 @@ def __eq__(self, other): :return: """ return super(DwdObservationValues, self).__eq__(other) and ( - self.stations.resolution == other.stations.resolution - and self.stations.period == other.stations.period + self.stations.resolution == other.stations.resolution and self.stations.period == other.stations.period ) def __str__(self): @@ -108,9 +105,7 @@ def __str__(self): :return: """ - periods_joined = "& ".join( - [period_type.value for period_type in self.stations.period] - ) + periods_joined = "& ".join([period_type.value for period_type in self.stations.period]) return ", ".join( [ @@ -139,10 +134,7 @@ def _collect_station_parameter( periods_and_date_ranges = [] for period in self.stations.period: - if ( - self.stations.resolution in HIGH_RESOLUTIONS - and period == Period.HISTORICAL - ): + if self.stations.resolution in HIGH_RESOLUTIONS and period == Period.HISTORICAL: date_ranges = self._get_historical_date_ranges(station_id, dataset) for date_range in date_ranges: periods_and_date_ranges.append((period, date_range)) @@ -158,12 +150,9 @@ def _collect_station_parameter( log.info(f"Acquiring observations data for {parameter_identifier}.") - if not check_dwd_observations_dataset( - dataset, self.stations.resolution, period - ): + if not check_dwd_observations_dataset(dataset, self.stations.resolution, period): log.info( - f"Invalid combination {dataset.value}/" - f"{self.stations.resolution.value}/{period} is skipped." + f"Invalid combination {dataset.value}/" f"{self.stations.resolution.value}/{period} is skipped." ) continue @@ -180,26 +169,16 @@ def _collect_station_parameter( station_id, date_range, ) - log.info( - f"No files found for {parameter_identifier}. Station will be skipped." - ) + log.info(f"No files found for {parameter_identifier}. Station will be skipped.") continue - filenames_and_files = download_climate_observations_data_parallel( - remote_files - ) + filenames_and_files = download_climate_observations_data_parallel(remote_files) - period_df = parse_climate_observations_data( - filenames_and_files, dataset, self.stations.resolution, period - ) + period_df = parse_climate_observations_data(filenames_and_files, dataset, self.stations.resolution, period) # Filter out values which already are in the DataFrame try: - period_df = period_df[ - ~period_df[DwdColumns.DATE.value].isin( - parameter_df[DwdColumns.DATE.value] - ) - ] + period_df = period_df[~period_df[DwdColumns.DATE.value].isin(parameter_df[DwdColumns.DATE.value])] except KeyError: pass @@ -214,7 +193,7 @@ def _collect_station_parameter( parameter_df[Columns.DATE.value] = pd.to_datetime( parameter_df[Columns.DATE.value], format=self._datetime_format ) - parameter_df = self._fix_timestamps(parameter_df) + return self._fix_timestamps(parameter_df) return parameter_df @@ -238,20 +217,20 @@ def _tidy_up_df(self, df: pd.DataFrame, dataset) -> pd.DataFrame: """ Implementation of _tidy_up_df for DWD Observations - :param df: - :param dataset: - :return: + :param df: untidy DataFrame + :param dataset: dataset enumeration + :return: tidied DataFrame """ droppable_columns = [ # Hourly # Cloud type - DwdObservationDatasetTree.HOURLY.CLOUD_TYPE.CLOUD_TYPE_LAYER1_ABBREVIATION.value, # noqa:E501 - DwdObservationDatasetTree.HOURLY.CLOUD_TYPE.CLOUD_TYPE_LAYER2_ABBREVIATION.value, # noqa:E501 - DwdObservationDatasetTree.HOURLY.CLOUD_TYPE.CLOUD_TYPE_LAYER3_ABBREVIATION.value, # noqa:E501 - DwdObservationDatasetTree.HOURLY.CLOUD_TYPE.CLOUD_TYPE_LAYER4_ABBREVIATION.value, # noqa:E501 + DwdObservationDatasetTree.HOURLY.CLOUD_TYPE.CLOUD_TYPE_LAYER1_ABBREVIATION.value, + DwdObservationDatasetTree.HOURLY.CLOUD_TYPE.CLOUD_TYPE_LAYER2_ABBREVIATION.value, + DwdObservationDatasetTree.HOURLY.CLOUD_TYPE.CLOUD_TYPE_LAYER3_ABBREVIATION.value, + DwdObservationDatasetTree.HOURLY.CLOUD_TYPE.CLOUD_TYPE_LAYER4_ABBREVIATION.value, # Cloudiness - DwdObservationDatasetTree.HOURLY.CLOUDINESS.CLOUD_COVER_TOTAL_INDICATOR.value, # noqa:E501 + DwdObservationDatasetTree.HOURLY.CLOUDINESS.CLOUD_COVER_TOTAL_INDICATOR.value, # Solar DwdObservationDatasetTree.HOURLY.SOLAR.END_OF_INTERVAL.value, DwdObservationDatasetTree.HOURLY.SOLAR.TRUE_LOCAL_TIME.value, @@ -267,41 +246,29 @@ def _tidy_up_df(self, df: pd.DataFrame, dataset) -> pd.DataFrame: resolution = self.stations.stations.resolution - if ( - resolution == Resolution.DAILY - and dataset == DwdObservationDataset.CLIMATE_SUMMARY - ): - quality_wind = df.pop( - DwdObservationDatasetTree.DAILY.CLIMATE_SUMMARY.QUALITY_WIND.value - ) - quality_general = df.pop( - DwdObservationDatasetTree.DAILY.CLIMATE_SUMMARY.QUALITY_GENERAL.value - ) - - quality = pd.concat( - [ - pd.Series(repeat(quality_wind.tolist(), 2)).explode(), - pd.Series(repeat(quality_general.tolist(), 12)).explode(), - ] - ) + if dataset == DwdObservationDataset.CLIMATE_SUMMARY: + if resolution == Resolution.DAILY: + quality_wind = df.pop(DwdObservationDatasetTree.DAILY.CLIMATE_SUMMARY.QUALITY_WIND.value) + quality_general = df.pop(DwdObservationDatasetTree.DAILY.CLIMATE_SUMMARY.QUALITY_GENERAL.value) - elif ( - resolution in (Resolution.MONTHLY, Resolution.ANNUAL) - and dataset == DwdObservationDataset.CLIMATE_SUMMARY - ): - quality_general = df.pop( - DwdObservationDatasetTree.MONTHLY.CLIMATE_SUMMARY.QUALITY_GENERAL.value - ) - quality_precipitation = df.pop( - DwdObservationDatasetTree.MONTHLY.CLIMATE_SUMMARY.QUALITY_PRECIPITATION.value # noqa: E501 - ) - quality = pd.concat( - [ - pd.Series(repeat(quality_general, 9)).explode(), - pd.Series(repeat(quality_precipitation, 2)).explode(), - ] - ) + quality = pd.concat( + [ + pd.Series(repeat(quality_wind.tolist(), 2)).explode(), + pd.Series(repeat(quality_general.tolist(), 12)).explode(), + ] + ) + elif resolution in (Resolution.MONTHLY, Resolution.ANNUAL): + quality_general = df.pop(DwdObservationDatasetTree.MONTHLY.CLIMATE_SUMMARY.QUALITY_GENERAL.value) + quality_precipitation = df.pop( + DwdObservationDatasetTree.MONTHLY.CLIMATE_SUMMARY.QUALITY_PRECIPITATION.value + ) + quality = pd.concat( + [ + pd.Series(repeat(quality_general, 9)).explode(), + pd.Series(repeat(quality_precipitation, 2)).explode(), + ] + ) else: quality = df.pop(df.columns[2]) quality = pd.Series(repeat(quality, df.shape[1])).explode() @@ -333,9 +300,7 @@ def _coerce_dates(self, series: pd.Series, timezone_: timezone) -> pd.Series: :param series: :return: """ - return pd.to_datetime(series, format=self._datetime_format).dt.tz_localize( - self.data_tz - ) + return pd.to_datetime(series, format=self._datetime_format).dt.tz_localize(self.data_tz) def _coerce_irregular_parameter(self, series: pd.Series) -> pd.Series: """ @@ -354,16 +319,12 @@ def _create_humanized_parameters_mapping(self) -> Dict[str, str]: :return: """ - hcnm = { + return { parameter.value: parameter.name.lower() for parameter in DwdObservationParameter[self.stations.resolution.name] } - return hcnm - - def _get_historical_date_ranges( - self, station_id: str, dataset: DwdObservationDataset - ) -> List[str]: + def _get_historical_date_ranges(self, station_id: str, dataset: DwdObservationDataset) -> List[str]: """ Get particular files for historical data which for high resolution is released in data chunks e.g. decades or monthly chunks @@ -372,9 +333,7 @@ def _get_historical_date_ranges( :param dataset: :return: """ - file_index = create_file_index_for_climate_observations( - dataset, self.stations.resolution, Period.HISTORICAL - ) + file_index = create_file_index_for_climate_observations(dataset, self.stations.resolution, Period.HISTORICAL) file_index = file_index[(file_index[Columns.STATION_ID.value] == station_id)] @@ -450,22 +409,10 @@ def _historical_interval(self) -> pd.Interval: """ historical_end = self._now_local.replace(month=1, day=1) - # if self.start_date < historical_end: - # historical_begin = self.start_date.tz_convert(historical_end.tz) - # else: - # historical_begin = historical_end + pd.tseries.offsets.DateOffset(years=-1) - # a year that is way before any data is collected - historical_begin = pd.Timestamp( - datetime(year=1678, month=1, day=1) - ).tz_localize(historical_end.tz) + historical_begin = pd.Timestamp(datetime(year=1678, month=1, day=1)).tz_localize(historical_end.tz) - # TODO: Use date - historical_interval = pd.Interval( - left=historical_begin, right=historical_end, closed="both" - ) - - return historical_interval + return pd.Interval(left=historical_begin, right=historical_end, closed="both") @property def _recent_interval(self) -> pd.Interval: @@ -478,12 +425,7 @@ def _recent_interval(self) -> pd.Interval: recent_end = self._now_local.replace(hour=0, minute=0, second=0) recent_begin = recent_end - pd.Timedelta(days=500) - # TODO: use date - recent_interval = pd.Interval( - left=recent_begin, right=recent_end, closed="both" - ) - - return recent_interval + return pd.Interval(left=recent_begin, right=recent_end, closed="both") @property def _now_interval(self) -> pd.Interval: @@ -496,9 +438,7 @@ def _now_interval(self) -> pd.Interval: now_end = self._now_local now_begin = now_end.replace(hour=0, minute=0, second=0) - pd.Timedelta(days=1) - now_interval = pd.Interval(left=now_begin, right=now_end, closed="both") - - return now_interval + return pd.Interval(left=now_begin, right=now_end, closed="both") def _get_periods(self) -> List[Period]: """ @@ -532,9 +472,7 @@ def _parse_station_id(self, series: pd.Series) -> pd.Series: """ series = super(DwdObservationRequest, self)._parse_station_id(series) - series = series.str.pad(5, "left", "0") - - return series + return series.str.pad(5, "left", "0") def __init__( self, @@ -567,10 +505,7 @@ def __init__( ) if self.start_date and self.period: - log.warning( - f"start_date and end_date filtering limited to defined " - f"periods {self.period}" - ) + log.warning(f"start_date and end_date filtering limited to defined " f"periods {self.period}") # Has to follow the super call as start date and end date are required for getting # automated periods from overlapping intervals @@ -590,10 +525,10 @@ def describe_fields(cls, dataset, resolution, period, language: str = "en") -> d :param language: :return: """ + from wetterdienst.provider.dwd.observation.fields import read_description + dataset = parse_enumeration_from_template(dataset, DwdObservationDataset) - resolution = parse_enumeration_from_template( - resolution, cls._resolution_base, Resolution - ) + resolution = parse_enumeration_from_template(resolution, cls._resolution_base, Resolution) period = parse_enumeration_from_template(period, cls._period_base, Period) file_index = _create_file_index_for_dwd_server( @@ -610,18 +545,12 @@ def describe_fields(cls, dataset, resolution, period, language: str = "en") -> d else: raise ValueError("Only language 'en' or 'de' supported") - file_index = file_index[ - file_index[DwdColumns.FILENAME.value].str.contains(file_prefix) - ] + file_index = file_index[file_index[DwdColumns.FILENAME.value].str.contains(file_prefix)] description_file_url = str(file_index[DwdColumns.FILENAME.value].tolist()[0]) log.info(f"Acquiring field information from {description_file_url}") - from wetterdienst.provider.dwd.observation.fields import read_description - - document = read_description(description_file_url, language=language) - - return document + return read_description(description_file_url, language=language) def _all(self) -> pd.DataFrame: """ @@ -638,38 +567,23 @@ def _all(self) -> pd.DataFrame: for period in reversed(self.period): if not check_dwd_observations_dataset(dataset, self.resolution, period): log.warning( - f"The combination of {dataset.value}, " - f"{self.resolution.value}, {period.value} is invalid." + f"The combination of {dataset.value}, " f"{self.resolution.value}, {period.value} is invalid." ) continue - df = create_meta_index_for_climate_observations( - dataset, self.resolution, period - ) + df = create_meta_index_for_climate_observations(dataset, self.resolution, period) - file_index = create_file_index_for_climate_observations( - dataset, self.resolution, period - ) + file_index = create_file_index_for_climate_observations(dataset, self.resolution, period) - df = df[ - df.loc[:, Columns.STATION_ID.value].isin( - file_index[Columns.STATION_ID.value] - ) - ] + df = df[df.loc[:, Columns.STATION_ID.value].isin(file_index[Columns.STATION_ID.value])] if not stations_df.empty: - df = df[ - ~df[Columns.STATION_ID.value].isin( - stations_df[Columns.STATION_ID.value] - ) - ] + df = df[~df[Columns.STATION_ID.value].isin(stations_df[Columns.STATION_ID.value])] stations_df = stations_df.append(df) if not stations_df.empty: - stations_df = stations_df.sort_values( - [Columns.STATION_ID.value], key=lambda x: x.astype(int) - ) + return stations_df.sort_values([Columns.STATION_ID.value], key=lambda x: x.astype(int)) return stations_df diff --git a/wetterdienst/provider/dwd/observation/download.py b/wetterdienst/provider/dwd/observation/download.py index 644690bdd..5092bf219 100644 --- a/wetterdienst/provider/dwd/observation/download.py +++ b/wetterdienst/provider/dwd/observation/download.py @@ -53,9 +53,7 @@ def __download_climate_observations_data(remote_file: str) -> bytes: try: zip_file = download_file(remote_file, ttl=CacheExpiry.FIVE_MINUTES) except InvalidURL as e: - raise InvalidURL( - f"Error: the station data {remote_file} could not be reached." - ) from e + raise InvalidURL(f"Error: the station data {remote_file} could not be reached.") from e except Exception: raise FailedDownload(f"Download failed for {remote_file}") @@ -75,9 +73,7 @@ def __download_climate_observations_data(remote_file: str) -> bytes: return file_in_bytes # If whatsoever no file was found and returned already throw exception - raise ProductFileNotFound( - f"The archive of {remote_file} does not hold a 'produkt' file." - ) + raise ProductFileNotFound(f"The archive of {remote_file} does not hold a 'produkt' file.") except BadZipFile as e: raise BadZipFile(f"The archive of {remote_file} seems to be corrupted.") from e diff --git a/wetterdienst/provider/dwd/observation/fields.py b/wetterdienst/provider/dwd/observation/fields.py index c42434592..93dcc8a30 100644 --- a/wetterdienst/provider/dwd/observation/fields.py +++ b/wetterdienst/provider/dwd/observation/fields.py @@ -33,8 +33,7 @@ def parse_section(text, headline): if capture: buffer.write(line) buffer.write("\n") - payload = buffer.getvalue() - return payload + return buffer.getvalue() def parse_parameters(text): @@ -53,9 +52,7 @@ def parse_parameters(text): # Remove some anomaly. more = more.replace("0\n1\n", "1\n") # Replace newlines after digits with "-". - more = re.sub( - r"^(\d+)\n(.*)", r"\g<1>- \g<2>", more, flags=re.MULTILINE - ) + more = re.sub(r"^(\d+)\n(.*)", r"\g<1>- \g<2>", more, flags=re.MULTILINE) # Remove all newlines _within_ text descriptions, per item. more = re.sub(r"\n(?!\d+)", " ", more, flags=re.DOTALL) else: @@ -98,9 +95,7 @@ def read_description(url, language: str = "en") -> dict: data["parameters"] = parse_parameters(parameters_text) # Read "Quality information" section. - data["quality_information"] = parse_section( - document, sections["quality_information"] - ) + data["quality_information"] = parse_section(document, sections["quality_information"]) return data @@ -110,16 +105,25 @@ def process(url) -> None: # pragma: no cover parameters = read_description(url) # Output as JSON. - # import json; print(json.dumps(parameters, indent=4)) + # import json; print(json.dumps(parameters, indent=4)) # noqa: E800 # Output as ASCII table. print(tabulate(list(parameters.items()), tablefmt="psql")) # noqa: T001 if __name__ == "__main__": # pragma: no cover - ten_minutes_air = "https://opendata.dwd.de/climate_environment/CDC/observations_germany/climate/10_minutes/air_temperature/recent/DESCRIPTION_obsgermany_climate_10min_tu_recent_en.pdf" # noqa:E501,B950 - hourly_solar = "https://opendata.dwd.de/climate_environment/CDC/observations_germany/climate/hourly/solar/DESCRIPTION_obsgermany_climate_hourly_solar_en.pdf" # noqa:E501,B950 - daily_kl = "https://opendata.dwd.de/climate_environment/CDC/observations_germany/climate/daily/kl/recent/DESCRIPTION_obsgermany_climate_daily_kl_recent_en.pdf" # noqa:E501,B950 + ten_minutes_air = ( + "https://opendata.dwd.de/climate_environment/CDC/observations_germany/climate/10_minutes/" + "air_temperature/recent/DESCRIPTION_obsgermany_climate_10min_tu_recent_en.pdf" + ) + hourly_solar = ( + "https://opendata.dwd.de/climate_environment/CDC/observations_germany/climate/hourly/" + "solar/DESCRIPTION_obsgermany_climate_hourly_solar_en.pdf" + ) + daily_kl = ( + "https://opendata.dwd.de/climate_environment/CDC/observations_germany/climate/daily/" + "kl/recent/DESCRIPTION_obsgermany_climate_daily_kl_recent_en.pdf" + ) for item in ten_minutes_air, hourly_solar, daily_kl: print(item) # noqa: T001 diff --git a/wetterdienst/provider/dwd/observation/fileindex.py b/wetterdienst/provider/dwd/observation/fileindex.py index d6a019cdc..80a0026c2 100644 --- a/wetterdienst/provider/dwd/observation/fileindex.py +++ b/wetterdienst/provider/dwd/observation/fileindex.py @@ -68,13 +68,9 @@ def create_file_index_for_climate_observations( Returns: file index in a pandas.DataFrame with sets of parameters and station id """ - file_index = _create_file_index_for_dwd_server( - parameter_set, resolution, period, DWDCDCBase.CLIMATE_OBSERVATIONS - ) + file_index = _create_file_index_for_dwd_server(parameter_set, resolution, period, DWDCDCBase.CLIMATE_OBSERVATIONS) - file_index = file_index.loc[ - file_index[DwdColumns.FILENAME.value].str.endswith(Extension.ZIP.value), : - ] + file_index = file_index.loc[file_index[DwdColumns.FILENAME.value].str.endswith(Extension.ZIP.value), :] file_index.loc[:, DwdColumns.STATION_ID.value] = ( file_index[DwdColumns.FILENAME.value].str.findall(STATION_ID_REGEX).str[0] @@ -82,9 +78,7 @@ def create_file_index_for_climate_observations( file_index = file_index.dropna().reset_index(drop=True) - file_index.loc[:, DwdColumns.STATION_ID.value] = file_index[ - DwdColumns.STATION_ID.value - ].astype(str) + file_index.loc[:, DwdColumns.STATION_ID.value] = file_index[DwdColumns.STATION_ID.value].astype(str) if resolution in HIGH_RESOLUTIONS and period == Period.HISTORICAL: # Date range string for additional filtering of historical files @@ -111,17 +105,12 @@ def create_file_index_for_climate_observations( # Temporary fix for filenames with wrong ordered/faulty dates # Fill those cases with minimum/maximum date to ensure that they are loaded as # we don't know what exact date range the included data has - wrong_date_order_index = ( - file_index[DwdColumns.FROM_DATE.value] - > file_index[DwdColumns.TO_DATE.value] - ) + wrong_date_order_index = file_index[DwdColumns.FROM_DATE.value] > file_index[DwdColumns.TO_DATE.value] file_index.loc[wrong_date_order_index, DwdColumns.FROM_DATE.value] = file_index[ DwdColumns.FROM_DATE.value ].min() - file_index.loc[wrong_date_order_index, DwdColumns.TO_DATE.value] = file_index[ - DwdColumns.TO_DATE.value - ].max() + file_index.loc[wrong_date_order_index, DwdColumns.TO_DATE.value] = file_index[DwdColumns.TO_DATE.value].max() file_index.loc[:, DwdColumns.INTERVAL.value] = file_index.apply( lambda x: pd.Interval( @@ -132,8 +121,4 @@ def create_file_index_for_climate_observations( axis=1, ) - file_index = file_index.sort_values( - by=[DwdColumns.STATION_ID.value, DwdColumns.FILENAME.value] - ) - - return file_index + return file_index.sort_values(by=[DwdColumns.STATION_ID.value, DwdColumns.FILENAME.value]) diff --git a/wetterdienst/provider/dwd/observation/metadata/dataset.py b/wetterdienst/provider/dwd/observation/metadata/dataset.py index 9b4dd9663..06f691ed5 100644 --- a/wetterdienst/provider/dwd/observation/metadata/dataset.py +++ b/wetterdienst/provider/dwd/observation/metadata/dataset.py @@ -42,9 +42,7 @@ class DwdObservationDataset(Enum): WEATHER_PHENOMENA = "weather_phenomena" -RESOLUTION_DATASET_MAPPING: Dict[ - Resolution, Dict[DwdObservationDataset, List[Period]] -] = { +RESOLUTION_DATASET_MAPPING: Dict[Resolution, Dict[DwdObservationDataset, List[Period]]] = { Resolution.MINUTE_1: { DwdObservationDataset.PRECIPITATION: [ Period.HISTORICAL, diff --git a/wetterdienst/provider/dwd/observation/metadata/parameter.py b/wetterdienst/provider/dwd/observation/metadata/parameter.py index b344842ec..e2c9eeb93 100644 --- a/wetterdienst/provider/dwd/observation/metadata/parameter.py +++ b/wetterdienst/provider/dwd/observation/metadata/parameter.py @@ -75,12 +75,7 @@ class HOURLY(Enum): CLOUD_HEIGHT_LAYER4 = "v_s4_hhs" CLOUD_COVER_LAYER4 = "v_s4_ns" # int - # cloudiness - # CLOUD_COVER_TOTAL_INDICATOR = "v_n_i" # str - # CLOUD_COVER_TOTAL = "v_n" # int - # dew_point - # TEMPERATURE_AIR_200 = "tt" TEMPERATURE_DEW_POINT_MEAN_200 = "td" # precipitation @@ -101,16 +96,11 @@ class HOURLY(Enum): TEMPERATURE_SOIL_MEAN_100 = "v_te100" # solar - # END_OF_INTERVAL = "end_of_interval" # modified, does not exist in original RADIATION_SKY_LONG_WAVE = "atmo_lberg" RADIATION_SKY_SHORT_WAVE_DIFFUSE = "fd_lberg" RADIATION_GLOBAL = "fg_lberg" SUNSHINE_DURATION = "sd_lberg" SUN_ZENITH_ANGLE = "zenit" - # TRUE_LOCAL_TIME = "true_local_time" # original name was adjusted to this one - - # sun - # SUNSHINE_DURATION = "sd_so" # visibility VISIBILITY_INDICATOR = "v_vv_i" # str @@ -120,17 +110,9 @@ class HOURLY(Enum): WIND_SPEED = "f" WIND_DIRECTION = "d" # int - # wind_synop - # WIND_SPEED = "ff" - # WIND_DIRECTION = "dd" # int - HUMIDITY_ABSOLUTE = "absf_std" PRESSURE_VAPOR = "vp_std" TEMPERATURE_WET_MEAN_200 = "tf_std" - # PRESSURE_AIR_SH = "p_std" - # TEMPERATURE_AIR_200 = "tt_std" - # HUMIDITY = "rf_std" - # TEMPERATURE_DEW_POINT_200 = "td_std" # subdaily class SUBDAILY(Enum): # noqa @@ -145,8 +127,6 @@ class SUBDAILY(Enum): # noqa # moisture PRESSURE_VAPOR = "vp_ter" TEMPERATURE_AIR_MEAN_005 = "e_tf_ter" - # TEMPERATURE_AIR_200 = "tf_ter" - # HUMIDITY = "rf_ter" # pressure PRESSURE_AIR_SITE = "pp_ter" @@ -180,9 +160,6 @@ class DAILY(Enum): TEMPERATURE_AIR_MIN_005 = "tgk" # more_precip - # PRECIPITATION_HEIGHT = "rs" - # PRECIPITATION_FORM = "rsf" # int - # SNOW_DEPTH = "sh_tag" # int SNOW_DEPTH_NEW = "nsh_tag" # int # soil_temperature @@ -197,11 +174,9 @@ class DAILY(Enum): RADIATION_SKY_LONG_WAVE = "atmo_strahl" RADIATION_SKY_SHORT_WAVE_DIFFUSE = "fd_strahl" RADIATION_SKY_SHORT_WAVE_DIRECT = "fg_strahl" - # SUNSHINE_DURATION = "sd_strahl" # water_equiv SNOW_DEPTH_EXCELLED = "ash_6" # int - # SNOW_DEPTH = "sh_tag" # int WATER_EQUIVALENT_SNOW_DEPTH = "wash_6" WATER_EQUIVALENT_SNOW_DEPTH_EXCELLED = "waas_6" @@ -233,9 +208,7 @@ class MONTHLY(Enum): # more_precip SNOW_DEPTH_NEW = "mo_nsh" # int - # PRECIPITATION_HEIGHT = "mo_rr" SNOW_DEPTH = "mo_sh_s" # int - # PRECIPITATION_HEIGHT_MAX = "mx_rs" # weather_phenomena COUNT_WEATHER_TYPE_STORM_STRONG_WIND = "mo_sturm_6" # int @@ -264,9 +237,7 @@ class ANNUAL(Enum): # more_precip SNOW_DEPTH_NEW = "ja_nsh" # int - # PRECIPITATION_HEIGHT = "ja_rr" SNOW_DEPTH = "ja_sh_s" # int - # PRECIPITATION_HEIGHT_MAX = "ja_mx_rs" # weather_phenomena COUNT_WEATHER_TYPE_STORM_STRONG_WIND = "ja_sturm_6" # int @@ -420,9 +391,7 @@ class SOLAR(Enum): RADIATION_GLOBAL = "fg_lberg" SUNSHINE_DURATION = "sd_lberg" SUN_ZENITH_ANGLE = "zenit" - TRUE_LOCAL_TIME = ( - "true_local_time" # original name was adjusted to this one - ) + TRUE_LOCAL_TIME = "true_local_time" # original name was adjusted to this one # sun class SUNSHINE_DURATION(Enum): @@ -689,28 +658,20 @@ class WEATHER_PHENOMENA(Enum): # noqa DwdObservationParameter.HOURLY.HUMIDITY: DwdObservationDataset.TEMPERATURE_AIR, # cloudiness DwdObservationParameter.HOURLY.CLOUD_COVER_TOTAL: DwdObservationDataset.CLOUDINESS, - # DwdObservationParameter.HOURLY.CLOUD_COVER_TOTAL_INDICATOR: DwdObservationDataset.CLOUDINESS, # cloud_type - # DwdObservationParameter.HOURLY.CLOUD_COVER_TOTAL: DwdObservationParameterSet.CLOUD_TYPE, - # DwdObservationParameter.HOURLY.CLOUD_COVER_TOTAL_INDICATOR: DwdObservationParameterSet.CLOUD_TYPE, DwdObservationParameter.HOURLY.CLOUD_TYPE_LAYER1: DwdObservationDataset.CLOUD_TYPE, - # DwdObservationParameter.HOURLY.CLOUD_TYPE_LAYER1_ABBREVIATION: DwdObservationDataset.CLOUD_TYPE, DwdObservationParameter.HOURLY.CLOUD_HEIGHT_LAYER1: DwdObservationDataset.CLOUD_TYPE, DwdObservationParameter.HOURLY.CLOUD_COVER_LAYER1: DwdObservationDataset.CLOUD_TYPE, DwdObservationParameter.HOURLY.CLOUD_TYPE_LAYER2: DwdObservationDataset.CLOUD_TYPE, - # DwdObservationParameter.HOURLY.CLOUD_TYPE_LAYER2_ABBREVIATION: DwdObservationDataset.CLOUD_TYPE, DwdObservationParameter.HOURLY.CLOUD_HEIGHT_LAYER2: DwdObservationDataset.CLOUD_TYPE, DwdObservationParameter.HOURLY.CLOUD_COVER_LAYER2: DwdObservationDataset.CLOUD_TYPE, DwdObservationParameter.HOURLY.CLOUD_TYPE_LAYER3: DwdObservationDataset.CLOUD_TYPE, - # DwdObservationParameter.HOURLY.CLOUD_TYPE_LAYER3_ABBREVIATION: DwdObservationDataset.CLOUD_TYPE, DwdObservationParameter.HOURLY.CLOUD_HEIGHT_LAYER3: DwdObservationDataset.CLOUD_TYPE, DwdObservationParameter.HOURLY.CLOUD_COVER_LAYER3: DwdObservationDataset.CLOUD_TYPE, DwdObservationParameter.HOURLY.CLOUD_TYPE_LAYER4: DwdObservationDataset.CLOUD_TYPE, - # DwdObservationParameter.HOURLY.CLOUD_TYPE_LAYER4_ABBREVIATION: DwdObservationDataset.CLOUD_TYPE, DwdObservationParameter.HOURLY.CLOUD_HEIGHT_LAYER4: DwdObservationDataset.CLOUD_TYPE, DwdObservationParameter.HOURLY.CLOUD_COVER_LAYER4: DwdObservationDataset.CLOUD_TYPE, # dew_point - # TEMPERATURE_AIR_200: "tt" DwdObservationParameter.HOURLY.TEMPERATURE_DEW_POINT_MEAN_200: DwdObservationDataset.DEW_POINT, # precipitation DwdObservationParameter.HOURLY.PRECIPITATION_HEIGHT: DwdObservationDataset.PRECIPITATION, @@ -729,22 +690,16 @@ class WEATHER_PHENOMENA(Enum): # noqa # sun DwdObservationParameter.HOURLY.SUNSHINE_DURATION: DwdObservationDataset.SUNSHINE_DURATION, # solar - # DwdObservationParameter.HOURLY.END_OF_INTERVAL: DwdObservationDataset.SOLAR, DwdObservationParameter.HOURLY.RADIATION_SKY_LONG_WAVE: DwdObservationDataset.SOLAR, DwdObservationParameter.HOURLY.RADIATION_SKY_SHORT_WAVE_DIFFUSE: DwdObservationDataset.SOLAR, DwdObservationParameter.HOURLY.RADIATION_GLOBAL: DwdObservationDataset.SOLAR, - # DwdObservationParameter.HOURLY.SUNSHINE_DURATION: DwdObservationParameterSetStructure.HOURLY.SOLAR.SUNSHINE_DURATION, DwdObservationParameter.HOURLY.SUN_ZENITH_ANGLE: DwdObservationDataset.SOLAR, - # DwdObservationParameter.HOURLY.TRUE_LOCAL_TIME: DwdObservationDataset.SOLAR, # visibility DwdObservationParameter.HOURLY.VISIBILITY_INDICATOR: DwdObservationDataset.VISIBILITY, DwdObservationParameter.HOURLY.VISIBILITY: DwdObservationDataset.VISIBILITY, # wind DwdObservationParameter.HOURLY.WIND_SPEED: DwdObservationDataset.WIND, DwdObservationParameter.HOURLY.WIND_DIRECTION: DwdObservationDataset.WIND, - # wind_synop - # DwdObservationParameter.HOURLY.WIND_SPEED: "ff" - # DwdObservationParameter.HOURLY.WIND_DIRECTION: "dd" # int # moisture DwdObservationParameter.HOURLY.HUMIDITY_ABSOLUTE: DwdObservationDataset.MOISTURE, DwdObservationParameter.HOURLY.PRESSURE_VAPOR: DwdObservationDataset.MOISTURE, @@ -761,8 +716,6 @@ class WEATHER_PHENOMENA(Enum): # noqa # moisture DwdObservationParameter.SUBDAILY.PRESSURE_VAPOR: DwdObservationDataset.MOISTURE, DwdObservationParameter.SUBDAILY.TEMPERATURE_AIR_MEAN_005: DwdObservationDataset.MOISTURE, - # TEMPERATURE_AIR_200: "TF_TER" - # HUMIDITY: "RF_TER" # pressure DwdObservationParameter.SUBDAILY.PRESSURE_AIR_SITE: DwdObservationDataset.PRESSURE, # soil @@ -775,9 +728,6 @@ class WEATHER_PHENOMENA(Enum): # noqa }, Resolution.DAILY: { # more_precip - # DwdObservationParameter.DAILY.PRECIPITATION_HEIGHT: DwdObservationDataset.PRECIPITATION_MORE, - # DwdObservationParameter.DAILY.PRECIPITATION_FORM: DwdObservationDataset.PRECIPITATION_MORE, - # DwdObservationParameter.DAILY.SNOW_DEPTH: DwdObservationDataset.PRECIPITATION_MORE, DwdObservationParameter.DAILY.SNOW_DEPTH_NEW: DwdObservationDataset.PRECIPITATION_MORE, # solar DwdObservationParameter.DAILY.RADIATION_SKY_LONG_WAVE: DwdObservationDataset.SOLAR, @@ -789,7 +739,6 @@ class WEATHER_PHENOMENA(Enum): # noqa DwdObservationParameter.DAILY.WIND_SPEED: DwdObservationDataset.CLIMATE_SUMMARY, DwdObservationParameter.DAILY.PRECIPITATION_HEIGHT: DwdObservationDataset.CLIMATE_SUMMARY, DwdObservationParameter.DAILY.PRECIPITATION_FORM: DwdObservationDataset.CLIMATE_SUMMARY, - # DwdObservationParameter.DAILY.SUNSHINE_DURATION: DwdObservationParameterSet.CLIMATE_SUMMARY, DwdObservationParameter.DAILY.SNOW_DEPTH: DwdObservationDataset.CLIMATE_SUMMARY, DwdObservationParameter.DAILY.CLOUD_COVER_TOTAL: DwdObservationDataset.CLIMATE_SUMMARY, DwdObservationParameter.DAILY.PRESSURE_VAPOR: DwdObservationDataset.CLIMATE_SUMMARY, @@ -807,7 +756,6 @@ class WEATHER_PHENOMENA(Enum): # noqa DwdObservationParameter.DAILY.TEMPERATURE_SOIL_MEAN_050: DwdObservationDataset.TEMPERATURE_SOIL, # water_equiv DwdObservationParameter.DAILY.SNOW_DEPTH_EXCELLED: DwdObservationDataset.WATER_EQUIVALENT, - # SNOW_DEPTH: "SH_TAG" # int DwdObservationParameter.DAILY.WATER_EQUIVALENT_SNOW_DEPTH: DwdObservationDataset.WATER_EQUIVALENT, DwdObservationParameter.DAILY.WATER_EQUIVALENT_SNOW_DEPTH_EXCELLED: DwdObservationDataset.WATER_EQUIVALENT, # weather_phenomena @@ -837,8 +785,6 @@ class WEATHER_PHENOMENA(Enum): # noqa DwdObservationParameter.MONTHLY.WIND_GUST_MAX: DwdObservationDataset.CLIMATE_SUMMARY, DwdObservationParameter.MONTHLY.TEMPERATURE_AIR_MIN_200: DwdObservationDataset.CLIMATE_SUMMARY, DwdObservationParameter.MONTHLY.SUNSHINE_DURATION: DwdObservationDataset.CLIMATE_SUMMARY, - # DwdObservationParameter.MONTHLY.PRECIPITATION_HEIGHT: DwdObservationParameterSet.CLIMATE_SUMMARY, - # DwdObservationParameter.MONTHLY.PRECIPITATION_HEIGHT_MAX: DwdObservationParameterSet.CLIMATE_SUMMARY, # weather_phenomena DwdObservationParameter.MONTHLY.COUNT_WEATHER_TYPE_STORM_STRONG_WIND: DwdObservationDataset.WEATHER_PHENOMENA, DwdObservationParameter.MONTHLY.COUNT_WEATHER_TYPE_STORM_STORMIER_WIND: DwdObservationDataset.WEATHER_PHENOMENA, @@ -865,8 +811,6 @@ class WEATHER_PHENOMENA(Enum): # noqa DwdObservationParameter.ANNUAL.WIND_GUST_MAX: DwdObservationDataset.CLIMATE_SUMMARY, DwdObservationParameter.ANNUAL.TEMPERATURE_AIR_MAX_200: DwdObservationDataset.CLIMATE_SUMMARY, DwdObservationParameter.ANNUAL.TEMPERATURE_AIR_MIN_200: DwdObservationDataset.CLIMATE_SUMMARY, - # DwdObservationParameter.ANNUAL.PRECIPITATION_HEIGHT: "JA_RR", - # DwdObservationParameter.ANNUAL.PRECIPITATION_HEIGHT_MAX: "JA_MX_RS", # weather_phenomena DwdObservationParameter.ANNUAL.COUNT_WEATHER_TYPE_STORM_STRONG_WIND: DwdObservationDataset.WEATHER_PHENOMENA, DwdObservationParameter.ANNUAL.COUNT_WEATHER_TYPE_STORM_STORMIER_WIND: DwdObservationDataset.WEATHER_PHENOMENA, diff --git a/wetterdienst/provider/dwd/observation/metaindex.py b/wetterdienst/provider/dwd/observation/metaindex.py index 750251a33..eb4e40c69 100644 --- a/wetterdienst/provider/dwd/observation/metaindex.py +++ b/wetterdienst/provider/dwd/observation/metaindex.py @@ -91,9 +91,7 @@ def create_meta_index_for_climate_observations( if cond: meta_index = _create_meta_index_for_1minute_historical_precipitation() else: - meta_index = _create_meta_index_for_climate_observations( - parameter_set, resolution, period - ) + meta_index = _create_meta_index_for_climate_observations(parameter_set, resolution, period) # If no state column available, take state information from daily historical # precipitation @@ -162,15 +160,11 @@ def _create_meta_index_for_climate_observations( ) # Fix column names, as header is not aligned to fixed column widths - meta_index.columns = "".join( - [column for column in meta_index.columns if "unnamed" not in column.lower()] - ).split(" ") - - meta_index = meta_index.rename(columns=str.lower) - - meta_index = meta_index.rename(columns=GERMAN_TO_ENGLISH_COLUMNS_MAPPING) + meta_index.columns = "".join([column for column in meta_index.columns if "unnamed" not in column.lower()]).split( + " " + ) - return meta_index + return meta_index.rename(columns=str.lower).rename(columns=GERMAN_TO_ENGLISH_COLUMNS_MAPPING) def _find_meta_file(files: List[str], url: str) -> str: @@ -205,9 +199,7 @@ def _create_meta_index_for_1minute_historical_precipitation() -> pd.DataFrame: """ - parameter_path = ( - f"{Resolution.MINUTE_1.value}/" f"{DwdObservationDataset.PRECIPITATION.value}/" - ) + parameter_path = f"{Resolution.MINUTE_1.value}/" f"{DwdObservationDataset.PRECIPITATION.value}/" url = reduce( urljoin, @@ -222,21 +214,15 @@ def _create_meta_index_for_1minute_historical_precipitation() -> pd.DataFrame: metadata_file_paths = list_remote_files_fsspec(url, recursive=False) - station_ids = [ - re.findall(STATION_ID_REGEX, file).pop(0) for file in metadata_file_paths - ] + station_ids = [re.findall(STATION_ID_REGEX, file).pop(0) for file in metadata_file_paths] meta_index_df = pd.DataFrame(columns=METADATA_COLUMNS) with ThreadPoolExecutor() as executor: - metadata_files = executor.map( - _download_metadata_file_for_1minute_precipitation, metadata_file_paths - ) + metadata_files = executor.map(_download_metadata_file_for_1minute_precipitation, metadata_file_paths) with ThreadPoolExecutor() as executor: - metadata_dfs = executor.map( - _parse_geo_metadata, zip(metadata_files, station_ids) - ) + metadata_dfs = executor.map(_parse_geo_metadata, zip(metadata_files, station_ids)) meta_index_df = meta_index_df.append(other=list(metadata_dfs), ignore_index=True) @@ -250,9 +236,7 @@ def _create_meta_index_for_1minute_historical_precipitation() -> pd.DataFrame: meta_index_df = meta_index_df.drop(labels=Columns.STATE.value, axis=1) # Make station id str - meta_index_df[Columns.STATION_ID.value] = meta_index_df[ - Columns.STATION_ID.value - ].str.pad(5, "left", "0") + meta_index_df[Columns.STATION_ID.value] = meta_index_df[Columns.STATION_ID.value].str.pad(5, "left", "0") return meta_index_df @@ -275,16 +259,12 @@ def _download_metadata_file_for_1minute_precipitation(metadata_file: str) -> Byt # Attention: Currently, a FSSPEC-based cache must not be used here as Windows # would croak when concurrently accessing those resources badly. # TODO: Revisit this place after completely getting rid of dogpile.cache. - file = download_file(metadata_file, ttl=CacheExpiry.NO_CACHE) + return download_file(metadata_file, ttl=CacheExpiry.NO_CACHE) except InvalidURL as e: raise InvalidURL(f"Reading metadata {metadata_file} file failed.") from e - return file - -def _parse_geo_metadata( - metadata_file_and_station_id: Tuple[BytesIO, str] -) -> pd.DataFrame: +def _parse_geo_metadata(metadata_file_and_station_id: Tuple[BytesIO, str]) -> pd.DataFrame: """A function that analysis the given file (bytes) and extracts geography of 1minute metadata zip and catches the relevant information and create a similar file to those that can usually be found already prepared for other @@ -311,9 +291,7 @@ def _parse_geo_metadata( metadata_geo_df = metadata_geo_df.rename(columns=GERMAN_TO_ENGLISH_COLUMNS_MAPPING) - metadata_geo_df[Columns.FROM_DATE.value] = metadata_geo_df.loc[ - 0, Columns.FROM_DATE.value - ] + metadata_geo_df[Columns.FROM_DATE.value] = metadata_geo_df.loc[0, Columns.FROM_DATE.value] metadata_geo_df = metadata_geo_df.iloc[[-1], :] diff --git a/wetterdienst/provider/dwd/observation/parser.py b/wetterdienst/provider/dwd/observation/parser.py index 90c08f92d..13fd0fd25 100644 --- a/wetterdienst/provider/dwd/observation/parser.py +++ b/wetterdienst/provider/dwd/observation/parser.py @@ -25,13 +25,11 @@ # Parameter names used to create full 1 minute precipitation dataset wherever those # columns are missing (which is the case for non historical data) PRECIPITATION_PARAMETERS = ( - DwdObservationDatasetTree.MINUTE_1.PRECIPITATION.PRECIPITATION_HEIGHT_DROPLET.value, # Noqa: E501, B950 - DwdObservationDatasetTree.MINUTE_1.PRECIPITATION.PRECIPITATION_HEIGHT_ROCKER.value, # Noqa: E501, B950 + DwdObservationDatasetTree.MINUTE_1.PRECIPITATION.PRECIPITATION_HEIGHT_DROPLET.value, + DwdObservationDatasetTree.MINUTE_1.PRECIPITATION.PRECIPITATION_HEIGHT_ROCKER.value, ) -PRECIPITATION_MINUTE_1_QUALITY = ( - DwdObservationDatasetTree.MINUTE_1.PRECIPITATION.QUALITY -) +PRECIPITATION_MINUTE_1_QUALITY = DwdObservationDatasetTree.MINUTE_1.PRECIPITATION.QUALITY def parse_climate_observations_data( @@ -60,9 +58,7 @@ def parse_climate_observations_data( for filename_and_file in filenames_and_files ] - df = pd.concat(data).reset_index(drop=True) - - return df + return pd.concat(data).reset_index(drop=True) def _parse_climate_observations_data( @@ -87,17 +83,13 @@ def _parse_climate_observations_data( try: df = pd.read_csv( - filepath_or_buffer=BytesIO( - file.read().replace(b" ", b"") - ), # prevent leading/trailing whitespace + filepath_or_buffer=BytesIO(file.read().replace(b" ", b"")), # prevent leading/trailing whitespace sep=STATION_DATA_SEP, dtype="str", na_values=NA_STRING, ) except pd.errors.ParserError: - log.warning( - f"The file representing {filename} could not be parsed and is skipped." - ) + log.warning(f"The file representing {filename} could not be parsed and is skipped.") return pd.DataFrame() except ValueError: log.warning(f"The file representing {filename} is None and is skipped.") @@ -118,24 +110,17 @@ def _parse_climate_observations_data( # information. Also rename column with true local time to english one df = df.rename( columns={ - "mess_datum_woz": ( - DwdObservationDatasetTree.HOURLY.SOLAR.TRUE_LOCAL_TIME.value # Noqa: E501, B950 - ), + "mess_datum_woz": DwdObservationDatasetTree.HOURLY.SOLAR.TRUE_LOCAL_TIME.value, } ) # Duplicate the date column to end of interval column - df[DwdObservationDatasetTree.HOURLY.SOLAR.END_OF_INTERVAL.value] = df[ - DwdOrigColumns.DATE.value - ] + df[DwdObservationDatasetTree.HOURLY.SOLAR.END_OF_INTERVAL.value] = df[DwdOrigColumns.DATE.value] # Fix real date column by cutting of minutes df[DwdOrigColumns.DATE.value] = df[DwdOrigColumns.DATE.value].str[:-3] - if ( - resolution == Resolution.MINUTE_1 - and dataset == DwdObservationDataset.PRECIPITATION - ): + if resolution == Resolution.MINUTE_1 and dataset == DwdObservationDataset.PRECIPITATION: # Need to unfold historical data, as it is encoded in its run length e.g. # from time X to time Y precipitation is 0 if period == Period.HISTORICAL: @@ -177,6 +162,4 @@ def _parse_climate_observations_data( df[parameter] = pd.NA # Assign meaningful column names (baseline). - df = df.rename(columns=GERMAN_TO_ENGLISH_COLUMNS_MAPPING) - - return df + return df.rename(columns=GERMAN_TO_ENGLISH_COLUMNS_MAPPING) diff --git a/wetterdienst/provider/dwd/observation/util/parameter.py b/wetterdienst/provider/dwd/observation/util/parameter.py index 39c94fb69..b79168949 100644 --- a/wetterdienst/provider/dwd/observation/util/parameter.py +++ b/wetterdienst/provider/dwd/observation/util/parameter.py @@ -22,34 +22,22 @@ def create_parameter_to_dataset_combination( parameter: Union[DwdObservationParameter, DwdObservationDataset], resolution: Resolution, -) -> Tuple[ - Union[DwdObservationParameter, DwdObservationDataset], - DwdObservationDataset, -]: +) -> Tuple[Union[DwdObservationParameter, DwdObservationDataset], DwdObservationDataset,]: """Function to create a mapping from a requested parameter to a provided parameter set which has to be downloaded first to extract the parameter from it""" try: - parameter_ = parse_enumeration_from_template( - parameter, DwdObservationParameter[resolution.name] - ) + parameter_ = parse_enumeration_from_template(parameter, DwdObservationParameter[resolution.name]) parameter = PARAMETER_TO_DATASET_MAPPING[resolution][parameter_] - return parameter, parse_enumeration_from_template( - parameter.__class__.__name__, DwdObservationDataset - ) + return parameter, parse_enumeration_from_template(parameter.__class__.__name__, DwdObservationDataset) except (KeyError, InvalidEnumeration): try: - parameter_set = parse_enumeration_from_template( - parameter, DwdObservationDataset - ) + parameter_set = parse_enumeration_from_template(parameter, DwdObservationDataset) return parameter_set, parameter_set except InvalidEnumeration: - raise InvalidParameter( - f"parameter {parameter} could not be parsed for " - f"time resolution {resolution}" - ) + raise InvalidParameter(f"parameter {parameter} could not be parsed for " f"time resolution {resolution}") def check_dwd_observations_dataset( diff --git a/wetterdienst/provider/dwd/radar/access.py b/wetterdienst/provider/dwd/radar/access.py index 04036e8c9..e0eb44b09 100644 --- a/wetterdienst/provider/dwd/radar/access.py +++ b/wetterdienst/provider/dwd/radar/access.py @@ -129,9 +129,7 @@ def collect_radar_data( results = [] for period in period_types: - file_index = create_fileindex_radolan_cdc( - resolution=resolution, period=period - ) + file_index = create_fileindex_radolan_cdc(resolution=resolution, period=period) # Filter for dates range if start_date and end_date are defined. if period == Period.RECENT: @@ -143,14 +141,8 @@ def collect_radar_data( # This is for matching historical data, e.g. "RW-200509.tar.gz". else: file_index = file_index[ - ( - file_index[DwdColumns.DATETIME.value].dt.year - == start_date.year - ) - & ( - file_index[DwdColumns.DATETIME.value].dt.month - == start_date.month - ) + (file_index[DwdColumns.DATETIME.value].dt.year == start_date.year) + & (file_index[DwdColumns.DATETIME.value].dt.month == start_date.month) ] results.append(file_index) @@ -219,15 +211,14 @@ def collect_radar_data( log.exception("Unable to read HDF5 file") -def should_cache_download(url) -> bool: # pragma: no cover +def should_cache_download(url: str) -> bool: # pragma: no cover """ Determine whether this specific result should be cached. Here, we don't want to cache any files containing "-latest-" in their filenames. - :param args: Arguments of decorated function. - :param kwargs: Keyword arguments of decorated function. - :return: When cache should be dimissed, return False. Otherwise, return True. + :param url: url string which is used to decide if result is cached + :return: When cache should be dismissed, return False. Otherwise, return True. """ if "-latest-" in url: return False @@ -263,7 +254,7 @@ def _download_generic_data(url: str) -> Generator[RadarResult, None, None]: filename=file.name, ) - # RadarParameter.WN_REFLECTIVITY, RADAR_PARAMETERS_SWEEPS (BUFR) + # RadarParameter.WN_REFLECTIVITY, RADAR_PARAMETERS_SWEEPS (BUFR) # noqa: E800 elif url.endswith(Extension.BZ2.value): with bz2.BZ2File(data, mode="rb") as archive: data = BytesIO(archive.read()) @@ -321,9 +312,7 @@ def _download_radolan_data(remote_radolan_filepath: str) -> BytesIO: return download_file(remote_radolan_filepath, ttl=CacheExpiry.TWELVE_HOURS) -def _extract_radolan_data( - date_time: datetime, archive_in_bytes: BytesIO -) -> RadarResult: +def _extract_radolan_data(date_time: datetime, archive_in_bytes: BytesIO) -> RadarResult: """ Function used to extract RADOLAN_CDC file for the requested datetime from the downloaded archive. @@ -356,14 +345,10 @@ def _extract_radolan_data( filename=file.name, ) - raise FileNotFoundError( - f"RADOLAN file for {date_time_string} not found." - ) # pragma: no cover + raise FileNotFoundError(f"RADOLAN file for {date_time_string} not found.") # pragma: no cover except EOFError as ex: - raise FailedDownload( - f"RADOLAN file for {date_time_string} is invalid: {ex}" - ) # pragma: no cover + raise FailedDownload(f"RADOLAN file for {date_time_string} is invalid: {ex}") # pragma: no cover # Otherwise if there's an error the data is from recent time period and only has to # be unpacked once @@ -372,6 +357,4 @@ def _extract_radolan_data( archive_in_bytes.seek(0) with gzip.GzipFile(fileobj=archive_in_bytes, mode="rb") as gz_file: - return RadarResult( - data=BytesIO(gz_file.read()), timestamp=date_time, filename=gz_file.name - ) + return RadarResult(data=BytesIO(gz_file.read()), timestamp=date_time, filename=gz_file.name) diff --git a/wetterdienst/provider/dwd/radar/api.py b/wetterdienst/provider/dwd/radar/api.py index 2d05ae277..445c20f00 100644 --- a/wetterdienst/provider/dwd/radar/api.py +++ b/wetterdienst/provider/dwd/radar/api.py @@ -75,12 +75,8 @@ def __init__( self.format = parse_enumeration_from_template(fmt, DwdRadarDataFormat) self.subset = parse_enumeration_from_template(subset, DwdRadarDataSubset) self.elevation = elevation and int(elevation) - self.resolution: Resolution = parse_enumeration_from_template( - resolution, DwdRadarResolution, Resolution - ) - self.period: Period = parse_enumeration_from_template( - period, DwdRadarPeriod, Period - ) + self.resolution: Resolution = parse_enumeration_from_template(resolution, DwdRadarResolution, Resolution) + self.period: Period = parse_enumeration_from_template(period, DwdRadarPeriod, Period) # Sanity checks. if self.parameter == DwdRadarParameter.RADOLAN_CDC: @@ -89,18 +85,14 @@ def __init__( Resolution.HOURLY, Resolution.DAILY, ): - raise ValueError( - "RADOLAN_CDC only supports daily and hourly resolutions" - ) + raise ValueError("RADOLAN_CDC only supports daily and hourly resolutions") elevation_parameters = [ DwdRadarParameter.SWEEP_VOL_VELOCITY_H, DwdRadarParameter.SWEEP_VOL_REFLECTIVITY_H, ] if self.elevation is not None and self.parameter not in elevation_parameters: - raise ValueError( - f"Argument 'elevation' only valid for parameter={elevation_parameters}" - ) + raise ValueError(f"Argument 'elevation' only valid for parameter={elevation_parameters}") if start_date == DwdRadarDate.LATEST: @@ -134,10 +126,7 @@ def __init__( start_date = datetime.utcnow() - timedelta(minutes=5) end_date = None - if ( - start_date == DwdRadarDate.MOST_RECENT - and parameter == DwdRadarParameter.RADOLAN_CDC - ): + if start_date == DwdRadarDate.MOST_RECENT and parameter == DwdRadarParameter.RADOLAN_CDC: start_date = datetime.utcnow() - timedelta(minutes=50) end_date = None @@ -169,7 +158,7 @@ def adjust_datetimes(self): minute marks for respective RadarParameter. - RADOLAN_CDC is always published at HH:50. - https://opendata.dwd.de/climate_environment/CDC/grids_germany/daily/radolan/recent/bin/ # noqa:E501,B950 + https://opendata.dwd.de/climate_environment/CDC/grids_germany/daily/radolan/recent/bin/ - RQ_REFLECTIVITY is published each 15 minutes. https://opendata.dwd.de/weather/radar/radvor/rq/ @@ -180,10 +169,7 @@ def adjust_datetimes(self): """ - if ( - self.parameter == DwdRadarParameter.RADOLAN_CDC - or self.parameter in RADAR_PARAMETERS_RADOLAN - ): + if self.parameter == DwdRadarParameter.RADOLAN_CDC or self.parameter in RADAR_PARAMETERS_RADOLAN: # Align "start_date" to the most recent 50 minute mark available. self.start_date = raster_minutes(self.start_date, 50) diff --git a/wetterdienst/provider/dwd/radar/index.py b/wetterdienst/provider/dwd/radar/index.py index 6d692d902..a629f9f47 100644 --- a/wetterdienst/provider/dwd/radar/index.py +++ b/wetterdienst/provider/dwd/radar/index.py @@ -91,39 +91,28 @@ def create_fileindex_radar( files_server = list_remote_files_fsspec(url, recursive=True) - files_server = pd.DataFrame( - files_server, columns=[DwdColumns.FILENAME.value], dtype="str" - ) + files_server = pd.DataFrame(files_server, columns=[DwdColumns.FILENAME.value], dtype="str") # Some directories have both "---bin" and "---bufr" files within the same directory, # so we need to filter here by designated RadarDataFormat. Example: # https://opendata.dwd.de/weather/radar/sites/px/boo/ if fmt is not None: if fmt == DwdRadarDataFormat.BINARY: - files_server = files_server[ - files_server[DwdColumns.FILENAME.value].str.contains("--bin") - ] + files_server = files_server[files_server[DwdColumns.FILENAME.value].str.contains("--bin")] elif fmt == DwdRadarDataFormat.BUFR: - files_server = files_server[ - files_server[DwdColumns.FILENAME.value].str.contains("--buf") - ] + files_server = files_server[files_server[DwdColumns.FILENAME.value].str.contains("--buf")] # Decode datetime of file for filtering. if parse_datetime: + files_server[DwdColumns.DATETIME.value] = files_server[DwdColumns.FILENAME.value].apply(get_date_from_filename) - files_server[DwdColumns.DATETIME.value] = files_server[ - DwdColumns.FILENAME.value - ].apply(get_date_from_filename) - - files_server = files_server.dropna() + return files_server.dropna() return files_server @fileindex_cache_five_minutes.cache_on_arguments() -def create_fileindex_radolan_cdc( - resolution: Resolution, period: Period -) -> pd.DataFrame: +def create_fileindex_radolan_cdc(resolution: Resolution, period: Period) -> pd.DataFrame: """ Function used to create a file index for the RADOLAN_CDC product. The file index will include both recent as well as historical files. A datetime column is created @@ -144,9 +133,7 @@ def create_fileindex_radolan_cdc( file_index = file_index[ file_index[DwdColumns.FILENAME.value].str.contains("/bin/") - & file_index[DwdColumns.FILENAME.value].str.endswith( - (Extension.GZ.value, Extension.TAR_GZ.value) - ) + & file_index[DwdColumns.FILENAME.value].str.endswith((Extension.GZ.value, Extension.TAR_GZ.value)) ].copy() # Decode datetime of file for filtering. @@ -200,19 +187,20 @@ def build_path_to_parameter( if parameter == DwdRadarParameter.RADOLAN_CDC: if resolution == Resolution.MINUTE_5: # See also page 4 on - # https://opendata.dwd.de/climate_environment/CDC/help/RADOLAN/Unterstuetzungsdokumente/Unterstuetzungsdokumente-Verwendung_von_RADOLAN-Produkten_im_ASCII-GIS-Rasterformat_in_GIS.pdf # noqa:E501,B950 - parameter_path = f"{DWD_CDC_PATH}/grids_germany/{resolution.value}/radolan/reproc/2017_002/bin" # noqa:E501,B950 + # https://opendata.dwd.de/climate_environment/CDC/help/RADOLAN/Unterstuetzungsdokumente/ + # Unterstuetzungsdokumente-Verwendung_von_RADOLAN-Produkten_im_ASCII-GIS-Rasterformat_in_GIS.pdf + return f"{DWD_CDC_PATH}/grids_germany/{resolution.value}/radolan/reproc/2017_002/bin" else: - parameter_path = f"{DWD_CDC_PATH}/grids_germany/{resolution.value}/radolan/{period.value}/bin" # noqa:E501,B950 + return f"{DWD_CDC_PATH}/grids_germany/{resolution.value}/radolan/{period.value}/bin" elif parameter in RADAR_PARAMETERS_COMPOSITES: - parameter_path = f"weather/radar/composit/{parameter.value}" + return f"weather/radar/composit/{parameter.value}" elif parameter in RADAR_PARAMETERS_RADOLAN: - parameter_path = f"weather/radar/radolan/{parameter.value}" + return f"weather/radar/radolan/{parameter.value}" elif parameter in RADAR_PARAMETERS_RADVOR: - parameter_path = f"weather/radar/radvor/{parameter.value}" + return f"weather/radar/radvor/{parameter.value}" elif parameter in RADAR_PARAMETERS_SITES: @@ -237,9 +225,7 @@ def build_path_to_parameter( candidates = [DwdRadarDataFormat.BUFR, DwdRadarDataFormat.HDF5] if candidates: - raise ValueError( - f"Argument 'format' is missing, use one of {candidates}" - ) + raise ValueError(f"Argument 'format' is missing, use one of {candidates}") # Compute path to BINARY/BUFR vs. HDF5. parameter_path = f"weather/radar/sites/{parameter.value}/{site.value}" @@ -249,12 +235,10 @@ def build_path_to_parameter( DwdRadarDataSubset.SIMPLE, DwdRadarDataSubset.POLARIMETRIC, ] - raise ValueError( - f"Argument 'subset' is missing, use one of {candidates}" - ) - parameter_path = f"{parameter_path}/{fmt.value}/filter_{subset.value}/" + raise ValueError(f"Argument 'subset' is missing, use one of {candidates}") + return f"{parameter_path}/{fmt.value}/filter_{subset.value}/" + + return parameter_path else: # pragma: no cover raise NotImplementedError(f"Acquisition for {parameter} not implemented yet") - - return parameter_path diff --git a/wetterdienst/provider/dwd/radar/metadata/parameter.py b/wetterdienst/provider/dwd/radar/metadata/parameter.py index 9dcc8946c..d779d77bd 100644 --- a/wetterdienst/provider/dwd/radar/metadata/parameter.py +++ b/wetterdienst/provider/dwd/radar/metadata/parameter.py @@ -13,13 +13,11 @@ class DwdRadarParameter(Enum): # https://docs.wradlib.org/en/stable/notebooks/fileio/wradlib_radar_formats.html#German-Weather-Service:-RADOLAN-(quantitative)-composit # noqa:E501,B950 # https://opendata.dwd.de/weather/radar/composit/ - # FX_REFLECTIVITY = "fx" HG_REFLECTIVITY = "hg" PG_REFLECTIVITY = "pg" RV_REFLECTIVITY = "rv" WX_REFLECTIVITY = "wx" WN_REFLECTIVITY = "wn" - # RX_REFLECTIVITY = "rx" # /radolan # https://opendata.dwd.de/weather/radar/radolan/ @@ -34,7 +32,7 @@ class DwdRadarParameter(Enum): # /sites # https://opendata.dwd.de/weather/radar/sites/ - # https://docs.wradlib.org/en/stable/notebooks/fileio/wradlib_radar_formats.html#German-Weather-Service:-DX-format # noqa:E501,B950 + # https://docs.wradlib.org/en/stable/notebooks/fileio/wradlib_radar_formats.html#German-Weather-Service:-DX-format DX_REFLECTIVITY = "dx" LMAX_VOLUME_SCAN = "lmax" PE_ECHO_TOP = "pe" @@ -59,13 +57,11 @@ class DwdRadarParameter(Enum): RADAR_PARAMETERS_COMPOSITES = [ - # DwdRadarParameter.FX_REFLECTIVITY, DwdRadarParameter.HG_REFLECTIVITY, DwdRadarParameter.PG_REFLECTIVITY, DwdRadarParameter.RV_REFLECTIVITY, DwdRadarParameter.WX_REFLECTIVITY, DwdRadarParameter.WN_REFLECTIVITY, - # DwdRadarParameter.RX_REFLECTIVITY, ] RADAR_PARAMETERS_RADOLAN = [ DwdRadarParameter.RW_REFLECTIVITY, diff --git a/wetterdienst/provider/dwd/radar/sites.py b/wetterdienst/provider/dwd/radar/sites.py index 99d834521..27a146759 100644 --- a/wetterdienst/provider/dwd/radar/sites.py +++ b/wetterdienst/provider/dwd/radar/sites.py @@ -8,8 +8,8 @@ Sources ======= -- April, 2018: https://www.dwd.de/DE/derdwd/messnetz/atmosphaerenbeobachtung/_functions/HaeufigGesucht/koordinaten-radarverbund.pdf?__blob=publicationFile # noqa:E501,B950 -- October, 2020: https://www.dwd.de/DE/leistungen/radolan/radolan_info/radolan_radvor_op_komposit_format_pdf.pdf?__blob=publicationFile # noqa:E501,B950 +- April, 2018: https://www.dwd.de/DE/derdwd/messnetz/atmosphaerenbeobachtung/_functions/HaeufigGesucht/koordinaten-radarverbund.pdf?__blob=publicationFile # noqa:B950 +- October, 2020: https://www.dwd.de/DE/leistungen/radolan/radolan_info/radolan_radvor_op_komposit_format_pdf.pdf?__blob=publicationFile # noqa:B950 References ========== @@ -51,14 +51,14 @@ class DwdRadarSitesGenerator: # pragma: no cover """ Parse list of sites from PDF documents [1,2] and output as Python dictionary. - [1] https://www.dwd.de/DE/derdwd/messnetz/atmosphaerenbeobachtung/_functions/HaeufigGesucht/koordinaten-radarverbund.pdf?__blob=publicationFile # noqa:E501,B950 - [2] https://www.dwd.de/DE/leistungen/radolan/radolan_info/radolan_radvor_op_komposit_format_pdf.pdf?__blob=publicationFile # noqa:E501,B950 + [1] https://www.dwd.de/DE/derdwd/messnetz/atmosphaerenbeobachtung/_functions/HaeufigGesucht/koordinaten-radarverbund.pdf?__blob=publicationFile # noqa:B950 + [2] https://www.dwd.de/DE/leistungen/radolan/radolan_info/radolan_radvor_op_komposit_format_pdf.pdf?__blob=publicationFile # noqa:B950 """ url = ( "https://www.dwd.de/DE/derdwd/messnetz/atmosphaerenbeobachtung/_functions" "/HaeufigGesucht/koordinaten-radarverbund.pdf?__blob=publicationFile" - ) # noqa:E501,B950 + ) def all(self) -> Dict: # pragma: no cover """ @@ -111,19 +111,13 @@ def read_pdf(self) -> pd.DataFrame: # Mungle into one coherent data frame. data = firsts - data = data.drop( - labels=["coordinates_wgs84_text", "coordinates_gauss"], axis="columns" - ) + data = data.drop(labels=["coordinates_wgs84_text", "coordinates_gauss"], axis="columns") data = data.rename(columns={"coordinates_wgs84": "latitude"}) data.insert(4, "longitude", seconds["coordinates_wgs84"].values) data = data.reset_index(drop=True) for column in ["latitude", "longitude"]: - data[column] = ( - data[column] - .apply(lambda x: x.strip("NE").replace(",", ".")) - .apply(float) # noqa: E501 - ) + data[column] = data[column].apply(lambda x: x.strip("NE").replace(",", ".")).apply(float) for column in ["wmo_id", "altitude"]: data[column] = data[column].apply(int) @@ -147,10 +141,6 @@ def read_pdf(self) -> pd.DataFrame: python wetterdienst/provider/dwd/radar/sites.py """ - # import pout - # sites = DwdRadarSitesGenerator().all() - # print(black.format_str(pout.ss(sites), mode=black.Mode())) - import pprint import black diff --git a/wetterdienst/provider/dwd/util.py b/wetterdienst/provider/dwd/util.py index 0c434d9cd..bab1460d0 100644 --- a/wetterdienst/provider/dwd/util.py +++ b/wetterdienst/provider/dwd/util.py @@ -19,6 +19,6 @@ def build_parameter_set_identifier( identifier = f"{dataset.value}/{resolution.value}/" f"{period.value}/{station_id}" if date_range_string: - identifier = f"{identifier}/{date_range_string}" + return f"{identifier}/{date_range_string}" return identifier diff --git a/wetterdienst/provider/eccc/observation/api.py b/wetterdienst/provider/eccc/observation/api.py index 52454a581..b6456a0c5 100644 --- a/wetterdienst/provider/eccc/observation/api.py +++ b/wetterdienst/provider/eccc/observation/api.py @@ -49,9 +49,7 @@ class EcccObservationValues(ScalarValuesCore): _has_quality = True _session = requests.Session() - _session.mount( - "https://", HTTPAdapter(max_retries=Retry(total=10, connect=5, read=5)) - ) + _session.mount("https://", HTTPAdapter(max_retries=Retry(total=10, connect=5, read=5))) _base_url = ( "https://climate.weather.gc.ca/climate_data/bulk_data_e.html?" @@ -85,21 +83,17 @@ def _time_step(self): def _create_humanized_parameters_mapping(self): # TODO: change to something general, depending on ._has_datasets - hcnm = { + return { parameter.value: parameter.name.lower() - for parameter in self.stations.stations._parameter_base[ - self.stations.stations.resolution.name - ] + for parameter in self.stations.stations._parameter_base[self.stations.stations.resolution.name] } - return hcnm - def _tidy_up_df(self, df: pd.DataFrame, dataset) -> pd.DataFrame: """ Tidy up dataframe pairwise by column 'DATE', 'Temp (°C)', 'Temp Flag', ... - :param df: - :return: + :param df: DataFrame with loaded data + :return: tidied DataFrame """ data = [] @@ -117,11 +111,10 @@ def _tidy_up_df(self, df: pd.DataFrame, dataset) -> pd.DataFrame: data.append(df_parameter) try: - df_tidy = pd.concat(data, ignore_index=True) + return pd.concat(data, ignore_index=True) except ValueError: - df_tidy = pd.DataFrame() - - return df_tidy + # TODO: add logging + return pd.DataFrame() def _collect_station_parameter( self, station_id: str, parameter: EcccObservationParameter, dataset: Enum @@ -133,9 +126,7 @@ def _collect_station_parameter( :param dataset: dataset of query, can be skipped as ECCC has unique dataset :return: pandas.DataFrame with data """ - meta = self.stations.df[ - self.stations.df[Columns.STATION_ID.value] == station_id - ] + meta = self.stations.df[self.stations.df[Columns.STATION_ID.value] == station_id] name, from_date, to_date = ( meta[ @@ -157,9 +148,7 @@ def _collect_station_parameter( start_date = self.stations.stations.start_date end_date = self.stations.stations.end_date - start_year = start_year and max( - start_year, start_date and start_date.year or start_year - ) + start_year = start_year and max(start_year, start_date and start_date.year or start_year) end_year = end_year and min(end_year, end_date and end_date.year or end_year) # Following lines may partially be based on @Zeitsperre's canada-climate-python @@ -210,9 +199,7 @@ def _collect_station_parameter( return df - def _create_file_urls( - self, station_id: str, start_year: int, end_year: int - ) -> Generator[str, None, None]: + def _create_file_urls(self, station_id: str, start_year: int, end_year: int) -> Generator[str, None, None]: """ :param station_id: @@ -228,9 +215,7 @@ def _create_file_urls( # For hourly data request only necessary data to reduce amount of data being # downloaded and parsed - for date in pd.date_range( - f"{start_year}-01-01", f"{end_year + 1}-01-01", freq=freq, closed=None - ): + for date in pd.date_range(f"{start_year}-01-01", f"{end_year + 1}-01-01", freq=freq, closed=None): url = self._base_url.format(int(station_id), self._timeframe) url += f"&Year={date.year}" @@ -348,9 +333,7 @@ def _all(self) -> pd.DataFrame: df = df.drop(columns=["latitude", "longitude"]) - df = df.rename(columns=self._columns_mapping) - - return df + return df.rename(columns=self._columns_mapping) @staticmethod @payload_cache_twelve_hours.cache_on_arguments() @@ -362,8 +345,7 @@ def _download_stations() -> bytes: """ ftp_url = ( - "ftp://client_climate:foobar@ftp.tor.ec.gc.ca" - "/Pub/Get_More_Data_Plus_de_donnees/Station Inventory EN.csv" + "ftp://client_climate:foobar@ftp.tor.ec.gc.ca" "/Pub/Get_More_Data_Plus_de_donnees/Station Inventory EN.csv" ) http_url = ( diff --git a/wetterdienst/provider/eccc/observation/metadata/dataset.py b/wetterdienst/provider/eccc/observation/metadata/dataset.py index db427dc39..a99dba65e 100644 --- a/wetterdienst/provider/eccc/observation/metadata/dataset.py +++ b/wetterdienst/provider/eccc/observation/metadata/dataset.py @@ -10,7 +10,6 @@ class EcccObservationDataset(Enum): HOURLY = "hourly" DAILY = "daily" MONTHLY = "monthly" - # ANNUAL = "annual" class EcccObservationParameter(DatasetTreeCore): diff --git a/wetterdienst/provider/eccc/observation/metadata/resolution.py b/wetterdienst/provider/eccc/observation/metadata/resolution.py index 380bd5f76..d9b6a8b2c 100644 --- a/wetterdienst/provider/eccc/observation/metadata/resolution.py +++ b/wetterdienst/provider/eccc/observation/metadata/resolution.py @@ -10,4 +10,3 @@ class EccObservationResolution(Enum): DAILY = Resolution.DAILY.value HOURLY = Resolution.HOURLY.value MONTHLY = Resolution.MONTHLY.value - # ANNUAL = Resolution.ANNUAL.value diff --git a/wetterdienst/provider/eumetnet/opera/sites.py b/wetterdienst/provider/eumetnet/opera/sites.py index 89fef06d2..3564d375f 100644 --- a/wetterdienst/provider/eumetnet/opera/sites.py +++ b/wetterdienst/provider/eumetnet/opera/sites.py @@ -80,8 +80,7 @@ def by_countryname(self, name: str) -> List[Dict]: """ sites = list( filter( - lambda site: site["country"] - and site["country"].lower() == name.lower(), + lambda site: site["country"] and site["country"].lower() == name.lower(), self.sites, ) ) @@ -98,9 +97,9 @@ class OperaRadarSitesGenerator: """ url = ( - "https://www.eumetnet.eu/wp-content/themes/aeron-child/observations-programme" - "/current-activities/opera/database/OPERA_Database/OPERA_RADARS_DB.json" - ) # noqa: E501 + "https://www.eumetnet.eu/wp-content/themes/aeron-child/observations-programme/" + "current-activities/opera/database/OPERA_Database/OPERA_RADARS_DB.json" + ) def get_opera_radar_sites(self) -> List[Dict]: # pragma: no cover @@ -151,9 +150,7 @@ def convert_types(element): def filter_and_convert(elements): for element in elements: - if ( - element["location"] and element["latitude"] and element["longitude"] - ): # noqa: E501 + if element["location"] and element["latitude"] and element["longitude"]: yield convert_types(element) return list(filter_and_convert(data)) @@ -170,9 +167,7 @@ def export(self): Generate "sites.json.gz". """ sites = self.get_opera_radar_sites() - with gzip.open( - OperaRadarSites.data_file, mode="wt", compresslevel=9, encoding="utf-8" - ) as fp: + with gzip.open(OperaRadarSites.data_file, mode="wt", compresslevel=9, encoding="utf-8") as fp: json.dump(sites, fp, indent=4) @@ -186,4 +181,3 @@ def export(self): """ orsg = OperaRadarSitesGenerator() orsg.export() - # print(orsg.to_json()) diff --git a/wetterdienst/provider/noaa/ghcn/api.py b/wetterdienst/provider/noaa/ghcn/api.py index 0f50ecfc3..835284a78 100644 --- a/wetterdienst/provider/noaa/ghcn/api.py +++ b/wetterdienst/provider/noaa/ghcn/api.py @@ -46,9 +46,7 @@ class NoaaGhcnValues(ScalarValuesCore): _data_tz = Timezone.DYNAMIC - _base_url = ( - "https://www1.ncdc.noaa.gov/pub/data/ghcn/daily/by_station/{station_id}.csv.gz" - ) + _base_url = "https://www1.ncdc.noaa.gov/pub/data/ghcn/daily/by_station/{station_id}.csv.gz" # use to get timezones from stations _tf = TimezoneFinder() @@ -56,9 +54,7 @@ class NoaaGhcnValues(ScalarValuesCore): # multiplication factors _mp_factors = PARAMETER_MULTIPLICATION_FACTORS - def _collect_station_parameter( - self, station_id: str, parameter, dataset - ) -> pd.DataFrame: + def _collect_station_parameter(self, station_id: str, parameter, dataset) -> pd.DataFrame: """ Collection method for NOAA GHCN data. Parameter and dataset can be ignored as data is provided as a whole. @@ -88,9 +84,7 @@ def _collect_station_parameter( Columns.QUALITY.value, ) - df.loc[:, Columns.PARAMETER.value] = df.loc[ - :, Columns.PARAMETER.value - ].str.lower() + df.loc[:, Columns.PARAMETER.value] = df.loc[:, Columns.PARAMETER.value].str.lower() # get timezone from station timezone_ = self._get_timezone_from_station(station_id) @@ -132,9 +126,7 @@ def _apply_factors(self, df: pd.DataFrame) -> pd.DataFrame: return df_factors - def _add_derived_parameters( - self, df: pd.DataFrame, station_id: str - ) -> pd.DataFrame: + def _add_derived_parameters(self, df: pd.DataFrame, station_id: str) -> pd.DataFrame: """ Method to add derived parameters to DataFrame, specifically temperature_air_mean_200 from maximum and minimum daily temperature @@ -167,9 +159,7 @@ def _add_derived_parameters( .reset_index() ) - df_temperatures = df_temperatures.reindex( - columns=(Columns.DATE.value, tmax_key, tmin_key) - ) + df_temperatures = df_temperatures.reindex(columns=(Columns.DATE.value, tmax_key, tmin_key)) start_date = self.stations.start_date end_date = self.stations.end_date @@ -202,8 +192,7 @@ def _add_derived_parameters( df_tmean = df_tmean.merge(df_temperatures, how="left", on=Columns.DATE.value) df_tmean[Columns.VALUE.value] = ( - df_tmean[tmax_key].astype(float, errors="ignore") - + df_tmean[tmin_key].astype(float, errors="ignore") + df_tmean[tmax_key].astype(float, errors="ignore") + df_tmean[tmin_key].astype(float, errors="ignore") ) / 2 df_tmean = df_tmean.drop(columns=[tmax_key, tmin_key]) @@ -212,9 +201,7 @@ def _add_derived_parameters( df_tmean[Columns.PARAMETER.value] = tmean_key df_tmean[Columns.QUALITY.value] = pd.NA - df = df.append(df_tmean) - - return df + return df.append(df_tmean) class NoaaGhcnRequest(ScalarRequestCore): diff --git a/wetterdienst/provider/noaa/ghcn/parameter.py b/wetterdienst/provider/noaa/ghcn/parameter.py index c5c0b8c8b..c2d697702 100644 --- a/wetterdienst/provider/noaa/ghcn/parameter.py +++ b/wetterdienst/provider/noaa/ghcn/parameter.py @@ -34,94 +34,67 @@ class DAILY(Enum): # Additional parameters: - # ACMC = Average cloudiness midnight to midnight from 30-second - # ceilometer data (percent) + # Average cloudiness midnight to midnight from 30-second ceilometer data (percent) CLOUD_COVER_TOTAL_MIDNIGHT_TO_MIDNIGHT = "acmc" # ceilometer - # ACMH = Average cloudiness midnight to midnight from manual - # observations (percent) + # Average cloudiness midnight to midnight from manual observations (percent) CLOUD_COVER_TOTAL_MIDNIGHT_TO_MIDNIGHT_MANUAL = "acmh" # manual - # ACSC = Average cloudiness sunrise to sunset from 30-second - # ceilometer data (percent) + # Average cloudiness sunrise to sunset from 30-second ceilometer data (percent) CLOUD_COVER_TOTAL_SUNRISE_TO_SUNSET = "acsc" # ceilometer - # ACSH = Average cloudiness sunrise to sunset from manual - # observations (percent) + # Average cloudiness sunrise to sunset from manual observations (percent) CLOUD_COVER_TOTAL_SUNRISE_TO_SUNSET_MANUAL = "acsh" # manual # TODO: use one CLOUD_COVER_TOTAL parameter that builds one time series # from the multiple existing parameters - # cloud cover total is usually measured on a daily basis ending at midnight - # so this is a synonym for midnight-to-midnight - # CLOUD_COVER_TOTAL = CLOUD_COVER_TOTAL_MIDNIGHT_TO_MIDNIGHT_MANUAL + # TODO: cloud cover total is usually measured on a daily basis ending at midnight + # so this is a synonym for midnight-to-midnight - # AWND = Average daily wind speed (meters per second or miles - # per hour as per user preference) + # Average daily wind speed (meters per second or miles per hour as per user preference) WIND_SPEED = "awnd" # m/s - # DAEV = Number of days included in the multiday evaporation - # total (MDEV) + # Number of days included in the multiday evaporation total (MDEV) COUNT_DAYS_MULTIDAY_EVAPORATION = "daev" - # DAPR = Number of days included in the multiday precipitation - # total (MDPR) + # Number of days included in the multiday precipitation total (MDPR) COUNT_DAYS_MULTIDAY_PRECIPITATION = "dapr" - # DASF = Number of days included in the multiday snowfall - # total (MDSF) + # Number of days included in the multiday snowfall total (MDSF) COUNT_DAYS_MULTIDAY_SNOW_DEPTH_NEW = "dasf" - # DATN = Number of days included in the multiday minimum - # temperature (MDTN) + # Number of days included in the multiday minimum temperature (MDTN) COUNT_DAYS_MULTIDAY_TEMPERATURE_AIR_MIN_200 = "datn" - # DATX = Number of days included in the multiday maximum - # temperature (MDTX) + # Number of days included in the multiday maximum temperature (MDTX) COUNT_DAYS_MULTIDAY_TEMPERATURE_AIR_MAX_200 = "datx" - # DAWM = Number of days included in the multiday wind - # movement (MDWM) + # Number of days included in the multiday wind movement (MDWM) COUNT_DAYS_MULTIDAY_WIND_MOVEMENT = "dawm" # TODO: what kind of parameter ? - # DWPR = Number of days with non-zero precipitation included - # in multiday precipitation total (MDPR) + # Number of days with non-zero precipitation included in multiday precipitation total (MDPR) COUNT_DAYS_MULTIDAY_PRECIPITATION_HEIGHT_GT_0 = "dwpr" - # EVAP = Evaporation of water from evaporation pan (mm or - # inches as per user preference, or hundredths of inches - # on Daily Form pdf file) + # Evaporation of water from evaporation pan (mm or inches as per user preference, or hundredths of inches + # on Daily Form pdf file) EVAPORATION_HEIGHT = "evap" # from evaporation pan, mm - # FMTM = Time of fastest mile or fastest 1-minute wind - # (hours and minutes, i.e., HHMM) + # Time of fastest mile or fastest 1-minute wind (hours and minutes, i.e., HHMM) TIME_WIND_GUST_MAX_1MILE_OR_1MIN = "fmtm" # HH:MM - # FRGB = Base of frozen ground layer (cm or inches as per - # user preference) + # Base of frozen ground layer (cm or inches as per user preference) FROZEN_GROUND_LAYER_BASE = "frgb" # cm - # FRGT = Top of frozen ground layer (cm or inches as per - # user preference) + # Top of frozen ground layer (cm or inches as per user preference) FROZEN_GROUND_LAYER_TOP = "frgt" - # FRTH = Thickness of frozen ground layer (cm or inches as - # per user preference) + # Thickness of frozen ground layer (cm or inches as per user preference) FROZEN_GROUND_LAYER_THICKNESS = "frth" - # GAHT = Difference between river and gauge height (cm or - # inches as per user preference) + # Difference between river and gauge height (cm or inches as per user preference) DISTANCE_RIVER_GAUGE_HEIGHT = "gaht" - # MDEV = Multiday evaporation total (mm or inches as per - # user preference; use with DAEV) + # Multiday evaporation total (mm or inches as per user preference; use with DAEV) EVAPORATION_HEIGHT_MULTIDAY = "mdev" - # MDPR = Multiday precipitation total (mm or inches as per - # user preference; use with DAPR and DWPR, if available) + # Multiday precipitation total (mm or inches as per user preference; use with DAPR and DWPR, if available) PRECIPITATION_HEIGHT_MULTIDAY = "mdpr" - # MDSF = Multiday snowfall total (mm or inches as per user - # preference) + # Multiday snowfall total (mm or inches as per user preference) SNOW_DEPTH_NEW_MULTIDAY = "mdsf" - # MDTN = Multiday minimum temperature (Fahrenheit or Celsius - # as per user preference ; use with DATN) + # Multiday minimum temperature (Fahrenheit or Celsius as per user preference ; use with DATN) TEMPERATURE_AIR_MIN_200_MULTIDAY = "mdtn" - # MDTX = Multiday maximum temperature (Fahrenheit or Celsius - # as per user preference ; use with DATX) + # Multiday maximum temperature (Fahrenheit or Celsius as per user preference ; use with DATX) TEMPERATURE_AIR_MAX_200_MULTIDAY = "mdtx" - # MDWM = Multiday wind movement (miles or km as per user - # preference) + # Multiday wind movement (miles or km as per user preference) WIND_MOVEMENT_MULTIDAY = "mdwm" # km - # MNPN = Daily minimum temperature of water in an evaporation - # pan (Fahrenheit or Celsius as per user preference) + # Daily minimum temperature of water in an evaporation pan (Fahrenheit or Celsius as per user preference) TEMPERATURE_WATER_EVAPORATION_PAN_MIN = "mnpn" - # MXPN = Daily maximum temperature of water in an evaporation - # pan (Fahrenheit or Celsius as per user preference) + # Daily maximum temperature of water in an evaporation pan (Fahrenheit or Celsius as per user preference) TEMPERATURE_WATER_EVAPORATION_PAN_MAX = "mxpn" - # PGTM = Peak gust time (hours and minutes, i.e., HHMM) + # Peak gust time (hours and minutes, i.e., HHMM) TIME_WIND_GUST_MAX = "pgtm" - # PSUN = Daily percent of possible sunshine (percent) + # Daily percent of possible sunshine (percent) SUNSHINE_DURATION_RELATIVE = "psun" # TODO: @@ -319,52 +292,41 @@ class DAILY(Enum): TEMPERATURE_SOIL_MAX_BARE_MUCK_150 = "sx86" # °C TEMPERATURE_SOIL_MAX_BARE_MUCK_180 = "sx87" # °C - # THIC = Thickness of ice on water (inches or mm as per user preference) + # Thickness of ice on water (inches or mm as per user preference) ICE_ON_WATER_THICKNESS = "thic" - # TOBS = Temperature at the time of observation (Fahrenheit or Celsius - # as per user preference) + # Temperature at the time of observation (Fahrenheit or Celsius as per user preference) TEMPERATURE_AIR_200 = "tobs" - # TSUN = Daily total sunshine (minutes) + # Daily total sunshine (minutes) SUNSHINE_DURATION = "tsun" - # WDF5 = Direction - # of fastest 5-second wind (degrees) + # Direction of fastest 5-second wind (degrees) WIND_DIRECTION_GUST_MAX_5SEC = "wdf5" - # WDF1 = Direction of fastest - # 1-minute wind (degrees) + # Direction of fastest 1-minute wind (degrees) WIND_DIRECTION_GUST_MAX_1MIN = "wdf1" - # WDF2 = Direction of fastest 2-minute wind (degrees) + # Direction of fastest 2-minute wind (degrees) WIND_DIRECTION_GUST_MAX_2MIN = "wdf2" - # WDFG = Direction of peak wind gust (degrees) + # Direction of peak wind gust (degrees) WIND_DIRECTION_GUST_MAX = "wdfg" - # WDFI = Direction of highest instantaneous wind (degrees) + # Direction of highest instantaneous wind (degrees) WIND_DIRECTION_GUST_MAX_INSTANT = "wdfi" - # WDFM = Fastest mile wind direction (degrees) + # Fastest mile wind direction (degrees) WIND_DIRECTION_GUST_MAX_1MILE = "wdfm" - # WDMV = 24-hour wind movement (km or miles as per user preference, - # miles on Daily Form pdf file) + # 24-hour wind movement (km or miles as per user preference, miles on Daily Form pdf file) WIND_MOVEMENT_24HOUR = "wdmv" - # WESD = Water equivalent of snow on the ground (inches or mm as per - # user preference) + # Water equivalent of snow on the ground (inches or mm as per user preference) WATER_EQUIVALENT_SNOW_DEPTH = "wesd" - # WESF = Water equivalent of snowfall (inches or mm as per user preference) + # Water equivalent of snowfall (inches or mm as per user preference) WATER_EQUIVALENT_SNOW_DEPTH_NEW = "wesf" - # WSF1 = Fastest 1-minute wind speed (miles per hour or meters per second - # as per user preference) + # Fastest 1-minute wind speed (miles per hour or meters per second as per user preference) WIND_GUST_MAX_5SEC = "wsf5" - # WSF2 = Fastest 2-minute wind speed (miles per hour or meters per second - # as per user preference) + # Fastest 2-minute wind speed (miles per hour or meters per second as per user preference) WIND_GUST_MAX_1MIN = "wsf1" - # WSF5 = Fastest 5-second wind speed (miles per hour or meters per second - # as per user preference) + # Fastest 5-second wind speed (miles per hour or meters per second as per user preference) WIND_GUST_MAX_2MIN = "wsf2" - # WSFG = Peak guest wind speed (miles per hour or meters per second as - # per user preference) + # Peak guest wind speed (miles per hour or meters per second as per user preference) WIND_GUST_MAX = "wsfg" - # WSFI = Highest instantaneous wind speed (miles per hour or meters per - # second as per user preference) + # Highest instantaneous wind speed (miles per hour or meters per second as per user preference) WIND_GUST_MAX_INSTANT = "wsfi" - # WSFM = Fastest mile wind speed (miles per hour or meters per second as - # per user preference) + # Fastest mile wind speed (miles per hour or meters per second as per user preference) WIND_GUST_MAX_1MILE = "wsfm" """ diff --git a/wetterdienst/provider/noaa/ghcn/unit.py b/wetterdienst/provider/noaa/ghcn/unit.py index b184ffece..3f1269d94 100644 --- a/wetterdienst/provider/noaa/ghcn/unit.py +++ b/wetterdienst/provider/noaa/ghcn/unit.py @@ -12,34 +12,29 @@ class NoaaGhcnUnit(DatasetTreeCore): class DAILY(UnitEnum): # The five core values are: - # PRCP = Precipitation (mm or inches as per user preference, - # inches to hundredths on Daily Form pdf file) + # Precipitation (mm or inches as per user preference, inches to hundredths on Daily Form pdf file) PRECIPITATION_HEIGHT = ( OriginUnit.MILLIMETER.value, SIUnit.KILOGRAM_PER_SQUARE_METER.value, ) - # SNOW = Snowfall (mm or inches as per user preference, - # inches to tenths on Daily Form pdf file) + # Snowfall (mm or inches as per user preference, inches to tenths on Daily Form pdf file) SNOW_DEPTH_NEW = ( OriginUnit.MILLIMETER.value, SIUnit.KILOGRAM_PER_SQUARE_METER.value, ) - # SNWD = Snow depth (mm or inches as per user preference, - # inches on Daily Form pdf file) + # Snow depth (mm or inches as per user preference, inches on Daily Form pdf file) SNOW_DEPTH = ( OriginUnit.MILLIMETER.value, SIUnit.KILOGRAM_PER_SQUARE_METER.value, ) - # TMAX = Maximum temperature (Fahrenheit or Celsius as - # per user preference, - # Fahrenheit to tenths on Daily Form pdf file + # Maximum temperature (Fahrenheit or Celsius as per user preference, + # Fahrenheit to tenths on Daily Form pdf file TEMPERATURE_AIR_MAX_200 = ( OriginUnit.DEGREE_CELSIUS.value, SIUnit.DEGREE_KELVIN.value, ) - # TMIN = Minimum temperature (Fahrenheit or Celsius as - # per user preference, - # Fahrenheit to tenths on Daily Form pdf file + # Minimum temperature (Fahrenheit or Celsius as per user preference, + # Fahrenheit to tenths on Daily Form pdf file TEMPERATURE_AIR_MIN_200 = ( OriginUnit.DEGREE_CELSIUS.value, SIUnit.DEGREE_KELVIN.value, @@ -51,154 +46,127 @@ class DAILY(UnitEnum): # Additional parameters: - # ACMC = Average cloudiness midnight to midnight from 30-second - # ceilometer data (percent) + # Average cloudiness midnight to midnight from 30-second ceilometer data (percent) CLOUD_COVER_TOTAL_MIDNIGHT_TO_MIDNIGHT = ( OriginUnit.PERCENT.value, SIUnit.PERCENT.value, ) - # ACMH = Average cloudiness midnight to midnight from manual - # observations (percent) + # Average cloudiness midnight to midnight from manual observations (percent) CLOUD_COVER_TOTAL_MIDNIGHT_TO_MIDNIGHT_MANUAL = ( OriginUnit.PERCENT.value, SIUnit.PERCENT.value, ) - # ACSC = Average cloudiness sunrise to sunset from 30-second - # ceilometer data (percent) + # Average cloudiness sunrise to sunset from 30-second ceilometer data (percent) CLOUD_COVER_TOTAL_SUNRISE_TO_SUNSET = ( OriginUnit.PERCENT.value, SIUnit.PERCENT.value, ) - # ACSH = Average cloudiness sunrise to sunset from manual - # observations (percent) + # Average cloudiness sunrise to sunset from manual observations (percent) CLOUD_COVER_TOTAL_SUNRISE_TO_SUNSET_MANUAL = ( OriginUnit.PERCENT.value, SIUnit.PERCENT.value, ) # TODO: use one CLOUD_COVER_TOTAL parameter that builds one time series # from the multiple existing parameters - # cloud cover total is usually measured on a daily basis ending at midnight - # so this is a synonym for midnight-to-midnight - # CLOUD_COVER_TOTAL = CLOUD_COVER_TOTAL_MIDNIGHT_TO_MIDNIGHT_MANUAL + # cloud cover total is usually measured on a daily basis ending at midnight + # so this is a synonym for midnight-to-midnight - # AWND = Average daily wind speed (meters per second or miles - # per hour as per user preference) + # Average daily wind speed (meters per second or miles per hour as per user preference) WIND_SPEED = OriginUnit.METER_PER_SECOND.value, SIUnit.METER_PER_SECOND.value - # DAEV = Number of days included in the multiday evaporation - # total (MDEV) + # Number of days included in the multiday evaporation total (MDEV) COUNT_DAYS_MULTIDAY_EVAPORATION = ( OriginUnit.DIMENSIONLESS.value, SIUnit.DIMENSIONLESS.value, ) - # DAPR = Number of days included in the multiday precipitation - # total (MDPR) + # Number of days included in the multiday precipitation total (MDPR) COUNT_DAYS_MULTIDAY_PRECIPITATION = ( OriginUnit.DIMENSIONLESS.value, SIUnit.DIMENSIONLESS.value, ) - # DASF = Number of days included in the multiday snowfall - # total (MDSF) + # Number of days included in the multiday snowfall total (MDSF) COUNT_DAYS_MULTIDAY_SNOW_DEPTH_NEW = ( OriginUnit.DIMENSIONLESS.value, SIUnit.DIMENSIONLESS.value, ) - # DATN = Number of days included in the multiday minimum - # temperature (MDTN) + # Number of days included in the multiday minimum temperature (MDTN) COUNT_DAYS_MULTIDAY_TEMPERATURE_AIR_MIN_200 = ( OriginUnit.DIMENSIONLESS.value, SIUnit.DIMENSIONLESS.value, ) - # DATX = Number of days included in the multiday maximum - # temperature (MDTX) + # Number of days included in the multiday maximum temperature (MDTX) COUNT_DAYS_MULTIDAY_TEMPERATURE_AIR_MAX_200 = ( OriginUnit.DIMENSIONLESS.value, SIUnit.DIMENSIONLESS.value, ) - # DAWM = Number of days included in the multiday wind - # movement (MDWM) + # Number of days included in the multiday wind movement (MDWM) COUNT_DAYS_MULTIDAY_WIND_MOVEMENT = ( OriginUnit.DIMENSIONLESS.value, SIUnit.DIMENSIONLESS.value, ) - # DWPR = Number of days with non-zero precipitation included - # in multiday precipitation total (MDPR) + # Number of days with non-zero precipitation included in multiday precipitation total (MDPR) COUNT_DAYS_MULTIDAY_PRECIPITATION_HEIGHT_GT_0 = ( OriginUnit.DIMENSIONLESS.value, SIUnit.DIMENSIONLESS.value, ) - # EVAP = Evaporation of water from evaporation pan (mm or - # inches as per user preference, or hundredths of inches - # on Daily Form pdf file) + # Evaporation of water from evaporation pan (mm or inches as per user preference, or hundredths of inches + # on Daily Form pdf file) EVAPORATION_HEIGHT = ( OriginUnit.MILLIMETER.value, SIUnit.KILOGRAM_PER_SQUARE_METER.value, ) - # FMTM = Time of fastest mile or fastest 1-minute wind - # (hours and minutes, i.e., HHMM) + # Time of fastest mile or fastest 1-minute wind (hours and minutes, i.e., HHMM) TIME_WIND_GUST_MAX_1MILE_OR_1MIN = ( OriginUnit.DIMENSIONLESS.value, SIUnit.DIMENSIONLESS.value, ) - # FRGB = Base of frozen ground layer (cm or inches as per - # user preference) + # Base of frozen ground layer (cm or inches as per user preference) FROZEN_GROUND_LAYER_BASE = OriginUnit.CENTIMETER.value, SIUnit.METER.value - # FRGT = Top of frozen ground layer (cm or inches as per - # user preference) + # Top of frozen ground layer (cm or inches as per user preference) FROZEN_GROUND_LAYER_TOP = OriginUnit.CENTIMETER.value, SIUnit.METER.value - # FRTH = Thickness of frozen ground layer (cm or inches as - # per user preference) + # Thickness of frozen ground layer (cm or inches as per user preference) FROZEN_GROUND_LAYER_THICKNESS = OriginUnit.CENTIMETER.value, SIUnit.METER.value - # GAHT = Difference between river and gauge height (cm or - # inches as per user preference) + # Difference between river and gauge height (cm or inches as per user preference) DISTANCE_RIVER_GAUGE_HEIGHT = OriginUnit.CENTIMETER.value, SIUnit.METER.value - # MDEV = Multiday evaporation total (mm or inches as per - # user preference; use with DAEV) + # Multiday evaporation total (mm or inches as per user preference; use with DAEV) EVAPORATION_HEIGHT_MULTIDAY = ( OriginUnit.MILLIMETER.value, SIUnit.KILOGRAM_PER_SQUARE_METER.value, ) - # MDPR = Multiday precipitation total (mm or inches as per - # user preference; use with DAPR and DWPR, if available) + # Multiday precipitation total (mm or inches as per user preference; use with DAPR and DWPR, if available) PRECIPITATION_HEIGHT_MULTIDAY = ( OriginUnit.MILLIMETER.value, SIUnit.KILOGRAM_PER_SQUARE_METER.value, ) - # MDSF = Multiday snowfall total (mm or inches as per user - # preference) + # Multiday snowfall total (mm or inches as per user preference) SNOW_DEPTH_NEW_MULTIDAY = ( OriginUnit.MILLIMETER.value, SIUnit.KILOGRAM_PER_SQUARE_METER.value, ) - # MDTN = Multiday minimum temperature (Fahrenheit or Celsius - # as per user preference ; use with DATN) + # Multiday minimum temperature (Fahrenheit or Celsius as per user preference ; use with DATN) TEMPERATURE_AIR_MIN_200_MULTIDAY = ( OriginUnit.DEGREE_CELSIUS.value, SIUnit.DEGREE_KELVIN.value, ) - # MDTX = Multiday maximum temperature (Fahrenheit or Celsius - # as per user preference ; use with DATX) + # Multiday maximum temperature (Fahrenheit or Celsius as per user preference ; use with DATX) TEMPERATURE_AIR_MAX_200_MULTIDAY = ( OriginUnit.DEGREE_CELSIUS.value, SIUnit.DEGREE_KELVIN.value, ) - # MDWM = Multiday wind movement (miles or km as per user - # preference) + # Multiday wind movement (miles or km as per user preference) WIND_MOVEMENT_MULTIDAY = OriginUnit.KILOMETER.value, SIUnit.METER.value - # MNPN = Daily minimum temperature of water in an evaporation - # pan (Fahrenheit or Celsius as per user preference) + # Daily minimum temperature of water in an evaporation pan (Fahrenheit or Celsius as per user preference) TEMPERATURE_WATER_EVAPORATION_PAN_MIN = ( OriginUnit.DEGREE_CELSIUS.value, SIUnit.DEGREE_KELVIN.value, ) - # MXPN = Daily maximum temperature of water in an evaporation - # pan (Fahrenheit or Celsius as per user preference) + # Daily maximum temperature of water in an evaporation pan (Fahrenheit or Celsius as per user preference) TEMPERATURE_WATER_EVAPORATION_PAN_MAX = ( OriginUnit.DEGREE_CELSIUS.value, SIUnit.DEGREE_KELVIN.value, ) - # PGTM = Peak gust time (hours and minutes, i.e., HHMM) + # Peak gust time (hours and minutes, i.e., HHMM) TIME_WIND_GUST_MAX = OriginUnit.SECOND.value, SIUnit.SECOND.value - # PSUN = Daily percent of possible sunshine (percent) + # Daily percent of possible sunshine (percent) SUNSHINE_DURATION_RELATIVE = OriginUnit.PERCENT.value, SIUnit.PERCENT.value """ @@ -764,91 +732,80 @@ class DAILY(UnitEnum): SIUnit.DEGREE_KELVIN.value, ) - # THIC = Thickness of ice on water (inches or mm as per user preference) + # Thickness of ice on water (inches or mm as per user preference) ICE_ON_WATER_THICKNESS = OriginUnit.MILLIMETER.value, SIUnit.METER.value - # TOBS = Temperature at the time of observation (Fahrenheit or Celsius - # as per user preference) + # Temperature at the time of observation (Fahrenheit or Celsius as per user preference) TEMPERATURE_AIR_200 = ( OriginUnit.DEGREE_CELSIUS.value, SIUnit.DEGREE_KELVIN.value, ) - # TSUN = Daily total sunshine (minutes) + # Daily total sunshine (minutes) SUNSHINE_DURATION = OriginUnit.MINUTE.value, SIUnit.SECOND.value - # WDF5 = Direction - # of fastest 5-second wind (degrees) + # Direction of fastest 5-second wind (degrees) WIND_DIRECTION_GUST_MAX_5SEC = ( OriginUnit.DEGREE.value, SIUnit.WIND_DIRECTION.value, ) - # WDF1 = Direction of fastest - # 1-minute wind (degrees) + # Direction of fastest 1-minute wind (degrees) WIND_DIRECTION_GUST_MAX_1MIN = ( OriginUnit.DEGREE.value, SIUnit.WIND_DIRECTION.value, ) - # WDF2 = Direction of fastest 2-minute wind (degrees) + # Direction of fastest 2-minute wind (degrees) WIND_DIRECTION_GUST_MAX_2MIN = ( OriginUnit.DEGREE.value, SIUnit.WIND_DIRECTION.value, ) - # WDFG = Direction of peak wind gust (degrees) + # Direction of peak wind gust (degrees) WIND_DIRECTION_GUST_MAX = ( OriginUnit.DEGREE.value, SIUnit.WIND_DIRECTION.value, ) - # WDFI = Direction of highest instantaneous wind (degrees) + # Direction of highest instantaneous wind (degrees) WIND_DIRECTION_GUST_MAX_INSTANT = ( OriginUnit.DEGREE.value, SIUnit.WIND_DIRECTION.value, ) - # WDFM = Fastest mile wind direction (degrees) + # Fastest mile wind direction (degrees) WIND_DIRECTION_GUST_MAX_1MILE = ( OriginUnit.DEGREE.value, SIUnit.WIND_DIRECTION.value, ) - # WDMV = 24-hour wind movement (km or miles as per user preference, - # miles on Daily Form pdf file) + # 24-hour wind movement (km or miles as per user preference, miles on Daily Form pdf file) WIND_MOVEMENT_24HOUR = OriginUnit.KILOMETER.value, SIUnit.METER.value - # WESD = Water equivalent of snow on the ground (inches or mm as per - # user preference) + # Water equivalent of snow on the ground (inches or mm as per user preference) WATER_EQUIVALENT_SNOW_DEPTH = ( OriginUnit.MILLIMETER.value, SIUnit.KILOGRAM_PER_SQUARE_METER.value, ) - # WESF = Water equivalent of snowfall (inches or mm as per user preference) + # Water equivalent of snowfall (inches or mm as per user preference) WATER_EQUIVALENT_SNOW_DEPTH_NEW = ( OriginUnit.MILLIMETER.value, SIUnit.KILOGRAM_PER_SQUARE_METER.value, ) - # WSF1 = Fastest 1-minute wind speed (miles per hour or meters per second - # as per user preference) + # Fastest 1-minute wind speed (miles per hour or meters per second as per user preference) WIND_GUST_MAX_5SEC = ( OriginUnit.METER_PER_SECOND.value, SIUnit.METER_PER_SECOND.value, ) - # WSF2 = Fastest 2-minute wind speed (miles per hour or meters per second - # as per user preference) + # Fastest 2-minute wind speed (miles per hour or meters per second as per user preference) WIND_GUST_MAX_1MIN = ( OriginUnit.METER_PER_SECOND.value, SIUnit.METER_PER_SECOND.value, ) - # WSF5 = Fastest 5-second wind speed (miles per hour or meters per second - # as per user preference) + # Fastest 5-second wind speed (miles per hour or meters per second as per user preference) WIND_GUST_MAX_2MIN = ( OriginUnit.METER_PER_SECOND.value, SIUnit.METER_PER_SECOND.value, ) - # WSFG = Peak guest wind speed (miles per hour or meters per second as - # per user preference) + # Peak guest wind speed (miles per hour or meters per second as per user preference) WIND_GUST_MAX = OriginUnit.METER_PER_SECOND.value, SIUnit.METER_PER_SECOND.value - # WSFI = Highest instantaneous wind speed (miles per hour or meters per - # second as per user preference) + # Highest instantaneous wind speed (miles per hour or meters per second as per user preference) WIND_GUST_MAX_INSTANT = ( OriginUnit.METER_PER_SECOND.value, SIUnit.METER_PER_SECOND.value, ) - # WSFM = Fastest mile wind speed (miles per hour or meters per second as - # per user preference) + # Fastest mile wind speed (miles per hour or meters per second as per user preference) WIND_GUST_MAX_1MILE = ( OriginUnit.METER_PER_SECOND.value, SIUnit.METER_PER_SECOND.value, diff --git a/wetterdienst/ui/cli.py b/wetterdienst/ui/cli.py index b19829bbb..dce7b204d 100644 --- a/wetterdienst/ui/cli.py +++ b/wetterdienst/ui/cli.py @@ -57,12 +57,10 @@ def get_api(provider: str, kind: str): :return: """ try: - api = Wetterdienst(provider, kind) + return Wetterdienst(provider, kind) except ProviderError as e: click.Abort(e.str) - return api - def station_options(command): """ @@ -86,9 +84,7 @@ def station_options(command): ), cloup.option_group( "Latitude-Longitude rank/distance filtering", - cloup.option( - "--coordinates", metavar="LATITUDE,LONGITUDE", type=click.STRING - ), + cloup.option("--coordinates", metavar="LATITUDE,LONGITUDE", type=click.STRING), cloup.option("--rank", type=click.INT), cloup.option("--distance", type=click.FLOAT), help="Provide --coordinates plus either --rank or --distance.", @@ -355,8 +351,6 @@ def info(): def version(): print(__version__) # noqa: T001 - return - @cli.command("restapi") @cloup.option("--listen", type=click.STRING, default=None) @@ -431,7 +425,7 @@ def coverage(provider, kind, filter_, debug): print(cov) # noqa: T001 - return + return None @about.command("fields") @@ -449,9 +443,7 @@ def coverage(provider, kind, filter_, debug): def fields(provider, kind, dataset, resolution, period, language, **kwargs): api = get_api(provider, kind) - if not ( - api.provider == Provider.DWD and api.kind == Kind.OBSERVATION - ) and kwargs.get("fields"): + if not (api.provider == Provider.DWD and api.kind == Kind.OBSERVATION) and kwargs.get("fields"): raise click.BadParameter("'fields' command only available for provider 'DWD'") metadata = api.describe_fields( diff --git a/wetterdienst/ui/core.py b/wetterdienst/ui/core.py index a2e10bb4f..a93feb263 100644 --- a/wetterdienst/ui/core.py +++ b/wetterdienst/ui/core.py @@ -45,9 +45,7 @@ def unpack_parameter(par: str) -> Union[str, Tuple[str, str]]: except AttributeError: pass - parameter = [unpack_parameter(p) for p in parameter] - - return parameter + return [unpack_parameter(p) for p in parameter] def get_stations( @@ -103,17 +101,14 @@ def get_stations( else: res = Resolution.HOUR_6 else: - res = parse_enumeration_from_template( - resolution, api._resolution_base, Resolution - ) + res = parse_enumeration_from_template(resolution, api._resolution_base, Resolution) # Split date string into start and end date string start_date, end_date = create_date_range(date=date, resolution=res) if api._data_range == DataRange.LOOSELY and not start_date and not end_date: raise TypeError( - f"Combination of provider {api.provider.name} and kind {api.kind.name} " - f"requires start and end date" + f"Combination of provider {api.provider.name} and kind {api.kind.name} " f"requires start and end date" ) # Todo: We may have to apply other measures to allow for @@ -142,19 +137,19 @@ def get_stations( r = api(**kwargs) if all_: - request = r.all() + return r.all() elif station_id: - request = r.filter_by_station_id(station_id) + return r.filter_by_station_id(station_id) elif name: - request = r.filter_by_name(name) + return r.filter_by_name(name) # Use coordinates twice in main if-elif to get same KeyError elif coordinates and rank: lat, lon = coordinates.split(",") - request = r.filter_by_rank( + return r.filter_by_rank( latitude=float(lat), longitude=float(lon), rank=rank, @@ -163,7 +158,7 @@ def get_stations( elif coordinates and distance: lat, lon = coordinates.split(",") - request = r.filter_by_distance( + return r.filter_by_distance( latitude=float(lat), longitude=float(lon), distance=distance, @@ -175,7 +170,7 @@ def get_stations( except ValueError as e: raise ValueError("bbox requires four floats separated by comma") from e - request = r.filter_by_bbox( + return r.filter_by_bbox( left=float(left), bottom=float(bottom), right=float(right), @@ -183,7 +178,7 @@ def get_stations( ) elif sql: - request = r.filter_by_sql(sql) + return r.filter_by_sql(sql) else: param_options = [ @@ -196,8 +191,6 @@ def get_stations( ] raise KeyError(f"Give one of the parameters: {', '.join(param_options)}") - return request - def get_values( api: ScalarRequestCore, diff --git a/wetterdienst/ui/explorer/app.py b/wetterdienst/ui/explorer/app.py index 74751008c..905492538 100644 --- a/wetterdienst/ui/explorer/app.py +++ b/wetterdienst/ui/explorer/app.py @@ -70,12 +70,7 @@ def fetch_stations(parameter: str, resolution: str, period: str): The data will be stored on a hidden within the browser DOM. """ - log.info( - f"Requesting stations for " - f"parameter={parameter}, " - f"resolution={resolution}, " - f"period={period}" - ) + log.info(f"Requesting stations for " f"parameter={parameter}, " f"resolution={resolution}, " f"period={period}") try: stations = DwdObservationRequest( parameter=DwdObservationDataset(parameter), @@ -204,9 +199,7 @@ def render_navigation_variables(payload): Input("dataframe-stations", "children"), ], ) -def render_status_response_stations( - parameter: str, resolution: str, period: str, payload: str -): +def render_status_response_stations(parameter: str, resolution: str, period: str, payload: str): """ Report about the status of the query. """ @@ -278,9 +271,7 @@ def render_status_response_values( missing.append(candidate) if missing: - empty_message.append( - html.Span(f"Please select all of the missing options {missing}.") - ) + empty_message.append(html.Span(f"Please select all of the missing options {missing}.")) messages += [html.Div(empty_message), html.Br()] @@ -338,7 +329,7 @@ def render_map(payload): return fig log.info(f"Rendering stations map from {frame_summary(stations_data)}") - fig = go.Figure( + return go.Figure( data=go.Scattermapbox( lat=stations_data[Columns.LATITUDE.value], lon=stations_data[Columns.LONGITUDE.value], @@ -356,8 +347,6 @@ def render_map(payload): layout=layout_germany, ) - return fig - @app.callback( Output("graph-values", "figure"), @@ -374,9 +363,7 @@ def render_graph(variable, payload): except ValueError: climate_data = pd.DataFrame() - log.info( - f"Rendering graph for variable={variable} from {frame_summary(climate_data)}" - ) + log.info(f"Rendering graph for variable={variable} from {frame_summary(climate_data)}") fig = default_figure(climate_data, variable) @@ -392,9 +379,7 @@ def render_graph(variable, payload): return fig -def start_service( - listen_address: Optional[str] = None, reload: Optional[bool] = False -): # pragma: no cover +def start_service(listen_address: Optional[str] = None, reload: Optional[bool] = False): # pragma: no cover """ This entrypoint will be used by `wetterdienst.cli`. """ diff --git a/wetterdienst/ui/explorer/layout/main.py b/wetterdienst/ui/explorer/layout/main.py index 6dc5adaf3..11534f62e 100644 --- a/wetterdienst/ui/explorer/layout/main.py +++ b/wetterdienst/ui/explorer/layout/main.py @@ -46,7 +46,6 @@ def get_app_layout(): html.Div( [ html.H1("Wetterdienst Explorer"), - # html.P("Hello world"), dbc.Navbar( [dbc.NavLink("About", id="open-about")], id="navbar", diff --git a/wetterdienst/ui/explorer/layout/observations_germany.py b/wetterdienst/ui/explorer/layout/observations_germany.py index 40162be09..f4e81f313 100644 --- a/wetterdienst/ui/explorer/layout/observations_germany.py +++ b/wetterdienst/ui/explorer/layout/observations_germany.py @@ -15,25 +15,17 @@ def get_parameters(): return sorted( - [ - {"label": param.value, "value": param.value} - for param in DwdObservationDataset - ], + [{"label": param.value, "value": param.value} for param in DwdObservationDataset], key=operator.itemgetter("label"), ) def get_resolutions(): - return [ - {"label": param.value, "value": param.value} - for param in DwdObservationResolution - ] + return [{"label": param.value, "value": param.value} for param in DwdObservationResolution] def get_periods(): - return [ - {"label": param.value, "value": param.value} for param in DwdObservationPeriod - ] + return [{"label": param.value, "value": param.value} for param in DwdObservationPeriod] def dashboard_layout() -> html: @@ -122,7 +114,6 @@ def dashboard_layout() -> html: [ html.Div( [ - # html.P("Time-series graph", style={"text-align": "center"}), dcc.Graph(id="graph-values"), ], id="graph", diff --git a/wetterdienst/ui/explorer/library.py b/wetterdienst/ui/explorer/library.py index 9ed04f2a8..54d92c6b1 100644 --- a/wetterdienst/ui/explorer/library.py +++ b/wetterdienst/ui/explorer/library.py @@ -39,13 +39,7 @@ def default_figure(climate_data: pd.DataFrame, column: str) -> go.Figure: add_annotation_no_data(fig) return fig - fig = go.Figure( - data=[ - go.Scatter( - x=climate_data.date, y=climate_data.loc[:, column], hoverinfo="x+y" - ) - ] - ) + fig = go.Figure(data=[go.Scatter(x=climate_data.date, y=climate_data.loc[:, column], hoverinfo="x+y")]) fig.update_layout(yaxis={"title": f"{column}"}, showlegend=False) return fig diff --git a/wetterdienst/ui/restapi.py b/wetterdienst/ui/restapi.py index 3c888ff3a..eaf4492a9 100644 --- a/wetterdienst/ui/restapi.py +++ b/wetterdienst/ui/restapi.py @@ -36,9 +36,7 @@ def index(): for provider in Provider: shortname = provider.name _, name, country, copyright_, url = provider.value - sources += ( - f"
  • {shortname} ({name}, {country}) - {copyright_}
  • " - ) + sources += f"
  • {shortname} ({name}, {country}) - {copyright_}
  • " return f""" @@ -67,7 +65,7 @@ def index(): - """ # noqa:E501,B950 + """ # noqa:B950 @app.get("/robots.txt", response_class=PlainTextResponse) @@ -90,19 +88,12 @@ def coverage( if not provider or not kind: cov = Wetterdienst.discover() - return Response( - content=json.dumps(cov, indent=4), media_type="application/json" - ) + return Response(content=json.dumps(cov, indent=4), media_type="application/json") api = get_api(provider=provider, kind=kind) - # dataset = kwargs.get("dataset") - # if dataset: - # dataset = read_list(dataset) - cov = api.discover( filter_=filter_, - # dataset=dataset, flatten=False, ) @@ -137,8 +128,7 @@ def stations( if parameter is None or resolution is None: raise HTTPException( status_code=400, - detail="Query arguments 'parameter', 'resolution' " - "and 'period' are required", + detail="Query arguments 'parameter', 'resolution' " "and 'period' are required", ) if fmt not in ("json", "geojson"): @@ -193,10 +183,6 @@ def stations( f"parameter(s) {parameter} and resolution {resolution}.", ) - # Postprocessing. - # if sql is not None: - # results.filter_by_sql(sql) - stations_.fill_gaps() indent = None @@ -233,7 +219,6 @@ def values( bbox: str = Query(default=None), sql: str = Query(default=None), sql_values: str = Query(alias="sql-values", default=None), - # fmt: str = Query(alias="format", default="json"), missing geojson support humanize: bool = Query(default=True), tidy: bool = Query(default=True), si_units: bool = Query(alias="si-units", default=True), @@ -279,8 +264,7 @@ def values( if parameter is None or resolution is None: raise HTTPException( status_code=400, - detail="Query arguments 'parameter', 'resolution' " - "and 'date' are required", + detail="Query arguments 'parameter', 'resolution' " "and 'date' are required", ) if fmt not in ("json", "geojson"): @@ -356,7 +340,7 @@ def make_json_response(data, provider): """ name_local, name_english, country, copyright_, url = provider.value - response = { + return { "meta": { "provider": { "name_local": name_local, @@ -373,12 +357,10 @@ def make_json_response(data, provider): }, "data": data, } - return response -def start_service( - listen_address: Optional[str] = None, reload: Optional[bool] = False -): # pragma: no cover +def start_service(listen_address: Optional[str] = None, reload: Optional[bool] = False): # pragma: no cover + from uvicorn.main import run setup_logging() @@ -387,6 +369,5 @@ def start_service( host, port = listen_address.split(":") port = int(port) - from uvicorn.main import run run(app="wetterdienst.ui.restapi:app", host=host, port=port, reload=reload) diff --git a/wetterdienst/util/cache.py b/wetterdienst/util/cache.py index f45929c37..45746672e 100644 --- a/wetterdienst/util/cache.py +++ b/wetterdienst/util/cache.py @@ -73,33 +73,25 @@ class CacheExpiry(Enum): # Define cache regions. -metaindex_cache = make_region( - function_key_generator=kwarg_function_key_generator -).configure( +metaindex_cache = make_region(function_key_generator=kwarg_function_key_generator).configure( backend, expiration_time=60 * 60 * 12, arguments={"filename": os.path.join(cache_dir, "dogpile", "metaindex.dbm")}, ) -fileindex_cache_five_minutes = make_region( - function_key_generator=kwarg_function_key_generator -).configure( +fileindex_cache_five_minutes = make_region(function_key_generator=kwarg_function_key_generator).configure( backend, expiration_time=60 * 5, arguments={"filename": os.path.join(cache_dir, "dogpile", "fileindex_5m.dbm")}, ) -fileindex_cache_twelve_hours = make_region( - function_key_generator=kwarg_function_key_generator -).configure( +fileindex_cache_twelve_hours = make_region(function_key_generator=kwarg_function_key_generator).configure( backend, expiration_time=60 * 60 * 12, arguments={"filename": os.path.join(cache_dir, "dogpile", "fileindex_12h.dbm")}, ) -payload_cache_twelve_hours = make_region( - function_key_generator=kwarg_function_key_generator -).configure( +payload_cache_twelve_hours = make_region(function_key_generator=kwarg_function_key_generator).configure( backend, expiration_time=60 * 60 * 12, arguments={"filename": os.path.join(cache_dir, "dogpile", "payload_12h.dbm")}, diff --git a/wetterdienst/util/cli.py b/wetterdienst/util/cli.py index 6cbe0ecb6..d0088f49f 100644 --- a/wetterdienst/util/cli.py +++ b/wetterdienst/util/cli.py @@ -4,7 +4,7 @@ """ A set of utility functions """ import logging import sys -from typing import List +from typing import List, Optional def setup_logging(level=logging.INFO) -> None: @@ -16,13 +16,13 @@ def setup_logging(level=logging.INFO) -> None: numexpr_logger.setLevel(logging.WARN) -def read_list(data: str, separator: str = u",") -> List[str]: +def read_list(data: Optional[str], separator: str = u",") -> List[str]: if data is None: return [] result = [x.strip() for x in data.split(separator)] if len(result) == 1 and not result[0]: - result = [] + return [] return result diff --git a/wetterdienst/util/datetime.py b/wetterdienst/util/datetime.py index 42dc1e9c9..695e401d9 100644 --- a/wetterdienst/util/datetime.py +++ b/wetterdienst/util/datetime.py @@ -41,9 +41,7 @@ def raster_minutes(timestamp: datetime, value: int): if timestamp.minute < value: timestamp = timestamp - timedelta(hours=1) - timestamp = timestamp.replace(minute=value) - - return timestamp + return timestamp.replace(minute=value) def mktimerange( @@ -77,8 +75,6 @@ def mktimerange( date_to = date_to + relativedelta(day=31) else: - raise NotImplementedError( - "mktimerange only implemented for annual and monthly time ranges" - ) + raise NotImplementedError("mktimerange only implemented for annual and monthly time ranges") return date_from, date_to diff --git a/wetterdienst/util/enumeration.py b/wetterdienst/util/enumeration.py index 7a18d5f45..627f53438 100644 --- a/wetterdienst/util/enumeration.py +++ b/wetterdienst/util/enumeration.py @@ -56,9 +56,7 @@ def parse_enumeration_from_template( else: enum_parsed = intermediate(enum_) except ValueError: - raise InvalidEnumeration( - f"{enum_} could not be parsed from {intermediate.__name__}." - ) + raise InvalidEnumeration(f"{enum_} could not be parsed from {intermediate.__name__}.") if base: try: @@ -67,9 +65,7 @@ def parse_enumeration_from_template( try: enum_parsed = base(enum_parsed) except ValueError: - raise InvalidEnumeration( - f"{enum_parsed} could not be parsed from {base.__name__}." - ) + raise InvalidEnumeration(f"{enum_parsed} could not be parsed from {base.__name__}.") return enum_parsed diff --git a/wetterdienst/util/geo.py b/wetterdienst/util/geo.py index 9f8932cbd..e003cacab 100644 --- a/wetterdienst/util/geo.py +++ b/wetterdienst/util/geo.py @@ -38,9 +38,7 @@ def get_coordinates_in_radians(self): return np.radians(self.get_coordinates()) def __eq__(self, other): - return np.array_equal(self.latitudes, other.latitudes) and np.array_equal( - self.longitudes, other.longitudes - ) + return np.array_equal(self.latitudes, other.latitudes) and np.array_equal(self.longitudes, other.longitudes) def derive_nearest_neighbours( @@ -67,9 +65,7 @@ def derive_nearest_neighbours( """ points = np.c_[np.radians(latitudes), np.radians(longitudes)] distance_tree = cKDTree(points) - return distance_tree.query( - coordinates.get_coordinates_in_radians(), k=number_nearby - ) + return distance_tree.query(coordinates.get_coordinates_in_radians(), k=number_nearby) def convert_dm_to_dd(dms: float) -> float: diff --git a/wetterdienst/util/io.py b/wetterdienst/util/io.py index ffeff6e18..88dd02f10 100644 --- a/wetterdienst/util/io.py +++ b/wetterdienst/util/io.py @@ -3,7 +3,7 @@ def read_in_chunks(file_object, chunk_size=1024): Lazy function (generator) to read a file piece by piece. Default chunk size: 1k. - -- https://stackoverflow.com/questions/519633/lazy-method-for-reading-big-file-in-python/519653#519653 # Noqa: E501, B950 + -- https://stackoverflow.com/questions/519633/lazy-method-for-reading-big-file-in-python/519653#519653 """ while True: data = file_object.read(chunk_size) diff --git a/wetterdienst/util/logging.py b/wetterdienst/util/logging.py index 69bd92244..cbac43f85 100644 --- a/wetterdienst/util/logging.py +++ b/wetterdienst/util/logging.py @@ -10,7 +10,7 @@ class TqdmToLogger(io.StringIO): Output stream for TQDM which will output to logger module instead of the StdOut. - Source: https://stackoverflow.com/questions/14897756/python-progress-bar-through-logging-module # noqa: E501 + Source: https://stackoverflow.com/questions/14897756/python-progress-bar-through-logging-module """ logger = None diff --git a/wetterdienst/util/network.py b/wetterdienst/util/network.py index dd02a5786..9f80a88f9 100644 --- a/wetterdienst/util/network.py +++ b/wetterdienst/util/network.py @@ -47,9 +47,7 @@ def register(cls, ttl=CacheExpiry.NO_CACHE): ttl_name, ttl_value = cls.resolve_ttl(ttl) key = f"ttl-{ttl_name}" real_cache_dir = os.path.join(cache_dir, "fsspec", key) - filesystem_real = HTTPFileSystem( - use_listings_cache=True, client_kwargs=FSSPEC_CLIENT_KWARGS - ) + filesystem_real = HTTPFileSystem(use_listings_cache=True, client_kwargs=FSSPEC_CLIENT_KWARGS) if WD_CACHE_DISABLE or ttl is CacheExpiry.NO_CACHE: filesystem_effective = filesystem_real else: @@ -91,9 +89,7 @@ def list_remote_files_legacy(url: str, recursive: bool) -> List[str]: soup = BeautifulSoup(r.text, "lxml") - files_and_folders = [ - link.get("href") for link in soup.find_all("a") if link.get("href") != "../" - ] + files_and_folders = [link.get("href") for link in soup.find_all("a") if link.get("href") != "../"] files = [] folders = [] @@ -105,9 +101,7 @@ def list_remote_files_legacy(url: str, recursive: bool) -> List[str]: folders.append(urljoin(url, f)) if recursive: - files_in_folders = [ - list_remote_files_legacy(folder, recursive) for folder in folders - ] + files_in_folders = [list_remote_files_legacy(folder, recursive) for folder in folders] for files_in_folder in files_in_folders: files.extend(files_in_folder) @@ -116,9 +110,7 @@ def list_remote_files_legacy(url: str, recursive: bool) -> List[str]: # v2: "Remote directory index" implementation based on FSSPEC. -def list_remote_files_fsspec( - url: str, recursive: bool = False, ttl: CacheExpiry = CacheExpiry.FILEINDEX -) -> List[str]: +def list_remote_files_fsspec(url: str, recursive: bool = False, ttl: CacheExpiry = CacheExpiry.FILEINDEX) -> List[str]: """ A function used to create a listing of all files of a given path on the server. @@ -148,9 +140,7 @@ def list_remote_files_fsspec( remote_urls.remove(url) except ValueError: pass - remote_urls = [i for i in remote_urls if not i.endswith("/")] - - return remote_urls + return [i for i in remote_urls if not i.endswith("/")] def download_file(url: str, ttl: Optional[int] = CacheExpiry.NO_CACHE) -> BytesIO: