Skip to content

Commit

Permalink
Merge pull request #3255 from dopplershift/silence-warnings
Browse files Browse the repository at this point in the history
Cleanup warnings from tests
  • Loading branch information
dcamron committed Dec 14, 2023
2 parents 34d14b2 + b3b2c3f commit 6a9cddc
Show file tree
Hide file tree
Showing 37 changed files with 309 additions and 260 deletions.
6 changes: 6 additions & 0 deletions .github/actions/install-pypi/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,12 @@ runs:
shell: bash
run: echo "PIP_NO_BINARY=shapely" >> $GITHUB_ENV

- name: Upgrade pip and setuptools
shell: bash
run: |
python -m pip install --upgrade pip
python -m pip install --upgrade setuptools
- name: Set dependency groups for install
shell: bash
run: |
Expand Down
9 changes: 3 additions & 6 deletions ci/download_cartopy_maps.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,6 @@ def grab_ne(category, feature, res):
for r in ['110m', '50m', '10m']:
grab_ne('cultural', feat, r)

for feat, r in [('coastline', '10m'), ('coastline', '50m'), ('coastline', '110m'),
('lakes', '10m'), ('lakes', '50m'),
('land', '10m'), ('land', '50m'), ('land', '110m'),
('ocean', '110m'), ('ocean', '50m'),
('rivers_lake_centerlines', '10m'), ('rivers_lake_centerlines', '110m')]:
grab_ne('physical', feat, r)
for feat in ['coastline', 'lakes', 'land', 'ocean', 'rivers_lake_centerlines']:
for r in ['110m', '50m', '10m']:
grab_ne('physical', feat, r)
16 changes: 2 additions & 14 deletions conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,9 +111,8 @@ def test_da_xy():
'lambert_conformal': ([], '')},
coords={
'time': xarray.DataArray(
numpy.array([numpy.datetime64('2018-07-01T00:00'),
numpy.datetime64('2018-07-01T06:00'),
numpy.datetime64('2018-07-01T12:00')]),
numpy.array(['2018-07-01T00:00', '2018-07-01T06:00', '2018-07-01T12:00'],
dtype='datetime64[ns]'),
name='time',
dims=['time']
),
Expand Down Expand Up @@ -153,17 +152,6 @@ def test_da_xy():
return ds.metpy.parse_cf('temperature')


@pytest.fixture()
def set_agg_backend():
"""Fixture to ensure the Agg backend is active."""
prev_backend = matplotlib.pyplot.get_backend()
try:
matplotlib.pyplot.switch_backend('agg')
yield
finally:
matplotlib.pyplot.switch_backend(prev_backend)


@pytest.fixture(params=['dask', 'xarray', 'masked', 'numpy'])
def array_type(request):
"""Return an array type for testing calc functions."""
Expand Down
4 changes: 2 additions & 2 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
# All configuration values have a default; values that are commented out
# serve to show the default.

from datetime import datetime
from datetime import datetime, timezone
import inspect
import os
from pathlib import Path
Expand Down Expand Up @@ -129,7 +129,7 @@

# The encoding of source files.
# source_encoding = 'utf-8-sig'
cur_date = datetime.utcnow()
cur_date = datetime.now(timezone.utc)

# The main toctree document.
master_doc = 'index'
Expand Down
2 changes: 1 addition & 1 deletion examples/meteogram_metpy.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def __init__(self, fig, dates, probeid, time=None, axis=0):
axis: number that controls the new axis to be plotted (FOR FUTURE)
"""
if not time:
time = dt.datetime.utcnow()
time = dt.datetime.now(dt.timezone.utc)
self.start = dates[0]
self.fig = fig
self.end = dates[-1]
Expand Down
20 changes: 16 additions & 4 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,13 +28,13 @@ requires-python = ">=3.9"
dependencies = [
"matplotlib>=3.5.0",
"numpy>=1.20.0",
"pandas>=1.2.0",
"pint>=0.15",
"pandas>=1.4.0",
"pint>=0.17",
"pooch>=1.2.0",
"pyproj>=3.0.0",
"scipy>=1.6.0",
"scipy>=1.8.0",
"traitlets>=5.0.5",
"xarray>=0.18.0"
"xarray>=0.21.0"
]

[project.entry-points."xarray.backends"]
Expand Down Expand Up @@ -103,6 +103,18 @@ norecursedirs = "build docs .idea"
doctest_optionflags = "NORMALIZE_WHITESPACE"
mpl-results-path = "test_output"
xfail_strict = true
filterwarnings = [
"error",
"ignore:numpy.ndarray size changed:RuntimeWarning",
# To be removed in the next python-dateutil release.
# See: https://github.com/dateutil/dateutil/issues/1314
'ignore:datetime.datetime.utcfromtimestamp\(\) is deprecated:DeprecationWarning:dateutil.tz.tz:37',
# Numpy deprecation triggered by Pint: https://github.com/hgrecco/pint/pull/1880
"ignore:Conversion of an array with ndim > 0 to a scalar is deprecated:DeprecationWarning:pint.facets.plain.quantity:575",
# PyProj automatically dispatching for single point, will be waiting for NumPy 2.0 to address
# See: https://github.com/pyproj4/pyproj/issues/1309
"ignore:Conversion of an array with ndim > 0 to a scalar is deprecated:DeprecationWarning:pyproj.geod:404"
]

[tool.ruff]
line-length = 95
Expand Down
11 changes: 8 additions & 3 deletions src/metpy/calc/basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def wind_direction(u, v, convention='from'):
origshape = wdir.shape
wdir = np.atleast_1d(wdir)

# Handle oceanographic convection
# Handle oceanographic convention
if convention == 'to':
wdir -= units.Quantity(180., 'deg')
elif convention not in ('to', 'from'):
Expand Down Expand Up @@ -405,8 +405,8 @@ def apparent_temperature(temperature, relative_humidity, speed, face_level_winds
# NB: older numpy.ma.where does not return a masked array
app_temperature = masked_array(
np.ma.where(masked_array(wind_chill_temperature).mask,
heat_index_temperature.to(temperature.units),
wind_chill_temperature.to(temperature.units)
heat_index_temperature.m_as(temperature.units),
wind_chill_temperature.m_as(temperature.units)
), temperature.units)

# If mask_undefined is False, then set any masked values to the temperature
Expand Down Expand Up @@ -829,6 +829,9 @@ def smooth_gaussian(scalar_grid, n):
num_ax = len(scalar_grid.shape)
# Assume the last two axes represent the horizontal directions
sgma_seq = [sgma if i > num_ax - 3 else 0 for i in range(num_ax)]
# Drop units as necessary to avoid warnings from scipy doing so--units will be reattached
# if necessary by wrapper
scalar_grid = getattr(scalar_grid, 'magnitude', scalar_grid)

filter_args = {'sigma': sgma_seq, 'truncate': 2 * np.sqrt(2)}
if hasattr(scalar_grid, 'mask'):
Expand Down Expand Up @@ -1104,6 +1107,8 @@ def zoom_xarray(input_field, zoom, output=None, order=3, mode='constant', cval=0
available.
"""
# Dequantify input to avoid warnings and make sure units propagate
input_field = input_field.metpy.dequantify()
# Zoom data
zoomed_data = scipy_zoom(
input_field.data, zoom, output=output, order=order, mode=mode, cval=cval,
Expand Down
4 changes: 2 additions & 2 deletions src/metpy/calc/cross_sections.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,8 +123,8 @@ def unit_vectors_from_cross_section(cross, index='index'):
"""
x, y = distances_from_cross_section(cross)
dx_di = first_derivative(x, axis=index).values
dy_di = first_derivative(y, axis=index).values
dx_di = first_derivative(x, axis=index).data
dy_di = first_derivative(y, axis=index).data
tangent_vector_mag = np.hypot(dx_di, dy_di)
unit_tangent_vector = np.vstack([dx_di / tangent_vector_mag, dy_di / tangent_vector_mag])
unit_normal_vector = np.vstack([-dy_di / tangent_vector_mag, dx_di / tangent_vector_mag])
Expand Down
2 changes: 1 addition & 1 deletion src/metpy/calc/thermo.py
Original file line number Diff line number Diff line change
Expand Up @@ -1638,7 +1638,7 @@ def saturation_equivalent_potential_temperature(pressure, temperature):
e = saturation_vapor_pressure(temperature).to('hPa').magnitude
r = saturation_mixing_ratio(pressure, temperature).magnitude

th_l = t * (1000 / (p - e)) ** mpconsts.kappa
th_l = t * (1000 / (p - e)) ** mpconsts.nounit.kappa
th_es = th_l * np.exp((3036. / t - 1.78) * r * (1 + 0.448 * r))

return units.Quantity(th_es, units.kelvin)
Expand Down
32 changes: 15 additions & 17 deletions src/metpy/calc/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,19 +25,19 @@

UND = 'UND'
UND_ANGLE = -999.
DIR_STRS = (
DIR_STRS = [
'N', 'NNE', 'NE', 'ENE',
'E', 'ESE', 'SE', 'SSE',
'S', 'SSW', 'SW', 'WSW',
'W', 'WNW', 'NW', 'NNW',
UND
) # note the order matters!
] # note the order matters!

MAX_DEGREE_ANGLE = units.Quantity(360, 'degree')
BASE_DEGREE_MULTIPLIER = units.Quantity(22.5, 'degree')

DIR_DICT = {dir_str: i * BASE_DEGREE_MULTIPLIER for i, dir_str in enumerate(DIR_STRS)}
DIR_DICT[UND] = np.nan
DIR_DICT[UND] = units.Quantity(np.nan, 'degree')


@exporter.export
Expand Down Expand Up @@ -1773,16 +1773,15 @@ def parse_angle(input_dir):
"""
if isinstance(input_dir, str):
# abb_dirs = abbrieviated directions
abb_dirs = _clean_direction([_abbrieviate_direction(input_dir)])
abb_dir = _clean_direction([_abbreviate_direction(input_dir)])[0]
return DIR_DICT[abb_dir]
elif hasattr(input_dir, '__len__'): # handle np.array, pd.Series, list, and array-like
input_dir_str = ','.join(_clean_direction(input_dir, preprocess=True))
abb_dir_str = _abbrieviate_direction(input_dir_str)
abb_dir_str = _abbreviate_direction(input_dir_str)
abb_dirs = _clean_direction(abb_dir_str.split(','))
return units.Quantity.from_list(itemgetter(*abb_dirs)(DIR_DICT))
else: # handle unrecognizable scalar
return np.nan

return itemgetter(*abb_dirs)(DIR_DICT)
return units.Quantity(np.nan, 'degree')


def _clean_direction(dir_list, preprocess=False):
Expand All @@ -1795,7 +1794,7 @@ def _clean_direction(dir_list, preprocess=False):
for the_dir in dir_list]


def _abbrieviate_direction(ext_dir_str):
def _abbreviate_direction(ext_dir_str):
"""Convert extended (non-abbreviated) directions to abbreviation."""
return (ext_dir_str
.upper()
Expand Down Expand Up @@ -1846,11 +1845,10 @@ def angle_to_direction(input_angle, full=False, level=3):

# clean any numeric strings, negatives, and None does not handle strings with alphabet
input_angle = units.Quantity(np.array(input_angle).astype(float), origin_units)
input_angle[input_angle < 0] = units.Quantity(np.nan, origin_units)
input_angle[input_angle < 0] = np.nan

# normalizer used for angles > 360 degree to normalize between 0 - 360
normalizer = np.array(input_angle.m / MAX_DEGREE_ANGLE.m, dtype=int)
norm_angles = abs(input_angle - MAX_DEGREE_ANGLE * normalizer)
# Normalize between 0 - 360
norm_angles = input_angle % MAX_DEGREE_ANGLE

if level == 3:
nskip = 1
Expand Down Expand Up @@ -1889,12 +1887,12 @@ def angle_to_direction(input_angle, full=False, level=3):
return dir_str_arr

dir_str_arr = ','.join(dir_str_arr)
dir_str_arr = _unabbrieviate_direction(dir_str_arr)
dir_str_arr = _unabbreviate_direction(dir_str_arr)
return dir_str_arr.replace(',', ' ') if scalar else dir_str_arr.split(',')


def _unabbrieviate_direction(abb_dir_str):
"""Convert abbrieviated directions to non-abbrieviated direction."""
def _unabbreviate_direction(abb_dir_str):
"""Convert abbreviated directions to non-abbreviated direction."""
return (abb_dir_str
.upper()
.replace(UND, 'Undefined ')
Expand Down
4 changes: 2 additions & 2 deletions src/metpy/interpolate/points.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

import numpy as np
from scipy.interpolate import griddata, Rbf
from scipy.spatial import cKDTree, ConvexHull, Delaunay, qhull
from scipy.spatial import cKDTree, ConvexHull, Delaunay, QhullError

from . import geometry, tools
from ..package_tools import Exporter
Expand Down Expand Up @@ -153,7 +153,7 @@ def natural_neighbor_point(xp, yp, variable, grid_loc, tri, neighbors, circumcen

area_list.append(cur_area * value[0])

except (ZeroDivisionError, qhull.QhullError) as e:
except (ZeroDivisionError, QhullError) as e:
message = ('Error during processing of a grid. '
'Interpolation will continue but be mindful '
f'of errors in output. {e}')
Expand Down
8 changes: 2 additions & 6 deletions src/metpy/interpolate/slices.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
import xarray as xr

from ..package_tools import Exporter
from ..units import is_quantity, units
from ..xarray import check_axis

exporter = Exporter(globals())
Expand Down Expand Up @@ -50,17 +49,14 @@ def interpolate_to_slice(data, points, interp_type='linear'):
'your data has been parsed by MetPy with proper x and y '
'dimension coordinates.') from None

data = data.metpy.dequantify()
data_sliced = data.interp({
x.name: xr.DataArray(points[:, 0], dims='index', attrs=x.attrs),
y.name: xr.DataArray(points[:, 1], dims='index', attrs=y.attrs)
}, method=interp_type)
data_sliced.coords['index'] = range(len(points))

# Bug in xarray: interp strips units
if is_quantity(data.data) and not is_quantity(data_sliced.data):
data_sliced.data = units.Quantity(data_sliced.data, data.data.units)

return data_sliced
return data_sliced.metpy.quantify()


@exporter.export
Expand Down
5 changes: 3 additions & 2 deletions src/metpy/io/nexrad.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import bz2
from collections import defaultdict, namedtuple, OrderedDict
import contextlib
import datetime
from datetime import datetime, timezone
import logging
import pathlib
import re
Expand Down Expand Up @@ -75,7 +75,8 @@ def bzip_blocks_decompress_all(data):
def nexrad_to_datetime(julian_date, ms_midnight):
"""Convert NEXRAD date time format to python `datetime.datetime`."""
# Subtracting one from julian_date is because epoch date is 1
return datetime.datetime.utcfromtimestamp((julian_date - 1) * day + ms_midnight * milli)
return datetime.fromtimestamp((julian_date - 1) * day + ms_midnight * milli,
tz=timezone.utc).replace(tzinfo=None)


def remap_status(val):
Expand Down
4 changes: 2 additions & 2 deletions src/metpy/io/text.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"""Support reading information from various text file formats."""

import contextlib
from datetime import datetime
from datetime import datetime, timezone
import re
import string

Expand Down Expand Up @@ -95,7 +95,7 @@ def parse_wpc_surface_bulletin(bulletin, year=None):
text = file.read().decode('utf-8')

parsed_text = []
valid_time = datetime.utcnow()
valid_time = datetime.now(timezone.utc).replace(tzinfo=None)
for parts in _regroup_lines(text.splitlines()):
# A single file may have multiple sets of data that are valid at different times. Set
# the valid_time string that will correspond to all the following lines parsed, until
Expand Down
6 changes: 3 additions & 3 deletions src/metpy/plots/_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
# SPDX-License-Identifier: BSD-3-Clause
"""Utilities for use in making plots."""

from datetime import datetime
from datetime import datetime, timezone

from matplotlib.collections import LineCollection
import matplotlib.patheffects as mpatheffects
Expand All @@ -24,7 +24,7 @@ def add_timestamp(ax, time=None, x=0.99, y=-0.04, ha='right', high_contrast=Fals
ax : `matplotlib.axes.Axes`
The `Axes` instance used for plotting
time : `datetime.datetime` (or any object with a compatible ``strftime`` method)
Specific time to be plotted - datetime.utcnow will be use if not specified
Specific time to be plotted - ``datetime.now(UTC)`` will be use if not specified
x : float
Relative x position on the axes of the timestamp
y : float
Expand Down Expand Up @@ -52,7 +52,7 @@ def add_timestamp(ax, time=None, x=0.99, y=-0.04, ha='right', high_contrast=Fals
text_args = {}
text_args.update(**kwargs)
if not time:
time = datetime.utcnow()
time = datetime.now(timezone.utc)
timestr = time.strftime(time_format)
# If we don't have a time string after that, assume xarray/numpy and see if item
if not isinstance(timestr, str):
Expand Down

0 comments on commit 6a9cddc

Please sign in to comment.