Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[PR]: Remove deprecated features and APIs for next release #628

Merged
merged 8 commits into from
Apr 2, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion conda-env/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ dependencies:
- cf_xarray >=0.7.3 # Constrained because https://github.com/xarray-contrib/cf-xarray/issues/467
- cftime
- dask
- lxml # TODO: Remove this in v0.7.0 once cdml/XML support is dropped
- netcdf4
- numpy >=1.23.0 # This version of numpy includes support for Python 3.11.
- pandas
Expand Down
1 change: 0 additions & 1 deletion conda-env/dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ dependencies:
- cf_xarray >=0.7.3 # Constrained because https://github.com/xarray-contrib/cf-xarray/issues/467
- cftime
- dask
- lxml # TODO: Remove this in v0.7.0 once cdml/XML support is dropped
- netcdf4
- numpy >=1.23.0 # This version of numpy includes support for Python 3.11.
- pandas
Expand Down
117 changes: 0 additions & 117 deletions tests/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
import numpy as np
import pytest
import xarray as xr
from lxml import etree

from tests.fixtures import generate_dataset
from xcdat._logger import _setup_custom_logger
Expand Down Expand Up @@ -77,9 +76,6 @@ def test_skips_adding_bounds(self):
ds = generate_dataset(decode_times=True, cf_compliant=True, has_bounds=False)
ds.to_netcdf(self.file_path)

result = open_dataset(self.file_path, add_bounds=False)
assert result.identical(ds)

result = open_dataset(self.file_path, add_bounds=None)
assert result.identical(ds)

Expand Down Expand Up @@ -324,48 +320,6 @@ def test_keeps_specified_var_and_preserves_bounds(self):

assert result.identical(expected)

def test_raises_deprecation_warning_when_passing_add_bounds_true(self):
ds_no_bounds = generate_dataset(
decode_times=True, cf_compliant=True, has_bounds=False
)
ds_no_bounds.to_netcdf(self.file_path)

with warnings.catch_warnings(record=True) as w:
result = open_dataset(self.file_path, add_bounds=True)

assert len(w) == 1
assert issubclass(w[0].category, DeprecationWarning)
assert str(w[0].message) == (
"`add_bounds=True` will be deprecated after v0.6.0. Please use a list "
"of axis strings instead (e.g., `add_bounds=['X', 'Y']`)."
)

expected = generate_dataset(
decode_times=True, cf_compliant=True, has_bounds=True
)
expected = expected.drop_vars("time_bnds")
del expected["time"].attrs["bounds"]

assert result.identical(expected)

def test_raises_deprecation_warning_when_passing_add_bounds_false(self):
ds_no_bounds = generate_dataset(
decode_times=True, cf_compliant=True, has_bounds=False
)
ds_no_bounds.to_netcdf(self.file_path)

with warnings.catch_warnings(record=True) as w:
result = open_dataset(self.file_path, add_bounds=False)

assert len(w) == 1
assert issubclass(w[0].category, DeprecationWarning)
assert str(w[0].message) == (
"`add_bounds=False` will be deprecated after v0.6.0. Please use "
"`add_bounds=None` instead."
)

assert result.identical(ds_no_bounds)


class TestOpenMfDataset:
@pytest.fixture(autouse=True)
Expand Down Expand Up @@ -410,80 +364,9 @@ def test_skips_adding_bounds(self):
ds = generate_dataset(decode_times=True, cf_compliant=True, has_bounds=False)
ds.to_netcdf(self.file_path1)

result = open_mfdataset(self.file_path1, add_bounds=False)
assert result.identical(ds)

result = open_mfdataset(self.file_path1, add_bounds=None)
assert result.identical(ds)

def test_raises_error_if_xml_does_not_have_root_directory_attr(self):
ds1 = generate_dataset(decode_times=False, cf_compliant=False, has_bounds=True)
ds1.to_netcdf(self.file_path1)
ds2 = generate_dataset(decode_times=False, cf_compliant=False, has_bounds=True)
ds2 = ds2.rename_vars({"ts": "tas"})
ds2.to_netcdf(self.file_path2)

# Create the XML file
xml_path = f"{self.dir}/datasets.xml"
page = etree.Element("dataset")
doc = etree.ElementTree(page)
doc.write(xml_path, xml_declaration=True, encoding="utf-16")

with pytest.raises(KeyError):
open_mfdataset(xml_path, decode_times=True)

def test_opens_datasets_from_xml_using_str_path(self):
ds1 = generate_dataset(decode_times=False, cf_compliant=False, has_bounds=True)
ds1.to_netcdf(self.file_path1)
ds2 = generate_dataset(decode_times=False, cf_compliant=False, has_bounds=True)
ds2 = ds2.rename_vars({"ts": "tas"})
ds2.to_netcdf(self.file_path2)

# Create the XML file
xml_path = f"{self.dir}/datasets.xml"
page = etree.Element("dataset", directory=str(self.dir))
doc = etree.ElementTree(page)
doc.write(xml_path, xml_declaration=True, encoding="utf-16")

result = open_mfdataset(xml_path, decode_times=True)
expected = ds1.merge(ds2)

result.identical(expected)

def test_opens_datasets_from_xml_raises_deprecation_warning(self):
ds1 = generate_dataset(decode_times=False, cf_compliant=False, has_bounds=True)
ds1.to_netcdf(self.file_path1)
ds2 = generate_dataset(decode_times=False, cf_compliant=False, has_bounds=True)
ds2 = ds2.rename_vars({"ts": "tas"})
ds2.to_netcdf(self.file_path2)

# Create the XML file
xml_path = f"{self.dir}/datasets.xml"
page = etree.Element("dataset", directory=str(self.dir))
doc = etree.ElementTree(page)
doc.write(xml_path, xml_declaration=True, encoding="utf-16")

with pytest.warns(DeprecationWarning):
open_mfdataset(xml_path, decode_times=True)

def test_opens_datasets_from_xml_using_pathlib_path(self):
ds1 = generate_dataset(decode_times=False, cf_compliant=False, has_bounds=True)
ds1.to_netcdf(self.file_path1)
ds2 = generate_dataset(decode_times=False, cf_compliant=False, has_bounds=True)
ds2 = ds2.rename_vars({"ts": "tas"})
ds2.to_netcdf(self.file_path2)

# Create the XML file
xml_path = self.dir / "datasets.xml"
page = etree.Element("dataset", directory=str(self.dir))
doc = etree.ElementTree(page)
doc.write(xml_path, xml_declaration=True, encoding="utf-16")

result = open_mfdataset(xml_path, decode_times=True)
expected = ds1.merge(ds2)

result.identical(expected)

def test_raises_error_if_directory_has_no_netcdf_files(self):
with pytest.raises(ValueError):
open_mfdataset(str(self.dir), decode_times=True)
Expand Down
113 changes: 21 additions & 92 deletions tests/test_regrid.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import datetime
import re
import sys
import warnings
from unittest import mock

import numpy as np
Expand Down Expand Up @@ -38,7 +37,8 @@ class TestXGCMRegridder:
def setup(self):
self.ds = fixtures.generate_lev_dataset()

self.output_grid = grid.create_grid(lev=np.linspace(10000, 2000, 2))
z = grid.create_axis("lev", np.linspace(10000, 2000, 2), generate_bounds=False)
self.output_grid = grid.create_grid(z=z)

def test_multiple_z_axes(self):
self.ds = self.ds.assign_coords({"ilev": self.ds.lev.copy().rename("ilev")})
Expand Down Expand Up @@ -891,73 +891,6 @@ def test_create_grid_wrong_axis_value(self):
):
grid.create_grid(x=(self.lon, self.lon_bnds, self.lat)) # type: ignore[arg-type]

def test_deprecated_unexpected_coordinate(self):
lev = np.linspace(1000, 1, 2)

with pytest.raises(
ValueError,
match="Coordinate mass is not valid, reference `xcdat.axis.VAR_NAME_MAP` for valid options.",
):
grid.create_grid(lev=lev, mass=np.linspace(10, 20, 2))

def test_deprecated_create_grid_lev(self):
lev = np.linspace(1000, 1, 2)
lev_bnds = np.array([[1499.5, 500.5], [500.5, -498.5]])

with warnings.catch_warnings(record=True) as w:
new_grid = grid.create_grid(lev=(lev, lev_bnds))

assert len(w) == 1
assert issubclass(w[0].category, DeprecationWarning)
assert (
str(w[0].message)
== "**kwargs will be deprecated, see docstring and use 'x', 'y', or 'z' arguments"
)

assert np.array_equal(new_grid.lev, lev)
assert np.array_equal(new_grid.lev_bnds, lev_bnds)

def test_deprecated_create_grid(self):
lat = np.array([-45, 0, 45])
lon = np.array([30, 60, 90, 120, 150])
lat_bnds = np.array([[-67.5, -22.5], [-22.5, 22.5], [22.5, 67.5]])
lon_bnds = np.array([[15, 45], [45, 75], [75, 105], [105, 135], [135, 165]])

new_grid = grid.create_grid(lat=lat, lon=lon)

assert np.array_equal(new_grid.lat, lat)
assert np.array_equal(new_grid.lat_bnds, lat_bnds)
assert new_grid.lat.units == "degrees_north"
assert np.array_equal(new_grid.lon, lon)
assert np.array_equal(new_grid.lon_bnds, lon_bnds)
assert new_grid.lon.units == "degrees_east"

da_lat = xr.DataArray(
name="lat",
data=lat,
dims=["lat"],
attrs={"units": "degrees_north", "axis": "Y"},
)
da_lon = xr.DataArray(
name="lon",
data=lon,
dims=["lon"],
attrs={"units": "degrees_east", "axis": "X"},
)
da_lat_bnds = xr.DataArray(name="lat_bnds", data=lat_bnds, dims=["lat", "bnds"])
da_lon_bnds = xr.DataArray(name="lon_bnds", data=lon_bnds, dims=["lon", "bnds"])

new_grid = grid.create_grid(
lat=(da_lat, da_lat_bnds), lon=(da_lon, da_lon_bnds)
)

assert np.array_equal(new_grid.lat, lat)
assert np.array_equal(new_grid.lat_bnds, lat_bnds)
assert new_grid.lat.units == "degrees_north"
assert np.array_equal(new_grid.lon, lon)
assert np.array_equal(new_grid.lon_bnds, lon_bnds)
assert new_grid.lon.units == "degrees_east"

def test_uniform_grid(self):
new_grid = grid.create_uniform_grid(-90, 90, 4.0, -180, 180, 5.0)

Expand Down Expand Up @@ -986,10 +919,14 @@ def test_gaussian_grid(self):
assert uneven_grid.lon.shape == (67,)

def test_global_mean_grid(self):
source_grid = grid.create_grid(
lat=np.array([-80, -40, 0, 40, 80]),
lon=np.array([0, 45, 90, 180, 270, 360]),
x = grid.create_axis(
"lon", np.array([0, 45, 90, 180, 270, 360]), generate_bounds=True
)
y = grid.create_axis(
"lat", np.array([-80, -40, 0, 40, 80]), generate_bounds=True
)

source_grid = grid.create_grid(x=x, y=y)

mean_grid = grid.create_global_mean_grid(source_grid)

Expand Down Expand Up @@ -1068,9 +1005,14 @@ def test_raises_error_for_global_mean_grid_if_an_axis_has_multiple_dimensions(se
grid.create_global_mean_grid(source_grid_with_2_lons)

def test_zonal_grid(self):
source_grid = grid.create_grid(
lat=np.array([-80, -40, 0, 40, 80]), lon=np.array([-160, -80, 80, 160])
x = grid.create_axis(
"lon", np.array([-160, -80, 80, 160]), generate_bounds=True
)
y = grid.create_axis(
"lat", np.array([-80, -40, 0, 40, 80]), generate_bounds=True
)

source_grid = grid.create_grid(x=x, y=y)

zonal_grid = grid.create_zonal_grid(source_grid)

Expand Down Expand Up @@ -1194,7 +1136,9 @@ def test_horizontal(self):
assert output_data.ts.shape == (15, 4, 4)

def test_vertical(self):
output_grid = grid.create_grid(lev=np.linspace(10000, 2000, 2))
z = grid.create_axis("lev", np.linspace(10000, 2000, 2), generate_bounds=False)

output_grid = grid.create_grid(z=z)

output_data = self.vertical_ds.regridder.vertical(
"so", output_grid, tool="xgcm", method="linear"
Expand All @@ -1210,7 +1154,8 @@ def test_vertical(self):
assert output_data.so.shape == (15, 4, 4, 4)

def test_vertical_multiple_z_axes(self):
output_grid = grid.create_grid(lev=np.linspace(10000, 2000, 2))
z = grid.create_axis("lev", np.linspace(10000, 2000, 2), generate_bounds=False)
output_grid = grid.create_grid(z=z)

self.vertical_ds = self.vertical_ds.assign_coords(
{"ilev": self.vertical_ds.lev.copy().rename("ilev")}
Expand Down Expand Up @@ -1312,22 +1257,6 @@ def test_vertical_tool_check(self, _get_input_grid):
):
self.ac.vertical("ts", mock_data, tool="dummy", target_data=None) # type: ignore

@pytest.mark.filterwarnings("ignore:.*invalid value.*divide.*:RuntimeWarning")
def test_convenience_methods(self):
ds = fixtures.generate_dataset(
decode_times=True, cf_compliant=False, has_bounds=True
)

out_grid = grid.create_gaussian_grid(32)

output_xesmf = ds.regridder.horizontal_xesmf("ts", out_grid, method="bilinear")

assert output_xesmf.ts.shape == (15, 32, 65)

output_regrid2 = ds.regridder.horizontal_regrid2("ts", out_grid)

assert output_regrid2.ts.shape == (15, 32, 65)


class TestBase:
def test_preserve_bounds(self):
Expand Down
Loading
Loading