Skip to content

Commit

Permalink
Merge branch 'main' of github.com:pytroll/satpy into feature_add_alph…
Browse files Browse the repository at this point in the history
…a_def_support
  • Loading branch information
ameraner committed Jun 27, 2024
2 parents b93d0ce + dd80145 commit ee0da69
Show file tree
Hide file tree
Showing 6 changed files with 64 additions and 27 deletions.
8 changes: 4 additions & 4 deletions satpy/composites/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -764,8 +764,8 @@ def _get_coszen_blending_weights(
self,
projectables: Sequence[xr.DataArray],
) -> xr.DataArray:
lim_low = np.cos(np.deg2rad(self.lim_low))
lim_high = np.cos(np.deg2rad(self.lim_high))
lim_low = float(np.cos(np.deg2rad(self.lim_low)))
lim_high = float(np.cos(np.deg2rad(self.lim_high)))
try:
coszen = np.cos(np.deg2rad(projectables[2 if self.day_night == "day_night" else 1]))
self._has_sza = True
Expand All @@ -775,8 +775,8 @@ def _get_coszen_blending_weights(
# Get chunking that matches the data
coszen = get_cos_sza(projectables[0])
# Calculate blending weights
coszen -= np.min((lim_high, lim_low))
coszen /= np.abs(lim_low - lim_high)
coszen -= min(lim_high, lim_low)
coszen /= abs(lim_low - lim_high)
return coszen.clip(0, 1)

def _get_data_for_single_side_product(
Expand Down
19 changes: 13 additions & 6 deletions satpy/readers/viirs_edr.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,18 +128,13 @@ def get_dataset(self, dataset_id: DataID, info: dict) -> xr.DataArray:
"""Get the dataset."""
data_arr = self.nc[info["file_key"]]
data_arr = self._mask_invalid(data_arr, info)
data_arr = self._sanitize_metadata(data_arr, info)
units = info.get("units", data_arr.attrs.get("units"))
if units is None or units == "unitless":
units = "1"
if units == "%" and data_arr.attrs.get("units") in ("1", "unitless"):
data_arr *= 100.0 # turn into percentages
data_arr.attrs["units"] = units
if "standard_name" in info:
data_arr.attrs["standard_name"] = info["standard_name"]
self._decode_flag_meanings(data_arr)
data_arr.attrs["platform_name"] = self.platform_name
data_arr.attrs["sensor"] = self.sensor_name
data_arr.attrs["rows_per_scan"] = self.rows_per_scans(data_arr)
if data_arr.attrs.get("standard_name") in ("longitude", "latitude"):
# recursive swath definitions are a problem for the base reader right now
# delete the coordinates here so the base reader doesn't try to
Expand All @@ -157,6 +152,18 @@ def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray:
return data_arr.where((valid_range[0] <= data_arr) & (data_arr <= valid_range[1]))
return data_arr

def _sanitize_metadata(self, data_arr: xr.DataArray, info: dict) -> xr.DataArray:
if "valid_range" in data_arr.attrs:
# don't use numpy arrays for simple metadata
data_arr.attrs["valid_range"] = tuple(data_arr.attrs["valid_range"])
if "standard_name" in info:
data_arr.attrs["standard_name"] = info["standard_name"]
self._decode_flag_meanings(data_arr)
data_arr.attrs["platform_name"] = self.platform_name
data_arr.attrs["sensor"] = self.sensor_name
data_arr.attrs["rows_per_scan"] = self.rows_per_scans(data_arr)
return data_arr

@staticmethod
def _decode_flag_meanings(data_arr: xr.DataArray):
flag_meanings = data_arr.attrs.get("flag_meanings", None)
Expand Down
8 changes: 5 additions & 3 deletions satpy/tests/modifier_tests/test_angles.py
Original file line number Diff line number Diff line change
Expand Up @@ -371,15 +371,17 @@ def test_relative_azimuth_calculation(self):
assert isinstance(raa, xr.DataArray)
np.testing.assert_allclose(expected_raa, raa)

def test_solazi_correction(self):
@pytest.mark.parametrize("dtype", [np.float32, np.float64])
def test_solazi_correction(self, dtype):
"""Test that solar azimuth angles are corrected into the right range."""
from satpy.modifiers.angles import _get_sun_azimuth_ndarray

lats = np.array([-80, 40, 0, 40, 80])
lons = np.array([-80, 40, 0, 40, 80])
lats = np.array([-80, 40, 0, 40, 80], dtype=dtype)
lons = np.array([-80, 40, 0, 40, 80], dtype=dtype)

date = dt.datetime(2022, 1, 5, 12, 50, 0)

azi = _get_sun_azimuth_ndarray(lats, lons, date)

assert np.all(azi > 0)
assert azi.dtype == dtype
3 changes: 2 additions & 1 deletion satpy/tests/reader_tests/test_netcdf_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,8 @@ def test_filenotfound(self):
"""Test that error is raised when file not found."""
from satpy.readers.netcdf_utils import NetCDF4FileHandler

with pytest.raises(IOError, match=".*No such file or directory.*"):
# NOTE: Some versions of NetCDF C report unknown file format on Windows
with pytest.raises(IOError, match=".*(No such file or directory|Unknown file format).*"):
NetCDF4FileHandler("/thisfiledoesnotexist.nc", {}, {})

def test_get_and_cache_npxr_is_xr(self):
Expand Down
44 changes: 35 additions & 9 deletions satpy/tests/reader_tests/test_viirs_edr.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,12 @@ def aod_file(tmp_path_factory: TempPathFactory) -> Path:
"""Generate fake AOD VIIRs EDR file."""
fn = f"JRR-AOD_v3r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202307231023395.nc"
data_vars = _create_continuous_variables(
("AOD550",)
("AOD550",),
data_attrs={
"valid_range": [-0.5, 0.5],
"units": "1",
"_FillValue": -999.999,
}
)
qc_data = np.zeros(data_vars["AOD550"].shape, dtype=np.int8)
qc_data[-1, -1] = 2
Expand Down Expand Up @@ -255,30 +260,40 @@ def _create_lst_variables() -> dict[str, xr.DataArray]:
return data_vars


def _create_continuous_variables(var_names: Iterable[str]) -> dict[str, xr.DataArray]:
def _create_continuous_variables(
var_names: Iterable[str],
data_attrs: None | dict = None
) -> dict[str, xr.DataArray]:
dims = ("Rows", "Columns")

lon_attrs = {"standard_name": "longitude", "units": "degrees_east", "_FillValue": -999.9}
lat_attrs = {"standard_name": "latitude", "units": "degrees_north", "_FillValue": -999.9}
cont_attrs = {"units": "Kelvin", "_FillValue": -9999,
"scale_factor": np.float32(0.0001), "add_offset": np.float32(0.0)}
cont_attrs = data_attrs
if cont_attrs is None:
cont_attrs = {"units": "Kelvin", "_FillValue": -9999,
"scale_factor": np.float32(0.0001), "add_offset": np.float32(0.0)}

m_data = RANDOM_GEN.random((M_ROWS, M_COLS)).astype(np.float32)
data_arrs = {
"Longitude": xr.DataArray(m_data, dims=dims, attrs=lon_attrs),
"Latitude": xr.DataArray(m_data, dims=dims, attrs=lat_attrs),
}
cont_data = m_data
if "valid_range" in cont_attrs:
valid_range = cont_attrs["valid_range"]
# scale 0-1 random data to fit in valid_range
cont_data = cont_data * (valid_range[1] - valid_range[0]) + valid_range[0]
for var_name in var_names:
data_arrs[var_name] = xr.DataArray(m_data, dims=dims, attrs=cont_attrs)
data_arrs[var_name] = xr.DataArray(cont_data, dims=dims, attrs=cont_attrs)
for data_arr in data_arrs.values():
if "_FillValue" in data_arr.attrs:
data_arr.encoding["_FillValue"] = data_arr.attrs.pop("_FillValue")
data_arr.encoding["coordinates"] = "Longitude Latitude"
if "scale_factor" not in data_arr.attrs:
continue
data_arr.encoding["dtype"] = np.int16
data_arr.encoding["scale_factor"] = data_arr.attrs.pop("scale_factor")
data_arr.encoding["add_offset"] = data_arr.attrs.pop("add_offset")
data_arr.encoding["coordinates"] = "Longitude Latitude"
return data_arrs


Expand Down Expand Up @@ -498,10 +513,16 @@ def _check_surf_refl_data_arr(
def _check_continuous_data_arr(data_arr: xr.DataArray) -> None:
_array_checks(data_arr)

# random sample should be between 0 and 1 only if factor/offset applied
if "valid_range" not in data_arr.attrs and "valid_min" not in data_arr.attrs:
# random sample should be between 0 and 1 only if factor/offset applied
exp_range = (0, 1)
else:
# if there is a valid range then we shouldn't be outside it
exp_range = data_arr.attrs.get("valid_range",
(data_arr.attrs.get("valid_min"), data_arr.attrs.get("valid_max")))
data = data_arr.data.compute()
assert not (data < 0).any()
assert not (data > 1).any()
assert not (data < exp_range[0]).any()
assert not (data > exp_range[1]).any()

_shared_metadata_checks(data_arr)

Expand Down Expand Up @@ -536,6 +557,11 @@ def _shared_metadata_checks(data_arr: xr.DataArray) -> None:
assert lats.min() >= -90.0
assert lats.max() <= 90.0

if "valid_range" in data_arr.attrs:
valid_range = data_arr.attrs["valid_range"]
assert isinstance(valid_range, tuple)
assert len(valid_range) == 2


def _is_mband_res(data_arr: xr.DataArray) -> bool:
return "I" not in data_arr.attrs["name"] # includes NDVI and EVI
9 changes: 5 additions & 4 deletions satpy/tests/test_composites.py
Original file line number Diff line number Diff line change
Expand Up @@ -606,11 +606,12 @@ def test_day_only_area_without_alpha(self):
"""Test compositor with day portion without alpha_band when SZA data is not provided."""
from satpy.composites import DayNightCompositor

with dask.config.set(scheduler=CustomScheduler(max_computes=1)):
comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=False)
res = comp((self.data_a,))
res = res.compute()
# with dask.config.set(scheduler=CustomScheduler(max_computes=1)):
comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=False)
res_dask = comp((self.data_a,))
res = res_dask.compute()
expected = np.array([[0., 0.33164983], [0.66835017, 1.]], dtype=np.float32)
assert res_dask.dtype == res.dtype
assert res.dtype == np.float32
np.testing.assert_allclose(res.values[0], expected)
assert "A" not in res.bands
Expand Down

0 comments on commit ee0da69

Please sign in to comment.