Skip to content

Commit

Permalink
Fixed is_polar() error improperly identifying ABI as polar.
Browse files Browse the repository at this point in the history
Updated tests to use fake goes data to test existing alias creation

Reduced aliases to ABI and VIIRS channels
  • Loading branch information
joleenf committed May 5, 2023
1 parent c6d0122 commit 8706601
Show file tree
Hide file tree
Showing 4 changed files with 79 additions and 53 deletions.
23 changes: 23 additions & 0 deletions satpy/etc/enhancements/generic.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -262,6 +262,29 @@ enhancements:
stretch: linear
cutoffs: [0.005, 0.005]

four_level_cloud_mask:
standard_name: cloud_mask
reader: clavrx
operations:
- name: palettize
method: !!python/name:satpy.enhancements.palettize
kwargs:
palettes:
- {'values': [-127,# Fill Value
0, # Clear
1, # Probably Clear
2, # Probably Cloudy
3, # Cloudy
],
'colors': [[ 0, 0, 0], # black,-127 = Fill Value
[ 94, 79, 162], # blue, 0 = Clear
[ 73, 228, 242], # cyan, 1 = Probably Clear
[158, 1, 66], # red, 2 = Probably Cloudy
[255, 255, 255], # white, 3 = Cloudy
],
'color_scale': 255,
}

sar-ice:
standard_name: sar-ice
operations:
Expand Down
81 changes: 42 additions & 39 deletions satpy/readers/clavrx.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,21 +69,18 @@
}

CHANNEL_ALIASES = {
"abi": {"refl_0_47um_nom": {"name": "C01", "wavelength": 0.47},
"refl_0_65um_nom": {"name": "C02", "wavelength": 0.64},
"refl_0_86um_nom": {"name": "C03", "wavelength": 0.865},
"refl_1_38um_nom": {"name": "C04", "wavelength": 1.378},
"refl_1_60um_nom": {"name": "C05", "wavelength": 1.61},
"refl_2_10um_nom": {"name": "C06", "wavelength": 2.25},
"abi": {"refl_0_47um_nom": {"name": "C01", "wavelength": 0.47, "modifiers": ("sunz_corrected",)},
"refl_0_65um_nom": {"name": "C02", "wavelength": 0.64, "modifiers": ("sunz_corrected",)},
"refl_0_86um_nom": {"name": "C03", "wavelength": 0.865, "modifiers": ("sunz_corrected",)},
"refl_1_38um_nom": {"name": "C04", "wavelength": 1.38, "modifiers": ("sunz_corrected",)},
"refl_1_60um_nom": {"name": "C05", "wavelength": 1.61, "modifiers": ("sunz_corrected",)},
"refl_2_10um_nom": {"name": "C06", "wavelength": 2.25, "modifiers": ("sunz_corrected",)},
},
"ahi": {"refl_0_47um_nom": {"name": "C01", "wavelength": 0.47},
"refl_0_55um_nom": {"name": "C02", "wavelength": 0.51},
"refl_0_65um_nom": {"name": "C03", "wavelength": 0.64},
"refl_0_86um_nom": {"name": "C04", "wavelength": 0.86},
"refl_1_60um_nom": {"name": "C05", "wavelength": 1.61},
"refl_2_10um_nom": {"name": "C06", "wavelength": 2.25}
},
}
"viirs": {"refl_0_65um_nom": {"name": "I01", "wavelength": 0.64, "modifiers": ("sunz_corrected",)},
"refl_1_38um_nom": {"name": "M09", "wavelength": 1.38, "modifiers": ("sunz_corrected",)},
"refl_1_60um_nom": {"name": "I03", "wavelength": 1.61, "modifiers": ("sunz_corrected",)}
}
}


def _get_sensor(sensor: str) -> str:
Expand Down Expand Up @@ -143,24 +140,21 @@ def _get_data(data, dataset_id: dict) -> xr.DataArray:
factor = attrs.pop('scale_factor', (np.ones(1, dtype=data.dtype))[0])
offset = attrs.pop('add_offset', (np.zeros(1, dtype=data.dtype))[0])
valid_range = attrs.get('valid_range', [None])
if isinstance(valid_range, np.ndarray):
attrs["valid_range"] = valid_range.tolist()

flags = not data.attrs.get("SCALED", 1) and any(data.attrs.get("flag_values", [None]))
if not flags:
data = data.where(data != fill)
data = _CLAVRxHelper._scale_data(data, factor, offset)
# don't need _FillValue if it has been applied.
attrs.pop('_FillValue', None)

if all(valid_range):
valid_min = _CLAVRxHelper._scale_data(valid_range[0], factor, offset)
valid_max = _CLAVRxHelper._scale_data(valid_range[1], factor, offset)
if flags:
data = data.where((data >= valid_min) & (data <= valid_max), fill)
else:
if isinstance(valid_range, np.ndarray):
valid_min = _CLAVRxHelper._scale_data(valid_range[0], factor, offset)
valid_max = _CLAVRxHelper._scale_data(valid_range[1], factor, offset)

Check warning on line 152 in satpy/readers/clavrx.py

View check run for this annotation

Codecov / codecov/patch

satpy/readers/clavrx.py#L152

Added line #L152 was not covered by tests
data = data.where((data >= valid_min) & (data <= valid_max))
attrs['valid_range'] = [valid_min, valid_max]
else:

Check warning on line 154 in satpy/readers/clavrx.py

View check run for this annotation

Codecov / codecov/patch

satpy/readers/clavrx.py#L154

Added line #L154 was not covered by tests
flag_values = attrs.get('flag_values', None)
if flag_values is not None and isinstance(flag_values, np.ndarray):
data = data.where((data >= flag_values[0]) & (data <= flag_values[-1]), fill)

Check warning on line 157 in satpy/readers/clavrx.py

View check run for this annotation

CodeScene Delta Analysis / CodeScene Cloud Delta Analysis (main)

❌ New issue: Bumpy Road Ahead

_CLAVRxHelper._get_data has 2 blocks with nested conditional logic. Any nesting of 2 or deeper is considered. Threshold is one single, nested block per function. The Bumpy Road code smell is a function that contains multiple chunks of nested conditional logic. The deeper the nesting and the more bumps, the lower the code health.

data.attrs = _CLAVRxHelper._remove_attributes(attrs)

Expand Down Expand Up @@ -330,6 +324,15 @@ def get_nadir_resolution(self, sensor):
elif res is not None:
return int(res)

def _available_aliases(self, ds_info, current_var):
"""Add alias if there is a match."""
alias_info = CHANNEL_ALIASES.get(self.sensor).get(current_var, None)
if alias_info is not None:
alias_info.update({"file_key": current_var})
alias_info["resolution"] = self.get_nadir_resolution(self.sensor)
ds_info.update(alias_info)
yield True, ds_info

Check warning on line 335 in satpy/readers/clavrx.py

View check run for this annotation

Codecov / codecov/patch

satpy/readers/clavrx.py#L332-L335

Added lines #L332 - L335 were not covered by tests
def available_datasets(self, configured_datasets=None):
"""Automatically determine datasets provided by this file."""
self.sensor = _get_sensor(self.file_content.get('/attr/sensor'))
Expand Down Expand Up @@ -375,6 +378,9 @@ def available_datasets(self, configured_datasets=None):
ds_info['coordinates'] = ['longitude', 'latitude']
yield True, ds_info

if CHANNEL_ALIASES.get(self.sensor) is not None:
yield from self._available_aliases(ds_info, var_name)

Check warning on line 382 in satpy/readers/clavrx.py

View check run for this annotation

CodeScene Delta Analysis / CodeScene Cloud Delta Analysis (main)

❌ Getting worse: Complex Method

CLAVRXHDF4FileHandler.available_datasets increases in cyclomatic complexity from 13 to 14, threshold = 9. This function has many conditional statements (e.g. if, for, while), leading to lower code health. Avoid adding more conditionals and code to it without refactoring.

def get_shape(self, dataset_id, ds_info):
"""Get the shape."""
var_name = ds_info.get('file_key', dataset_id['name'])
Expand Down Expand Up @@ -425,11 +431,20 @@ def __init__(self, filename, filename_info, filetype_info):
{"name": "longitude"})

def _get_ds_info_for_data_arr(self, var_name):
"""Set data name and, if applicable, aliases."""
channel_info = None
ds_info = {
'file_type': self.filetype_info['file_type'],
'name': var_name,
}
return ds_info
yield True, ds_info

if CHANNEL_ALIASES.get(self.sensor) is not None:
channel_info = CHANNEL_ALIASES.get(self.sensor).get(var_name, None)
if channel_info is not None:
channel_info["file_key"] = var_name
ds_info.update(channel_info)
yield True, ds_info

@staticmethod
def _is_2d_yx_data_array(data_arr):
Expand All @@ -450,19 +465,7 @@ def _available_new_datasets(self, handled_vars):
# we need 'traditional' y/x dimensions currently
continue

ds_info = self._get_ds_info_for_data_arr(var_name)
ds_info.update({"file_key": var_name})
yield True, ds_info

alias_info = CHANNEL_ALIASES[self.sensor].get(var_name, None)
if alias_info is not None:
alias_info.update({"file_key": var_name})
if "RESOLUTION_KM" in self.nc.attrs:
alias_info["resolution"] = self.nc.attrs["RESOLUTION_KM"] * 1000.
else:
alias_info["resolution"] = NADIR_RESOLUTION[self.sensor]
ds_info.update(alias_info)
yield True, ds_info
yield from self._get_ds_info_for_data_arr(var_name)

def available_datasets(self, configured_datasets=None):
"""Dynamically discover what variables can be loaded from this file.
Expand All @@ -488,7 +491,7 @@ def _is_polar(self):
l1b_att, inst_att = (str(self.nc.attrs.get('L1B', None)),
str(self.nc.attrs.get('sensor', None)))

return (inst_att != 'AHI' and 'GOES' not in inst_att) or (l1b_att is None)
return (inst_att not in ['ABI', 'AHI'] and 'GOES' not in inst_att) or (l1b_att is None)

def get_area_def(self, key):
"""Get the area definition of the data at hand."""
Expand Down
2 changes: 1 addition & 1 deletion satpy/tests/reader_tests/test_clavrx.py
Original file line number Diff line number Diff line change
Expand Up @@ -386,7 +386,7 @@ def test_load_all_old_donor(self):
else:
self.assertNotIn('_FillValue', v.attrs)
if v.attrs["name"] == 'variable1':
self.assertIsInstance(v.attrs["valid_range"], list)
self.assertIsInstance(v.attrs["valid_range"], tuple)
else:
self.assertNotIn('valid_range', v.attrs)
if 'flag_values' in v.attrs:
Expand Down
26 changes: 13 additions & 13 deletions satpy/tests/reader_tests/test_clavrx_nc.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,17 +36,17 @@
DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0)
DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE)
DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0)
AHI_FILE = 'clavrx_H08_20210603_1500_B01_FLDK_R.level2.nc'
ABI_FILE = 'clavrx_OR_ABI-L1b-RadC-M6C01_G16_s20231021601173.level2.nc'
FILL_VALUE = -32768


def fake_test_content(filename, **kwargs):
"""Mimic reader input file content."""
attrs = {
'platform': 'HIM8',
'sensor': 'AHI',
'platform': 'G16',
'sensor': 'ABI',
# this is a Level 2 file that came from a L1B file
'L1B': 'clavrx_H08_20210603_1500_B01_FLDK_R',
'L1B': '"clavrx_OR_ABI-L1b-RadC-M6C01_G16_s20231021601173',
}

longitude = xr.DataArray(DEFAULT_LON_DATA,
Expand Down Expand Up @@ -127,7 +127,7 @@ def setup_method(self):

@pytest.mark.parametrize(
("filenames", "expected_loadables"),
[([AHI_FILE], 1)]
[([ABI_FILE], 1)]
)
def test_reader_creation(self, filenames, expected_loadables):
"""Test basic initialization."""
Expand All @@ -143,7 +143,7 @@ def test_reader_creation(self, filenames, expected_loadables):

@pytest.mark.parametrize(
("filenames", "expected_datasets"),
[([AHI_FILE], ['variable1', 'refl_0_65um_nom', 'variable3']), ]
[([ABI_FILE], ['variable1', 'refl_0_65um_nom', 'variable3']), ]
)
def test_available_datasets(self, filenames, expected_datasets):
"""Test that variables are dynamically discovered."""
Expand All @@ -157,12 +157,12 @@ def test_available_datasets(self, filenames, expected_datasets):
for var_name in expected_datasets:
assert var_name in avails
# check extra datasets created by alias or coordinates
for var_name in ["latitude", "longitude", "C03"]:
for var_name in ["latitude", "longitude"]:
assert var_name in avails

@pytest.mark.parametrize(
("filenames", "loadable_ids"),
[([AHI_FILE], ['variable1', 'refl_0_65um_nom', 'C03', 'variable3']), ]
[([ABI_FILE], ['variable1', 'refl_0_65um_nom', 'C02', 'variable3']), ]
)
def test_load_all_new_donor(self, filenames, loadable_ids):
"""Test loading all test datasets with new donor."""
Expand All @@ -181,8 +181,8 @@ def test_load_all_new_donor(self, filenames, loadable_ids):
semi_major_axis=6378137,
semi_minor_axis=6356752.3142,
perspective_point_height=35791000,
longitude_of_projection_origin=140.7,
sweep_angle_axis='y',
longitude_of_projection_origin=-137.2,
sweep_angle_axis='x',
)
d.return_value = fake_donor = mock.MagicMock(
variables={'goes_imager_projection': proj, 'x': x, 'y': y},
Expand All @@ -194,15 +194,15 @@ def test_load_all_new_donor(self, filenames, loadable_ids):
assert 'calibration' not in v.attrs
assert "units" in v.attrs
assert isinstance(v.attrs['area'], AreaDefinition)
assert v.attrs['platform_name'] == 'himawari8'
assert v.attrs['sensor'] == 'ahi'
assert v.attrs['platform_name'] == 'GOES-16'
assert v.attrs['sensor'] == 'abi'
assert 'rows_per_scan' not in v.coords.get('longitude').attrs
if v.attrs["name"] == 'variable1':
assert "valid_range" not in v.attrs
assert v.dtype == np.float64
assert "_FillValue" not in v.attrs
# should have file variable and one alias for reflectance
elif v.attrs["name"] in ["refl_0_65um_nom", "C03"]:
elif v.attrs["name"] in ["refl_0_65um_nom", "C02"]:
assert isinstance(v.attrs["valid_range"], list)
assert v.dtype == np.float64
assert "_FillValue" not in v.attrs.keys()
Expand Down

0 comments on commit 8706601

Please sign in to comment.