Skip to content

Commit

Permalink
Merge pull request #2100 from djhoese/bugfix-viirs-l1b-available
Browse files Browse the repository at this point in the history
Fix handling of non-existent reflectance bands in 'viirs_l1b' reader
  • Loading branch information
djhoese committed May 4, 2022
2 parents e5a71d5 + d8ed23a commit 56eb21b
Show file tree
Hide file tree
Showing 2 changed files with 87 additions and 46 deletions.
35 changes: 31 additions & 4 deletions satpy/readers/viirs_l1b.py
Expand Up @@ -96,7 +96,7 @@ def adjust_scaling_factors(self, factors, file_units, output_units):

def get_shape(self, ds_id, ds_info):
"""Get shape."""
var_path = ds_info.get('file_key', 'observation_data/{}'.format(ds_id['name']))
var_path = self._dataset_name_to_var_path(ds_id['name'], ds_info)
return self.get(var_path + '/shape', 1)

@property
Expand Down Expand Up @@ -170,12 +170,12 @@ def _get_dataset_valid_range(self, dataset_id, ds_info, var_path):

def get_metadata(self, dataset_id, ds_info):
"""Get metadata."""
var_path = ds_info.get('file_key', 'observation_data/{}'.format(dataset_id['name']))
var_path = self._dataset_name_to_var_path(dataset_id['name'], ds_info)
shape = self.get_shape(dataset_id, ds_info)
file_units = self._get_dataset_file_units(dataset_id, ds_info, var_path)

# Get extra metadata
if '/dimension/number_of_scans' in self:
if self._is_scan_based_array(shape):
rows_per_scan = int(shape[0] / self['/dimension/number_of_scans'])
ds_info.setdefault('rows_per_scan', rows_per_scan)

Expand All @@ -194,9 +194,12 @@ def get_metadata(self, dataset_id, ds_info):
i.update(dataset_id.to_dict())
return i

def _is_scan_based_array(self, shape):
return '/dimension/number_of_scans' in self and isinstance(shape, tuple) and shape

def get_dataset(self, dataset_id, ds_info):
"""Get dataset."""
var_path = ds_info.get('file_key', 'observation_data/{}'.format(dataset_id['name']))
var_path = self._dataset_name_to_var_path(dataset_id['name'], ds_info)
metadata = self.get_metadata(dataset_id, ds_info)

valid_min, valid_max, scale_factor, scale_offset = self._get_dataset_valid_range(dataset_id, ds_info, var_path)
Expand Down Expand Up @@ -236,3 +239,27 @@ def get_dataset(self, dataset_id, ds_info):
if 'number_of_lines' in data.dims:
data = data.rename({'number_of_lines': 'y', 'number_of_pixels': 'x'})
return data

def available_datasets(self, configured_datasets=None):
"""Generate dataset info and their availablity.
See
:meth:`satpy.readers.file_handlers.BaseFileHandler.available_datasets`
for details.
"""
for is_avail, ds_info in (configured_datasets or []):
if is_avail is not None:
# some other file handler said it has this dataset
# we don't know any more information than the previous
# file handler so let's yield early
yield is_avail, ds_info
continue
ft_matches = self.file_type_matches(ds_info['file_type'])
var_path = self._dataset_name_to_var_path(ds_info['name'], ds_info)
is_in_file = var_path in self
yield ft_matches and is_in_file, ds_info

@staticmethod
def _dataset_name_to_var_path(dataset_name: str, ds_info: dict) -> str:
return ds_info.get('file_key', 'observation_data/{}'.format(dataset_name))
98 changes: 56 additions & 42 deletions satpy/tests/reader_tests/test_viirs_l1b.py
Expand Up @@ -37,25 +37,20 @@
DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0)


class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler):
class FakeNetCDF4FileHandlerDay(FakeNetCDF4FileHandler):
"""Swap-in NetCDF4 File Handler."""

M_REFL_BANDS = [f"M{band_num:02d}" for band_num in range(1, 12)]
M_BT_BANDS = [f"M{band_num:02d}" for band_num in range(12, 17)]
M_BANDS = M_REFL_BANDS + M_BT_BANDS
I_REFL_BANDS = [f"I{band_num:02d}" for band_num in range(1, 4)]
I_BT_BANDS = [f"I{band_num:02d}" for band_num in range(4, 6)]
I_BANDS = I_REFL_BANDS + I_BT_BANDS

def get_test_content(self, filename, filename_info, filetype_info):
"""Mimic reader input file content."""
dt = filename_info.get('start_time', datetime(2016, 1, 1, 12, 0, 0))
file_type = filename[:5].lower()
# num_lines = {
# 'vl1bi': 3248 * 2,
# 'vl1bm': 3248,
# 'vl1bd': 3248,
# }[file_type]
# num_pixels = {
# 'vl1bi': 6400,
# 'vl1bm': 3200,
# 'vl1bd': 4064,
# }[file_type]
# num_scans = 203
# num_luts = 65536
num_lines = DEFAULT_FILE_SHAPE[0]
num_pixels = DEFAULT_FILE_SHAPE[1]
num_scans = 5
Expand All @@ -72,14 +67,11 @@ def get_test_content(self, filename, filename_info, filetype_info):
'/attr/platform': 'Suomi-NPP',
}
self._fill_contents_with_default_data(file_content, file_type)

self._set_dataset_specific_metadata(file_content)

convert_file_content_to_data_array(file_content)
return file_content

@staticmethod
def _fill_contents_with_default_data(file_content, file_type):
def _fill_contents_with_default_data(self, file_content, file_type):
"""Fill file contents with default data."""
if file_type.startswith('vgeo'):
file_content['/attr/OrbitNumber'] = file_content.pop('/attr/orbit_number')
Expand All @@ -93,28 +85,11 @@ def _fill_contents_with_default_data(file_content, file_type):
file_content['geolocation_data/lunar_zenith'] = DEFAULT_LON_DATA
file_content['geolocation_data/lunar_azimuth'] = DEFAULT_LON_DATA
elif file_type == 'vl1bm':
file_content['observation_data/M01'] = DEFAULT_FILE_DATA
file_content['observation_data/M02'] = DEFAULT_FILE_DATA
file_content['observation_data/M03'] = DEFAULT_FILE_DATA
file_content['observation_data/M04'] = DEFAULT_FILE_DATA
file_content['observation_data/M05'] = DEFAULT_FILE_DATA
file_content['observation_data/M06'] = DEFAULT_FILE_DATA
file_content['observation_data/M07'] = DEFAULT_FILE_DATA
file_content['observation_data/M08'] = DEFAULT_FILE_DATA
file_content['observation_data/M09'] = DEFAULT_FILE_DATA
file_content['observation_data/M10'] = DEFAULT_FILE_DATA
file_content['observation_data/M11'] = DEFAULT_FILE_DATA
file_content['observation_data/M12'] = DEFAULT_FILE_DATA
file_content['observation_data/M13'] = DEFAULT_FILE_DATA
file_content['observation_data/M14'] = DEFAULT_FILE_DATA
file_content['observation_data/M15'] = DEFAULT_FILE_DATA
file_content['observation_data/M16'] = DEFAULT_FILE_DATA
for m_band in self.M_BANDS:
file_content[f'observation_data/{m_band}'] = DEFAULT_FILE_DATA
elif file_type == 'vl1bi':
file_content['observation_data/I01'] = DEFAULT_FILE_DATA
file_content['observation_data/I02'] = DEFAULT_FILE_DATA
file_content['observation_data/I03'] = DEFAULT_FILE_DATA
file_content['observation_data/I04'] = DEFAULT_FILE_DATA
file_content['observation_data/I05'] = DEFAULT_FILE_DATA
for i_band in self.I_BANDS:
file_content[f'observation_data/{i_band}'] = DEFAULT_FILE_DATA
elif file_type == 'vl1bd':
file_content['observation_data/DNB_observations'] = DEFAULT_FILE_DATA
file_content['observation_data/DNB_observations/attr/units'] = 'Watts/cm^2/steradian'
Expand Down Expand Up @@ -151,18 +126,32 @@ def _set_dataset_specific_metadata(file_content):
file_content[k + '/attr/add_offset'] = 0.1


class TestVIIRSL1BReader:
class FakeNetCDF4FileHandlerNight(FakeNetCDF4FileHandlerDay):
"""Same as the day file handler, but some day-only bands are missing.
This matches what happens in real world files where reflectance bands
are removed in night data to save space.
"""

M_BANDS = FakeNetCDF4FileHandlerDay.M_BT_BANDS
I_BANDS = FakeNetCDF4FileHandlerDay.I_BT_BANDS


class TestVIIRSL1BReaderDay:
"""Test VIIRS L1B Reader."""

yaml_file = "viirs_l1b.yaml"
fake_cls = FakeNetCDF4FileHandlerDay
has_reflectance_bands = True

def setup_method(self):
"""Wrap NetCDF4 file handler with our own fake handler."""
from satpy._config import config_search_paths
from satpy.readers.viirs_l1b import VIIRSL1BFileHandler
self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file))
# http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library
self.p = mock.patch.object(VIIRSL1BFileHandler, '__bases__', (FakeNetCDF4FileHandler2,))
self.p = mock.patch.object(VIIRSL1BFileHandler, '__bases__', (self.fake_cls,))
self.fake_handler = self.p.start()
self.p.is_local = True

Expand All @@ -182,6 +171,20 @@ def test_init(self):
# make sure we have some files
assert r.file_handlers

def test_available_datasets_m_bands(self):
"""Test available datasets for M band files."""
from satpy.readers import load_reader
r = load_reader(self.reader_configs)
loadables = r.select_files_from_pathnames([
'VL1BM_snpp_d20161130_t012400_c20161130054822.nc',
'VGEOM_snpp_d20161130_t012400_c20161130054822.nc',
])
r.create_filehandlers(loadables)
avail_names = r.available_dataset_names
angles = {"satellite_azimuth_angle", "satellite_zenith_angle", "solar_azimuth_angle", "solar_zenith_angle"}
geo = {"m_lon", "m_lat"}
assert set(avail_names) == set(self.fake_cls.M_BANDS) | angles | geo

def test_load_every_m_band_bt(self):
"""Test loading all M band brightness temperatures."""
from satpy.readers import load_reader
Expand Down Expand Up @@ -225,7 +228,7 @@ def test_load_every_m_band_refl(self):
'M09',
'M10',
'M11'])
assert len(datasets) == 11
assert len(datasets) == (11 if self.has_reflectance_bands else 0)
for v in datasets.values():
assert v.attrs['calibration'] == 'reflectance'
assert v.attrs['units'] == '%'
Expand Down Expand Up @@ -260,7 +263,7 @@ def test_load_every_m_band_rad(self):
make_dataid(name='M14', calibration='radiance'),
make_dataid(name='M15', calibration='radiance'),
make_dataid(name='M16', calibration='radiance')])
assert len(datasets) == 16
assert len(datasets) == (16 if self.has_reflectance_bands else 5)
for v in datasets.values():
assert v.attrs['calibration'] == 'radiance'
assert v.attrs['units'] == 'W m-2 um-1 sr-1'
Expand Down Expand Up @@ -311,3 +314,14 @@ def test_load_dnb_angles(self):
assert v.attrs['area'].lons.attrs['rows_per_scan'] == 2
assert v.attrs['area'].lats.attrs['rows_per_scan'] == 2
assert v.attrs['sensor'] == "viirs"


class TestVIIRSL1BReaderDayNight(TestVIIRSL1BReaderDay):
"""Test VIIRS L1b with night data.
Night data files don't have reflectance bands in them.
"""

fake_cls = FakeNetCDF4FileHandlerNight
has_reflectance_bands = False

0 comments on commit 56eb21b

Please sign in to comment.