Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Factorize and improve modis reader's interpolation #802

Merged
merged 3 commits into from
Jun 10, 2019
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
74 changes: 41 additions & 33 deletions satpy/readers/hdfeos_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,9 @@

# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.

"""Base HDF-EOS reader."""

import re
import logging

Expand All @@ -32,6 +35,7 @@


def interpolate(clons, clats, csatz, src_resolution, dst_resolution):
"""Interpolate two parallel datasets jointly."""
from geotiepoints.modisinterpolator import modis_1km_to_250m, modis_1km_to_500m, modis_5km_to_1km

interpolation_functions = {
Expand All @@ -53,8 +57,10 @@ def interpolate(clons, clats, csatz, src_resolution, dst_resolution):


class HDFEOSBaseFileReader(BaseFileHandler):
"""Base file handler for HDF EOS data for both L1b and L2 products. """
"""Base file handler for HDF EOS data for both L1b and L2 products."""

def __init__(self, filename, filename_info, filetype_info):
"""Initialize the base reader."""
BaseFileHandler.__init__(self, filename, filename_info, filetype_info)
try:
self.sd = SD(self.filename)
Expand All @@ -73,6 +79,7 @@ def __init__(self, filename, filename_info, filetype_info):

@staticmethod
def read_mda(attribute):
"""Read the EOS metadata."""
lines = attribute.split('\n')
mda = {}
current_dict = mda
Expand Down Expand Up @@ -116,12 +123,14 @@ def read_mda(attribute):

@property
def start_time(self):
"""Get the start time of the dataset."""
date = (self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEBEGINNINGDATE']['VALUE'] + ' ' +
self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEBEGINNINGTIME']['VALUE'])
return datetime.strptime(date, '%Y-%m-%d %H:%M:%S.%f')

@property
def end_time(self):
"""Get the end time of the dataset."""
date = (self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEENDINGDATE']['VALUE'] + ' ' +
self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEENDINGTIME']['VALUE'])
return datetime.strptime(date, '%Y-%m-%d %H:%M:%S.%f')
Expand All @@ -137,7 +146,7 @@ def _read_dataset_in_file(self, dataset_name):
return dataset

def load_dataset(self, dataset_name):
"""Load the dataset from HDF EOS file. """
"""Load the dataset from HDF EOS file."""
from satpy.readers.hdf4_utils import from_sds

dataset = self._read_dataset_in_file(dataset_name)
Expand All @@ -164,7 +173,7 @@ def load_dataset(self, dataset_name):


class HDFEOSGeoReader(HDFEOSBaseFileReader):
"""Handler for the geographical datasets. """
"""Handler for the geographical datasets."""

# list of geographical datasets handled by the georeader
# mapping to the default variable name if not specified in YAML
Expand All @@ -178,11 +187,13 @@ class HDFEOSGeoReader(HDFEOSBaseFileReader):
}

def __init__(self, filename, filename_info, filetype_info):
"""Initialize the geographical reader."""
HDFEOSBaseFileReader.__init__(self, filename, filename_info, filetype_info)
self.cache = {}

@staticmethod
def read_geo_resolution(metadata):
"""Parses metadata to find the geolocation resolution.
"""Parse metadata to find the geolocation resolution.

It is implemented as a staticmethod to match read_mda pattern.

Expand Down Expand Up @@ -217,7 +228,7 @@ def geo_resolution(self):
return self.read_geo_resolution(self.metadata)

def _load_ds_by_name(self, ds_name):
"""Helper to attempt loading using multiple common names."""
"""Attempt loading using multiple common names."""
var_names = self.DATASET_NAMES[ds_name]
if isinstance(var_names, (list, tuple)):
try:
Expand All @@ -226,6 +237,21 @@ def _load_ds_by_name(self, ds_name):
return self.load_dataset(var_names[1])
return self.load_dataset(var_names)

def get_interpolated_dataset(self, name1, name2, resolution, sensor_zenith, offset=0):
"""Load and interpolate datasets."""
try:
result1 = self.cache[(name1, resolution)]
result2 = self.cache[(name2, resolution)]
except KeyError:
result1 = self._load_ds_by_name(name1)
result2 = self._load_ds_by_name(name2) - offset
result1, result2 = interpolate(
result1, result2, sensor_zenith,
self.geo_resolution, resolution
)
self.cache[(name1, resolution)] = result1
self.cache[(name2, resolution)] = result2 + offset

def get_dataset(self, dataset_keys, dataset_info):
"""Get the geolocation dataset."""
# Name of the dataset as it appears in the HDF EOS file
Expand All @@ -249,39 +275,21 @@ def get_dataset(self, dataset_keys, dataset_info):
"configured".format(dataset_name))

# The data must be interpolated
interpolated_dataset = {}
sensor_zenith = self._load_ds_by_name(dataset_name)
sensor_zenith = self._load_ds_by_name('satellite_zenith_angle')
logger.debug("Loading %s", dataset_name)
if dataset_name in ['longitude', 'latitude']:
longitude = self._load_ds_by_name('longitude')
latitude = self._load_ds_by_name('latitude')
longitude, latitude = interpolate(
longitude, latitude, sensor_zenith,
self.geo_resolution, resolution
)
interpolated_dataset['longitude'] = longitude
interpolated_dataset['latitude'] = latitude
self.get_interpolated_dataset('longitude', 'latitude',
resolution, sensor_zenith)
elif dataset_name in ['satellite_azimuth_angle', 'satellite_zenith_angle']:
# Sensor dataset names differs between L1b and L2 products
sensor_azimuth_a = self._load_ds_by_name('satellite_azimuth_angle')
sensor_azimuth_b = self._load_ds_by_name('satellite_zenith_angle') - 90
sensor_azimuth_a, sensor_azimuth_b = interpolate(
sensor_azimuth_a, sensor_azimuth_b, sensor_zenith,
self.geo_resolution, resolution
)
interpolated_dataset['satellite_azimuth_angle'] = sensor_azimuth_a
interpolated_dataset['satellite_zenith_angle'] = sensor_azimuth_b + 90
self.get_interpolated_dataset('satellite_azimuth_angle', 'satellite_zenith_angle',
resolution, sensor_zenith, offset=90)
elif dataset_name in ['solar_azimuth_angle', 'solar_zenith_angle']:
# Sensor dataset names differs between L1b and L2 products
solar_azimuth_a = self._load_ds_by_name('solar_azimuth_angle')
solar_azimuth_b = self._load_ds_by_name('solar_zenith_angle') - 90
solar_azimuth_a, solar_azimuth_b = interpolate(
solar_azimuth_a, solar_azimuth_b, sensor_zenith,
self.geo_resolution, resolution
)
interpolated_dataset['solar_azimuth_angle'] = solar_azimuth_a
interpolated_dataset['solar_zenith_angle'] = solar_azimuth_b + 90

data = interpolated_dataset[dataset_name]
self.get_interpolated_dataset('solar_azimuth_angle', 'solar_zenith_angle',
resolution, sensor_zenith, offset=90)

data = self.cache[dataset_name, resolution]

for key in ('standard_name', 'units'):
if key in dataset_info:
Expand Down