Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add assembled_lat_bounds, assembled_lon_bounds and time variables #1091

Merged
merged 16 commits into from Mar 25, 2020
Merged
20 changes: 20 additions & 0 deletions satpy/etc/readers/tropomi_l2.yaml
Expand Up @@ -24,3 +24,23 @@ datasets:
file_key: 'PRODUCT/longitude'
coordinates: [longitude, latitude]
standard_name: longitude
latitude_bounds:
name: 'latitude_bounds'
file_type: tropomi_l2
file_key: 'PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds'
standard_name: latitude_bounds
longitude_bounds:
name: 'longitude_bounds'
file_type: tropomi_l2
file_key: 'PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds'
standard_name: longitude_bounds
assembled_lat_bounds:
name: 'assembled_lat_bounds'
file_type: tropomi_l2
file_key: 'PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds'
standard_name: assembled_latitude_bounds
assembled_lon_bounds:
name: 'assembled_lon_bounds'
file_type: tropomi_l2
file_key: 'PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds'
standard_name: assembled_longitude_bounds
44 changes: 41 additions & 3 deletions satpy/readers/tropomi_l2.py
Expand Up @@ -32,6 +32,9 @@
from satpy.readers.netcdf_utils import NetCDF4FileHandler, netCDF4
import logging
import numpy as np
import xarray as xr
import dask.array as da
from satpy import CHUNK_SIZE

logger = logging.getLogger(__name__)

Expand All @@ -51,7 +54,7 @@ def end_time(self):

@property
def platform_shortname(self):
"""Get start time."""
"""Get platform shortname."""
return self.filename_info['platform_shortname']

@property
Expand All @@ -78,6 +81,8 @@ def available_datasets(self, configured_datasets=None):

# update previously configured datasets
logger.debug("Starting previously configured variables loop...")
# if bounds exists, we can assemble them later
bounds_exist = 'latitude_bounds' in self and 'longitude_bounds' in self
for is_avail, ds_info in (configured_datasets or []):

# some other file handler knows how to load this
Expand All @@ -89,9 +94,13 @@ def available_datasets(self, configured_datasets=None):
matches = self.file_type_matches(ds_info['file_type'])
# we can confidently say that we can provide this dataset and can
# provide more info
if matches and var_name in self:
assembled = var_name in ['assembled_lat_bounds', 'assembled_lon_bounds']
zxdawn marked this conversation as resolved.
Show resolved Hide resolved
if (matches and var_name in self) or (assembled and bounds_exist):
logger.debug("Handling previously configured variable: %s", var_name)
handled_variables.add(var_name)
if not assembled:
# Because assembled variables and bounds use the same file_key,
# we need to omit file_key once.
handled_variables.add(var_name)
new_info = ds_info.copy() # don't mess up the above yielded
yield True, new_info
elif is_avail is None:
Expand Down Expand Up @@ -158,6 +167,33 @@ def _rename_dims(self, data_arr):
dims_dict['scanline'] = 'y'
return data_arr.rename(dims_dict)

def prepare_geo(self, bounds_data):
"""Prepare lat/lon bounds for pcolormesh.
lat/lon bounds are ordered in the following way:
zxdawn marked this conversation as resolved.
Show resolved Hide resolved
3----2
| |
0----1
Extend longitudes and latitudes with one element to support "pcolormesh"
(X[i+1, j], Y[i+1, j]) (X[i+1, j+1], Y[i+1, j+1])
+--------+
| C[i,j] |
+--------+
(X[i, j], Y[i, j]) (X[i, j+1], Y[i, j+1])
"""
# Create the bottom array
bottom = np.hstack([bounds_data[:, :, 0], bounds_data[:, -1:, 1]])
# Create the top array
top = np.hstack([bounds_data[-1, :, 3], bounds_data[-1, -1, 2]])
# Stack vertically
dest = np.vstack([top, bottom])
# Convert to DataArray
dask_dest = da.from_array(dest, chunks=CHUNK_SIZE)
dest = xr.DataArray(dask_dest,
dims=('y', 'x'),
attrs=bounds_data.attrs
)
return dest

def get_dataset(self, ds_id, ds_info):
"""Get dataset."""
logger.debug("Getting data for: %s", ds_id.name)
Expand All @@ -168,4 +204,6 @@ def get_dataset(self, ds_id, ds_info):
data = data.squeeze()
data = data.where(data != fill)
data = self._rename_dims(data)
if ds_id.name in ['assembled_lat_bounds', 'assembled_lon_bounds']:
data = self.prepare_geo(data)
return data
41 changes: 40 additions & 1 deletion satpy/tests/reader_tests/test_tropomi_l2.py
Expand Up @@ -31,6 +31,8 @@
DEFAULT_FILE_SHAPE = (3246, 450)
DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1],
dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE)
DEFAULT_BOUND_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1] * 4,
dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE+(4,))


class FakeNetCDF4FileHandlerTL2(FakeNetCDF4FileHandler):
Expand All @@ -51,6 +53,8 @@ def get_test_content(self, filename, filename_info, filetype_info):

file_content['PRODUCT/latitude'] = DEFAULT_FILE_DATA
file_content['PRODUCT/longitude'] = DEFAULT_FILE_DATA
file_content['PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds'] = DEFAULT_BOUND_DATA
file_content['PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds'] = DEFAULT_BOUND_DATA

if 'NO2' in filename:
file_content['PRODUCT/nitrogen_dioxide_total_column'] = DEFAULT_FILE_DATA
Expand All @@ -65,12 +69,16 @@ def get_test_content(self, filename, filename_info, filetype_info):
# convert to xarrays
for key, val in file_content.items():
if isinstance(val, np.ndarray):
if val.ndim > 1:
if 1 < val.ndim <= 2:
file_content[key] = DataArray(val, dims=('scanline', 'ground_pixel'))
elif val.ndim > 2:
file_content[key] = DataArray(val, dims=('scanline', 'ground_pixel', 'corner'))
else:
file_content[key] = DataArray(val)
file_content['PRODUCT/latitude'].attrs['_FillValue'] = -999.0
file_content['PRODUCT/longitude'].attrs['_FillValue'] = -999.0
file_content['PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds'].attrs['_FillValue'] = -999.0
file_content['PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds'].attrs['_FillValue'] = -999.0
if 'NO2' in filename:
file_content['PRODUCT/nitrogen_dioxide_total_column'].attrs['_FillValue'] = -999.0
if 'SO2' in filename:
Expand Down Expand Up @@ -148,3 +156,34 @@ def test_load_so2(self):
self.assertIsNotNone(d.attrs['area'])
self.assertIn('y', d.dims)
self.assertIn('x', d.dims)

def test_load_bounds(self):
"""Load bounds dataset"""
from satpy.readers import load_reader
r = load_reader(self.reader_configs)
with mock.patch('satpy.readers.tropomi_l2.netCDF4.Variable', xr.DataArray):
loadables = r.select_files_from_pathnames([
'S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc',
])
r.create_filehandlers(loadables)
keys = ['latitude_bounds', 'longitude_bounds']
ds = r.load(keys)
self.assertEqual(len(ds), 2)
for key in keys:
self.assertEqual(ds[key].attrs['platform_shortname'], 'S5P')
self.assertIn('y', ds[key].dims)
self.assertIn('x', ds[key].dims)
self.assertIn('corner', ds[key].dims)
# check assembled bounds
bottom = np.hstack([ds[key][:, :, 0], ds[key][:, -1:, 1]])
top = np.hstack([ds[key][-1, :, 3], ds[key][-1, -1, 2]])
dest = np.vstack([top, bottom])
dest = xr.DataArray(dest,
dims=('y', 'x')
)
dest.attrs = ds[key].attrs
self.assertEqual(dest.attrs['platform_shortname'], 'S5P')
self.assertIn('y', dest.dims)
self.assertIn('x', dest.dims)
self.assertEqual(DEFAULT_FILE_SHAPE[0] + 1, dest.shape[0])
self.assertEqual(DEFAULT_FILE_SHAPE[1] + 1, dest.shape[1])