Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove the time_bnds #1246

Closed
wants to merge 9 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
102 changes: 6 additions & 96 deletions satpy/tests/writer_tests/test_cf.py
Expand Up @@ -87,8 +87,7 @@ def test_save_with_compression(self):
scn = Scene()
start_time = datetime(2018, 5, 30, 10, 0)
end_time = datetime(2018, 5, 30, 10, 15)
with mock.patch('satpy.writers.cf_writer.xr.Dataset') as xrdataset,\
mock.patch('satpy.writers.cf_writer.make_time_bounds'):
with mock.patch('satpy.writers.cf_writer.xr.Dataset') as xrdataset:
scn['test-array'] = xr.DataArray([1, 2, 3],
attrs=dict(start_time=start_time,
end_time=end_time,
Expand Down Expand Up @@ -279,91 +278,6 @@ def test_single_time_value(self):
scn.save_datasets(filename=filename, writer='cf')
with xr.open_dataset(filename, decode_cf=True) as f:
np.testing.assert_array_equal(f['time'], scn['test-array']['time'])
bounds_exp = np.array([[start_time, end_time]], dtype='datetime64[m]')
np.testing.assert_array_equal(f['time_bnds'], bounds_exp)

def test_bounds(self):
"""Test setting time bounds."""
from satpy import Scene
import xarray as xr
scn = Scene()
start_time = datetime(2018, 5, 30, 10, 0)
end_time = datetime(2018, 5, 30, 10, 15)
test_array = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1)
scn['test-array'] = xr.DataArray(test_array,
dims=['x', 'y', 'time'],
coords={'time': [np.datetime64('2018-05-30T10:05:00')]},
attrs=dict(start_time=start_time,
end_time=end_time))
with TempFile() as filename:
scn.save_datasets(filename=filename, writer='cf')
# Check decoded time coordinates & bounds
with xr.open_dataset(filename, decode_cf=True) as f:
bounds_exp = np.array([[start_time, end_time]], dtype='datetime64[m]')
np.testing.assert_array_equal(f['time_bnds'], bounds_exp)
self.assertEqual(f['time'].attrs['bounds'], 'time_bnds')

# Check raw time coordinates & bounds
with xr.open_dataset(filename, decode_cf=False) as f:
np.testing.assert_almost_equal(f['time_bnds'], [[-0.0034722, 0.0069444]])

# User-specified time encoding should have preference
with TempFile() as filename:
time_units = 'seconds since 2018-01-01'
scn.save_datasets(filename=filename, encoding={'time': {'units': time_units}},
writer='cf')
with xr.open_dataset(filename, decode_cf=False) as f:
np.testing.assert_array_equal(f['time_bnds'], [[12909600, 12910500]])

def test_bounds_minimum(self):
"""Test minimum bounds."""
from satpy import Scene
import xarray as xr
scn = Scene()
start_timeA = datetime(2018, 5, 30, 10, 0) # expected to be used
end_timeA = datetime(2018, 5, 30, 10, 20)
start_timeB = datetime(2018, 5, 30, 10, 3)
end_timeB = datetime(2018, 5, 30, 10, 15) # expected to be used
test_arrayA = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1)
test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1)
scn['test-arrayA'] = xr.DataArray(test_arrayA,
dims=['x', 'y', 'time'],
coords={'time': [np.datetime64('2018-05-30T10:05:00')]},
attrs=dict(start_time=start_timeA,
end_time=end_timeA))
scn['test-arrayB'] = xr.DataArray(test_arrayB,
dims=['x', 'y', 'time'],
coords={'time': [np.datetime64('2018-05-30T10:05:00')]},
attrs=dict(start_time=start_timeB,
end_time=end_timeB))
with TempFile() as filename:
scn.save_datasets(filename=filename, writer='cf')
with xr.open_dataset(filename, decode_cf=True) as f:
bounds_exp = np.array([[start_timeA, end_timeB]], dtype='datetime64[m]')
np.testing.assert_array_equal(f['time_bnds'], bounds_exp)

def test_bounds_missing_time_info(self):
"""Test time bounds generation in case of missing time."""
from satpy import Scene
import xarray as xr
scn = Scene()
start_timeA = datetime(2018, 5, 30, 10, 0)
end_timeA = datetime(2018, 5, 30, 10, 15)
test_arrayA = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1)
test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1)
scn['test-arrayA'] = xr.DataArray(test_arrayA,
dims=['x', 'y', 'time'],
coords={'time': [np.datetime64('2018-05-30T10:05:00')]},
attrs=dict(start_time=start_timeA,
end_time=end_timeA))
scn['test-arrayB'] = xr.DataArray(test_arrayB,
dims=['x', 'y', 'time'],
coords={'time': [np.datetime64('2018-05-30T10:05:00')]})
with TempFile() as filename:
scn.save_datasets(filename=filename, writer='cf')
with xr.open_dataset(filename, decode_cf=True) as f:
bounds_exp = np.array([[start_timeA, end_timeA]], dtype='datetime64[m]')
np.testing.assert_array_equal(f['time_bnds'], bounds_exp)

def test_encoding_kwarg(self):
"""Test 'encoding' keyword argument."""
Expand Down Expand Up @@ -627,13 +541,11 @@ def test_collect_datasets(self, *mocks):

# Collect datasets
writer = CFWriter()
datas, start_times, end_times = writer._collect_datasets(datasets, include_lonlats=True)
datas = writer._collect_datasets(datasets, include_lonlats=True)

# Test results
self.assertEqual(len(datas), 3)
self.assertEqual(set(datas.keys()), {'var1', 'var2', 'geos'})
self.assertListEqual(start_times, [None, tstart, None])
self.assertListEqual(end_times, [None, tend, None])
var1 = datas['var1']
var2 = datas['var2']
self.assertEqual(var1.name, 'var1')
Expand Down Expand Up @@ -1147,8 +1059,8 @@ def test_collect_datasets_with_latitude_named_lat(self, *mocks):

# Collect datasets
writer = CFWriter()
datas, start_times, end_times = writer._collect_datasets(self.datasets_list, include_lonlats=True)
datas2, start_times, end_times = writer._collect_datasets(self.datasets_list_no_latlon, include_lonlats=True)
datas = writer._collect_datasets(self.datasets_list, include_lonlats=True)
datas2 = writer._collect_datasets(self.datasets_list_no_latlon, include_lonlats=True)
# Test results

self.assertEqual(len(datas), 5)
Expand Down Expand Up @@ -1234,10 +1146,8 @@ def test_with_time(self):
'bar': {'chunksizes': (1, 1, 1)},
'time': {'_FillValue': None,
'calendar': 'proleptic_gregorian',
'units': 'days since 2009-07-01 12:15:00'},
'time_bnds': {'_FillValue': None,
'calendar': 'proleptic_gregorian',
'units': 'days since 2009-07-01 12:15:00'}})
'units': 'days since 2009-07-01 12:15:00'}}
)

# User-defined encoding may not be altered
self.assertDictEqual(kwargs['encoding'], {'bar': {'chunksizes': (1, 1, 1)}})
26 changes: 2 additions & 24 deletions satpy/writers/cf_writer.py
Expand Up @@ -208,17 +208,6 @@ def area2cf(dataarray, strict=False, got_lonlats=False):
return res


def make_time_bounds(start_times, end_times):
"""Create time bounds for the current *dataarray*."""
start_time = min(start_time for start_time in start_times
if start_time is not None)
end_time = min(end_time for end_time in end_times
if end_time is not None)
data = xr.DataArray([[np.datetime64(start_time), np.datetime64(end_time)]],
dims=['time', 'bnds_1d'])
return data


def assert_xy_unique(datas):
"""Check that all datasets share the same projection coordinates x/y."""
unique_x = set()
Expand Down Expand Up @@ -470,11 +459,7 @@ def _set_default_time_encoding(encoding, dataset):
default = CFDatetimeCoder().encode(xr.DataArray(dtnp64))
time_enc = {'units': default.attrs['units'], 'calendar': default.attrs['calendar']}
time_enc.update(encoding.get('time', {}))
bounds_enc = {'units': time_enc['units'],
'calendar': time_enc['calendar'],
'_FillValue': None}
encoding['time'] = time_enc
encoding['time_bnds'] = bounds_enc # FUTURE: Not required anymore with xarray-0.14+


def _set_encoding_dataset_names(encoding, dataset, numeric_name_prefix):
Expand Down Expand Up @@ -630,8 +615,6 @@ def _collect_datasets(self, datasets, epoch=EPOCH, flatten_attrs=False, exclude_
ds_collection.update(get_extra_ds(ds))
got_lonlats = has_projection_coords(ds_collection)
datas = {}
start_times = []
end_times = []
# sort by name, but don't use the name
for _, ds in sorted(ds_collection.items()):
if ds.dtype not in CF_DTYPES:
Expand All @@ -644,8 +627,6 @@ def _collect_datasets(self, datasets, epoch=EPOCH, flatten_attrs=False, exclude_
except KeyError:
new_datasets = [ds]
for new_ds in new_datasets:
start_times.append(new_ds.attrs.get("start_time", None))
end_times.append(new_ds.attrs.get("end_time", None))
new_var = self.da2cf(new_ds, epoch=epoch, flatten_attrs=flatten_attrs,
exclude_attrs=exclude_attrs, compression=compression,
include_orig_name=include_orig_name,
Expand All @@ -657,7 +638,7 @@ def _collect_datasets(self, datasets, epoch=EPOCH, flatten_attrs=False, exclude_
link_coords(datas)
datas = make_alt_coords_unique(datas, pretty=pretty)

return datas, start_times, end_times
return datas

def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, engine=None, epoch=EPOCH,
flatten_attrs=False, exclude_attrs=None, include_lonlats=True, pretty=False,
Expand Down Expand Up @@ -753,15 +734,12 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None,
# Write datasets to groups (appending to the file; group=None means no group)
for group_name, group_datasets in groups_.items():
# XXX: Should we combine the info of all datasets?
datas, start_times, end_times = self._collect_datasets(
datas = self._collect_datasets(
group_datasets, epoch=epoch, flatten_attrs=flatten_attrs, exclude_attrs=exclude_attrs,
include_lonlats=include_lonlats, pretty=pretty, compression=compression,
include_orig_name=include_orig_name, numeric_name_prefix=numeric_name_prefix)
dataset = xr.Dataset(datas)
if 'time' in dataset:
dataset['time_bnds'] = make_time_bounds(start_times,
end_times)
dataset['time'].attrs['bounds'] = "time_bnds"
dataset['time'].attrs['standard_name'] = "time"
else:
grp_str = ' of group {}'.format(group_name) if group_name is not None else ''
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Expand Up @@ -31,7 +31,7 @@
pass

requires = ['numpy >=1.13', 'pillow', 'pyresample >=1.11.0', 'trollsift',
'trollimage >1.10.1', 'pykdtree', 'pyyaml', 'xarray >=0.10.1, !=0.13.0',
'trollimage >1.10.1', 'pykdtree', 'pyyaml', 'xarray >=0.14.1, !=0.13.0',
'dask[array] >=0.17.1', 'pyproj>=2.2', 'zarr', 'donfig', 'appdirs',
'pooch']

Expand Down