Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove the time_bnds #1246

Closed
wants to merge 9 commits into from
26 changes: 2 additions & 24 deletions satpy/writers/cf_writer.py
Expand Up @@ -208,17 +208,6 @@ def area2cf(dataarray, strict=False, got_lonlats=False):
return res


def make_time_bounds(start_times, end_times):
"""Create time bounds for the current *dataarray*."""
start_time = min(start_time for start_time in start_times
if start_time is not None)
end_time = min(end_time for end_time in end_times
if end_time is not None)
data = xr.DataArray([[np.datetime64(start_time), np.datetime64(end_time)]],
dims=['time', 'bnds_1d'])
return data


def assert_xy_unique(datas):
"""Check that all datasets share the same projection coordinates x/y."""
unique_x = set()
Expand Down Expand Up @@ -470,11 +459,7 @@ def _set_default_time_encoding(encoding, dataset):
default = CFDatetimeCoder().encode(xr.DataArray(dtnp64))
time_enc = {'units': default.attrs['units'], 'calendar': default.attrs['calendar']}
time_enc.update(encoding.get('time', {}))
bounds_enc = {'units': time_enc['units'],
'calendar': time_enc['calendar'],
'_FillValue': None}
encoding['time'] = time_enc
encoding['time_bnds'] = bounds_enc # FUTURE: Not required anymore with xarray-0.14+


def _set_encoding_dataset_names(encoding, dataset, numeric_name_prefix):
Expand Down Expand Up @@ -630,8 +615,6 @@ def _collect_datasets(self, datasets, epoch=EPOCH, flatten_attrs=False, exclude_
ds_collection.update(get_extra_ds(ds))
got_lonlats = has_projection_coords(ds_collection)
datas = {}
start_times = []
end_times = []
# sort by name, but don't use the name
for _, ds in sorted(ds_collection.items()):
if ds.dtype not in CF_DTYPES:
Expand All @@ -644,8 +627,6 @@ def _collect_datasets(self, datasets, epoch=EPOCH, flatten_attrs=False, exclude_
except KeyError:
new_datasets = [ds]
for new_ds in new_datasets:
start_times.append(new_ds.attrs.get("start_time", None))
end_times.append(new_ds.attrs.get("end_time", None))
new_var = self.da2cf(new_ds, epoch=epoch, flatten_attrs=flatten_attrs,
exclude_attrs=exclude_attrs, compression=compression,
include_orig_name=include_orig_name,
Expand All @@ -657,7 +638,7 @@ def _collect_datasets(self, datasets, epoch=EPOCH, flatten_attrs=False, exclude_
link_coords(datas)
datas = make_alt_coords_unique(datas, pretty=pretty)

return datas, start_times, end_times
return datas

def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, engine=None, epoch=EPOCH,
flatten_attrs=False, exclude_attrs=None, include_lonlats=True, pretty=False,
Expand Down Expand Up @@ -753,15 +734,12 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None,
# Write datasets to groups (appending to the file; group=None means no group)
for group_name, group_datasets in groups_.items():
# XXX: Should we combine the info of all datasets?
datas, start_times, end_times = self._collect_datasets(
datas = self._collect_datasets(
group_datasets, epoch=epoch, flatten_attrs=flatten_attrs, exclude_attrs=exclude_attrs,
include_lonlats=include_lonlats, pretty=pretty, compression=compression,
include_orig_name=include_orig_name, numeric_name_prefix=numeric_name_prefix)
dataset = xr.Dataset(datas)
if 'time' in dataset:
dataset['time_bnds'] = make_time_bounds(start_times,
end_times)
dataset['time'].attrs['bounds'] = "time_bnds"
dataset['time'].attrs['standard_name'] = "time"
else:
grp_str = ' of group {}'.format(group_name) if group_name is not None else ''
Expand Down