Skip to content

Commit

Permalink
Handle annual/monthly summary products with EO3 metadata correctly.
Browse files Browse the repository at this point in the history
  • Loading branch information
phaesler committed Oct 25, 2019
1 parent 73b3572 commit 3a9f2e9
Show file tree
Hide file tree
Showing 5 changed files with 67 additions and 23 deletions.
52 changes: 31 additions & 21 deletions datacube_wms/data.py
Expand Up @@ -18,7 +18,7 @@

from datacube_wms.cube_pool import cube

from datacube_wms.wms_layers import get_service_cfg
from datacube_wms.wms_layers import get_service_cfg, TIMERES_RAW, TIMERES_YR, TIMERES_MON
from datacube_wms.wms_utils import img_coords_to_geopoint , GetMapParameters, \
GetFeatureInfoParameters, solar_correct_data
from datacube_wms.ogc_utils import resp_headers, local_solar_date_range, local_date, dataset_center_time, \
Expand All @@ -34,22 +34,6 @@

tracer = get_opencensus_tracer()

# Read data for given datasets and measurements per the output_geobox
@log_call
@opencensus_trace_call(tracer=tracer)
def read_data(datasets, measurements, geobox, resampling=Resampling.nearest, **kwargs):
if not hasattr(datasets, "__iter__"):
datasets = [datasets]

datasets = datacube.Datacube.group_datasets(datasets, 'solar_day')
data = datacube.Datacube.load_data(
datasets,
geobox,
measurements=measurements,
fuse_func=kwargs.get('fuse_func', None))
# maintain compatibility with functions not expecting the data to have a time dimension
return data.squeeze(dim='time', drop=True)


class DataStacker():
@log_call
Expand All @@ -64,7 +48,13 @@ def __init__(self, product, geobox, time, resampling=None, style=None, bands=Non
self._needed_bands = [ self._product.band_idx.band(b) for b in bands ]
else:
self._needed_bands = self._product.band_idx.native_bands.index
self._time = local_solar_date_range(geobox, time)

if self._product.time_resolution == TIMERES_MON:
self._time = time
elif self._product.time_resolution == TIMERES_YR:
self._time = str(time.year)
else:
self._time = local_solar_date_range(geobox, time)

def needed_bands(self):
return self._needed_bands
Expand Down Expand Up @@ -128,7 +118,7 @@ def data(self, datasets, mask=False, manual_merge=False, skip_corrections=False,
# process the data for the datasets individually to do solar correction.
merged = None
for ds in datasets:
d = read_data(ds, measurements, self._geobox, **kwargs)
d = self.read_data(ds, measurements, self._geobox, **kwargs)
for band in self.needed_bands():
if band != self._product.pq_band:
d[band] = solar_correct_data(d[band], ds)
Expand All @@ -138,7 +128,7 @@ def data(self, datasets, mask=False, manual_merge=False, skip_corrections=False,
merged = merged.combine_first(d)
return merged
else:
data = read_data(datasets, measurements, self._geobox, self._resampling, **kwargs)
data = self.read_data(datasets, measurements, self._geobox, self._resampling, **kwargs)
return data

@log_call
Expand All @@ -152,7 +142,7 @@ def manual_data_stack(self, datasets, measurements, mask, skip_corrections, **kw
else:
bands = self.needed_bands()
for ds in datasets:
d = read_data(ds, measurements, self._geobox, **kwargs)
d = self.read_data(ds, measurements, self._geobox, **kwargs)
extent_mask = None
for band in bands:
for f in self._product.extent_mask_func:
Expand All @@ -175,6 +165,26 @@ def manual_data_stack(self, datasets, measurements, mask, skip_corrections, **kw
merged[band].attrs = d[band].attrs
return merged

# Read data for given datasets and measurements per the output_geobox
@log_call
@opencensus_trace_call(tracer=tracer)
def read_data(self, datasets, measurements, geobox, resampling=Resampling.nearest, **kwargs):
if not hasattr(datasets, "__iter__"):
datasets = [datasets]

if self._product.time_resolution == TIMERES_RAW:
datasets = datacube.Datacube.group_datasets(datasets, 'solar_day')
else:
datasets = datacube.Datacube.group_datasets(datasets, 'time')
data = datacube.Datacube.load_data(
datasets,
geobox,
measurements=measurements,
fuse_func=kwargs.get('fuse_func', None))
# maintain compatibility with functions not expecting the data to have a time dimension
return data.squeeze(dim='time', drop=True)



def bbox_to_geom(bbox, crs):
return datacube.utils.geometry.box(bbox.left, bbox.bottom, bbox.right, bbox.top, crs)
Expand Down
8 changes: 6 additions & 2 deletions datacube_wms/ogc_utils.py
Expand Up @@ -22,15 +22,19 @@
tf = TimezoneFinder(in_memory=True)

# Use metadata time if possible as this is what WMS uses to calculate it's temporal extents
# datacube-core center time accessed through the dataset API is caluclated and may
# datacube-core center time accessed through the dataset API is calculated and may
# not agree with the metadata document
def dataset_center_time(dataset):
center_time = dataset.center_time
try:
metadata_time = dataset.metadata_doc['extent']['center_dt']
center_time = parse(metadata_time)
except KeyError:
pass
try:
metadata_time = dataset.metadata_doc['properties']['dtr:start_datetime']
center_time = parse(metadata_time)
except KeyError:
pass
return center_time


Expand Down
13 changes: 13 additions & 0 deletions datacube_wms/wms_cfg_example.py
Expand Up @@ -293,6 +293,19 @@
# Bands to always fetch from the Datacube, even if it is not used by the active style.
# Useful for when a particular band is always needed for the extent_mask_func,
"always_fetch_bands": [ ],
# Time resolution of the product. Controls the way ODC time query parameters are generated.
#
# Defaults to "raw". Supported values are:
#
# "raw": The default. For sub-day time resolution, e.g. raw (unsummarised) EO data.
# Also works fine for DEA packaged summary data where the "from" and "to" dates
# are both set to the start of the summary time period.
#
# "month":
# "year" : For data summarised to monthly or yearly resolution respectively.
# Needed for e.g. summary data with EO3-style metadata with the "from" and "to"
# dates set to the start and end of the summary time period.
"time_resolution": "raw",
# Apply corrections for solar angle, for "Level 1" products.
# (Defaults to false - should not be used for NBAR/NBAR-T or other Analysis Ready products
"apply_solar_corrections": False,
Expand Down
11 changes: 11 additions & 0 deletions datacube_wms/wms_layers.py
Expand Up @@ -122,6 +122,13 @@ def __init__(self, cfg):
self.format = cfg["format"]


TIMERES_RAW = "raw"
TIMERES_MON = "month"
TIMERES_YR = "year"

TIMERES_VALS = [ TIMERES_RAW, TIMERES_MON, TIMERES_YR ]


class ProductLayerDef(object):
# pylint: disable=invalid-name, too-many-instance-attributes, bare-except, too-many-statements
def __init__(self, product_cfg, platform_def, dc):
Expand Down Expand Up @@ -170,6 +177,10 @@ def __init__(self, product_cfg, platform_def, dc):
self.pq_name = self.pq_names[0] if self.pq_names is not None and len(self.pq_names) > 0 else None
self.pq_band = product_cfg.get("pq_band")

self.time_resolution = product_cfg.get("time_resolution", TIMERES_RAW)
if self.time_resolution not in TIMERES_VALS:
raise ProductLayerException("Invalid time resolution value: %s" % self.time_resolution)

self.min_zoom = product_cfg.get("min_zoom_factor", 300.0)
self.max_datasets_wms = product_cfg.get("max_datasets_wms", 0)
self.zoom_fill = product_cfg.get("zoomed_out_fill_colour", [150, 180, 200])
Expand Down
6 changes: 6 additions & 0 deletions update_ranges.py
Expand Up @@ -3,6 +3,7 @@
from datacube_wms.product_ranges import update_all_ranges, get_sqlconn, add_product_range, add_multiproduct_range, add_all, update_range
from datacube import Datacube
from psycopg2.sql import SQL, Identifier
import os
import click

@click.command()
Expand All @@ -29,6 +30,11 @@ def main(product, multiproduct, merge_only, calculate_extent, schema, role):
print("Sorry, cannot update schema without specifying a role")
return 1

if os.environ.get("PYDEV_DEBUG"):
import pydevd_pycharm
pydevd_pycharm.settrace('172.17.0.1', port=12321, stdoutToServer=True, stderrToServer=True)


dc = Datacube(app="wms_update_ranges")
if schema:
print("Checking schema....")
Expand Down

0 comments on commit 3a9f2e9

Please sign in to comment.