Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

odc-geo migration #1424

Merged
merged 24 commits into from
May 5, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
1 change: 1 addition & 0 deletions conda-environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,3 +36,4 @@ dependencies:
- GeoAlchemy2
- xarray >=0.9
- toolz
- odc-geo
29 changes: 18 additions & 11 deletions datacube/api/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,11 @@
from datacube.config import LocalConfig
from datacube.storage import reproject_and_fuse, BandInfo
from datacube.utils import ignore_exceptions_if
from datacube.utils import geometry
from odc.geo import CRS, XY, Resolution
from odc.geo.xr import xr_coords
from datacube.utils.dates import normalise_dt
from datacube.utils.geometry import intersects, GeoBox
from datacube.utils.geometry.gbox import GeoboxTiles
from odc.geo.geom import intersects, box, bbox_union
from odc.geo.geobox import GeoBox, GeoboxTiles
from datacube.model import ExtraDimensions
from datacube.model.utils import xr_apply

Expand Down Expand Up @@ -344,7 +345,7 @@ def load(self, product=None, measurements=None, output_crs=None, resolution=None

:param xarray.Dataset like:
Use the output of a previous :meth:`load()` to load data into the same spatial grid and
resolution (i.e. :class:`datacube.utils.geometry.GeoBox`).
resolution (i.e. :class:`odc.geo.geobox.GeoBox`).
E.g.::

pq = dc.load(product='ls5_pq_albers', like=nbar_dataset)
Expand Down Expand Up @@ -604,7 +605,7 @@ def empty_func(m, shape):
dims_default = tuple(coords) + geobox.dimensions

shape_default = tuple(c.size for k, c in coords.items() if k in dims_default) + geobox.shape
coords_default = OrderedDict(**coords, **geobox.xr_coords(with_crs=spatial_ref))
coords_default = OrderedDict(**coords, **xr_coords(geobox, spatial_ref))

arrays = []
ds_coords = deepcopy(coords_default)
Expand Down Expand Up @@ -880,7 +881,7 @@ def output_geobox(like=None, output_crs=None, resolution=None, align=None,
if output_crs is not None:
if resolution is None:
raise ValueError("Must specify 'resolution' when specifying 'output_crs'")
crs = geometry.CRS(output_crs)
crs = CRS(output_crs)
elif grid_spec is not None:
# specification from grid_spec
crs = grid_spec.crs
Expand All @@ -907,7 +908,13 @@ def output_geobox(like=None, output_crs=None, resolution=None, align=None,

geopolygon = get_bounds(datasets, crs)

return geometry.GeoBox.from_geopolygon(geopolygon, resolution, crs, align)
if resolution is not None and type(resolution) is not Resolution:
resolution = Resolution(*resolution)

if align is not None:
align = XY(*align)

return GeoBox.from_geopolygon(geopolygon, resolution, crs, align)


def select_datasets_inside_polygon(datasets, polygon):
Expand Down Expand Up @@ -961,8 +968,8 @@ def _fuse_measurement(dest, datasets, geobox, measurement,


def get_bounds(datasets, crs):
bbox = geometry.bbox_union(ds.extent.to_crs(crs).boundingbox for ds in datasets)
return geometry.box(*bbox, crs=crs)
bbox = bbox_union(ds.extent.to_crs(crs).boundingbox for ds in datasets)
return box(*bbox, crs=crs)


def _calculate_chunk_sizes(sources: xarray.DataArray,
Expand Down Expand Up @@ -1048,11 +1055,11 @@ def _mk_empty(shape: Tuple[int, ...]) -> str:
key_prefix = (dsk_name, *irr_index)

# all spatial chunks
for idx in numpy.ndindex(gbt.shape):
for idx in numpy.ndindex(gbt.shape.shape):
dss = tiled_dss.get(idx, None)

if dss is None:
val = _mk_empty(gbt.chunk_shape(idx))
val = _mk_empty(gbt.chunk_shape(idx).xy)
# 3D case
if 'extra_dim' in measurement:
index_subset = extra_dims.measurements_index(measurement.extra_dim)
Expand Down
4 changes: 2 additions & 2 deletions datacube/api/grid_workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from collections import OrderedDict
import pandas as pd

from datacube.utils.geometry import intersects
from odc.geo.geom import intersects
from .query import Query, query_group_by
from .core import Datacube

Expand Down Expand Up @@ -169,7 +169,7 @@ def cell_observations(self, cell_index=None, geopolygon=None, tile_buffer=None,
"""
List datasets, grouped by cell.
:param datacube.utils.Geometry geopolygon:
:param odc.geo.geom.Geometry geopolygon:
Only return observations with data inside polygon.
:param (float,float) tile_buffer:
buffer tiles by (y, x) in CRS units
Expand Down
30 changes: 19 additions & 11 deletions datacube/api/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,17 @@


from ..model import Range, Dataset
from ..utils import geometry
from ..utils.dates import normalise_dt, tz_aware

from odc.geo import CRS, Geometry
from odc.geo.geom import (
lonlat_bounds,
point,
line,
polygon,
mid_longitude,
)

_LOG = logging.getLogger(__name__)


Expand Down Expand Up @@ -142,7 +150,7 @@ def search_terms(self):
kwargs = {}
kwargs.update(self.search)
if self.geopolygon:
geo_bb = geometry.lonlat_bounds(self.geopolygon, resolution=100_000) # TODO: pick resolution better
geo_bb = lonlat_bounds(self.geopolygon, resolution=100_000) # TODO: pick resolution better
if geo_bb.bottom != geo_bb.top:
kwargs['lat'] = Range(geo_bb.bottom, geo_bb.top)
else:
Expand Down Expand Up @@ -250,21 +258,21 @@ def _range_to_geopolygon(**kwargs):
if key in ['longitude', 'lon', 'long', 'x']:
input_coords['left'], input_coords['right'] = _value_to_range(value)
if key in ['crs', 'coordinate_reference_system']:
input_crs = geometry.CRS(value)
input_crs = input_crs or geometry.CRS('EPSG:4326')
input_crs = CRS(value)
input_crs = input_crs or CRS('EPSG:4326')
if any(v is not None for v in input_coords.values()):
if input_coords['left'] == input_coords['right']:
if input_coords['top'] == input_coords['bottom']:
return geometry.point(input_coords['left'], input_coords['top'], crs=input_crs)
return point(input_coords['left'], input_coords['top'], crs=input_crs)
else:
points = [(input_coords['left'], input_coords['bottom']),
(input_coords['left'], input_coords['top'])]
return geometry.line(points, crs=input_crs)
return line(points, crs=input_crs)
else:
if input_coords['top'] == input_coords['bottom']:
points = [(input_coords['left'], input_coords['top']),
(input_coords['right'], input_coords['top'])]
return geometry.line(points, crs=input_crs)
return line(points, crs=input_crs)
else:
points = [
(input_coords['left'], input_coords['top']),
Expand All @@ -273,7 +281,7 @@ def _range_to_geopolygon(**kwargs):
(input_coords['left'], input_coords['bottom']),
(input_coords['left'], input_coords['top'])
]
return geometry.polygon(points, crs=input_crs)
return polygon(points, crs=input_crs)
return None


Expand Down Expand Up @@ -389,7 +397,7 @@ def solar_day(dataset: Dataset, longitude: Optional[float] = None) -> np.datetim
return np.datetime64(solar_time.date(), 'D')


def solar_offset(geom: Union[geometry.Geometry, Dataset],
def solar_offset(geom: Union[Geometry, Dataset],
precision: str = 'h') -> datetime.timedelta:
"""
Given a geometry or a Dataset compute offset to add to UTC timestamp to get solar day right.
Expand All @@ -399,8 +407,8 @@ def solar_offset(geom: Union[geometry.Geometry, Dataset],
:param geom: Geometry with defined CRS
:param precision: one of ``'h'`` or ``'s'``, defaults to hour precision
"""
if isinstance(geom, geometry.Geometry):
lon = geometry.mid_longitude(geom)
if isinstance(geom, Geometry):
lon = mid_longitude(geom)
else:
_lon = _ds_mid_longitude(geom)
if _lon is None:
Expand Down
2 changes: 1 addition & 1 deletion datacube/drivers/_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from affine import Affine
from concurrent.futures import Future
from datacube.storage import BandInfo
from datacube.utils.geometry import CRS
from odc.geo.crs import CRS

# pylint: disable=invalid-name,unsubscriptable-object,pointless-statement

Expand Down
91 changes: 0 additions & 91 deletions datacube/drivers/netcdf/_safestrings.py

This file was deleted.

2 changes: 1 addition & 1 deletion datacube/drivers/netcdf/_write.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def create_netcdf_storage_unit(filename,
Create a NetCDF file on disk.

:param pathlib.Path filename: filename to write to
:param datacube.utils.geometry.CRS crs: Datacube CRS object defining the spatial projection
:param odc.geo.crs.CRS crs: Datacube CRS object defining the spatial projection
:param dict coordinates: Dict of named `datacube.model.Coordinate`s to create
:param dict variables: Dict of named `datacube.model.Variable`s to create
:param dict variable_params:
Expand Down
11 changes: 7 additions & 4 deletions datacube/drivers/netcdf/writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,11 @@
import numpy

from datacube.utils.masking import describe_flags_def
from datacube.utils import geometry, data_resolution_and_offset
from ._safestrings import SafeStringsDataset as Dataset
from datacube.utils import data_resolution_and_offset
from netCDF4 import Dataset

from odc.geo import CRS
from odc.geo.geom import box

from datacube import __version__

Expand Down Expand Up @@ -228,7 +231,7 @@ def _create_projected_grid_mapping_variable(nco, crs, name=DEFAULT_GRID_MAPPING)


def _write_geographical_extents_attributes(nco, extent):
geo_extents = extent.to_crs(geometry.CRS("EPSG:4326"))
geo_extents = extent.to_crs(CRS("EPSG:4326"))
nco.geospatial_bounds = geo_extents.wkt
nco.geospatial_bounds_crs = "EPSG:4326"

Expand Down Expand Up @@ -274,7 +277,7 @@ def create_grid_mapping_variable(nco, crs, name=DEFAULT_GRID_MAPPING):

left, right = nco[dims[1]][0] - 0.5 * xres, nco[dims[1]][-1] + 0.5 * xres
bottom, top = nco[dims[0]][0] - 0.5 * yres, nco[dims[0]][-1] + 0.5 * yres
_write_geographical_extents_attributes(nco, geometry.box(left, bottom, right, top, crs=crs))
_write_geographical_extents_attributes(nco, box(left, bottom, right, top, crs=crs))

return crs_var

Expand Down
2 changes: 1 addition & 1 deletion datacube/drivers/postgis/_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@

from datacube.index.fields import OrExpression
from datacube.model import Range
from datacube.utils.geometry import CRS, Geometry
from odc.geo import CRS, Geometry
from datacube.utils.uris import split_uri
from datacube.index.abstract import DSID
from datacube.model.lineage import LineageRelation, LineageDirection
Expand Down
4 changes: 2 additions & 2 deletions datacube/drivers/postgis/_connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import datacube
from datacube.index.exceptions import IndexSetupError
from datacube.utils import jsonify_document
from datacube.utils.geometry import CRS
from odc.geo import CRS

from . import _api
from . import _core
Expand Down Expand Up @@ -219,7 +219,7 @@ def spindexes(self) -> Mapping[CRS, Type[SpatialIndex]]:
self._refresh_spindexes()
return self._spindexes

def create_spatial_index(self, crs: "datacube.utils.geometry.CRS") -> Optional[Type[SpatialIndex]]:
def create_spatial_index(self, crs: CRS) -> Optional[Type[SpatialIndex]]:
"""
Create a spatial index across the database, for the named CRS.

Expand Down
3 changes: 2 additions & 1 deletion datacube/drivers/postgis/_spatial.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@
from sqlalchemy import Column
from sqlalchemy.orm import Session

from datacube.utils.geometry import CRS, Geometry as Geom, multipolygon, polygon
from odc.geo import CRS, Geometry as Geom
from odc.geo.geom import multipolygon, polygon
from ._core import METADATA
from .sql import SCHEMA_NAME
from ._schema import orm_registry, Dataset, SpatialIndex, SpatialIndexRecord
Expand Down
2 changes: 1 addition & 1 deletion datacube/drivers/rio/_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
import rasterio.crs # type: ignore[import]

from datacube.storage import BandInfo
from datacube.utils.geometry import CRS
from odc.geo import CRS
from datacube.utils import (
uri_to_local_path,
get_part_from_uri,
Expand Down