Skip to content

Commit

Permalink
Merge b069e02 into 86da56e
Browse files Browse the repository at this point in the history
  • Loading branch information
ashoka1234 committed Mar 13, 2019
2 parents 86da56e + b069e02 commit 4ccc5d8
Show file tree
Hide file tree
Showing 27 changed files with 2,011 additions and 202 deletions.
11 changes: 11 additions & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,11 @@ addons:
services:
- postgresql
apt:
sources:
- sourceline: ppa:nextgis/ppa
packages:
- gdal-bin
- gdal-data
- libhdf5-serial-dev
- libnetcdf-dev
- libproj-dev
Expand All @@ -35,6 +39,13 @@ before_install:

- export CPLUS_INCLUDE_PATH="/usr/include/gdal"
- export C_INCLUDE_PATH="/usr/include/gdal"

# The following is a temporary fix for broken gdal shared libraries due to upgrade of GDAL to 2.4.0.
# This may require removing if more recent versions fixes this issue or alternative isolation
# approaches between postgis and GDAL could be implemented instead
- sudo ln -s /usr/lib/x86_64-linux-gnu/libgdal.so /usr/lib/x86_64-linux-gnu/libgdal.so.1
- sudo /sbin/ldconfig

- travis_retry pip install --upgrade pip
- travis_retry pip install --progress-bar off coveralls codecov
- travis_retry pip install --progress-bar off --requirement requirements-test.txt
Expand Down
9 changes: 9 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,15 @@ You can alter default [Flask](http://flask.pocoo.org/docs/1.0/config/) or

# Which theme to use (in the cubedash/themes folder)
CUBEDASH_THEME = 'odc'

# Customise '/stac' endpoint information
STAC_ENDPOINT_ID = 'my-odc-explorer'
STAC_ENDPOINT_TITLE = 'My ODC Explorer'
STAC_ENDPOINT_DESCRIPTION = 'Optional Longer description of this endpoint'

STAC_DEFAULT_PAGE_SIZE = 20
STAC_PAGE_SIZE_LIMIT = 1000


[Sentry](https://sentry.io/) error reporting is supported by adding a `SENTRY_CONFIG` section.
See [their documentation](https://docs.sentry.io/clients/python/integrations/flask/#settings).
Expand Down
37 changes: 34 additions & 3 deletions cubedash/_api.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
import logging
from flask import Blueprint, abort, request

from flask import Blueprint, abort

from cubedash import _utils
from . import _model
from ._utils import as_geojson

_MAX_DATASET_RETURN = 2000

_LOG = logging.getLogger(__name__)
bp = Blueprint('api', __name__, url_prefix='/api')

Expand All @@ -19,7 +21,36 @@ def datasets_geojson(
month: int = None,
day: int = None,
):
return as_geojson(_model.get_datasets_geojson(product_name, year, month, day))
limit = request.args.get('limit', default=500, type=int)
if limit > _MAX_DATASET_RETURN:
limit = _MAX_DATASET_RETURN

time = _utils.as_time_range(
year, month, day,
tzinfo=_model.STORE.grouping_timezone
)

return as_geojson(dict(
type='FeatureCollection',
features=[s.as_geojson() for s in _model.STORE.search_items(
product_name=product_name,
time=time,
limit=limit,
) if s.geom_geojson is not None]
))

# TODO: replace this api with stac?
# Stac includes much more information in records, so has to join the
# dataset table, so is slower, but does it matter?
# Can trivially redirect to stac as its return value is still geojson:
# return flask.redirect(
# flask.url_for(
# 'stac.stac_search',
# product_name=product_name,
# time=_unparse_time_range(time) if time else None,
# limit=limit,
# )
# )


@bp.route('/footprint/<product_name>')
Expand Down
5 changes: 4 additions & 1 deletion cubedash/_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import logging

import flask
from flask import Blueprint
from flask import Blueprint, abort

from . import _utils as utils
from . import _model
Expand All @@ -21,6 +21,9 @@ def dataset_page(id_):
index = _model.STORE.index
dataset = index.datasets.get(id_, include_sources=True)

if dataset is None:
abort(404, f"No dataset found with id {id_}")

source_list = list(dataset.metadata.sources.items())
if len(source_list) > PROVENANCE_DISPLAY_LIMIT:
source_dataset_overflow = len(source_list) - PROVENANCE_DISPLAY_LIMIT
Expand Down
38 changes: 5 additions & 33 deletions cubedash/_model.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,19 @@
import time

from functools import partial
from pathlib import Path
from typing import Dict, Optional, Counter
from typing import Iterable, Tuple

import dateutil.parser
import flask
import flask_themes
import pyproj
import shapely
import shapely.geometry
import shapely.ops
import shapely.prepared
import shapely.wkb
import structlog
from flask_caching import Cache
from pathlib import Path
from shapely.geometry import MultiPolygon
from shapely.ops import transform
from typing import Dict, Optional, Counter
from typing import Iterable, Tuple

from cubedash.summary import TimePeriodOverview, SummaryStore
from cubedash.summary._extents import RegionInfo
Expand Down Expand Up @@ -71,23 +67,6 @@ def get_product_summary(product_name: str) -> ProductSummary:
return STORE.get_product_summary(product_name)


@cache.memoize(timeout=60)
def get_datasets_geojson(
product_name: str,
year: Optional[int] = None,
month: Optional[int] = None,
day: Optional[int] = None,
limit: int = 500
) -> Dict:
return STORE.get_dataset_footprints(
product_name,
year,
month,
day,
limit=limit
)


@cache.memoize(timeout=120)
def get_last_updated():
# Drop a text file in to override the "updated time": for example, when we know it's an old clone of our DB.
Expand Down Expand Up @@ -181,21 +160,14 @@ def get_regions_geojson(
return regions


def _get_footprint(period: TimePeriodOverview):
def _get_footprint(period: TimePeriodOverview) -> Optional[MultiPolygon]:
if not period or not period.dataset_count:
return None

if not period.footprint_geometry:
return None
start = time.time()
tranform_wrs84 = partial(
pyproj.transform,
pyproj.Proj(init=period.footprint_crs),
pyproj.Proj(init='epsg:4326')
)
# It's possible to get self-intersection after transformation, presumably due to
# rounding, so we buffer 0.
footprint_wrs84 = transform(tranform_wrs84, period.footprint_geometry).buffer(0)
footprint_wrs84 = period.footprint_wrs84
_LOG.info(
'overview.footprint_size_diff',
from_len=len(period.footprint_geometry.wkt),
Expand Down
5 changes: 3 additions & 2 deletions cubedash/_pages.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from cubedash.summary._stores import ProductSummary
from datacube.model import DatasetType, Range
from datacube.scripts.dataset import build_dataset_info
from . import _filters, _dataset, _product, _platform, _api, _model, _reports
from . import _filters, _dataset, _product, _platform, _api, _model, _reports, _stac
from . import _utils as utils
from ._utils import as_rich_json

Expand All @@ -28,13 +28,14 @@
app.register_blueprint(_platform.bp)
app.register_blueprint(_reports.bp)
app.register_blueprint(_audit.bp)
app.register_blueprint(_stac.bp)

_LOG = structlog.getLogger()

_HARD_SEARCH_LIMIT = app.config.get('CUBEDASH_HARD_SEARCH_LIMIT', 150)

# Add server timings to http headers.
if app.debug or app.config.get('CUBEDASH_SHOW_PERF_TIMES', False):
if app.config.get('CUBEDASH_SHOW_PERF_TIMES', False):
_monitoring.init_app_monitoring()


Expand Down

0 comments on commit 4ccc5d8

Please sign in to comment.