Skip to content

Commit

Permalink
Merge pull request #205 from jeremyh/uri-schema
Browse files Browse the repository at this point in the history
Add schema migration for uri search changes
  • Loading branch information
andrewdhicks committed Mar 15, 2017
2 parents d49fb24 + 0bd6819 commit 2fa297e
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 8 deletions.
28 changes: 22 additions & 6 deletions datacube/index/postgres/tables/_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,20 +124,20 @@ def schema_is_latest(engine):
"""
Is the schema up-to-date?
"""
is_unification = _pg_exists(engine, schema_qualified('dataset_type'))
is_updated = not _pg_exists(engine, schema_qualified('uq_dataset_source_dataset_ref'))

# We may have versioned schema in the future.
# For now, we know updates ahve been applied if the dataset_type table exists,
return is_unification and is_updated
# For now, we know updates have been applied if certain objects exist,

has_dataset_source_update = not _pg_exists(engine, schema_qualified('uq_dataset_source_dataset_ref'))
has_uri_searches = _pg_exists(engine, schema_qualified('ix_agdc_dataset_location_dataset_ref'))
return has_dataset_source_update and has_uri_searches


def update_schema(engine):
is_unification = _pg_exists(engine, schema_qualified('dataset_type'))
if not is_unification:
raise ValueError('Pre-unification database cannot be updated.')

# Remove surrogate key from dataset_source: it makes the table larger for no benefit.
# Removal of surrogate key from dataset_source: it makes the table larger for no benefit.
if _pg_exists(engine, schema_qualified('uq_dataset_source_dataset_ref')):
_LOG.info('Applying surrogate-key update')
engine.execute("""
Expand All @@ -150,9 +150,25 @@ def update_schema(engine):
""")
_LOG.info('Completed surrogate-key update')

# float8range is needed if the user uses the double-range field type.
if not engine.execute("SELECT 1 FROM pg_type WHERE typname = 'float8range'").scalar():
engine.execute(TYPES_INIT_SQL)

# Update uri indexes to allow dataset search-by-uri.
if not _pg_exists(engine, schema_qualified('ix_agdc_dataset_location_dataset_ref')):
_LOG.info('Applying uri-search update')
engine.execute("""
begin;
-- Add a separate index by dataset.
create index ix_agdc_dataset_location_dataset_ref on agdc.dataset_location (dataset_ref);
-- Replace (dataset, uri) index with (uri, dataset) index.
alter table agdc.dataset_location add constraint uq_dataset_location_uri_scheme unique (uri_scheme, uri_body, dataset_ref);
alter table agdc.dataset_location drop constraint uq_dataset_location_dataset_ref;
commit;
""")
_LOG.info('Completed uri-search update')


def _ensure_role(engine, name, inherits_from=None, add_user=False, create_db=False):
if has_role(engine, name):
Expand Down
7 changes: 5 additions & 2 deletions docs/about/whats_new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,7 @@ v1.2.3

- Updated the Postgres product views to include the whole dataset metadata document.

- `init` now recreates the product views by default every time it is run: rerun init on
your datacube to get the improved views.
- `init` now recreates the product views by default every time it is run.

- Updated `init` to support Postgres 9.6

Expand All @@ -23,6 +22,10 @@ v1.2.3

- We are now part of Open Data Cube, and have a new home at https://github.com/opendatacube/datacube-core

This release now enforces the uri index changes to be applied: it will prompt you to rerun `init` as
an administrator to update your existing cubes: `datacube -v system init` (this command can be run without affecting
read-only users, but will briefly pause writes)

v1.2.2
------

Expand Down

0 comments on commit 2fa297e

Please sign in to comment.