Skip to content

Commit

Permalink
replace usage of index.dataset.types with index.products
Browse files Browse the repository at this point in the history
  • Loading branch information
v0lat1le committed Jan 4, 2017
1 parent 6f0dbff commit 9f676e0
Show file tree
Hide file tree
Showing 6 changed files with 35 additions and 35 deletions.
4 changes: 2 additions & 2 deletions integration_tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ def ls5_nbar_gtiff_doc(default_metadata_type):

@pytest.fixture
def ls5_nbar_gtiff_type(index, ls5_nbar_gtiff_doc):
return index.datasets.types.add_document(ls5_nbar_gtiff_doc)
return index.products.add_document(ls5_nbar_gtiff_doc)


@pytest.fixture
Expand Down Expand Up @@ -226,7 +226,7 @@ def indexed_ls5_scene_dataset_type(index, default_metadata_type):
dataset_types = load_test_dataset_types(DATASET_TYPES / 'ls5_scenes.yaml')

for dataset_type in dataset_types:
index.datasets.types.add_document(dataset_type)
index.products.add_document(dataset_type)

return None

Expand Down
50 changes: 25 additions & 25 deletions integration_tests/index/test_config_docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,16 +101,16 @@ def test_idempotent_add_dataset_type(index, ls5_nbar_gtiff_type, ls5_nbar_gtiff_
:type ls5_nbar_gtiff_type: datacube.model.DatasetType
:type index: datacube.index._api.Index
"""
assert index.datasets.types.get_by_name(ls5_nbar_gtiff_type.name) is not None
assert index.products.get_by_name(ls5_nbar_gtiff_type.name) is not None

# Re-add should have no effect, because it's equal to the current one.
index.datasets.types.add_document(ls5_nbar_gtiff_doc)
index.products.add_document(ls5_nbar_gtiff_doc)

# But if we add the same type with differing properties we should get an error:
different_telemetry_type = copy.deepcopy(ls5_nbar_gtiff_doc)
different_telemetry_type['metadata']['ga_label'] = 'something'
with pytest.raises(ValueError):
index.datasets.types.add_document(different_telemetry_type)
index.products.add_document(different_telemetry_type)

# TODO: Support for adding/changing search fields?

Expand All @@ -120,50 +120,50 @@ def test_update_dataset_type(index, ls5_nbar_gtiff_type, ls5_nbar_gtiff_doc, def
:type ls5_nbar_gtiff_type: datacube.model.DatasetType
:type index: datacube.index._api.Index
"""
assert index.datasets.types.get_by_name(ls5_nbar_gtiff_type.name) is not None
assert index.products.get_by_name(ls5_nbar_gtiff_type.name) is not None

# Update with a new description
ls5_nbar_gtiff_doc['description'] = "New description"
index.datasets.types.update_document(ls5_nbar_gtiff_doc)
index.products.update_document(ls5_nbar_gtiff_doc)
# Ensure was updated
assert index.datasets.types.get_by_name(ls5_nbar_gtiff_type.name).definition['description'] == "New description"
assert index.products.get_by_name(ls5_nbar_gtiff_type.name).definition['description'] == "New description"

# Remove some match rules (looser rules -- that match more datasets -- should be allowed)
assert 'format' in ls5_nbar_gtiff_doc['metadata']
del ls5_nbar_gtiff_doc['metadata']['format']['name']
del ls5_nbar_gtiff_doc['metadata']['format']
index.datasets.types.update_document(ls5_nbar_gtiff_doc)
index.products.update_document(ls5_nbar_gtiff_doc)
# Ensure was updated
updated_type = index.datasets.types.get_by_name(ls5_nbar_gtiff_type.name)
updated_type = index.products.get_by_name(ls5_nbar_gtiff_type.name)
assert updated_type.definition['metadata'] == ls5_nbar_gtiff_doc['metadata']

# Specifying metadata type definition (rather than name) should be allowed
full_doc = copy.deepcopy(ls5_nbar_gtiff_doc)
full_doc['metadata_type'] = default_metadata_type_doc
index.datasets.types.update_document(full_doc)
index.products.update_document(full_doc)

# Remove fixed field, forcing a new index to be created (as datasets can now differ for the field).
assert not _object_exists(index._db, 'dix_ls5_nbart_p54_gtiff_product_type')
del ls5_nbar_gtiff_doc['metadata']['product_type']
index.datasets.types.update_document(ls5_nbar_gtiff_doc)
index.products.update_document(ls5_nbar_gtiff_doc)
# Ensure was updated
assert _object_exists(index._db, 'dix_ls5_nbart_p54_gtiff_product_type')
updated_type = index.datasets.types.get_by_name(ls5_nbar_gtiff_type.name)
updated_type = index.products.get_by_name(ls5_nbar_gtiff_type.name)
assert updated_type.definition['metadata'] == ls5_nbar_gtiff_doc['metadata']

# But if we make metadata more restrictive we get an error:
different_telemetry_type = copy.deepcopy(ls5_nbar_gtiff_doc)
assert 'ga_label' not in different_telemetry_type['metadata']
different_telemetry_type['metadata']['ga_label'] = 'something'
with pytest.raises(ValueError):
index.datasets.types.update_document(different_telemetry_type)
index.products.update_document(different_telemetry_type)
# Check was not updated.
updated_type = index.datasets.types.get_by_name(ls5_nbar_gtiff_type.name)
updated_type = index.products.get_by_name(ls5_nbar_gtiff_type.name)
assert 'ga_label' not in updated_type.definition['metadata']

# But works when unsafe updates are allowed.
index.datasets.types.update_document(different_telemetry_type, allow_unsafe_updates=True)
updated_type = index.datasets.types.get_by_name(ls5_nbar_gtiff_type.name)
index.products.update_document(different_telemetry_type, allow_unsafe_updates=True)
updated_type = index.products.get_by_name(ls5_nbar_gtiff_type.name)
assert updated_type.definition['metadata']['ga_label'] == 'something'


Expand Down Expand Up @@ -211,11 +211,11 @@ def test_filter_types_by_fields(index, ls5_nbar_gtiff_type):
:type ls5_nbar_gtiff_type: datacube.model.DatasetType
:type index: datacube.index._api.Index
"""
assert index.datasets.types
res = list(index.datasets.types.get_with_fields(['lat', 'lon', 'platform']))
assert index.products
res = list(index.products.get_with_fields(['lat', 'lon', 'platform']))
assert res == [ls5_nbar_gtiff_type]

res = list(index.datasets.types.get_with_fields(['lat', 'lon', 'platform', 'favorite_icecream']))
res = list(index.products.get_with_fields(['lat', 'lon', 'platform', 'favorite_icecream']))
assert len(res) == 0


Expand All @@ -224,21 +224,21 @@ def test_filter_types_by_search(index, ls5_nbar_gtiff_type):
:type ls5_nbar_gtiff_type: datacube.model.DatasetType
:type index: datacube.index._api.Index
"""
assert index.datasets.types
assert index.products

# No arguments, return all.
res = list(index.datasets.types.search())
res = list(index.products.search())
assert res == [ls5_nbar_gtiff_type]

# Matching fields
res = list(index.datasets.types.search(
res = list(index.products.search(
product_type='nbart',
product='ls5_nbart_p54_gtiff'
))
assert res == [ls5_nbar_gtiff_type]

# Matching fields and non-available fields
res = list(index.datasets.types.search(
res = list(index.products.search(
product_type='nbart',
product='ls5_nbart_p54_gtiff',
lat=Range(142.015625, 142.015625),
Expand All @@ -247,7 +247,7 @@ def test_filter_types_by_search(index, ls5_nbar_gtiff_type):
assert res == []

# Matching fields and available fields
[(res, q)] = list(index.datasets.types.search_robust(
[(res, q)] = list(index.products.search_robust(
product_type='nbart',
product='ls5_nbart_p54_gtiff',
lat=Range(142.015625, 142.015625),
Expand All @@ -258,13 +258,13 @@ def test_filter_types_by_search(index, ls5_nbar_gtiff_type):
assert 'lon' in q

# Or expression test
res = list(index.datasets.types.search(
res = list(index.products.search(
product_type=['nbart', 'nbar'],
))
assert res == [ls5_nbar_gtiff_type]

# Mismatching fields
res = list(index.datasets.types.search(
res = list(index.products.search(
product_type='nbar',
))
assert res == []
8 changes: 4 additions & 4 deletions integration_tests/index/test_index_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@


def test_archive_datasets(index, db, local_config, default_metadata_type):
dataset_type = index.datasets.types.add_document(_pseudo_telemetry_dataset_type)
dataset_type = index.products.add_document(_pseudo_telemetry_dataset_type)
with db.begin() as transaction:
was_inserted = transaction.insert_dataset(
_telemetry_dataset,
Expand Down Expand Up @@ -95,7 +95,7 @@ def test_index_duplicate_dataset(index, db, local_config, default_metadata_type)
:type index: datacube.index._api.Index
:type db: datacube.index.postgres._api.PostgresDb
"""
dataset_type = index.datasets.types.add_document(_pseudo_telemetry_dataset_type)
dataset_type = index.products.add_document(_pseudo_telemetry_dataset_type)
assert not index.datasets.has(_telemetry_uuid)

with db.begin() as transaction:
Expand Down Expand Up @@ -126,7 +126,7 @@ def test_transactions(index, db, local_config, default_metadata_type):
"""
assert not index.datasets.has(_telemetry_uuid)

dataset_type = index.datasets.types.add_document(_pseudo_telemetry_dataset_type)
dataset_type = index.products.add_document(_pseudo_telemetry_dataset_type)
with db.begin() as transaction:
was_inserted = transaction.insert_dataset(
_telemetry_dataset,
Expand Down Expand Up @@ -173,7 +173,7 @@ def test_index_dataset_with_location(index, default_metadata_type):
first_file = Path('/tmp/first/something.yaml').absolute()
second_file = Path('/tmp/second/something.yaml').absolute()

type_ = index.datasets.types.add_document(_pseudo_telemetry_dataset_type)
type_ = index.products.add_document(_pseudo_telemetry_dataset_type)
dataset = Dataset(type_, _telemetry_dataset, first_file.as_uri())
index.datasets.add(dataset)
stored = index.datasets.get(dataset.id)
Expand Down
4 changes: 2 additions & 2 deletions integration_tests/index/test_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@

@pytest.fixture
def pseudo_telemetry_type(index, default_metadata_type):
index.datasets.types.add_document({
index.products.add_document({
'name': 'ls8_telemetry',
'description': 'telemetry test',
'metadata': {
Expand All @@ -39,7 +39,7 @@ def pseudo_telemetry_type(index, default_metadata_type):
},
'metadata_type': default_metadata_type.name # 'eo'
})
return index.datasets.types.get_by_name('ls8_telemetry')
return index.products.get_by_name('ls8_telemetry')


@pytest.fixture
Expand Down
2 changes: 1 addition & 1 deletion integration_tests/test_end_to_end.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,7 @@ def calc_max_change(da):

def check_open_with_grid_workflow(index):
type_name = 'ls5_nbar_albers'
dt = index.datasets.types.get_by_name(type_name)
dt = index.products.get_by_name(type_name)

from datacube.api.grid_workflow import GridWorkflow
gw = GridWorkflow(index, dt.grid_spec)
Expand Down
2 changes: 1 addition & 1 deletion integration_tests/test_full_ingestion.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ def check_open_with_api(index):
dc = Datacube(index=index)

input_type_name = 'ls5_nbar_albers'
input_type = dc.index.datasets.types.get_by_name(input_type_name)
input_type = dc.index.products.get_by_name(input_type_name)

geobox = GeoBox(200, 200, Affine(25, 0.0, 1500000, 0.0, -25, -3900000), CRS('EPSG:3577'))
observations = dc.find_datasets(product='ls5_nbar_albers', geopolygon=geobox.extent)
Expand Down

0 comments on commit 9f676e0

Please sign in to comment.