Skip to content

Commit

Permalink
Change product finding to handle lists of products including test
Browse files Browse the repository at this point in the history
  • Loading branch information
alexgleith committed Sep 29, 2021
1 parent d268f49 commit ab80f02
Show file tree
Hide file tree
Showing 3 changed files with 32 additions and 9 deletions.
10 changes: 5 additions & 5 deletions datacube/api/query.py
Expand Up @@ -99,12 +99,12 @@ def __init__(self, index=None, product=None, geopolygon=None, like=None, **searc
# Retrieve known keys for extra dimensions
known_dim_keys = set()
if product is not None:
datacube_product = index.products.get_by_name(product)
if datacube_product is not None:
known_dim_keys.update(datacube_product.extra_dimensions.dims.keys())
datacube_products = index.products.search(product=product)
else:
for datacube_product in index.products.get_all():
known_dim_keys.update(datacube_product.extra_dimensions.dims.keys())
datacube_products = index.products.get_all()

for datacube_product in datacube_products:
known_dim_keys.update(datacube_product.extra_dimensions.dims.keys())

remaining_keys -= known_dim_keys

Expand Down
15 changes: 15 additions & 0 deletions integration_tests/index/test_search.py
Expand Up @@ -32,6 +32,8 @@

from datacube.testutils import load_dataset_definition

from datacube import Datacube


@pytest.fixture
def pseudo_ls8_type(index, ga_metadata_type):
Expand Down Expand Up @@ -1181,6 +1183,19 @@ def test_csv_structure(clirunner, pseudo_ls8_type, ls5_telem_type,
assert lines[0] == _EXPECTED_OUTPUT_HEADER


def test_query_dataset_multi_product(index: Index, ls5_dataset_w_children: Dataset):
# We have one ls5 level1 and its child nbar
dc = Datacube(index)

# Can we query a single product name?
datasets = dc.find_datasets(product='ls5_nbar_scene')
assert len(datasets) == 1

# Can we query multiple products?
datasets = dc.find_datasets(product=['ls5_nbar_scene', 'ls5_level1_scene'])
assert len(datasets) == 2


def _cli_csv_search(args, clirunner):
# Do a CSV search from the cli, returning results as a list of dictionaries
output = _csv_search_raw(args, clirunner)
Expand Down
16 changes: 12 additions & 4 deletions tests/api/test_query.py
Expand Up @@ -15,17 +15,20 @@
from datacube.utils.geometry import CRS


@pytest.fixture
def mock_index():
from unittest.mock import MagicMock
return MagicMock()


def test_datetime_to_timestamp():
assert _datetime_to_timestamp((1990, 1, 7)) == 631670400
assert _datetime_to_timestamp(datetime.datetime(1990, 1, 7)) == 631670400
assert _datetime_to_timestamp(631670400) == 631670400
assert _datetime_to_timestamp('1990-01-07T00:00:00.0Z') == 631670400


def test_query_kwargs():
from unittest.mock import MagicMock

mock_index = MagicMock()
def test_query_kwargs(mock_index):
mock_index.datasets.get_field_names = lambda: {u'product', u'lat', u'sat_path', 'type_id', u'time', u'lon',
u'orbit', u'instrument', u'sat_row', u'platform', 'metadata_type',
u'gsi', 'type', 'id'}
Expand Down Expand Up @@ -213,3 +216,8 @@ def test_dateline_query_building():
def test_query_issue_1146():
q = Query(k='AB')
assert q.search['k'] == 'AB'


def test_query_multiple_products(mock_index):
q = Query(index=mock_index, product=['ls5_nbar_albers', 'ls7_nbar_albers'])
assert q.product == ['ls5_nbar_albers', 'ls7_nbar_albers']

0 comments on commit ab80f02

Please sign in to comment.