Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Null template index driver #1236

Merged
merged 7 commits into from Mar 1, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
31 changes: 30 additions & 1 deletion datacube/index/abstract.py
Expand Up @@ -3,6 +3,7 @@
# Copyright (c) 2015-2022 ODC Contributors
# SPDX-License-Identifier: Apache-2.0
import datetime
from pathlib import Path

from abc import ABC, abstractmethod
from typing import (Any, Iterable, Iterator,
Expand All @@ -12,6 +13,7 @@

from datacube.model import Dataset, MetadataType, Range
from datacube.model import DatasetType as Product
from datacube.utils import read_documents, InvalidDocException
from datacube.utils.changes import AllowPolicy, Change, Offset


Expand Down Expand Up @@ -64,6 +66,12 @@ def list_users(self) -> Iterable[Tuple[str, str, str]]:
:return: Iterable of (role, username, description) tuples
"""

_DEFAULT_METADATA_TYPES_PATH = Path(__file__).parent.joinpath('default-metadata-types.yaml')

def default_metadata_type_docs():
"""A list of the bare dictionary format of default :class:`datacube.model.MetadataType`"""
return [doc for (path, doc) in read_documents(_DEFAULT_METADATA_TYPES_PATH)]


class AbstractMetadataTypeResource(ABC):
"""
Expand Down Expand Up @@ -240,14 +248,34 @@ class and implement all abstract methods.
raise a NotImplementedError)
"""

@abstractmethod
def from_doc(self, definition: Mapping[str, Any]) -> Product:
"""
Construct unpersisted Product model from product metadata dictionary

:param definition: a Product metadata dictionary
:return: Unpersisted product model
"""
# This column duplication is getting out of hand:
Product.validate(definition)
# Validate extra dimension metadata
Product.validate_extra_dims(definition)

metadata_type = definition['metadata_type']

# They either specified the name of a metadata type, or specified a metadata type.
# Is it a name?
if isinstance(metadata_type, str):
metadata_type = self.metadata_type_resource.get_by_name(metadata_type)
else:
# Otherwise they embedded a document, add it if needed:
metadata_type = self.metadata_type_resource.from_doc(metadata_type)
definition = definition.copy()
definition['metadata_type'] = metadata_type.name

if not metadata_type:
raise InvalidDocException('Unknown metadata type: %r' % definition['metadata_type'])

return Product(metadata_type, definition)

@abstractmethod
def add(self,
Expand Down Expand Up @@ -937,3 +965,4 @@ def metadata_type_from_doc(
definition: dict
) -> MetadataType:
pass

7 changes: 7 additions & 0 deletions datacube/index/null/__init__.py
@@ -0,0 +1,7 @@
# This file is part of the Open Data Cube, see https://opendatacube.org for more information
#
# Copyright (c) 2015-2022 ODC Contributors
# SPDX-License-Identifier: Apache-2.0
"""
Module
"""
116 changes: 116 additions & 0 deletions datacube/index/null/_datasets.py
@@ -0,0 +1,116 @@
# This file is part of the Open Data Cube, see https://opendatacube.org for more information
#
# Copyright (c) 2015-2020 ODC Contributors
# SPDX-License-Identifier: Apache-2.0
from typing import Iterable, Union, Optional
from uuid import UUID

from datacube.index.abstract import AbstractDatasetResource
from datacube.model import Dataset, DatasetType


class DatasetResource(AbstractDatasetResource):
def __init__(self, product_resource):
self.types = product_resource

def get(self, id_: Union[str, UUID], include_sources=False):
return None

def bulk_get(self, ids):
return []

def get_derived(self, id_):
return []

def has(self, id_):
return False

def bulk_has(self, ids_):
return [False for id in ids_]

def add(self, dataset: Dataset,
with_lineage: Optional[bool] = None,
**kwargs) -> Dataset:
raise NotImplementedError()

def search_product_duplicates(self, product: DatasetType, *args):
return []

def can_update(self, dataset, updates_allowed=None):
raise NotImplementedError()

def update(self, dataset: Dataset, updates_allowed=None):
raise NotImplementedError()

def archive(self, ids):
raise NotImplementedError()

def restore(self, ids):
raise NotImplementedError()

def purge(self, ids: Iterable[UUID]):
raise NotImplementedError()

def get_all_dataset_ids(self, archived: bool):
return []

def get_field_names(self, product_name=None):
return []

def get_locations(self, id_):
return []

def get_archived_locations(self, id_):
return []

def get_archived_location_times(self, id_):
return []

def add_location(self, id_, uri):
raise NotImplementedError()

def get_datasets_for_location(self, uri, mode=None):
return []

def remove_location(self, id_, uri):
raise NotImplementedError()

def archive_location(self, id_, uri):
raise NotImplementedError()

def restore_location(self, id_, uri):
raise NotImplementedError()

def search_by_metadata(self, metadata):
return []

def search(self, limit=None, **query):
return []

def search_by_product(self, **query):
return []

def search_returning(self, field_names, limit=None, **query):
return []

def count(self, **query):
return 0

def count_by_product(self, **query):
return []

def count_by_product_through_time(self, period, **query):
return []

def count_product_through_time(self, period, **query):
return []

def search_summaries(self, **query):
return []

def get_product_time_bounds(self, product: str):
raise NotImplementedError()

# pylint: disable=redefined-outer-name
def search_returning_datasets_light(self, field_names: tuple, custom_offsets=None, limit=None, **query):
return []
40 changes: 40 additions & 0 deletions datacube/index/null/_metadata_types.py
@@ -0,0 +1,40 @@
# This file is part of the Open Data Cube, see https://opendatacube.org for more information
#
# Copyright (c) 2015-2022 ODC Contributors
# SPDX-License-Identifier: Apache-2.0

from datacube.index.abstract import AbstractMetadataTypeResource
from datacube.model import MetadataType


class MetadataTypeResource(AbstractMetadataTypeResource):
def __init__(self):
pass

def from_doc(self, definition):
raise NotImplementedError

def add(self, metadata_type, allow_table_lock=False):
raise NotImplementedError

def can_update(self, metadata_type, allow_unsafe_updates=False):
raise NotImplementedError

def update(self, metadata_type: MetadataType, allow_unsafe_updates=False, allow_table_lock=False):
raise NotImplementedError

def update_document(self, definition, allow_unsafe_updates=False):
raise NotImplementedError

def get_unsafe(self, id_):
raise KeyError(id_)

def get_by_name_unsafe(self, name):
raise KeyError(name)

def check_field_indexes(self, allow_table_lock=False, rebuild_all=None,
rebuild_views=False, rebuild_indexes=False):
raise NotImplementedError

def get_all(self):
return []
44 changes: 44 additions & 0 deletions datacube/index/null/_products.py
@@ -0,0 +1,44 @@
# This file is part of the Open Data Cube, see https://opendatacube.org for more information
#
# Copyright (c) 2015-2022 ODC Contributors
# SPDX-License-Identifier: Apache-2.0
import logging

from datacube.index.abstract import AbstractProductResource
from datacube.model import DatasetType

from typing import Iterable

_LOG = logging.getLogger(__name__)


class ProductResource(AbstractProductResource):
def __init__(self, metadata_type_resource):
self.metadata_type_resource = metadata_type_resource

def add(self, product, allow_table_lock=False):
raise NotImplementedError()

def can_update(self, product, allow_unsafe_updates=False):
raise NotImplementedError()

def update(self, product: DatasetType, allow_unsafe_updates=False, allow_table_lock=False):
raise NotImplementedError()

def update_document(self, definition, allow_unsafe_updates=False, allow_table_lock=False):
raise NotImplementedError()

def get_unsafe(self, id_): # type: ignore
raise KeyError(id_)

def get_by_name_unsafe(self, name): # type: ignore
raise KeyError(name)

def get_with_fields(self, field_names):
return []

def search_robust(self, **query):
return []

def get_all(self) -> Iterable[DatasetType]:
return []
23 changes: 23 additions & 0 deletions datacube/index/null/_users.py
@@ -0,0 +1,23 @@
# This file is part of the Open Data Cube, see https://opendatacube.org for more information
#
# Copyright (c) 2015-2020 ODC Contributors
# SPDX-License-Identifier: Apache-2.0
from typing import Iterable, Optional, Tuple
from datacube.index.abstract import AbstractUserResource

class UserResource(AbstractUserResource):
def __init__(self) -> None:
pass

def grant_role(self, role: str, *usernames: str) -> None:
raise NotImplementedError()

def create_user(self, username: str, password: str,
role: str, description: Optional[str] = None) -> None:
raise NotImplementedError()

def delete_user(self, *usernames: str) -> None:
raise NotImplementedError()

def list_users(self) -> Iterable[Tuple[str, str, str]]:
return []
82 changes: 82 additions & 0 deletions datacube/index/null/index.py
@@ -0,0 +1,82 @@
# This file is part of the Open Data Cube, see https://opendatacube.org for more information
#
# Copyright (c) 2015-2022 ODC Contributors
# SPDX-License-Identifier: Apache-2.0
import logging

from datacube.index.null._datasets import DatasetResource # type: ignore
from datacube.index.null._metadata_types import MetadataTypeResource
from datacube.index.null._products import ProductResource
from datacube.index.null._users import UserResource
from datacube.index.abstract import AbstractIndex, AbstractIndexDriver
from datacube.model import MetadataType

_LOG = logging.getLogger(__name__)


class Index(AbstractIndex):
"""
(Sub-)Minimal (non-)implementation of the Index API.
"""

def __init__(self) -> None:
self._users = UserResource()
self._metadata_types = MetadataTypeResource()
self._products = ProductResource(self.metadata_types)
self._datasets = DatasetResource(self.products)

@property
def users(self) -> UserResource:
return self._users

@property
def metadata_types(self) -> MetadataTypeResource:
return self._metadata_types

@property
def products(self) -> ProductResource:
return self._products

@property
def datasets(self) -> DatasetResource:
return self._datasets

@property
def url(self) -> str:
return "null"

@classmethod
def from_config(cls, config, application_name=None, validate_connection=True):
return cls()

@classmethod
def get_dataset_fields(cls, doc):
return {}

def init_db(self, with_default_types=True, with_permissions=True):
return True

def close(self):
pass

def __repr__(self):
return "Index<null>"


class NullIndexDriver(AbstractIndexDriver):
@staticmethod
def connect_to_index(config, application_name=None, validate_connection=True):
return Index.from_config(config, application_name, validate_connection)

@staticmethod
def metadata_type_from_doc(definition: dict) -> MetadataType:
"""
:param definition:
"""
MetadataType.validate(definition) # type: ignore
return MetadataType(definition,
dataset_search_fields=Index.get_dataset_fields(definition))


def index_driver_init():
return NullIndexDriver()