Skip to content

Commit

Permalink
Merge 549e231 into 8a81b2b
Browse files Browse the repository at this point in the history
  • Loading branch information
omad committed Dec 10, 2019
2 parents 8a81b2b + 549e231 commit c348251
Show file tree
Hide file tree
Showing 11 changed files with 192 additions and 139 deletions.
6 changes: 3 additions & 3 deletions .pre-commit-config.yaml
@@ -1,10 +1,10 @@
repos:
- repo: https://github.com/adrienverge/yamllint.git
sha: v1.15.0
rev: v1.19.0
hooks:
- id: yamllint
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.1.0
rev: v2.4.0
hooks:
- id: flake8
- id: end-of-file-fixer
Expand All @@ -18,6 +18,6 @@ repos:
- id: check-added-large-files
- id: check-merge-conflict
- repo: https://github.com/pre-commit/mirrors-pylint
rev: 'v2.3.1' # Use the sha / tag you want to point at
rev: v2.4.4 # Use the sha / tag you want to point at
hooks:
- id: pylint
174 changes: 91 additions & 83 deletions datacube/drivers/postgres/_api.py

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions datacube/drivers/postgres/_dynamic.py
Expand Up @@ -9,7 +9,7 @@
from sqlalchemy import select

from ._core import schema_qualified
from ._schema import DATASET, DATASET_TYPE, METADATA_TYPE
from ._schema import DATASET, PRODUCT, METADATA_TYPE
from .sql import pg_exists, CreateView

_LOG = logging.getLogger(__name__)
Expand Down Expand Up @@ -51,7 +51,7 @@ def _ensure_view(conn, fields, name, replace_existing, where_expression):
[field.alchemy_expression.label(field.name) for field in fields.values()
if not field.affects_row_selection]
).select_from(
DATASET.join(DATASET_TYPE).join(METADATA_TYPE)
DATASET.join(PRODUCT).join(METADATA_TYPE)
).where(where_expression)
)
)
Expand Down
4 changes: 2 additions & 2 deletions datacube/drivers/postgres/_schema.py
Expand Up @@ -31,7 +31,7 @@
CheckConstraint(r"name ~* '^\w+$'", name='alphanumeric_name'),
)

DATASET_TYPE = Table(
PRODUCT = Table(
'dataset_type', _core.METADATA,
Column('id', SmallInteger, primary_key=True, autoincrement=True),

Expand Down Expand Up @@ -60,7 +60,7 @@
Column('id', postgres.UUID(as_uuid=True), primary_key=True),

Column('metadata_type_ref', None, ForeignKey(METADATA_TYPE.c.id), nullable=False),
Column('dataset_type_ref', None, ForeignKey(DATASET_TYPE.c.id), index=True, nullable=False),
Column('dataset_type_ref', None, ForeignKey(PRODUCT.c.id), index=True, nullable=False),

Column('metadata', postgres.JSONB, index=False, nullable=False),

Expand Down
10 changes: 5 additions & 5 deletions datacube/index/_products.py
Expand Up @@ -98,7 +98,7 @@ def add(self, product, allow_table_lock=False):
metadata_type = self.metadata_type_resource.add(product.metadata_type,
allow_table_lock=allow_table_lock)
with self._db.connect() as connection:
connection.insert_dataset_type(
connection.insert_product(
name=product.name,
metadata=product.metadata_doc,
metadata_type_id=metadata_type.id,
Expand Down Expand Up @@ -207,7 +207,7 @@ def update(self, product, allow_unsafe_updates=False, allow_table_lock=False):
# TODO: should we add metadata type here?
assert metadata_type, "TODO: should we add metadata type here?"
with self._db.connect() as conn:
conn.update_dataset_type(
conn.update_product(
name=product.name,
metadata=product.metadata_doc,
metadata_type_id=metadata_type.id,
Expand Down Expand Up @@ -279,7 +279,7 @@ def get_by_name(self, name):
# pylint: disable=method-hidden
def get_unsafe(self, id_): # type: ignore
with self._db.connect() as connection:
result = connection.get_dataset_type(id_)
result = connection.get_product(id_)
if not result:
raise KeyError('"%s" is not a valid Product id' % id_)
return self._make(result)
Expand All @@ -288,7 +288,7 @@ def get_unsafe(self, id_): # type: ignore
# pylint: disable=method-hidden
def get_by_name_unsafe(self, name): # type: ignore
with self._db.connect() as connection:
result = connection.get_dataset_type_by_name(name)
result = connection.get_product_by_name(name)
if not result:
raise KeyError('"%s" is not a valid Product name' % name)
return self._make(result)
Expand Down Expand Up @@ -366,7 +366,7 @@ def get_all(self) -> Iterable[DatasetType]:
Retrieve all Products
"""
with self._db.connect() as connection:
return (self._make(record) for record in connection.get_all_dataset_types())
return (self._make(record) for record in connection.get_all_products())

def _make_many(self, query_rows):
return (self._make(c) for c in query_rows)
Expand Down
11 changes: 3 additions & 8 deletions integration_tests/conftest.py
Expand Up @@ -123,20 +123,15 @@ def uninitialised_postgres_db(local_config, request):


@pytest.fixture
def index(local_config, uninitialised_postgres_db):
"""
:type initialised_postgres_db: datacube.drivers.postgres._connections.PostgresDb
"""
def index(local_config,
uninitialised_postgres_db: PostgresDb):
index = index_connect(local_config, validate_connection=False)
index.init_db()
return index


@pytest.fixture
def index_empty(local_config, uninitialised_postgres_db):
"""
:type initialised_postgres_db: datacube.drivers.postgres._connections.PostgresDb
"""
def index_empty(local_config, uninitialised_postgres_db: PostgresDb):
index = index_connect(local_config, validate_connection=False)
index.init_db(with_default_types=False)
return index
Expand Down
2 changes: 1 addition & 1 deletion integration_tests/test_config_tool.py
Expand Up @@ -21,7 +21,7 @@

def _dataset_type_count(db):
with db.connect() as connection:
return len(list(connection.get_all_dataset_types()))
return len(list(connection.get_all_products()))


def test_add_example_dataset_types(clirunner, initialised_postgres_db, default_metadata_type):
Expand Down
111 changes: 77 additions & 34 deletions pylintrc
Expand Up @@ -4,40 +4,83 @@ ignore=ndexpr
extension-pkg-whitelist=SharedArray,zstd,ciso8601

[MESSAGES CONTROL]
enable=python3

disable=no-self-use,
comprehension-escape,
star-args,
duplicate-code,
unused-argument,
missing-docstring,
no-member,
unused-variable,
unused-import,
locally-disabled,
no-name-in-module,
fixme,
no-value-for-parameter,
file-ignored,
wrong-import-order,
ungrouped-imports,
len-as-condition,
no-else-return,
import-error,
keyword-arg-before-vararg, # TODO: investigate differences between python versions for this
round-builtin, # py2.7
map-builtin-not-iterating, # py2.7
range-builtin-not-iterating, # py2.7
zip-builtin-not-iterating, # py2.7
nonzero-method, # py2.7
eq-without-hash, # py2.7
useless-object-inheritance,
no-absolute-import,
useless-return,
old-division,
too-many-function-args,
consider-using-in,
disable=all

# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
enable=print-statement,
parameter-unpacking,
unpacking-in-except,
old-raise-syntax,
backtick,
long-suffix,
old-ne-operator,
old-octal-literal,
import-star-module-level,
non-ascii-bytes-literal,
invalid-unicode-literal,
c-extension-no-member,
apply-builtin,
basestring-builtin,
buffer-builtin,
cmp-builtin,
coerce-builtin,
execfile-builtin,
file-builtin,
long-builtin,
raw_input-builtin,
reduce-builtin,
standarderror-builtin,
unicode-builtin,
xrange-builtin,
coerce-method,
delslice-method,
getslice-method,
setslice-method,
dict-iter-method,
dict-view-method,
next-method-called,
metaclass-assignment,
indexing-exception,
raising-string,
reload-builtin,
oct-method,
hex-method,
# nonzero-method,
cmp-method,
input-builtin,
# round-builtin,
intern-builtin,
unichr-builtin,
# map-builtin-not-iterating,
# zip-builtin-not-iterating,
# range-builtin-not-iterating,
filter-builtin-not-iterating,
using-cmp-argument,
# eq-without-hash,
div-method,
idiv-method,
rdiv-method,
exception-message-attribute,
invalid-str-codec,
sys-max-int,
bad-python3-import,
deprecated-string-function,
deprecated-str-translate-call,
deprecated-itertools-function,
deprecated-types-field,
next-method-defined,
dict-items-not-iterating,
dict-keys-not-iterating,
dict-values-not-iterating,
deprecated-operator-function,
deprecated-urllib-function,
xreadlines-attribute,
deprecated-sys-function,
exception-escape,
useless-import-alias



Expand Down
4 changes: 4 additions & 0 deletions setup.cfg
Expand Up @@ -13,3 +13,7 @@ universal=1

[flake8]
max-line-length = 120
ignore =
# E711: comparison to None should be 'if cond is None:'
# SQLAlchemy requires using 'if cond == None:'
E711
4 changes: 3 additions & 1 deletion tests/test_driver.py
Expand Up @@ -51,7 +51,7 @@ def test_reader_drivers():
assert isinstance(available_drivers, list)

pytest.importorskip('datacube.drivers.s3.storage.s3aio.s3lio')
assert 's3aio' not in available_drivers # TODO: remove once s3aio moved out of legacy
assert 's3aio' not in available_drivers


def test_writer_drivers():
Expand All @@ -63,6 +63,8 @@ def test_writer_drivers():
def test_index_drivers():
available_drivers = index_drivers()
assert 'default' in available_drivers

pytest.importorskip('datacube.drivers.s3.storage.s3aio.s3lio')
assert 's3aio_index' in available_drivers


Expand Down
1 change: 1 addition & 0 deletions tests/test_utils_rio.py
Expand Up @@ -71,6 +71,7 @@ def test_rio_env_aws():
assert get_rio_env() == {}


@pytest.mark.xfail(reason='This test fails if a default region is set in `~/.aws/config`')
@mock.patch('datacube.utils.aws.botocore_default_region',
return_value=None)
def test_rio_env_aws_auto_region(*mocks):
Expand Down

0 comments on commit c348251

Please sign in to comment.