Skip to content

Commit

Permalink
Responding to Damien's comments.
Browse files Browse the repository at this point in the history
  • Loading branch information
SpacemanPaul authored and omad committed Feb 27, 2023
1 parent 4e5cd7d commit e58edce
Show file tree
Hide file tree
Showing 11 changed files with 31 additions and 30 deletions.
2 changes: 1 addition & 1 deletion datacube/drivers/postgis/_api.py
Expand Up @@ -1160,7 +1160,7 @@ def list_users(self):
order by group_role.oid asc, user_role.oid asc;
"""))
for row in result:
yield _core.from_pg_role(row._mapping['role_name']), row._mapping['user_name'], row._mapping['description']
yield _core.from_pg_role(row.role_name), row.user_name, row.description

def create_user(self, username, password, role, description=None):
pg_role = _core.to_pg_role(role)
Expand Down
2 changes: 1 addition & 1 deletion datacube/drivers/postgis/_connections.py
Expand Up @@ -136,7 +136,7 @@ def _create_engine(url, application_name=None, iam_rds_auth=False, iam_rds_timeo
# than assuming it's still open. Allows servers to close idle connections without clients
# getting errors.
pool_recycle=pool_timeout,
connect_args={'application_name': application_name}
connect_args={'application_name': application_name},
)

if iam_rds_auth:
Expand Down
7 changes: 4 additions & 3 deletions datacube/drivers/postgis/_core.py
Expand Up @@ -94,7 +94,8 @@ def ensure_db(engine, with_permissions=True):
if not has_schema(engine):
is_new = True
try:
c.execute(text('begin'))
# TODO: Switch to SQLAlchemy-2.0/Future style connections and transactions.
sqla_txn = c.begin()
if with_permissions:
# Switch to 'odc_admin', so that all items are owned by them.
c.execute(text('set role odc_admin'))
Expand All @@ -107,10 +108,10 @@ def ensure_db(engine, with_permissions=True):
orm_registry.metadata.create_all(c, tables=ALL_STATIC_TABLES)
_LOG.info("Creating triggers.")
install_timestamp_trigger(c)
c.execute(text('commit'))
sqla_txn.commit()
except: # noqa: E722
_LOG.error("Unhandled SQLAlchemy error.")
c.execute(text('rollback'))
sqla_txn.rollback()
raise
finally:
if with_permissions:
Expand Down
16 changes: 8 additions & 8 deletions datacube/drivers/postgres/_api.py
Expand Up @@ -954,11 +954,11 @@ def check_dynamic_fields(self, concurrently=False, rebuild_views=False, rebuild_
search_fields = {}

for metadata_type in self.get_all_metadata_types():
fields = get_dataset_fields(metadata_type._mapping['definition'])
search_fields[metadata_type._mapping['id']] = fields
fields = get_dataset_fields(metadata_type.definition)
search_fields[metadata_type.id] = fields
self._setup_metadata_type_fields(
metadata_type._mapping['id'],
metadata_type._mapping['name'],
metadata_type.id,
metadata_type.name,
fields,
rebuild_indexes=rebuild_indexes,
rebuild_views=rebuild_views,
Expand All @@ -978,10 +978,10 @@ def _setup_metadata_type_fields(self, id_, name, fields,

for product in self._get_products_for_metadata_type(id_):
self._setup_product_fields(
product._mapping['id'],
product._mapping['name'],
product.id,
product.name,
fields,
product._mapping['definition']['metadata'],
product.definition['metadata'],
rebuild_view=rebuild_views,
rebuild_indexes=rebuild_indexes,
concurrently=concurrently
Expand Down Expand Up @@ -1140,7 +1140,7 @@ def list_users(self):
order by group_role.oid asc, user_role.oid asc;
"""))
for row in result:
yield _core.from_pg_role(row._mapping['role_name']), row._mapping['user_name'], row._mapping['description']
yield _core.from_pg_role(row.role_name), row.user_name, row.description

def create_user(self, username, password, role, description=None):
pg_role = _core.to_pg_role(role)
Expand Down
6 changes: 3 additions & 3 deletions datacube/drivers/postgres/_core.py
Expand Up @@ -100,7 +100,7 @@ def ensure_db(engine, with_permissions=True):
if not has_schema(engine):
is_new = True
try:
c.execute(text('begin'))
sqla_txn = c.begin()
if with_permissions:
# Switch to 'agdc_admin', so that all items are owned by them.
c.execute(text('set role agdc_admin'))
Expand All @@ -113,9 +113,9 @@ def ensure_db(engine, with_permissions=True):
install_timestamp_trigger(c)
_LOG.info("Creating added column.")
install_added_column(c)
c.execute(text('commit'))
sqla_txn.commit()
except: # noqa: E722
c.execute(text('rollback'))
sqla_txn.rollback()
raise
finally:
if with_permissions:
Expand Down
6 changes: 3 additions & 3 deletions datacube/index/postgis/_datasets.py
Expand Up @@ -67,7 +67,7 @@ def get(self, id_: Union[str, UUID], include_sources=False):
dataset = connection.get_dataset(id_)
return self._make(dataset, full_info=True) if dataset else None

datasets = {result._mapping['id']: (self._make(result, full_info=True), result)
datasets = {result.id: (self._make(result, full_info=True), result)
for result in connection.get_dataset_sources(id_)}

if not datasets:
Expand All @@ -77,11 +77,11 @@ def get(self, id_: Union[str, UUID], include_sources=False):
for dataset, result in datasets.values():
dataset.metadata.sources = {
classifier: datasets[source][0].metadata_doc
for source, classifier in zip(result._mapping['sources'], result._mapping['classes']) if source
for source, classifier in zip(result.sources, result.classes) if source
}
dataset.sources = {
classifier: datasets[source][0]
for source, classifier in zip(result._mapping['sources'], result._mapping['classes']) if source
for source, classifier in zip(result.sources, result.classes) if source
}
return datasets[id_][0]

Expand Down
2 changes: 1 addition & 1 deletion datacube/index/postgis/_metadata_types.py
Expand Up @@ -227,7 +227,7 @@ def _make_from_query_row(self, query_row):
"""
:rtype: datacube.model.MetadataType
"""
return self._make(query_row._mapping['definition'], query_row._mapping['id'])
return self._make(query_row.definition, query_row.id)

def _make(self, definition, id_=None):
"""
Expand Down
6 changes: 3 additions & 3 deletions datacube/index/postgis/_products.py
Expand Up @@ -348,7 +348,7 @@ def _make_many(self, query_rows):

def _make(self, query_row) -> Product:
return Product(
definition=query_row._mapping['definition'],
metadata_type=cast(MetadataType, self.metadata_type_resource.get(query_row._mapping['metadata_type_ref'])),
id_=query_row._mapping['id'],
definition=query_row.definition,
metadata_type=cast(MetadataType, self.metadata_type_resource.get(query_row.metadata_type_ref)),
id_=query_row.id,
)
6 changes: 3 additions & 3 deletions datacube/index/postgres/_datasets.py
Expand Up @@ -62,7 +62,7 @@ def get(self, id_: Union[str, UUID], include_sources=False):
dataset = connection.get_dataset(id_)
return self._make(dataset, full_info=True) if dataset else None

datasets = {result._mapping['id']: (self._make(result, full_info=True), result)
datasets = {result.id: (self._make(result, full_info=True), result)
for result in connection.get_dataset_sources(id_)}

if not datasets:
Expand All @@ -72,11 +72,11 @@ def get(self, id_: Union[str, UUID], include_sources=False):
for dataset, result in datasets.values():
dataset.metadata.sources = {
classifier: datasets[source][0].metadata_doc
for source, classifier in zip(result._mapping['sources'], result._mapping['classes']) if source
for source, classifier in zip(result.sources, result.classes) if source
}
dataset.sources = {
classifier: datasets[source][0]
for source, classifier in zip(result._mapping['sources'], result._mapping['classes']) if source
for source, classifier in zip(result.sources, result.classes) if source
}
return datasets[id_][0]

Expand Down
2 changes: 1 addition & 1 deletion datacube/index/postgres/_metadata_types.py
Expand Up @@ -226,7 +226,7 @@ def _make_from_query_row(self, query_row):
"""
:rtype: datacube.model.MetadataType
"""
return self._make(query_row._mapping['definition'], query_row._mapping['id'])
return self._make(query_row.definition, query_row.id)

def _make(self, definition, id_=None):
"""
Expand Down
6 changes: 3 additions & 3 deletions datacube/index/postgres/_products.py
Expand Up @@ -339,7 +339,7 @@ def _make_many(self, query_rows):

def _make(self, query_row) -> Product:
return Product(
definition=query_row._mapping['definition'],
metadata_type=cast(MetadataType, self.metadata_type_resource.get(query_row._mapping['metadata_type_ref'])),
id_=query_row._mapping['id'],
definition=query_row.definition,
metadata_type=cast(MetadataType, self.metadata_type_resource.get(query_row.metadata_type_ref)),
id_=query_row.id,
)

0 comments on commit e58edce

Please sign in to comment.