Skip to content

Commit

Permalink
Moved row inserts in WorkingCopyGPKG into a separate function.
Browse files Browse the repository at this point in the history
  • Loading branch information
OrangeOranges committed Sep 1, 2020
1 parent b078e66 commit 61a0bdd
Showing 1 changed file with 46 additions and 9 deletions.
55 changes: 46 additions & 9 deletions sno/working_copy.py
Expand Up @@ -5,6 +5,7 @@
import time
from datetime import datetime
from pathlib import Path
from enum import Enum

import pygit2
from osgeo import gdal
Expand Down Expand Up @@ -90,6 +91,26 @@ def __str__(self):
return f"Working Copy is tree {self.working_copy_tree_id}; expecting {self.match_tree_id}"


class SQLCommand(Enum):
INSERT = "INSERT"
INSERT_OR_REPLACE = "INSERT OR REPLACE"


def sql_insert_dict(dbcur, sql_command, table_name, row_dict):
"""
Inserts a row into a database table.
sql_command should be a member of SQLCommand
"""
keys, values = zip(*row_dict.items())
sql = f"""
{sql_command.value} INTO {table_name}
({','.join([gpkg.ident(k) for k in keys])})
VALUES
({','.join(['?'] * len(keys))});
"""
dbcur.execute(sql, values)


class WorkingCopyGPKG(WorkingCopy):
def __init__(self, repo, path):
self.repo = repo
Expand Down Expand Up @@ -212,7 +233,7 @@ def _get_columns(self, dataset):
pk_field = col["name"]
# TODO: Should INTEGER PRIMARY KEYs ever be non-AUTOINCREMENT?
# See https://github.com/koordinates/sno/issues/188
if col['type'] == "INTEGER":
if col["type"] == "INTEGER":
col_spec += " AUTOINCREMENT"
if col["notnull"]:
col_spec += " NOT NULL"
Expand Down Expand Up @@ -313,7 +334,7 @@ def write_meta(self, dataset):
identifier_prefix = f"{dataset.table_name}: "
if not gpkg_contents["identifier"].startswith(identifier_prefix):
gpkg_contents["identifier"] = (
identifier_prefix + gpkg_contents['identifier']
identifier_prefix + gpkg_contents["identifier"]
)

gpkg_geometry_columns = dataset.get_gpkg_meta_item("gpkg_geometry_columns")
Expand All @@ -323,6 +344,10 @@ def write_meta(self, dataset):
dbcur = db.cursor()
# Update GeoPackage core tables
for o in gpkg_spatial_ref_sys:
sql_insert_dict(
dbcur, SQLCommand.INSERT_OR_REPLACE, "gpkg_spatial_ref_sys", o
)
'''
keys, values = zip(*o.items())
sql = f"""
INSERT OR REPLACE INTO gpkg_spatial_ref_sys
Expand All @@ -331,21 +356,32 @@ def write_meta(self, dataset):
({','.join(['?'] * len(keys))});
"""
dbcur.execute(sql, values)
'''

keys, values = zip(*gpkg_contents.items())
# our repo copy doesn't include all fields from gpkg_contents
# but the default value for last_change (now), and NULL for {min_x,max_x,min_y,max_y}
# should deal with the remaining fields

sql_insert_dict(dbcur, SQLCommand.INSERT, "gpkg_contents", gpkg_contents)
'''
keys, values = zip(*gpkg_contents.items())
sql = f"""
INSERT INTO gpkg_contents
({','.join([gpkg.ident(k) for k in keys])})
VALUES
({','.join(['?'] * len(keys))});
"""
dbcur.execute(sql, values)
'''

if gpkg_geometry_columns:
sql_insert_dict(
dbcur,
SQLCommand.INSERT,
"gpkg_geometry_columns",
gpkg_geometry_columns,
)
'''
keys, values = zip(*gpkg_geometry_columns.items())
sql = f"""
INSERT INTO gpkg_geometry_columns
Expand All @@ -354,6 +390,7 @@ def write_meta(self, dataset):
({','.join(['?']*len(keys))});
"""
dbcur.execute(sql, values)
'''

gpkg_metadata = dataset.get_gpkg_meta_item("gpkg_metadata")
gpkg_metadata_reference = dataset.get_gpkg_meta_item(
Expand Down Expand Up @@ -1025,13 +1062,13 @@ def _apply_meta_title(self, table_name, src_value, dest_value, db, dbcur):
# TODO - find a better way to roundtrip titles while keeping them unique
identifier = f"{table_name}: {dest_value}"
dbcur.execute(
'''UPDATE gpkg_contents SET identifier = ? WHERE table_name = ?''',
"""UPDATE gpkg_contents SET identifier = ? WHERE table_name = ?""",
(identifier, table_name),
)

def _apply_meta_description(self, table_name, src_value, dest_value, db, dbcur):
dbcur.execute(
'''UPDATE gpkg_contents SET description = ? WHERE table_name = ?''',
"""UPDATE gpkg_contents SET description = ? WHERE table_name = ?""",
(dest_value, table_name),
)

Expand All @@ -1050,7 +1087,7 @@ def _apply_meta_schema_json(self, table_name, src_value, dest_value, db, dbcur):
src_name = src_schema[col_id].name
dest_name = dest_schema[col_id].name
dbcur.execute(
f'''ALTER TABLE {gpkg.ident(table_name)} RENAME COLUMN {gpkg.ident(src_name)} TO {gpkg.ident(dest_name)}'''
f"""ALTER TABLE {gpkg.ident(table_name)} RENAME COLUMN {gpkg.ident(src_name)} TO {gpkg.ident(dest_name)}"""
)

def _apply_meta_metadata_dataset_json(
Expand Down Expand Up @@ -1078,7 +1115,7 @@ def _apply_meta_diff(self, dataset, meta_diff, db, dbcur):
table_name = dataset.table_name
for key in meta_diff:
func_key = key.replace("/", "_").replace(".", "_")
func = getattr(self, f'_apply_meta_{func_key}')
func = getattr(self, f"_apply_meta_{func_key}")
delta = meta_diff[key]
func(table_name, delta.old_value, delta.new_value, db, dbcur)

Expand Down Expand Up @@ -1232,7 +1269,7 @@ def reset(
structural_changes = table_inserts | table_deletes
if track_changes_as_dirty and structural_changes:
# We don't yet support tracking changes as dirty if we delete, create, or rewrite an entire table.
structural_changes_text = '\n'.join(structural_changes)
structural_changes_text = "\n".join(structural_changes)
raise NotYetImplemented(
"Sorry, this operation is not possible when there are structural changes."
f"Structural changes are affecting:\n{structural_changes_text}"
Expand Down

0 comments on commit 61a0bdd

Please sign in to comment.