Skip to content

Commit

Permalink
Move add_dataset tests into test_add_dataset.py
Browse files Browse the repository at this point in the history
  • Loading branch information
olsen232 committed Feb 8, 2024
1 parent 90031d5 commit e4b4de0
Show file tree
Hide file tree
Showing 2 changed files with 192 additions and 187 deletions.
192 changes: 192 additions & 0 deletions tests/test_add_dataset.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,192 @@
from ast import Dict
import json

from kart.exceptions import (
NO_CHANGES,
NotFound,
InvalidOperation,
)
from kart.repo import KartRepo


def test_add_dataset_json_output__gpkg(cli_runner, data_working_copy):
new_table = "test_table"
message = "test commit"
with data_working_copy("points") as (path, wc):
repo = KartRepo(path)
with repo.working_copy.tabular.session() as sess:
sess.execute(
f"""CREATE TABLE IF NOT EXISTS {new_table} (test_id int primary key, field1 text, field2 text);"""
)
sess.execute(
f"""INSERT INTO {new_table} (test_id, field1, field2)
VALUES
(1, 'value1a', 'value1b'),
(2, 'value2a', 'value2b'),
(3, 'value3a', 'value3b'),
(4, 'value4a', 'value4b'),
(5, 'value5a', 'value5b');"""
)

r = cli_runner.invoke(
["add-dataset", new_table, "-m", message, "-o", "json"],
env={
"GIT_COMMITTER_DATE": "2010-1-1T00:00:00Z",
"GIT_AUTHOR_EMAIL": "user@example.com",
"GIT_COMMITTER_EMAIL": "committer@example.com",
},
)

assert r.exit_code == 0, r

expected_output: Dict[str, Dict[str, any]] = {
"kart.commit/v1": {
"commit": str(repo.head.target),
"abbrevCommit": str(repo.head.target)[:7],
"author": "user@example.com",
"committer": "committer@example.com",
"branch": "main",
"message": message,
"changes": {
new_table: {"meta": {"inserts": 1}, "feature": {"inserts": 5}}
},
"commitTime": "2010-01-01T00:00:00Z",
"commitTimeOffset": "+00:00",
}
}

assert json.loads(r.stdout) == expected_output


def test_add_dataset_text_output__gpkg(cli_runner, data_working_copy):
new_table = "test_table"
message = "test commit"
with data_working_copy("points") as (path, wc):
repo = KartRepo(path)
with repo.working_copy.tabular.session() as sess:
sess.execute(
f"""CREATE TABLE IF NOT EXISTS {new_table} (test_id int primary key, field1 text, field2 text);"""
)
sess.execute(
f"""INSERT INTO {new_table} (test_id, field1, field2)
VALUES
(1, 'value1a', 'value1b'),
(2, 'value2a', 'value2b'),
(3, 'value3a', 'value3b'),
(4, 'value4a', 'value4b'),
(5, 'value5a', 'value5b');"""
)

r = cli_runner.invoke(
["add-dataset", new_table, "-m", message, "-o", "text"],
env={
"GIT_COMMITTER_DATE": "2010-1-1T00:00:00Z",
},
)

assert r.exit_code == 0, r

diff = {new_table: {"meta": {"inserts": 1}, "feature": {"inserts": 5}}}

flat_diff = ""
for table, table_diff in diff.items():
flat_diff += f" {table}:\n"
for section, section_diff in table_diff.items():
for op, count in section_diff.items():
flat_diff += f" {section}:\n"
flat_diff += f" {count} {op}\n"

expected_output = f"[main {str(repo.head.target)[:7]}] {message}\n{flat_diff} Date: Fri Jan 1 00:00:00 2010 +0000\n"

assert r.stdout == expected_output


def test_add_dataset_nonexistent__gpkg(cli_runner, data_working_copy):
new_table = "test_table"
wrong_table = "wrong_test_table"
message = "test commit"
with data_working_copy("points") as (path, wc):
repo = KartRepo(path)
with repo.working_copy.tabular.session() as sess:
sess.execute(
f"""CREATE TABLE IF NOT EXISTS {new_table} (test_id int primary key, field1 text, field2 text);"""
)

try:
cli_runner.invoke(["add-dataset", wrong_table, "-m", message, "-o", "text"])
except NotFound as e:
assert (
str(e)
== f"""Table '{wrong_table}' is not found\n\nTry running 'kart status --list-untracked-tables'\n"""
)
assert e.exit_code == NO_CHANGES


def test_add_dataset_twice__gpkg(cli_runner, data_working_copy):
new_table = "test_table"
message1 = "test commit1"
message2 = "test commit2"

with data_working_copy("points") as (path, wc):
repo = KartRepo(path)
with repo.working_copy.tabular.session() as sess:
sess.execute(
f"""CREATE TABLE IF NOT EXISTS {new_table} (test_id int primary key, field1 text, field2 text);"""
)

try:
cli_runner.invoke(["add-dataset", new_table, "-m", message1, "-o", "text"])
cli_runner.invoke(["add-dataset", new_table, "-m", message2, "-o", "text"])
except InvalidOperation as e:
assert str(e) == f"Table '{new_table}' is already tracked\n"
assert e.exit_code == NO_CHANGES


def test_add_dataset_triggers__gpkg(cli_runner, data_working_copy):
new_table = "test_table"
message = "test commit"

# Test how diff handles an existing table with triggers
with data_working_copy("points") as (path, wc):
repo = KartRepo(path)

# Test how diff handles a new table after add-dataset
with repo.working_copy.tabular.session() as sess:
sess.execute(
f"""CREATE TABLE IF NOT EXISTS {new_table} (test_id int primary key, field1 text, field2 text);"""
)
sess.execute(
f"""INSERT INTO {new_table} (test_id, field1, field2)
VALUES
(1, 'value1a', 'value1b'),
(2, 'value2a', 'value2b'),
(3, 'value3a', 'value3b'),
(4, 'value4a', 'value4b'),
(5, 'value5a', 'value5b');"""
)

r = cli_runner.invoke(["add-dataset", new_table, "-m", message, "-o", "text"])
with repo.working_copy.tabular.session() as sess:
sess.execute(
f"""DELETE FROM {new_table}
WHERE test_id = 1;"""
)
r = cli_runner.invoke(["diff", "-o", "json"])

output = json.loads(r.stdout)

expected = {
"test_table": {
"feature": [
{
"-": {
"test_id": 1,
"field1": "value1a",
"field2": "value1b",
}
},
]
}
}

assert output["kart.diff/v1+hexwkb"] == expected
187 changes: 0 additions & 187 deletions tests/test_commit.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
from ast import Dict
import json
import re
import shlex
Expand All @@ -13,9 +12,6 @@
NO_DATA,
NO_REPOSITORY,
SCHEMA_VIOLATION,
NO_CHANGES,
NotFound,
InvalidOperation,
)
from kart.commit import fallback_editor
from kart.repo import KartRepo
Expand Down Expand Up @@ -330,186 +326,3 @@ def test_commit_schema_violation(cli_runner, data_working_copy):
"nz_pa_points_topo_150k: In column 'macronated' value 'kinda' exceeds limit of 1 characters",
"Error: Schema violation - values do not match schema",
]


def test_add_dataset_json_output__gpkg(cli_runner, data_working_copy):
new_table = "test_table"
message = "test commit"
with data_working_copy("points") as (path, wc):
repo = KartRepo(path)
with repo.working_copy.tabular.session() as sess:
sess.execute(
f"""CREATE TABLE IF NOT EXISTS {new_table} (test_id int primary key, field1 text, field2 text);"""
)
sess.execute(
f"""INSERT INTO {new_table} (test_id, field1, field2)
VALUES
(1, 'value1a', 'value1b'),
(2, 'value2a', 'value2b'),
(3, 'value3a', 'value3b'),
(4, 'value4a', 'value4b'),
(5, 'value5a', 'value5b');"""
)

r = cli_runner.invoke(
["add-dataset", new_table, "-m", message, "-o", "json"],
env={
"GIT_COMMITTER_DATE": "2010-1-1T00:00:00Z",
"GIT_AUTHOR_EMAIL": "user@example.com",
"GIT_COMMITTER_EMAIL": "committer@example.com",
},
)

assert r.exit_code == 0, r

expected_output: Dict[str, Dict[str, any]] = {
"kart.commit/v1": {
"commit": str(repo.head.target),
"abbrevCommit": str(repo.head.target)[:7],
"author": "user@example.com",
"committer": "committer@example.com",
"branch": "main",
"message": message,
"changes": {
new_table: {"meta": {"inserts": 1}, "feature": {"inserts": 5}}
},
"commitTime": "2010-01-01T00:00:00Z",
"commitTimeOffset": "+00:00",
}
}

assert json.loads(r.stdout) == expected_output


def test_add_dataset_text_output__gpkg(cli_runner, data_working_copy):
new_table = "test_table"
message = "test commit"
with data_working_copy("points") as (path, wc):
repo = KartRepo(path)
with repo.working_copy.tabular.session() as sess:
sess.execute(
f"""CREATE TABLE IF NOT EXISTS {new_table} (test_id int primary key, field1 text, field2 text);"""
)
sess.execute(
f"""INSERT INTO {new_table} (test_id, field1, field2)
VALUES
(1, 'value1a', 'value1b'),
(2, 'value2a', 'value2b'),
(3, 'value3a', 'value3b'),
(4, 'value4a', 'value4b'),
(5, 'value5a', 'value5b');"""
)

r = cli_runner.invoke(
["add-dataset", new_table, "-m", message, "-o", "text"],
env={
"GIT_COMMITTER_DATE": "2010-1-1T00:00:00Z",
},
)

assert r.exit_code == 0, r

diff = {new_table: {"meta": {"inserts": 1}, "feature": {"inserts": 5}}}

flat_diff = ""
for table, table_diff in diff.items():
flat_diff += f" {table}:\n"
for section, section_diff in table_diff.items():
for op, count in section_diff.items():
flat_diff += f" {section}:\n"
flat_diff += f" {count} {op}\n"

expected_output = f"[main {str(repo.head.target)[:7]}] {message}\n{flat_diff} Date: Fri Jan 1 00:00:00 2010 +0000\n"

assert r.stdout == expected_output


def test_add_dataset_nonexistent__gpkg(cli_runner, data_working_copy):
new_table = "test_table"
wrong_table = "wrong_test_table"
message = "test commit"
with data_working_copy("points") as (path, wc):
repo = KartRepo(path)
with repo.working_copy.tabular.session() as sess:
sess.execute(
f"""CREATE TABLE IF NOT EXISTS {new_table} (test_id int primary key, field1 text, field2 text);"""
)

try:
cli_runner.invoke(["add-dataset", wrong_table, "-m", message, "-o", "text"])
except NotFound as e:
assert (
str(e)
== f"""Table '{wrong_table}' is not found\n\nTry running 'kart status --list-untracked-tables'\n"""
)
assert e.exit_code == NO_CHANGES


def test_add_dataset_twice__gpkg(cli_runner, data_working_copy):
new_table = "test_table"
message1 = "test commit1"
message2 = "test commit2"

with data_working_copy("points") as (path, wc):
repo = KartRepo(path)
with repo.working_copy.tabular.session() as sess:
sess.execute(
f"""CREATE TABLE IF NOT EXISTS {new_table} (test_id int primary key, field1 text, field2 text);"""
)

try:
cli_runner.invoke(["add-dataset", new_table, "-m", message1, "-o", "text"])
cli_runner.invoke(["add-dataset", new_table, "-m", message2, "-o", "text"])
except InvalidOperation as e:
assert str(e) == f"Table '{new_table}' is already tracked\n"
assert e.exit_code == NO_CHANGES


def test_add_dataset_triggers__gpkg(cli_runner, data_working_copy):
new_table = "test_table"
message = "test commit"

# Test how diff handles an existing table with triggers
with data_working_copy("points") as (path, wc):
repo = KartRepo(path)

# Test how diff handles a new table after add-dataset
with repo.working_copy.tabular.session() as sess:
sess.execute(
f"""CREATE TABLE IF NOT EXISTS {new_table} (test_id int primary key, field1 text, field2 text);"""
)
sess.execute(
f"""INSERT INTO {new_table} (test_id, field1, field2)
VALUES
(1, 'value1a', 'value1b'),
(2, 'value2a', 'value2b'),
(3, 'value3a', 'value3b'),
(4, 'value4a', 'value4b'),
(5, 'value5a', 'value5b');"""
)

r = cli_runner.invoke(["add-dataset", new_table, "-m", message, "-o", "text"])
with repo.working_copy.tabular.session() as sess:
sess.execute(
f"""DELETE FROM {new_table}
WHERE test_id = 1;"""
)
r = cli_runner.invoke(["diff", "-o", "json"])

output = json.loads(r.stdout)

expected = {
"test_table": {
"feature": [
{
"-": {
"test_id": 1,
"field1": "value1a",
"field2": "value1b",
}
},
]
}
}

assert output["kart.diff/v1+hexwkb"] == expected

0 comments on commit e4b4de0

Please sign in to comment.