Skip to content

Commit

Permalink
Fix a bunch of warnings in the tests, refs #541
Browse files Browse the repository at this point in the history
  • Loading branch information
simonw committed May 8, 2023
1 parent 4fc2f12 commit a256d7d
Show file tree
Hide file tree
Showing 4 changed files with 52 additions and 30 deletions.
25 changes: 14 additions & 11 deletions sqlite_utils/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -1839,40 +1839,43 @@ def memory(
stem_counts = {}
for i, path in enumerate(paths):
# Path may have a :format suffix
fp = None
if ":" in path and path.rsplit(":", 1)[-1].upper() in Format.__members__:
path, suffix = path.rsplit(":", 1)
format = Format[suffix.upper()]
else:
format = None
if path in ("-", "stdin"):
csv_fp = sys.stdin.buffer
csv_table = "stdin"
fp = sys.stdin.buffer
file_table = "stdin"
else:
csv_path = pathlib.Path(path)
stem = csv_path.stem
file_path = pathlib.Path(path)
stem = file_path.stem
if stem_counts.get(stem):
csv_table = "{}_{}".format(stem, stem_counts[stem])
file_table = "{}_{}".format(stem, stem_counts[stem])
else:
csv_table = stem
file_table = stem
stem_counts[stem] = stem_counts.get(stem, 1) + 1
csv_fp = csv_path.open("rb")
rows, format_used = rows_from_file(csv_fp, format=format, encoding=encoding)
fp = file_path.open("rb")
rows, format_used = rows_from_file(fp, format=format, encoding=encoding)
tracker = None
if format_used in (Format.CSV, Format.TSV) and not no_detect_types:
tracker = TypeTracker()
rows = tracker.wrap(rows)
if flatten:
rows = (_flatten(row) for row in rows)
db[csv_table].insert_all(rows, alter=True)
db[file_table].insert_all(rows, alter=True)
if tracker is not None:
db[csv_table].transform(types=tracker.types)
db[file_table].transform(types=tracker.types)
# Add convenient t / t1 / t2 views
view_names = ["t{}".format(i + 1)]
if i == 0:
view_names.append("t")
for view_name in view_names:
if not db[view_name].exists():
db.create_view(view_name, "select * from [{}]".format(csv_table))
db.create_view(view_name, "select * from [{}]".format(file_table))
if fp:
fp.close()

if analyze:
_analyze(db, tables=None, columns=None, save=False)
Expand Down
18 changes: 12 additions & 6 deletions tests/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,11 @@
from .utils import collapse_whitespace


def write_json(file_path, data):
with open(file_path, "w") as fp:
json.dump(data, fp)


def _supports_pragma_function_list():
db = Database(memory=True)
try:
Expand Down Expand Up @@ -1016,7 +1021,7 @@ def test_upsert(db_path, tmpdir):
{"id": 1, "name": "Cleo", "age": 4},
{"id": 2, "name": "Nixie", "age": 4},
]
open(json_path, "w").write(json.dumps(insert_dogs))
write_json(json_path, insert_dogs)
result = CliRunner().invoke(
cli.cli,
["insert", db_path, "dogs", json_path, "--pk", "id"],
Expand All @@ -1029,7 +1034,7 @@ def test_upsert(db_path, tmpdir):
{"id": 1, "age": 5},
{"id": 2, "age": 5},
]
open(json_path, "w").write(json.dumps(upsert_dogs))
write_json(json_path, upsert_dogs)
result = CliRunner().invoke(
cli.cli,
["upsert", db_path, "dogs", json_path, "--pk", "id"],
Expand All @@ -1048,7 +1053,7 @@ def test_upsert_pk_required(db_path, tmpdir):
{"id": 1, "name": "Cleo", "age": 4},
{"id": 2, "name": "Nixie", "age": 4},
]
open(json_path, "w").write(json.dumps(insert_dogs))
write_json(json_path, insert_dogs)
result = CliRunner().invoke(
cli.cli,
["upsert", db_path, "dogs", json_path],
Expand Down Expand Up @@ -1091,14 +1096,14 @@ def test_upsert_alter(db_path, tmpdir):
json_path = str(tmpdir / "dogs.json")
db = Database(db_path)
insert_dogs = [{"id": 1, "name": "Cleo"}]
open(json_path, "w").write(json.dumps(insert_dogs))
write_json(json_path, insert_dogs)
result = CliRunner().invoke(
cli.cli, ["insert", db_path, "dogs", json_path, "--pk", "id"]
)
assert 0 == result.exit_code, result.output
# Should fail with error code if no --alter
upsert_dogs = [{"id": 1, "age": 5}]
open(json_path, "w").write(json.dumps(upsert_dogs))
write_json(json_path, upsert_dogs)
result = CliRunner().invoke(
cli.cli, ["upsert", db_path, "dogs", json_path, "--pk", "id"]
)
Expand Down Expand Up @@ -1767,7 +1772,8 @@ def test_insert_encoding(tmpdir):
)
assert latin1_csv.decode("latin-1").split("\n")[2].split(",")[1] == "São Paulo"
csv_path = str(tmpdir / "test.csv")
open(csv_path, "wb").write(latin1_csv)
with open(csv_path, "wb") as fp:
fp.write(latin1_csv)
# First attempt should error:
bad_result = CliRunner().invoke(
cli.cli,
Expand Down
27 changes: 18 additions & 9 deletions tests/test_cli_insert.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@
def test_insert_simple(tmpdir):
json_path = str(tmpdir / "dog.json")
db_path = str(tmpdir / "dogs.db")
open(json_path, "w").write(json.dumps({"name": "Cleo", "age": 4}))
with open(json_path, "w") as fp:
fp.write(json.dumps({"name": "Cleo", "age": 4}))
result = CliRunner().invoke(cli.cli, ["insert", db_path, "dogs", json_path])
assert 0 == result.exit_code
assert [{"age": 4, "name": "Cleo"}] == list(
Expand Down Expand Up @@ -78,7 +79,8 @@ def test_insert_json_flatten_nl(tmpdir):

def test_insert_with_primary_key(db_path, tmpdir):
json_path = str(tmpdir / "dog.json")
open(json_path, "w").write(json.dumps({"id": 1, "name": "Cleo", "age": 4}))
with open(json_path, "w") as fp:
fp.write(json.dumps({"id": 1, "name": "Cleo", "age": 4}))
result = CliRunner().invoke(
cli.cli, ["insert", db_path, "dogs", json_path, "--pk", "id"]
)
Expand All @@ -93,7 +95,8 @@ def test_insert_with_primary_key(db_path, tmpdir):
def test_insert_multiple_with_primary_key(db_path, tmpdir):
json_path = str(tmpdir / "dogs.json")
dogs = [{"id": i, "name": "Cleo {}".format(i), "age": i + 3} for i in range(1, 21)]
open(json_path, "w").write(json.dumps(dogs))
with open(json_path, "w") as fp:
fp.write(json.dumps(dogs))
result = CliRunner().invoke(
cli.cli, ["insert", db_path, "dogs", json_path, "--pk", "id"]
)
Expand All @@ -109,7 +112,8 @@ def test_insert_multiple_with_compound_primary_key(db_path, tmpdir):
{"breed": "mixed", "id": i, "name": "Cleo {}".format(i), "age": i + 3}
for i in range(1, 21)
]
open(json_path, "w").write(json.dumps(dogs))
with open(json_path, "w") as fp:
fp.write(json.dumps(dogs))
result = CliRunner().invoke(
cli.cli, ["insert", db_path, "dogs", json_path, "--pk", "id", "--pk", "breed"]
)
Expand All @@ -134,7 +138,8 @@ def test_insert_not_null_default(db_path, tmpdir):
{"id": i, "name": "Cleo {}".format(i), "age": i + 3, "score": 10}
for i in range(1, 21)
]
open(json_path, "w").write(json.dumps(dogs))
with open(json_path, "w") as fp:
fp.write(json.dumps(dogs))
result = CliRunner().invoke(
cli.cli,
["insert", db_path, "dogs", json_path, "--pk", "id"]
Expand Down Expand Up @@ -182,7 +187,8 @@ def test_insert_ignore(db_path, tmpdir):
db = Database(db_path)
db["dogs"].insert({"id": 1, "name": "Cleo"}, pk="id")
json_path = str(tmpdir / "dogs.json")
open(json_path, "w").write(json.dumps([{"id": 1, "name": "Bailey"}]))
with open(json_path, "w") as fp:
fp.write(json.dumps([{"id": 1, "name": "Bailey"}]))
# Should raise error without --ignore
result = CliRunner().invoke(
cli.cli, ["insert", db_path, "dogs", json_path, "--pk", "id"]
Expand Down Expand Up @@ -212,7 +218,8 @@ def test_insert_ignore(db_path, tmpdir):
def test_insert_csv_tsv(content, options, db_path, tmpdir):
db = Database(db_path)
file_path = str(tmpdir / "insert.csv-tsv")
open(file_path, "w").write(content)
with open(file_path, "w") as fp:
fp.write(content)
result = CliRunner().invoke(
cli.cli,
["insert", db_path, "data", file_path] + options,
Expand All @@ -233,7 +240,8 @@ def test_insert_csv_tsv(content, options, db_path, tmpdir):
)
def test_only_allow_one_of_nl_tsv_csv(options, db_path, tmpdir):
file_path = str(tmpdir / "insert.csv-tsv")
open(file_path, "w").write("foo")
with open(file_path, "w") as fp:
fp.write("foo")
result = CliRunner().invoke(
cli.cli, ["insert", db_path, "data", file_path] + options
)
Expand All @@ -251,7 +259,8 @@ def test_insert_replace(db_path, tmpdir):
{"id": 2, "name": "Insert replaced 2", "age": 4},
{"id": 21, "name": "Fresh insert 21", "age": 6},
]
open(json_path, "w").write(json.dumps(insert_replace_dogs))
with open(json_path, "w") as fp:
fp.write(json.dumps(insert_replace_dogs))
result = CliRunner().invoke(
cli.cli, ["insert", db_path, "dogs", json_path, "--pk", "id", "--replace"]
)
Expand Down
12 changes: 8 additions & 4 deletions tests/test_cli_memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,8 @@ def test_memory_csv(tmpdir, sql_from, use_stdin):
sql_from = "stdin"
else:
csv_path = str(tmpdir / "test.csv")
open(csv_path, "w").write(content)
with open(csv_path, "w") as fp:
fp.write(content)
result = CliRunner().invoke(
cli.cli,
["memory", csv_path, "select * from {}".format(sql_from), "--nl"],
Expand All @@ -46,7 +47,8 @@ def test_memory_tsv(tmpdir, use_stdin):
else:
input = None
path = str(tmpdir / "chickens.tsv")
open(path, "w").write(data)
with open(path, "w") as fp:
fp.write(data)
path = path + ":tsv"
sql_from = "chickens"
result = CliRunner().invoke(
Expand All @@ -71,7 +73,8 @@ def test_memory_json(tmpdir, use_stdin):
else:
input = None
path = str(tmpdir / "chickens.json")
open(path, "w").write(data)
with open(path, "w") as fp:
fp.write(data)
path = path + ":json"
sql_from = "chickens"
result = CliRunner().invoke(
Expand All @@ -96,7 +99,8 @@ def test_memory_json_nl(tmpdir, use_stdin):
else:
input = None
path = str(tmpdir / "chickens.json")
open(path, "w").write(data)
with open(path, "w") as fp:
fp.write(data)
path = path + ":nl"
sql_from = "chickens"
result = CliRunner().invoke(
Expand Down

0 comments on commit a256d7d

Please sign in to comment.