Skip to content

Commit

Permalink
Merge pull request #929 from koordinates/non-checkout-status
Browse files Browse the repository at this point in the history
Include non-checkout-datasets in status output
  • Loading branch information
olsen232 committed Oct 30, 2023
2 parents 277f419 + ae7d6d8 commit 169261a
Show file tree
Hide file tree
Showing 3 changed files with 110 additions and 41 deletions.
61 changes: 42 additions & 19 deletions kart/status.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def get_type_counts(self):
@click.option(
"--list-untracked-tables",
is_flag=True,
help="Shows which tables haven't yet been tracked by Kart"
help="Shows which tables haven't yet been tracked by Kart",
)
def status(ctx, output_format, list_untracked_tables):
"""Show the working copy status"""
Expand All @@ -105,9 +105,10 @@ def status(ctx, output_format, list_untracked_tables):

if output_format == "json":
dump_json_output({"kart.status/v2": jdict}, sys.stdout)
else:
else:
click.echo(status_to_text(jdict))



def get_branch_status_json(repo):
output = {"commit": None, "abbrevCommit": None, "branch": None, "upstream": None}

Expand Down Expand Up @@ -138,32 +139,35 @@ def get_working_copy_status_json(repo, list_untracked_tables):

result = {
"parts": repo.working_copy.parts_status(),
"changes": get_diff_status_json(repo)
"changes": get_diff_status_json(repo),
"nonCheckoutDatasets": sorted(repo.non_checkout_datasets),
}
if list_untracked_tables:
result["untrackedTables"] = get_untracked_tables(repo)


return result


def get_untracked_tables(repo):
"""Check for any untracked tables in working copy"""
wc = repo.working_copy.tabular
wc = repo.working_copy.tabular
untracked_tables = []

if wc is not None and wc.session() is not None:
if wc is not None and wc.session() is not None:
with wc.session() as sess:
wc_items = wc.adapter.list_tables(sess)
# Get all tables in working copy
all_tables = [table_name for table_name, title in wc_items.items()]
# Get tables shown in kart data ls
datasets_paths = [dataset.table_name for dataset in repo.datasets(filter_dataset_type="table")]
datasets_paths = [
dataset.table_name for dataset in repo.datasets(filter_dataset_type="table")
]
# Get untracked tables
untracked_tables = list(set(all_tables) - set(datasets_paths))


return untracked_tables


def get_diff_status_json(repo):
"""
Returns a structured count of all the inserts, updates, and deletes (and spatialFilterConflicts)
Expand Down Expand Up @@ -191,13 +195,19 @@ def status_to_text(jdict):
if not is_merging and not is_empty:
status_list.append(working_copy_status_to_text(jdict["workingCopy"]))

if jdict["workingCopy"] is not None and "untrackedTables" in jdict["workingCopy"]:
if (
jdict["workingCopy"] is not None
and "untrackedTables" in jdict["workingCopy"]
):
if jdict["workingCopy"]["untrackedTables"]:
status_list.append(untracked_tables_status_to_text(jdict["workingCopy"]["untrackedTables"]))
status_list.append(
untracked_tables_status_to_text(
jdict["workingCopy"]["untrackedTables"]
)
)
else:
status_list.append("No untracked tables found.")


return "\n\n".join(status_list)


Expand Down Expand Up @@ -299,15 +309,27 @@ def working_copy_status_to_text(jdict):
if jdict is None:
return 'No working copy\n (use "kart checkout" to create a working copy)\n'

result_list = []

if jdict["nonCheckoutDatasets"]:
non_checkout_datasets = "\n".join(jdict["nonCheckoutDatasets"])
result_list.append(
"User configuration prevents the following datasets from being checked out\n"
f" (to overturn, use `kart checkout --dataset=DATASET`):\n{non_checkout_datasets}"
)

if not jdict["changes"]:
return "Nothing to commit, working copy clean"
result_list.append("Nothing to commit, working copy clean")

return (
"Changes in working copy:\n"
' (use "kart commit" to commit)\n'
' (use "kart restore" to discard changes)\n\n'
+ diff_status_to_text(jdict["changes"])
)
else:
result_list.append(
"Changes in working copy:\n"
' (use "kart commit" to commit)\n'
' (use "kart restore" to discard changes)\n\n'
+ diff_status_to_text(jdict["changes"])
)

return "\n\n".join(result_list)


def diff_status_to_text(jdict):
Expand All @@ -334,6 +356,7 @@ def diff_status_to_text(jdict):

return "\n".join(message)


def untracked_tables_status_to_text(jdict):
message = []
message.append("Untracked tables:")
Expand Down
16 changes: 14 additions & 2 deletions tests/test_checkout.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,13 +155,25 @@ def test_non_checkout_datasets(data_working_copy, cli_runner):
r = cli_runner.invoke(
["checkout", "--not-dataset=census2016_sdhca_ot_sos_short"]
)
assert r.exit_code == 0
assert r.exit_code == 0, r.stderr

r = cli_runner.invoke(["status"])
assert r.exit_code == 0, r.stderr
assert r.stdout.splitlines() == [
"On branch branch1",
"",
"User configuration prevents the following datasets from being checked out",
" (to overturn, use `kart checkout --dataset=DATASET`):",
"census2016_sdhca_ot_sos_short",
"",
"Nothing to commit, working copy clean",
]

_check_workingcopy_contains_tables(repo, {"census2016_sdhca_ot_ra_short"})

# No WC changes are returned.
r = cli_runner.invoke(["diff", "--exit-code"])
assert r.exit_code == 0
assert r.exit_code == 0, r.stderr

r = cli_runner.invoke(
["checkout", "main", "--dataset=census2016_sdhca_ot_sos_short"]
Expand Down
74 changes: 54 additions & 20 deletions tests/test_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,11 @@ def test_status(
"branch": "main",
"upstream": None,
"spatialFilter": None,
"workingCopy": {"parts": parts_status, "changes": {}},
"workingCopy": {
"parts": parts_status,
"changes": {},
"nonCheckoutDatasets": [],
},
}
}

Expand All @@ -77,7 +81,11 @@ def test_status(
"branch": None,
"upstream": None,
"spatialFilter": None,
"workingCopy": {"parts": parts_status, "changes": {}},
"workingCopy": {
"parts": parts_status,
"changes": {},
"nonCheckoutDatasets": [],
},
}
}

Expand Down Expand Up @@ -114,7 +122,11 @@ def test_status(
"behind": 0,
},
"spatialFilter": None,
"workingCopy": {"parts": parts_status, "changes": {}},
"workingCopy": {
"parts": parts_status,
"changes": {},
"nonCheckoutDatasets": [],
},
}
}

Expand Down Expand Up @@ -155,7 +167,11 @@ def test_status(
"behind": 1,
},
"spatialFilter": None,
"workingCopy": {"parts": parts_status, "changes": {}},
"workingCopy": {
"parts": parts_status,
"changes": {},
"nonCheckoutDatasets": [],
},
}
}

Expand Down Expand Up @@ -186,7 +202,11 @@ def test_status(
"behind": 1,
},
"spatialFilter": None,
"workingCopy": {"parts": parts_status, "changes": {}},
"workingCopy": {
"parts": parts_status,
"changes": {},
"nonCheckoutDatasets": [],
},
}
}

Expand Down Expand Up @@ -215,7 +235,11 @@ def test_status(
"behind": 0,
},
"spatialFilter": None,
"workingCopy": {"parts": parts_status, "changes": {}},
"workingCopy": {
"parts": parts_status,
"changes": {},
"nonCheckoutDatasets": [],
},
}
}

Expand Down Expand Up @@ -265,6 +289,7 @@ def test_status(
},
}
},
"nonCheckoutDatasets": [],
},
}
}
Expand Down Expand Up @@ -292,7 +317,11 @@ def test_status_empty(tmp_path, cli_runner, chdir):
"branch": "main",
"upstream": None,
"spatialFilter": None,
"workingCopy": {"parts": parts_status, "changes": {}},
"workingCopy": {
"parts": parts_status,
"changes": {},
"nonCheckoutDatasets": [],
},
}
}

Expand Down Expand Up @@ -381,25 +410,30 @@ def test_status_untracked_tables(data_working_copy, cli_runner):
parts_status = {
"tabular": {"location": str(wc), "type": "gpkg", "status": "ok"},
"workdir": {"status": "notFound"},
}
}

assert json.loads(r.stdout) == {
"kart.status/v2": {
"commit": H.POINTS.HEAD_SHA,
"abbrevCommit": H.POINTS.HEAD_SHA[:7],
"branch": "main",
"upstream": None,
"spatialFilter": None,
"workingCopy": {"parts": parts_status, "changes": {}, "untrackedTables": [new_table]}
}
"kart.status/v2": {
"commit": H.POINTS.HEAD_SHA,
"abbrevCommit": H.POINTS.HEAD_SHA[:7],
"branch": "main",
"upstream": None,
"spatialFilter": None,
"workingCopy": {
"parts": parts_status,
"changes": {},
"nonCheckoutDatasets": [],
"untrackedTables": [new_table],
},
}
}

r = cli_runner.invoke(["status", "--list-untracked-tables"])
assert r.stdout.splitlines() == [
"On branch main",
"",
"Nothing to commit, working copy clean",
"",
"Untracked tables:",
f" {new_table}"
]
f" {new_table}",
]

0 comments on commit 169261a

Please sign in to comment.