Skip to content

Commit

Permalink
Merge pull request #3996 from yarikoptic/bf-update-no-ds
Browse files Browse the repository at this point in the history
BF: update -- do not save if no --merge, use save instead of Dataset(refds_path).add
  • Loading branch information
yarikoptic committed Jan 11, 2020
2 parents 4776c27 + 41d36ac commit dc64c39
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 2 deletions.
13 changes: 13 additions & 0 deletions datalad/distribution/tests/test_update.py
Expand Up @@ -104,6 +104,19 @@ def test_update_simple(origin, src_path, dst_path):
dest.repo.get_file_key("update.txt") # raises if unknown
eq_([False], dest.repo.file_has_content(["update.txt"]))

# test that update doesn't crash if we specify only a single path (submod) to
# operate on
with chpwd(dest.path):
assert_result_count(
update(path=['subm 1'], recursive=True), 1,
status='ok', type='dataset')

# and with merge we would also try to save (but there would be no changes)
res_merge = update(path=['subm 1'], recursive=True, merge=True)
assert_result_count(res_merge, 2)
assert_in_results(res_merge, action='update', status='ok', type='dataset')
assert_in_results(res_merge, action='save', status='notneeded', type='dataset')

# smoke-test if recursive update doesn't fail if submodule is removed
# and that we can run it from within a dataset without providing it
# explicitly
Expand Down
8 changes: 6 additions & 2 deletions datalad/distribution/update.py
Expand Up @@ -94,6 +94,7 @@ def __call__(
reobtain_data=False):
"""
"""
from datalad.api import save
if fetch_all is not None:
lgr.warning('update(fetch_all=...) called. Option has no effect, and will be removed')

Expand Down Expand Up @@ -186,15 +187,18 @@ def __call__(
res['status'] = 'ok'
yield res
save_paths.append(ap['path'])
if recursive:
# we need to save updated states only if merge was requested -- otherwise
# it was a pure fetch
if merge and recursive:
save_paths = [p for p in save_paths if p != refds_path]
if not save_paths:
return
lgr.debug(
'Subdatasets where updated state may need to be '
'saved in the parent dataset: %s', save_paths)
for r in Dataset(refds_path).add(
for r in save(
path=save_paths,
dataset=refds_path,
recursive=False,
message='[DATALAD] Save updated subdatasets'):
yield r
Expand Down

0 comments on commit dc64c39

Please sign in to comment.