From 685a2eba64e69f016e97b491c8a30934c9ebb93e Mon Sep 17 00:00:00 2001 From: Tony Narlock Date: Sun, 2 Nov 2025 17:10:04 -0600 Subject: [PATCH 1/6] tests/cli(test[add]): xfail regression for --no-merge data loss why: Document the duplicate-root data loss bug surfaced by vcspull add --no-merge. what: - add parametrized fixture mirroring the reported config layout to reproduce data loss - mark the regression test xfail until the loader/writer preserve duplicate sections --- tests/cli/test_add.py | 113 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 113 insertions(+) diff --git a/tests/cli/test_add.py b/tests/cli/test_add.py index c488be9d..47a9e5fc 100644 --- a/tests/cli/test_add.py +++ b/tests/cli/test_add.py @@ -12,6 +12,7 @@ import pytest +from vcspull._internal.config_reader import DuplicateAwareConfigReader from vcspull.cli.add import add_repo, create_add_subparser, handle_add_command from vcspull.util import contract_user_home @@ -772,3 +773,115 @@ def test_add_parser_rejects_extra_positional() -> None: with pytest.raises(SystemExit): parser.parse_args(["add", "name", "https://example.com/repo.git"]) + + +class NoMergePreservationFixture(t.NamedTuple): + """Fixture for asserting --no-merge keeps duplicate sections intact.""" + + test_id: str + initial_yaml: str + expected_original_repos: tuple[str, ...] + new_repo_name: str + new_repo_url: str + workspace_label: str + + +NO_MERGE_PRESERVATION_FIXTURES: list[NoMergePreservationFixture] = [ + NoMergePreservationFixture( + test_id="duplicate-root-yaml", + initial_yaml=textwrap.dedent( + """\ + ~/study/python/: + Flexget: + repo: git+https://github.com/Flexget/Flexget.git + MyST-Parser: + repo: git@github.com:executablebooks/MyST-Parser.git + RootTheBox: + repo: git+https://github.com/moloch--/RootTheBox.git + ~/study/python/: + bubbles: + repo: git+https://github.com/Stiivi/bubbles.git + cubes: + repo: git+https://github.com/Stiivi/cubes.git + """ + ), + expected_original_repos=( + "Flexget", + "MyST-Parser", + "RootTheBox", + "bubbles", + "cubes", + ), + new_repo_name="pytest-docker", + new_repo_url="git+https://github.com/avast/pytest-docker", + workspace_label="~/study/python/", + ), +] + + +@pytest.mark.xfail( + reason="vcspull add --no-merge overwrites earlier duplicate workspace sections (data loss bug)", +) +@pytest.mark.parametrize( + list(NoMergePreservationFixture._fields), + NO_MERGE_PRESERVATION_FIXTURES, + ids=[fixture.test_id for fixture in NO_MERGE_PRESERVATION_FIXTURES], +) +def test_add_repo_no_merge_preserves_duplicate_sections( + test_id: str, + initial_yaml: str, + expected_original_repos: tuple[str, ...], + new_repo_name: str, + new_repo_url: str, + workspace_label: str, + tmp_path: pathlib.Path, + monkeypatch: MonkeyPatch, +) -> None: + """vcspull add should not drop duplicate workspace sections when --no-merge.""" + monkeypatch.setenv("HOME", str(tmp_path)) + monkeypatch.chdir(tmp_path) + + config_file = tmp_path / ".vcspull.yaml" + config_file.write_text(initial_yaml, encoding="utf-8") + + repo_path = tmp_path / "study/python" / new_repo_name + repo_path.mkdir(parents=True, exist_ok=True) + + ( + _initial_config, + initial_duplicates, + ) = DuplicateAwareConfigReader.load_with_duplicates(config_file) + assert workspace_label in initial_duplicates + assert len(initial_duplicates[workspace_label]) == 2 + + add_repo( + name=new_repo_name, + url=new_repo_url, + config_file_path_str=str(config_file), + path=str(repo_path), + workspace_root_path=workspace_label, + dry_run=False, + merge_duplicates=False, + ) + + ( + _final_config, + duplicate_sections, + ) = DuplicateAwareConfigReader.load_with_duplicates(config_file) + + assert workspace_label in duplicate_sections, f"{test_id}: workspace missing" + workspace_entries = duplicate_sections[workspace_label] + assert len(workspace_entries) == 2, f"{test_id}: duplicate sections collapsed" + + combined_repos: set[str] = set() + contains_new_repo = False + + for entry in workspace_entries: + assert isinstance(entry, dict), f"{test_id}: workspace entry not dict" + combined_repos.update(entry.keys()) + if new_repo_name in entry: + contains_new_repo = True + + expected_repos = set(expected_original_repos) | {new_repo_name} + assert combined_repos == expected_repos, f"{test_id}: repositories mismatch" + assert contains_new_repo, f"{test_id}: new repo missing from duplicate sections" From fec187dcd438f179edd2aa4880d28b94dcd0e8e0 Mon Sep 17 00:00:00 2001 From: Tony Narlock Date: Sun, 2 Nov 2025 17:16:47 -0600 Subject: [PATCH 2/6] tests/config(test[duplicates]): cover ordered loader items and writer why: Drive duplicate-preserving loader and writer work via failing tests and CLI regression coverage. what: - assert DuplicateAwareConfigReader exposes ordered top-level items - add writer regression verifying duplicate sections persist on save - extend CLI no-merge test expectations and discovery mocks for new loader API --- tests/cli/test_add.py | 15 +++++++++-- tests/cli/test_discover.py | 2 +- tests/test_config_reader.py | 38 ++++++++++++++++++++++++++ tests/test_config_writer.py | 54 +++++++++++++++++++++++++++++++++++++ 4 files changed, 106 insertions(+), 3 deletions(-) create mode 100644 tests/test_config_writer.py diff --git a/tests/cli/test_add.py b/tests/cli/test_add.py index 47a9e5fc..6a82772a 100644 --- a/tests/cli/test_add.py +++ b/tests/cli/test_add.py @@ -820,7 +820,10 @@ class NoMergePreservationFixture(t.NamedTuple): @pytest.mark.xfail( - reason="vcspull add --no-merge overwrites earlier duplicate workspace sections (data loss bug)", + reason=( + "vcspull add --no-merge overwrites earlier duplicate workspace sections " + "(data loss bug)" + ), ) @pytest.mark.parametrize( list(NoMergePreservationFixture._fields), @@ -837,7 +840,7 @@ def test_add_repo_no_merge_preserves_duplicate_sections( tmp_path: pathlib.Path, monkeypatch: MonkeyPatch, ) -> None: - """vcspull add should not drop duplicate workspace sections when --no-merge.""" + """CLI add should not drop duplicate workspace sections when --no-merge.""" monkeypatch.setenv("HOME", str(tmp_path)) monkeypatch.chdir(tmp_path) @@ -850,9 +853,11 @@ def test_add_repo_no_merge_preserves_duplicate_sections( ( _initial_config, initial_duplicates, + initial_items, ) = DuplicateAwareConfigReader.load_with_duplicates(config_file) assert workspace_label in initial_duplicates assert len(initial_duplicates[workspace_label]) == 2 + assert [key for key, _ in initial_items] == [workspace_label, workspace_label] add_repo( name=new_repo_name, @@ -867,8 +872,14 @@ def test_add_repo_no_merge_preserves_duplicate_sections( ( _final_config, duplicate_sections, + final_items, ) = DuplicateAwareConfigReader.load_with_duplicates(config_file) + assert [key for key, _ in final_items] == [ + workspace_label, + workspace_label, + ], f"{test_id}: final items unexpectedly merged" + assert workspace_label in duplicate_sections, f"{test_id}: workspace missing" workspace_entries = duplicate_sections[workspace_label] assert len(workspace_entries) == 2, f"{test_id}: duplicate sections collapsed" diff --git a/tests/cli/test_discover.py b/tests/cli/test_discover.py index 391e3231..f3cfcb09 100644 --- a/tests/cli/test_discover.py +++ b/tests/cli/test_discover.py @@ -441,7 +441,7 @@ def test_discover_config_load_edges( if mode == "non_dict": monkeypatch.setattr( "vcspull.cli.discover.DuplicateAwareConfigReader.load_with_duplicates", - lambda _path: (["invalid"], {}), + lambda _path: (["invalid"], {}, []), ) else: # mode == "exception" diff --git a/tests/test_config_reader.py b/tests/test_config_reader.py index a8769a95..a6aec62d 100644 --- a/tests/test_config_reader.py +++ b/tests/test_config_reader.py @@ -93,3 +93,41 @@ def test_duplicate_aware_reader_passes_through_json(tmp_path: pathlib.Path) -> N }, } assert reader.duplicate_sections == {} + + +def test_duplicate_aware_reader_preserves_top_level_item_order( + tmp_path: pathlib.Path, +) -> None: + """Loader should expose ordered top-level items so duplicates can be replayed.""" + yaml_content = textwrap.dedent( + """\ + ~/study/python/: + Flexget: + repo: git+https://github.com/Flexget/Flexget.git + ~/study/python/: + bubbles: + repo: git+https://github.com/Stiivi/bubbles.git + ~/study/python/: + cubes: + repo: git+https://github.com/Stiivi/cubes.git + """, + ) + config_path = _write(tmp_path, "ordered.yaml", yaml_content) + + reader = DuplicateAwareConfigReader.from_file(config_path) + + items = reader.top_level_items + assert [key for key, _ in items] == [ + "~/study/python/", + "~/study/python/", + "~/study/python/", + ] + assert items[0][1] == { + "Flexget": {"repo": "git+https://github.com/Flexget/Flexget.git"}, + } + assert items[1][1] == { + "bubbles": {"repo": "git+https://github.com/Stiivi/bubbles.git"}, + } + assert items[2][1] == { + "cubes": {"repo": "git+https://github.com/Stiivi/cubes.git"}, + } diff --git a/tests/test_config_writer.py b/tests/test_config_writer.py new file mode 100644 index 00000000..6afb2471 --- /dev/null +++ b/tests/test_config_writer.py @@ -0,0 +1,54 @@ +"""Tests for duplicate-preserving config writer utilities.""" + +from __future__ import annotations + +import pathlib +import textwrap +import typing as t + +import pytest + +from vcspull.config import save_config_yaml_with_items + +FixtureEntry = tuple[str, dict[str, t.Any]] + + +@pytest.mark.parametrize( + ("entries", "expected_yaml"), + [ + ( + ( + ( + "~/study/python/", + {"Flexget": {"repo": "git+https://github.com/Flexget/Flexget.git"}}, + ), + ( + "~/study/python/", + {"bubbles": {"repo": "git+https://github.com/Stiivi/bubbles.git"}}, + ), + ), + textwrap.dedent( + """\ + ~/study/python/: + Flexget: + repo: git+https://github.com/Flexget/Flexget.git + ~/study/python/: + bubbles: + repo: git+https://github.com/Stiivi/bubbles.git + """, + ), + ), + ], +) +def test_save_config_yaml_with_items_preserves_duplicate_sections( + entries: tuple[FixtureEntry, ...], + expected_yaml: str, + tmp_path: pathlib.Path, +) -> None: + """Writing duplicates should round-trip without collapsing sections.""" + config_path = tmp_path / ".vcspull.yaml" + + save_config_yaml_with_items(config_path, list(entries)) + + yaml_text = config_path.read_text(encoding="utf-8") + assert yaml_text == expected_yaml From b963892fa1861f659fef3a521c7c8dd5dd0d1ad6 Mon Sep 17 00:00:00 2001 From: Tony Narlock Date: Sun, 2 Nov 2025 17:16:59 -0600 Subject: [PATCH 3/6] src/config(fix[loader]): retain ordered duplicate workspace items why: Provide structured metadata so writers can replay duplicate workspace roots without loss. what: - extend DuplicateAwareConfigReader to capture ordered top-level items - propagate new load_with_duplicates return signature through CLI/config callers --- src/vcspull/_internal/config_reader.py | 39 +++++++++++++++++++------- src/vcspull/cli/add.py | 1 + src/vcspull/cli/discover.py | 1 + src/vcspull/cli/fmt.py | 2 +- src/vcspull/config.py | 2 +- 5 files changed, 33 insertions(+), 12 deletions(-) diff --git a/src/vcspull/_internal/config_reader.py b/src/vcspull/_internal/config_reader.py index dc5339fe..f6494173 100644 --- a/src/vcspull/_internal/config_reader.py +++ b/src/vcspull/_internal/config_reader.py @@ -228,6 +228,7 @@ def __init__(self, stream: str) -> None: super().__init__(stream) self.top_level_key_values: dict[t.Any, list[t.Any]] = {} self._mapping_depth = 0 + self.top_level_items: list[tuple[t.Any, t.Any]] = [] def _duplicate_tracking_construct_mapping( @@ -248,7 +249,9 @@ def _duplicate_tracking_construct_mapping( value = construct(value_node) if loader._mapping_depth == 1: - loader.top_level_key_values.setdefault(key, []).append(copy.deepcopy(value)) + duplicated_value = copy.deepcopy(value) + loader.top_level_key_values.setdefault(key, []).append(duplicated_value) + loader.top_level_items.append((copy.deepcopy(key), duplicated_value)) mapping[key] = value @@ -270,20 +273,27 @@ def __init__( content: RawConfigData, *, duplicate_sections: dict[str, list[t.Any]] | None = None, + top_level_items: list[tuple[str, t.Any]] | None = None, ) -> None: super().__init__(content) self._duplicate_sections = duplicate_sections or {} + self._top_level_items = top_level_items or [] @property def duplicate_sections(self) -> dict[str, list[t.Any]]: """Mapping of top-level keys to the list of duplicated values.""" return self._duplicate_sections + @property + def top_level_items(self) -> list[tuple[str, t.Any]]: + """Ordered list of top-level items, including duplicates.""" + return copy.deepcopy(self._top_level_items) + @classmethod def _load_yaml_with_duplicates( cls, content: str, - ) -> tuple[dict[str, t.Any], dict[str, list[t.Any]]]: + ) -> tuple[dict[str, t.Any], dict[str, list[t.Any]], list[tuple[str, t.Any]]]: loader = _DuplicateTrackingSafeLoader(content) try: @@ -306,33 +316,42 @@ def _load_yaml_with_duplicates( if len(values) > 1 } - return loaded, duplicate_sections + top_level_items = [ + (t.cast("str", key), copy.deepcopy(value)) + for key, value in loader.top_level_items + ] + + return loaded, duplicate_sections, top_level_items @classmethod def _load_from_path( cls, path: pathlib.Path, - ) -> tuple[dict[str, t.Any], dict[str, list[t.Any]]]: + ) -> tuple[dict[str, t.Any], dict[str, list[t.Any]], list[tuple[str, t.Any]]]: if path.suffix.lower() in {".yaml", ".yml"}: content = path.read_text(encoding="utf-8") return cls._load_yaml_with_duplicates(content) - return ConfigReader._from_file(path), {} + return ConfigReader._from_file(path), {}, [] @classmethod def from_file(cls, path: pathlib.Path) -> DuplicateAwareConfigReader: - content, duplicate_sections = cls._load_from_path(path) - return cls(content, duplicate_sections=duplicate_sections) + content, duplicate_sections, top_level_items = cls._load_from_path(path) + return cls( + content, + duplicate_sections=duplicate_sections, + top_level_items=top_level_items, + ) @classmethod def _from_file(cls, path: pathlib.Path) -> dict[str, t.Any]: - content, _ = cls._load_from_path(path) + content, _, _ = cls._load_from_path(path) return content @classmethod def load_with_duplicates( cls, path: pathlib.Path, - ) -> tuple[dict[str, t.Any], dict[str, list[t.Any]]]: + ) -> tuple[dict[str, t.Any], dict[str, list[t.Any]], list[tuple[str, t.Any]]]: reader = cls.from_file(path) - return reader.content, reader.duplicate_sections + return reader.content, reader.duplicate_sections, reader.top_level_items diff --git a/src/vcspull/cli/add.py b/src/vcspull/cli/add.py index b0459bf8..44b4c7ea 100644 --- a/src/vcspull/cli/add.py +++ b/src/vcspull/cli/add.py @@ -342,6 +342,7 @@ def add_repo( ( raw_config, duplicate_root_occurrences, + _top_level_items, ) = DuplicateAwareConfigReader.load_with_duplicates(config_file_path) except TypeError: log.exception( diff --git a/src/vcspull/cli/discover.py b/src/vcspull/cli/discover.py index 6450aee4..bbaf16ea 100644 --- a/src/vcspull/cli/discover.py +++ b/src/vcspull/cli/discover.py @@ -203,6 +203,7 @@ def discover_repos( ( raw_config, duplicate_root_occurrences, + _top_level_items, ) = DuplicateAwareConfigReader.load_with_duplicates(config_file_path) except TypeError: log.exception( diff --git a/src/vcspull/cli/fmt.py b/src/vcspull/cli/fmt.py index 31ca7170..d0dcd8eb 100644 --- a/src/vcspull/cli/fmt.py +++ b/src/vcspull/cli/fmt.py @@ -172,7 +172,7 @@ def format_single_config( # Load existing config try: - raw_config, duplicate_root_occurrences = ( + raw_config, duplicate_root_occurrences, _top_level_items = ( DuplicateAwareConfigReader.load_with_duplicates(config_file_path) ) except TypeError: diff --git a/src/vcspull/config.py b/src/vcspull/config.py index ccb0efa0..d1425e0d 100644 --- a/src/vcspull/config.py +++ b/src/vcspull/config.py @@ -271,7 +271,7 @@ def load_configs( file = pathlib.Path(file) assert isinstance(file, pathlib.Path) - config_content, duplicate_roots = ( + config_content, duplicate_roots, _top_level_items = ( DuplicateAwareConfigReader.load_with_duplicates(file) ) From 306562a4cb436bef66e78edb8d68d5bd1c81f5fa Mon Sep 17 00:00:00 2001 From: Tony Narlock Date: Sun, 2 Nov 2025 17:22:09 -0600 Subject: [PATCH 4/6] tests/cli(test[add]): assert --no-merge preserves duplicate sections why: Finalize the regression to ensure vcspull add keeps duplicate workspace roots when --no-merge. what: - remove xfail and validate ordered items before and after the add operation --- tests/cli/test_add.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/tests/cli/test_add.py b/tests/cli/test_add.py index 6a82772a..065d8f49 100644 --- a/tests/cli/test_add.py +++ b/tests/cli/test_add.py @@ -819,12 +819,6 @@ class NoMergePreservationFixture(t.NamedTuple): ] -@pytest.mark.xfail( - reason=( - "vcspull add --no-merge overwrites earlier duplicate workspace sections " - "(data loss bug)" - ), -) @pytest.mark.parametrize( list(NoMergePreservationFixture._fields), NO_MERGE_PRESERVATION_FIXTURES, From c4707a9cf8b9c0b2aaa1bbcc6dad64f332bbb6cf Mon Sep 17 00:00:00 2001 From: Tony Narlock Date: Sun, 2 Nov 2025 17:22:16 -0600 Subject: [PATCH 5/6] src/cli(fix[add]): preserve duplicate workspace sections when --no-merge why: should respect duplicate workspace roots instead of rewriting configs when users opt out of merging. what: - track ordered top-level items from the loader and aggregate them for duplicate-aware operations - introduce duplicate-preserving YAML writer and invoke it for --no-merge updates --- src/vcspull/cli/add.py | 61 ++++++++++++++++++++++++++++++++++++++---- src/vcspull/config.py | 23 ++++++++++++++++ 2 files changed, 79 insertions(+), 5 deletions(-) diff --git a/src/vcspull/cli/add.py b/src/vcspull/cli/add.py index 44b4c7ea..c18b5bef 100644 --- a/src/vcspull/cli/add.py +++ b/src/vcspull/cli/add.py @@ -2,6 +2,7 @@ from __future__ import annotations +import copy import logging import pathlib import subprocess @@ -18,6 +19,7 @@ merge_duplicate_workspace_roots, normalize_workspace_roots, save_config_yaml, + save_config_yaml_with_items, workspace_root_label, ) from vcspull.util import contract_user_home @@ -335,6 +337,7 @@ def add_repo( # Load existing config raw_config: dict[str, t.Any] duplicate_root_occurrences: dict[str, list[t.Any]] + top_level_items: list[tuple[str, t.Any]] display_config_path = contract_user_home(config_file_path) if config_file_path.exists() and config_file_path.is_file(): @@ -342,7 +345,7 @@ def add_repo( ( raw_config, duplicate_root_occurrences, - _top_level_items, + top_level_items, ) = DuplicateAwareConfigReader.load_with_duplicates(config_file_path) except TypeError: log.exception( @@ -358,11 +361,32 @@ def add_repo( else: raw_config = {} duplicate_root_occurrences = {} + top_level_items = [] log.info( "Config file %s not found. A new one will be created.", display_config_path, ) + config_items: list[tuple[str, t.Any]] = ( + [(label, copy.deepcopy(section)) for label, section in top_level_items] + if top_level_items + else [(label, copy.deepcopy(section)) for label, section in raw_config.items()] + ) + + def _aggregate_items(items: list[tuple[str, t.Any]]) -> dict[str, t.Any]: + aggregated: dict[str, t.Any] = {} + for label, section in items: + if isinstance(section, dict): + workspace_section = aggregated.setdefault(label, {}) + for repo_name, repo_config in section.items(): + workspace_section[repo_name] = copy.deepcopy(repo_config) + else: + aggregated[label] = copy.deepcopy(section) + return aggregated + + if not merge_duplicates: + raw_config = _aggregate_items(config_items) + duplicate_merge_conflicts: list[str] = [] duplicate_merge_changes = 0 duplicate_merge_details: list[tuple[str, int]] = [] @@ -417,6 +441,10 @@ def add_repo( cwd = pathlib.Path.cwd() home = pathlib.Path.home() + aggregated_config = ( + raw_config if merge_duplicates else _aggregate_items(config_items) + ) + if merge_duplicates: ( raw_config, @@ -424,7 +452,7 @@ def add_repo( merge_conflicts, merge_changes, ) = normalize_workspace_roots( - raw_config, + aggregated_config, cwd=cwd, home=home, ) @@ -436,7 +464,7 @@ def add_repo( merge_conflicts, _merge_changes, ) = normalize_workspace_roots( - raw_config, + aggregated_config, cwd=cwd, home=home, ) @@ -459,15 +487,24 @@ def add_repo( ) workspace_map[workspace_path] = workspace_label raw_config.setdefault(workspace_label, {}) + if not merge_duplicates: + config_items.append((workspace_label, {})) if workspace_label not in raw_config: raw_config[workspace_label] = {} + if not merge_duplicates: + config_items.append((workspace_label, {})) elif not isinstance(raw_config[workspace_label], dict): log.error( "Workspace root '%s' in configuration is not a dictionary. Aborting.", workspace_label, ) return + workspace_sections: list[tuple[int, dict[str, t.Any]]] = [ + (idx, section) + for idx, (label, section) in enumerate(config_items) + if label == workspace_label and isinstance(section, dict) + ] # Check if repo already exists if name in raw_config[workspace_label]: @@ -518,7 +555,18 @@ def add_repo( return # Add the repository in verbose format - raw_config[workspace_label][name] = {"repo": url} + new_repo_entry = {"repo": url} + if merge_duplicates: + raw_config[workspace_label][name] = new_repo_entry + else: + target_section: dict[str, t.Any] + if workspace_sections: + _, target_section = workspace_sections[-1] + else: + target_section = {} + config_items.append((workspace_label, target_section)) + target_section[name] = copy.deepcopy(new_repo_entry) + raw_config[workspace_label][name] = copy.deepcopy(new_repo_entry) # Save or preview config if dry_run: @@ -541,7 +589,10 @@ def add_repo( ) else: try: - save_config_yaml(config_file_path, raw_config) + if merge_duplicates: + save_config_yaml(config_file_path, raw_config) + else: + save_config_yaml_with_items(config_file_path, config_items) log.info( "%s✓%s Successfully added %s'%s'%s (%s%s%s) to %s%s%s under '%s%s%s'.", Fore.GREEN, diff --git a/src/vcspull/config.py b/src/vcspull/config.py index d1425e0d..6dc351e8 100644 --- a/src/vcspull/config.py +++ b/src/vcspull/config.py @@ -481,6 +481,29 @@ def save_config_yaml(config_file_path: pathlib.Path, data: dict[t.Any, t.Any]) - config_file_path.write_text(yaml_content, encoding="utf-8") +def save_config_yaml_with_items( + config_file_path: pathlib.Path, + items: list[tuple[str, t.Any]], +) -> None: + """Persist configuration data while preserving duplicate top-level sections.""" + documents: list[str] = [] + + for label, section in items: + dumped = ConfigReader._dump( + fmt="yaml", + content={label: section}, + indent=2, + ).rstrip() + if dumped: + documents.append(dumped) + + yaml_content = "\n".join(documents) + if yaml_content: + yaml_content += "\n" + + config_file_path.write_text(yaml_content, encoding="utf-8") + + def merge_duplicate_workspace_root_entries( label: str, occurrences: list[t.Any], From f75d438c64b860fe9952e2e6c1fcefc32ccca582 Mon Sep 17 00:00:00 2001 From: Tony Narlock Date: Sun, 2 Nov 2025 19:23:18 -0600 Subject: [PATCH 6/6] docs/CHANGES: document --no-merge duplicate fix (#482) why: Surface the data-loss regression fix in the upcoming release notes. what: - add bug-fix entry under v1.45.x referencing PR #482 --- CHANGES | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/CHANGES b/CHANGES index 20de421f..5b230fd4 100644 --- a/CHANGES +++ b/CHANGES @@ -33,6 +33,15 @@ $ uvx --from 'vcspull' --prerelease allow vcspull _Upcoming changes will be written here._ +### Bug Fixes + +#### `vcspull add --no-merge` preserves duplicate workspace roots (#482) + +- Duplicate workspace sections are no longer flattened when adding a repository + with `--no-merge`; all previously configured repositories stay intact. +- The configuration loader now exposes ordered duplicate entries so CLI writes + can target the correct section without data loss. + ## vcspull v1.44.0 (2025-11-02) ### Improvements