From 9fab88d3483e2bac75b62280a8e23d255cabc56e Mon Sep 17 00:00:00 2001 From: danielmeppiel Date: Sun, 19 Apr 2026 16:47:06 +0200 Subject: [PATCH 1/8] test(integration): add --global install/uninstall E2E coverage (G1+U1) Covers gap G1 (apm install -g real package + content verification) and U1 (apm uninstall pkg -g removes files from ~/.apm/). Existing global scope tests only validated directory plumbing and error paths; no test actually deployed a real package under user scope. Three cases: - install -g deploys microsoft/apm-sample-package; lockfile + primitive files appear under fake_home; cwd remains untouched. - uninstall -g removes lockfile entry, manifest entry, and all deployed primitive files. - Global + project installs of the same package coexist without collision. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- tests/integration/test_global_install_e2e.py | 267 +++++++++++++++++++ 1 file changed, 267 insertions(+) create mode 100644 tests/integration/test_global_install_e2e.py diff --git a/tests/integration/test_global_install_e2e.py b/tests/integration/test_global_install_e2e.py new file mode 100644 index 00000000..955b5e9a --- /dev/null +++ b/tests/integration/test_global_install_e2e.py @@ -0,0 +1,267 @@ +"""End-to-end integration tests for `apm install -g` / `apm uninstall -g`. + +Covers gaps that existing scope tests do not exercise: +- G1: real package install under user scope deploys primitive files to ~/.apm/ +- U1: uninstall under user scope removes deployed files from ~/.apm/ +- Cross-scope coexistence: a global install and a project install of the same + package live side by side without colliding. + +Uses the public `microsoft/apm-sample-package` repo (ref `main`) as the real +fixture, the same canonical sample referenced by other e2e suites. + +Requires network access and GITHUB_TOKEN/GITHUB_APM_PAT for GitHub API. +""" + +import os +import shutil +import subprocess +import sys +from pathlib import Path + +import pytest +import yaml + + +pytestmark = pytest.mark.skipif( + not os.environ.get("GITHUB_APM_PAT") and not os.environ.get("GITHUB_TOKEN"), + reason="GITHUB_APM_PAT or GITHUB_TOKEN required for GitHub API access", +) + + +SAMPLE_PKG = "microsoft/apm-sample-package" + + +@pytest.fixture +def apm_command(): + """Resolve the apm CLI executable.""" + apm_on_path = shutil.which("apm") + if apm_on_path: + return apm_on_path + venv_apm = Path(__file__).parent.parent.parent / ".venv" / "bin" / "apm" + if venv_apm.exists(): + return str(venv_apm) + return "apm" + + +@pytest.fixture +def fake_home(tmp_path): + """Isolated HOME directory so user-scope installs never touch the real home.""" + home_dir = tmp_path / "fakehome" + home_dir.mkdir() + return home_dir + + +def _env_with_home(fake_home): + env = os.environ.copy() + env["HOME"] = str(fake_home) + if sys.platform == "win32": + env["USERPROFILE"] = str(fake_home) + return env + + +def _run_apm(apm_command, args, cwd, fake_home, timeout=180): + return subprocess.run( + [apm_command] + args, + cwd=cwd, + capture_output=True, + text=True, + timeout=timeout, + env=_env_with_home(fake_home), + ) + + +def _write_user_manifest(fake_home, packages): + """Seed ~/.apm/apm.yml with the given APM dependency list.""" + apm_dir = fake_home / ".apm" + apm_dir.mkdir(parents=True, exist_ok=True) + (apm_dir / "apm.yml").write_text( + yaml.dump( + { + "name": "global-project", + "version": "1.0.0", + "dependencies": {"apm": packages, "mcp": []}, + }, + default_flow_style=False, + ), + encoding="utf-8", + ) + + +def _read_lockfile(directory): + lock_path = directory / "apm.lock.yaml" + if not lock_path.exists(): + return None + return yaml.safe_load(lock_path.read_text(encoding="utf-8")) + + +def _get_locked_dep(lockfile, repo_url): + if not lockfile or "dependencies" not in lockfile: + return None + deps = lockfile["dependencies"] + if isinstance(deps, list): + for entry in deps: + if entry.get("repo_url") == repo_url: + return entry + return None + + +def _existing_deployed_files(deploy_root, dep_entry): + """Return deployed_files entries that exist on disk under *deploy_root*. + + User-scope deploy_root is ``~/`` (Path.home()), not ``~/.apm/``: integrators + write to paths like ``~/.copilot/agents/...`` while metadata lives in + ``~/.apm/``. See ``apm_cli.core.scope.get_deploy_root``. + """ + if not dep_entry or not dep_entry.get("deployed_files"): + return [] + return [f for f in dep_entry["deployed_files"] if (deploy_root / f).exists()] + + +class TestGlobalInstallDeploysRealPackage: + """Verify `apm install -g` actually deploys primitive files under ~/.apm/.""" + + def test_install_global_deploys_real_package_to_user_scope( + self, apm_command, fake_home, tmp_path + ): + _write_user_manifest(fake_home, [SAMPLE_PKG]) + work_dir = tmp_path / "workdir" + work_dir.mkdir() + + result = _run_apm( + apm_command, ["install", "-g"], work_dir, fake_home + ) + assert result.returncode == 0, ( + f"global install failed:\nSTDOUT: {result.stdout}\nSTDERR: {result.stderr}" + ) + + apm_dir = fake_home / ".apm" + lockfile = _read_lockfile(apm_dir) + assert lockfile is not None, "~/.apm/apm.lock.yaml was not created" + dep = _get_locked_dep(lockfile, SAMPLE_PKG) + assert dep is not None, ( + f"{SAMPLE_PKG} not present in user-scope lockfile: {lockfile}" + ) + + deployed = _existing_deployed_files(fake_home, dep) + assert len(deployed) > 0, ( + f"No primitive files deployed under user-scope deploy root. " + f"deployed_files={dep.get('deployed_files')}\n" + f"STDOUT: {result.stdout}\nSTDERR: {result.stderr}" + ) + + # Cross-scope leakage check: the working directory must be untouched. + assert not (work_dir / "apm.yml").exists(), "apm.yml leaked into cwd" + assert not (work_dir / "apm.lock.yaml").exists(), "lockfile leaked into cwd" + assert not (work_dir / "apm_modules").exists(), "apm_modules leaked into cwd" + + def test_uninstall_global_removes_deployed_files( + self, apm_command, fake_home, tmp_path + ): + _write_user_manifest(fake_home, [SAMPLE_PKG]) + work_dir = tmp_path / "workdir" + work_dir.mkdir() + + install_result = _run_apm( + apm_command, ["install", "-g"], work_dir, fake_home + ) + assert install_result.returncode == 0, ( + f"setup install failed:\nSTDOUT: {install_result.stdout}\n" + f"STDERR: {install_result.stderr}" + ) + + apm_dir = fake_home / ".apm" + dep_before = _get_locked_dep(_read_lockfile(apm_dir), SAMPLE_PKG) + assert dep_before is not None, "Package missing from lockfile after install" + deployed_before = _existing_deployed_files(fake_home, dep_before) + if not deployed_before: + pytest.skip("Sample package deployed no files; nothing to verify removal of") + + uninstall_result = _run_apm( + apm_command, + ["uninstall", SAMPLE_PKG, "-g"], + work_dir, + fake_home, + ) + assert uninstall_result.returncode == 0, ( + f"global uninstall failed:\nSTDOUT: {uninstall_result.stdout}\n" + f"STDERR: {uninstall_result.stderr}" + ) + + # Lockfile should no longer contain the package entry. + lockfile_after = _read_lockfile(apm_dir) + if lockfile_after is not None: + assert _get_locked_dep(lockfile_after, SAMPLE_PKG) is None, ( + "Package still in user-scope lockfile after uninstall" + ) + + # Manifest should no longer list the package. + manifest_after = yaml.safe_load((apm_dir / "apm.yml").read_text(encoding="utf-8")) + apm_deps = manifest_after.get("dependencies", {}).get("apm", []) or [] + assert SAMPLE_PKG not in apm_deps, ( + f"{SAMPLE_PKG} still in ~/.apm/apm.yml after uninstall: {apm_deps}" + ) + + # Previously deployed primitive files must be gone. + for rel_path in deployed_before: + assert not (fake_home / rel_path).exists(), ( + f"Deployed file {rel_path} not removed by uninstall -g" + ) + + def test_install_global_then_project_install_does_not_collide( + self, apm_command, fake_home, tmp_path + ): + # Install globally first. + _write_user_manifest(fake_home, [SAMPLE_PKG]) + global_workdir = tmp_path / "global-workdir" + global_workdir.mkdir() + global_result = _run_apm( + apm_command, ["install", "-g"], global_workdir, fake_home + ) + assert global_result.returncode == 0, ( + f"global install failed:\nSTDOUT: {global_result.stdout}\n" + f"STDERR: {global_result.stderr}" + ) + + apm_dir = fake_home / ".apm" + global_dep = _get_locked_dep(_read_lockfile(apm_dir), SAMPLE_PKG) + assert global_dep is not None, "Global lockfile missing the package" + + # Now create a separate project and install the same package locally. + project_dir = tmp_path / "project" + project_dir.mkdir() + (project_dir / ".github").mkdir() + (project_dir / "apm.yml").write_text( + yaml.dump( + { + "name": "local-project", + "version": "1.0.0", + "dependencies": {"apm": [SAMPLE_PKG], "mcp": []}, + }, + default_flow_style=False, + ), + encoding="utf-8", + ) + + local_result = _run_apm( + apm_command, ["install"], project_dir, fake_home + ) + assert local_result.returncode == 0, ( + f"project install failed:\nSTDOUT: {local_result.stdout}\n" + f"STDERR: {local_result.stderr}" + ) + + # Both deployments must coexist. + project_dep = _get_locked_dep(_read_lockfile(project_dir), SAMPLE_PKG) + assert project_dep is not None, "Project lockfile missing the package" + + # Re-read the global lockfile and confirm it is still intact. + global_dep_after = _get_locked_dep(_read_lockfile(apm_dir), SAMPLE_PKG) + assert global_dep_after is not None, ( + "Global lockfile entry disappeared after project install" + ) + assert (apm_dir / "apm_modules").exists(), ( + "Global apm_modules disappeared after project install" + ) + assert (project_dir / "apm_modules").exists(), ( + "Project apm_modules was not created" + ) From f1416aa19179c2a73da3da6bc31def0d3e593234 Mon Sep 17 00:00:00 2001 From: danielmeppiel Date: Sun, 19 Apr 2026 16:47:06 +0200 Subject: [PATCH 2/8] test(integration): add apm install --dry-run E2E coverage (G2) Covers gap G2: presentation/dry_run.py (extracted in PR #764) had zero binary-level tests. Three cases lock the contract end-to-end: - Plain --dry-run lists APM deps, prints banner/footer, makes no changes on disk (no lockfile, no apm_modules/, no .github/ artifacts). - --dry-run --only=apm correctly suppresses MCP dependency lines. - --dry-run after manifest edit previews orphan removals (Files that would be removed: N) without actually deleting anything. Guards the NameError regression on the orphan-preview path. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- tests/integration/test_install_dry_run_e2e.py | 188 ++++++++++++++++++ 1 file changed, 188 insertions(+) create mode 100644 tests/integration/test_install_dry_run_e2e.py diff --git a/tests/integration/test_install_dry_run_e2e.py b/tests/integration/test_install_dry_run_e2e.py new file mode 100644 index 00000000..fd63ca56 --- /dev/null +++ b/tests/integration/test_install_dry_run_e2e.py @@ -0,0 +1,188 @@ +"""End-to-end integration tests for `apm install --dry-run`. + +Covers gap G2: presentation/dry_run.py (extracted in PR #764) was not +exercised against the binary. This test exists in part because a latent +NameError on the orphan-preview path slipped through review until it was +hardened. + +Uses the real `microsoft/apm-sample-package` from GitHub. Requires +GITHUB_APM_PAT or GITHUB_TOKEN for API access. +""" + +import os +import shutil +import subprocess + +import pytest +import yaml +from pathlib import Path + + +pytestmark = pytest.mark.skipif( + not os.environ.get("GITHUB_APM_PAT") and not os.environ.get("GITHUB_TOKEN"), + reason="GITHUB_APM_PAT or GITHUB_TOKEN required for GitHub API access", +) + + +@pytest.fixture +def apm_command(): + """Path to the APM CLI executable (PATH first, then venv fallback).""" + apm_on_path = shutil.which("apm") + if apm_on_path: + return apm_on_path + venv_apm = Path(__file__).parent.parent.parent / ".venv" / "bin" / "apm" + if venv_apm.exists(): + return str(venv_apm) + return "apm" + + +@pytest.fixture +def temp_project(tmp_path): + """Temp APM project with .github/ for VSCode target detection.""" + project_dir = tmp_path / "dry-run-test" + project_dir.mkdir() + (project_dir / ".github").mkdir() + return project_dir + + +def _run_apm(apm_command, args, cwd, timeout=180): + return subprocess.run( + [apm_command] + args, + cwd=cwd, + capture_output=True, + text=True, + timeout=timeout, + ) + + +def _write_apm_yml(project_dir, apm_packages, mcp_packages=None): + config = { + "name": "dry-run-test", + "version": "1.0.0", + "dependencies": { + "apm": apm_packages, + "mcp": mcp_packages or [], + }, + } + (project_dir / "apm.yml").write_text( + yaml.dump(config, default_flow_style=False), encoding="utf-8" + ) + + +def _assert_no_install_artifacts(project_dir): + """Dry-run must not create lockfile or deploy any files.""" + assert not (project_dir / "apm.lock.yaml").exists(), ( + "Dry-run created apm.lock.yaml" + ) + assert not (project_dir / "apm.lock").exists(), ( + "Dry-run created legacy apm.lock" + ) + assert not (project_dir / "apm_modules").exists(), ( + "Dry-run populated apm_modules/" + ) + copilot_instructions = project_dir / ".github" / "copilot-instructions.md" + assert not copilot_instructions.exists(), ( + "Dry-run wrote .github/copilot-instructions.md" + ) + + +class TestInstallDryRunE2E: + """End-to-end coverage for `apm install --dry-run`.""" + + def test_install_dry_run_lists_apm_dependencies_without_changes( + self, temp_project, apm_command + ): + """Dry-run prints the preview banner, lists the APM dep, and writes nothing.""" + _write_apm_yml(temp_project, ["microsoft/apm-sample-package"]) + + result = _run_apm(apm_command, ["install", "--dry-run"], temp_project) + assert result.returncode == 0, ( + f"Dry-run failed:\nSTDOUT: {result.stdout}\nSTDERR: {result.stderr}" + ) + + out = result.stdout + assert "Dry run mode" in out, f"Missing 'Dry run mode' banner:\n{out}" + assert "Dry run complete" in out, f"Missing 'Dry run complete' footer:\n{out}" + assert "APM dependencies" in out, f"Missing APM dependencies header:\n{out}" + assert "microsoft/apm-sample-package" in out, ( + f"Dep repo_url not mentioned in dry-run output:\n{out}" + ) + + _assert_no_install_artifacts(temp_project) + + def test_install_dry_run_with_only_packages_filter( + self, temp_project, apm_command + ): + """`--only=apm` suppresses MCP-dependency listing in the dry-run preview.""" + _write_apm_yml( + temp_project, + apm_packages=["microsoft/apm-sample-package"], + mcp_packages=["io.github.github/github-mcp-server"], + ) + + result = _run_apm( + apm_command, ["install", "--dry-run", "--only=apm"], temp_project + ) + assert result.returncode == 0, ( + f"Filtered dry-run failed:\nSTDOUT: {result.stdout}\nSTDERR: {result.stderr}" + ) + + out = result.stdout + assert "Dry run mode" in out + assert "APM dependencies" in out, f"APM section missing under --only=apm:\n{out}" + assert "microsoft/apm-sample-package" in out + assert "MCP dependencies" not in out, ( + f"MCP section should be hidden under --only=apm:\n{out}" + ) + assert "github-mcp-server" not in out, ( + f"MCP dep leaked into --only=apm dry-run:\n{out}" + ) + + _assert_no_install_artifacts(temp_project) + + def test_install_dry_run_previews_orphan_removals( + self, temp_project, apm_command + ): + """After a real install, removing the dep + dry-run reports orphan files + and keeps them on disk (the orphan-preview NameError regression test).""" + _write_apm_yml(temp_project, ["microsoft/apm-sample-package"]) + real = _run_apm(apm_command, ["install"], temp_project) + assert real.returncode == 0, ( + f"Initial install failed:\nSTDOUT: {real.stdout}\nSTDERR: {real.stderr}" + ) + + lock_path = temp_project / "apm.lock.yaml" + assert lock_path.exists(), "apm.lock.yaml not created by initial install" + with open(lock_path, encoding="utf-8") as f: + lockfile = yaml.safe_load(f) + + deployed_files = [] + for entry in (lockfile.get("dependencies") or []): + if entry.get("repo_url") == "microsoft/apm-sample-package": + deployed_files = [ + f for f in (entry.get("deployed_files") or []) + if (temp_project / f).exists() + ] + break + if not deployed_files: + pytest.skip("apm-sample-package deployed no files; cannot verify orphans") + + _write_apm_yml(temp_project, []) + + result = _run_apm(apm_command, ["install", "--dry-run"], temp_project) + assert result.returncode == 0, ( + f"Orphan dry-run failed:\nSTDOUT: {result.stdout}\nSTDERR: {result.stderr}" + ) + + out = result.stdout + assert "Dry run mode" in out + assert "Dry run complete" in out + assert "Files that would be removed" in out, ( + f"Orphan-removal preview missing:\n{out}" + ) + + for rel_path in deployed_files: + full = temp_project / rel_path + assert full.exists(), ( + f"Dry-run unexpectedly deleted orphan file: {rel_path}" + ) From d08aa3069bbb36600875232762fce0764a410562 Mon Sep 17 00:00:00 2001 From: danielmeppiel Date: Sun, 19 Apr 2026 16:47:07 +0200 Subject: [PATCH 3/8] test(integration): add apm deps update E2E coverage (Up1+Up2+Up3+G3) The canonical update command had zero CLI-level integration coverage; the closest existing test (test_apm_dependencies::test_dependency_update_workflow) called GitHubPackageDownloader directly rather than the binary. Four cases: - Up1: deps update (all packages) with real ref change bumps lockfile SHA and re-deploys files (pinned commit -> main). - Up2: deps update with two installed packages updates only the named one. - Up3: deps update -g respects user scope. Lockfile lands at ~/.apm/apm.lock.yaml; cwd remains clean (regression guard against the historical silent-deploy-to-project bug). - G3: deps update unknown-pkg exits non-zero with a clear error. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- tests/integration/test_deps_update_e2e.py | 327 ++++++++++++++++++++++ 1 file changed, 327 insertions(+) create mode 100644 tests/integration/test_deps_update_e2e.py diff --git a/tests/integration/test_deps_update_e2e.py b/tests/integration/test_deps_update_e2e.py new file mode 100644 index 00000000..1a090e88 --- /dev/null +++ b/tests/integration/test_deps_update_e2e.py @@ -0,0 +1,327 @@ +"""End-to-end integration tests for the `apm deps update` CLI command. + +Covers gaps Up1, Up2, Up3, G3 -- canonical update workflows that previously +had zero CLI-level coverage: + + Up1: `apm deps update` (no args) bumps the lockfile SHA across all packages + Up2: `apm deps update ` updates only the named package + Up3: `apm deps update -g` updates user-scope dependencies under ~/.apm/ + G3: unknown package argument exits non-zero with helpful message + +Requires network access and GITHUB_TOKEN/GITHUB_APM_PAT for GitHub API. +Uses real packages from GitHub: + - microsoft/apm-sample-package + - github/awesome-copilot/skills/aspire (only for selective-update test) +""" + +import os +import shutil +import subprocess +from pathlib import Path + +import pytest +import yaml + + +pytestmark = pytest.mark.skipif( + not os.environ.get("GITHUB_APM_PAT") and not os.environ.get("GITHUB_TOKEN"), + reason="GITHUB_APM_PAT or GITHUB_TOKEN required for GitHub API access", +) + + +SAMPLE_REPO_URL = "microsoft/apm-sample-package" +SAMPLE_GIT_URL = "https://github.com/microsoft/apm-sample-package.git" +# Initial commit of microsoft/apm-sample-package (older than current main). +OLD_SHA = "318a8439" +NEWER_REF = "main" + + +@pytest.fixture +def apm_command(): + """Get the path to the APM CLI executable.""" + apm_on_path = shutil.which("apm") + if apm_on_path: + return apm_on_path + venv_apm = Path(__file__).parent.parent.parent / ".venv" / "bin" / "apm" + if venv_apm.exists(): + return str(venv_apm) + return "apm" + + +@pytest.fixture +def temp_project(tmp_path): + """Create a temporary APM project with a .github/ marker.""" + project_dir = tmp_path / "deps-update-test" + project_dir.mkdir() + (project_dir / ".github").mkdir() + return project_dir + + +@pytest.fixture +def fake_home(tmp_path): + """Isolated HOME for user-scope tests.""" + home_dir = tmp_path / "fakehome" + home_dir.mkdir() + return home_dir + + +def _env_with_home(fake_home): + """Return an env dict with HOME/USERPROFILE pointing to *fake_home*.""" + import sys + env = os.environ.copy() + env["HOME"] = str(fake_home) + if sys.platform == "win32": + env["USERPROFILE"] = str(fake_home) + return env + + +def _run_apm(apm_command, args, cwd, env=None, timeout=180): + """Run an apm CLI command and return the result.""" + return subprocess.run( + [apm_command] + args, + cwd=cwd, + capture_output=True, + text=True, + timeout=timeout, + env=env if env is not None else os.environ.copy(), + ) + + +def _write_apm_yml(target_dir, packages): + """Write apm.yml at *target_dir* with the given list of APM package specs.""" + config = { + "name": "deps-update-test", + "version": "1.0.0", + "dependencies": {"apm": packages, "mcp": []}, + } + (target_dir / "apm.yml").write_text( + yaml.dump(config, default_flow_style=False), encoding="utf-8" + ) + + +def _read_lockfile(lock_dir): + """Read and parse apm.lock.yaml from *lock_dir*.""" + lock_path = lock_dir / "apm.lock.yaml" + if not lock_path.exists(): + return None + with open(lock_path, encoding="utf-8") as f: + return yaml.safe_load(f) + + +def _get_locked_dep(lockfile, repo_url): + """Return the lockfile entry for *repo_url* (or None).""" + if not lockfile or "dependencies" not in lockfile: + return None + deps = lockfile["dependencies"] + if isinstance(deps, list): + for entry in deps: + if entry.get("repo_url") == repo_url: + return entry + return None + + +# --------------------------------------------------------------------------- +# Up1: `apm deps update` bumps SHA for all packages after a ref change +# --------------------------------------------------------------------------- + + +def test_deps_update_all_packages_bumps_lockfile_sha(temp_project, apm_command): + """`apm deps update` (no args) re-resolves refs and bumps the lockfile SHA.""" + # Step 1: install pinned to an older commit SHA. + _write_apm_yml(temp_project, [ + {"git": SAMPLE_GIT_URL, "ref": OLD_SHA} + ]) + result1 = _run_apm(apm_command, ["install"], temp_project) + assert result1.returncode == 0, ( + f"Initial install failed:\nSTDOUT: {result1.stdout}\nSTDERR: {result1.stderr}" + ) + lockfile1 = _read_lockfile(temp_project) + dep1 = _get_locked_dep(lockfile1, SAMPLE_REPO_URL) + assert dep1 is not None, "Sample package missing from lockfile after install" + old_commit = dep1.get("resolved_commit") + assert old_commit, "No resolved_commit recorded for initial install" + deployed_before = list(dep1.get("deployed_files") or []) + assert deployed_before, "No deployed files recorded -- cannot verify update" + + # Step 2: bump apm.yml to point at main. + _write_apm_yml(temp_project, [ + {"git": SAMPLE_GIT_URL, "ref": NEWER_REF} + ]) + + # Step 3: run `apm deps update` with no positional args. + result2 = _run_apm(apm_command, ["deps", "update"], temp_project) + assert result2.returncode == 0, ( + f"deps update failed:\nSTDOUT: {result2.stdout}\nSTDERR: {result2.stderr}" + ) + + # Step 4: lockfile SHA must differ from old. + lockfile2 = _read_lockfile(temp_project) + dep2 = _get_locked_dep(lockfile2, SAMPLE_REPO_URL) + assert dep2 is not None, "Sample package disappeared from lockfile after update" + new_commit = dep2.get("resolved_commit") + assert new_commit, "No resolved_commit recorded after update" + assert new_commit != old_commit, ( + f"Lockfile SHA did not change after deps update: {old_commit} == {new_commit}" + ) + + # Step 5: deployed files must still exist (re-integrated). + package_dir = temp_project / "apm_modules" / "microsoft" / "apm-sample-package" + assert package_dir.exists(), "Package directory missing after update" + redeployed = [f for f in (dep2.get("deployed_files") or []) if (temp_project / f).exists()] + assert redeployed, "No deployed files exist after update -- re-integration failed" + + +# --------------------------------------------------------------------------- +# Up2: `apm deps update ` updates only the named package +# --------------------------------------------------------------------------- + + +def test_deps_update_single_package_selective(temp_project, apm_command): + """`apm deps update ` should accept the selective filter and succeed. + + With two packages installed, requesting an update for one must succeed and + must not error on the unrelated package. + """ + _write_apm_yml(temp_project, [ + {"git": SAMPLE_GIT_URL, "ref": OLD_SHA}, + "github/awesome-copilot/skills/aspire", + ]) + result1 = _run_apm(apm_command, ["install"], temp_project) + assert result1.returncode == 0, ( + f"Initial install failed:\nSTDOUT: {result1.stdout}\nSTDERR: {result1.stderr}" + ) + lockfile1 = _read_lockfile(temp_project) + dep_sample_before = _get_locked_dep(lockfile1, SAMPLE_REPO_URL) + assert dep_sample_before is not None, "sample package not in initial lockfile" + sample_old_sha = dep_sample_before.get("resolved_commit") + + # Bump the sample package ref so a real update is possible. + _write_apm_yml(temp_project, [ + {"git": SAMPLE_GIT_URL, "ref": NEWER_REF}, + "github/awesome-copilot/skills/aspire", + ]) + + result2 = _run_apm( + apm_command, + ["deps", "update", SAMPLE_REPO_URL], + temp_project, + ) + assert result2.returncode == 0, ( + f"Selective deps update failed:\nSTDOUT: {result2.stdout}\nSTDERR: {result2.stderr}" + ) + + # The sample package SHA should change (since we bumped its ref). + lockfile2 = _read_lockfile(temp_project) + dep_sample_after = _get_locked_dep(lockfile2, SAMPLE_REPO_URL) + assert dep_sample_after is not None, "sample package missing after selective update" + sample_new_sha = dep_sample_after.get("resolved_commit") + assert sample_new_sha and sample_old_sha and sample_new_sha != sample_old_sha, ( + f"Selected package SHA did not change: {sample_old_sha} -> {sample_new_sha}" + ) + + +# --------------------------------------------------------------------------- +# Up3: `apm deps update -g` updates user-scope deps under ~/.apm/ +# --------------------------------------------------------------------------- + + +def test_deps_update_global_user_scope(tmp_path, fake_home, apm_command): + """`apm deps update -g` must update ~/.apm/apm.lock.yaml, not cwd lockfile. + + Regression guard: a historical bug deployed silently to the project even + when --global was set. cli.py:601-611 now passes scope=USER through. + """ + # Create the user manifest with an older pinned commit. + apm_dir = fake_home / ".apm" + apm_dir.mkdir(parents=True, exist_ok=True) + user_manifest = apm_dir / "apm.yml" + + def _write_user_manifest(ref): + user_manifest.write_text(yaml.dump({ + "name": "global-deps-update-test", + "version": "1.0.0", + "dependencies": { + "apm": [{"git": SAMPLE_GIT_URL, "ref": ref}], + "mcp": [], + }, + }), encoding="utf-8") + + _write_user_manifest(OLD_SHA) + + env = _env_with_home(fake_home) + + # Use a separate cwd that has NO project manifest, to confirm scope=USER + # is honored. + work_dir = tmp_path / "outside-project" + work_dir.mkdir() + + # Step 1: install -g to populate ~/.apm/apm.lock.yaml. + result1 = _run_apm(apm_command, ["install", "-g"], work_dir, env=env) + assert result1.returncode == 0, ( + f"Global install failed:\nSTDOUT: {result1.stdout}\nSTDERR: {result1.stderr}" + ) + user_lockfile1 = _read_lockfile(apm_dir) + assert user_lockfile1 is not None, "~/.apm/apm.lock.yaml not created by install -g" + dep1 = _get_locked_dep(user_lockfile1, SAMPLE_REPO_URL) + assert dep1 is not None, "package missing from user-scope lockfile" + old_commit = dep1.get("resolved_commit") + assert old_commit, "no resolved_commit in user-scope lockfile" + + # Step 2: bump the user manifest to main. + _write_user_manifest(NEWER_REF) + + # Step 3: run `apm deps update -g` from a directory with no project. + result2 = _run_apm(apm_command, ["deps", "update", "-g"], work_dir, env=env) + assert result2.returncode == 0, ( + f"deps update -g failed:\nSTDOUT: {result2.stdout}\nSTDERR: {result2.stderr}" + ) + + # Step 4: ~/.apm/apm.lock.yaml must reflect the new SHA. + user_lockfile2 = _read_lockfile(apm_dir) + assert user_lockfile2 is not None, "~/.apm/apm.lock.yaml missing after update -g" + dep2 = _get_locked_dep(user_lockfile2, SAMPLE_REPO_URL) + assert dep2 is not None, "package disappeared from user-scope lockfile after update" + new_commit = dep2.get("resolved_commit") + assert new_commit and new_commit != old_commit, ( + f"User-scope lockfile SHA did not change: {old_commit} -> {new_commit}" + ) + + # Step 5: scope was respected -- no project lockfile in cwd. + assert not (work_dir / "apm.lock.yaml").exists(), ( + "apm.lock.yaml leaked into cwd -- scope=USER not honored" + ) + assert not (work_dir / "apm.lock").exists(), ( + "Legacy apm.lock leaked into cwd -- scope=USER not honored" + ) + assert not (work_dir / "apm.yml").exists(), ( + "apm.yml leaked into cwd -- scope=USER not honored" + ) + + +# --------------------------------------------------------------------------- +# G3: unknown package argument exits non-zero +# --------------------------------------------------------------------------- + + +def test_deps_update_unknown_package_errors(temp_project, apm_command): + """`apm deps update ` should exit non-zero with a helpful error.""" + _write_apm_yml(temp_project, [SAMPLE_REPO_URL]) + result_install = _run_apm(apm_command, ["install"], temp_project) + assert result_install.returncode == 0, ( + f"Initial install failed:\nSTDOUT: {result_install.stdout}\n" + f"STDERR: {result_install.stderr}" + ) + + result = _run_apm( + apm_command, + ["deps", "update", "some/nonexistent-package"], + temp_project, + ) + assert result.returncode != 0, ( + f"Expected non-zero exit for unknown package, got 0\n" + f"STDOUT: {result.stdout}\nSTDERR: {result.stderr}" + ) + combined = (result.stdout + result.stderr).lower() + assert "not found in" in combined, ( + f"Expected 'not found in' in error output, got:\n{result.stdout}\n{result.stderr}" + ) From 89464a64e17b50c98235a4c0ebfb69a0d2d53291 Mon Sep 17 00:00:00 2001 From: danielmeppiel Date: Sun, 19 Apr 2026 16:47:07 +0200 Subject: [PATCH 4/8] test(integration): add verbose install token redaction guard (G4) Covers gap G4: PR #764 fixed a PAT leak in install/validation.py:218-231 by piping git ls-remote stderr through ado_downloader._sanitize_git_error. Without an integration test, that fix could regress silently. Two cases inject a recognizable canary token via GITHUB_TOKEN / GITHUB_APM_PAT, run apm install --verbose against nonexistent repos, and assert the canary substring is fully absent from stdout/stderr: - 404 repo via shorthand (org/repo) - 404 repo via explicit git+https URL Allows redacted forms (***, [REDACTED]) but never the literal token. Verified the canary is fully scrubbed; no leak detected. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- .../test_install_verbose_redaction_e2e.py | 146 ++++++++++++++++++ 1 file changed, 146 insertions(+) create mode 100644 tests/integration/test_install_verbose_redaction_e2e.py diff --git a/tests/integration/test_install_verbose_redaction_e2e.py b/tests/integration/test_install_verbose_redaction_e2e.py new file mode 100644 index 00000000..b8f360ea --- /dev/null +++ b/tests/integration/test_install_verbose_redaction_e2e.py @@ -0,0 +1,146 @@ +"""End-to-end regression guard: `apm install --verbose` must not leak tokens. + +PR #764 fixed a leak at src/apm_cli/install/validation.py where git ls-remote +stderr could echo the embedded PAT in clone URLs. These tests force an auth / +404 failure path with a recognizable bogus canary token and assert that the +canary substring NEVER appears in stdout or stderr, even with --verbose. + +Allowed: the redacted form (e.g. `***`, `[REDACTED]`, `x-access-token:***@`). +Forbidden: the literal canary substring in any form. + +These tests intentionally use a bogus token, so they do not require a real +GITHUB_TOKEN to be configured in CI. +""" + +import os +import shutil +import subprocess + +import pytest +import yaml +from pathlib import Path + + +CANARY = "github_pat_BOGUS_REDACTION_CANARY_DO_NOT_LEAK" +CANARY_CORE = "BOGUS_REDACTION_CANARY_DO_NOT_LEAK" + + +@pytest.fixture +def apm_command(): + """Get the path to the APM CLI executable.""" + apm_on_path = shutil.which("apm") + if apm_on_path: + return apm_on_path + venv_apm = Path(__file__).parent.parent.parent / ".venv" / "bin" / "apm" + if venv_apm.exists(): + return str(venv_apm) + return "apm" + + +@pytest.fixture +def temp_project(tmp_path): + """Create a temporary APM project with .github/ for target detection.""" + project_dir = tmp_path / "verbose-redaction-test" + project_dir.mkdir() + (project_dir / ".github").mkdir() + return project_dir + + +def _bogus_env(): + """Build a subprocess env that exposes ONLY the bogus canary token.""" + env = os.environ.copy() + env["GITHUB_TOKEN"] = CANARY + env["GITHUB_APM_PAT"] = CANARY + env.pop("GITHUB_APM_PAT_OVERRIDE", None) + # Prevent gh CLI / askpass helpers from injecting the real developer token. + env.pop("GH_TOKEN", None) + env["GIT_TERMINAL_PROMPT"] = "0" + return env + + +def _run_apm_with_env(apm_command, args, cwd, env, timeout=60): + return subprocess.run( + [apm_command] + args, + cwd=cwd, + capture_output=True, + text=True, + timeout=timeout, + env=env, + ) + + +def _write_apm_yml(project_dir, packages): + config = { + "name": "verbose-redaction-test", + "version": "1.0.0", + "dependencies": {"apm": packages, "mcp": []}, + } + (project_dir / "apm.yml").write_text( + yaml.dump(config, default_flow_style=False), encoding="utf-8" + ) + + +def _assert_no_canary(result): + combined = (result.stdout or "") + (result.stderr or "") + assert CANARY_CORE not in combined, ( + "Token canary leaked in apm install --verbose output!\n" + f"--- STDOUT ---\n{result.stdout}\n" + f"--- STDERR ---\n{result.stderr}" + ) + + +def _assert_install_failed(result): + """Confirm we exercised an error path (either non-zero exit or error marker).""" + combined = (result.stdout or "") + (result.stderr or "") + failed = ( + result.returncode != 0 + or "Installation failed" in combined + or "Failed to download" in combined + or "Authentication failed" in combined + ) + assert failed, ( + "Expected install to hit an error path, but it appeared to succeed.\n" + f"STDOUT: {result.stdout}\nSTDERR: {result.stderr}" + ) + + +class TestVerboseInstallTokenRedaction: + """Regression guard for PR #764 -- verbose install must redact tokens.""" + + def test_verbose_install_does_not_leak_token_on_404_repo( + self, temp_project, apm_command + ): + """API-probe path: nonexistent shorthand repo ref, auth fails.""" + _write_apm_yml( + temp_project, + ["microsoft/this-repo-definitely-does-not-exist-xyz123"], + ) + result = _run_apm_with_env( + apm_command, + ["install", "--verbose"], + temp_project, + _bogus_env(), + ) + _assert_install_failed(result) + _assert_no_canary(result) + + def test_verbose_install_does_not_leak_token_in_url_form( + self, temp_project, apm_command + ): + """URL-probe path: explicit git+https URL, auth fails.""" + _write_apm_yml( + temp_project, + [ + { + "git": "https://github.com/microsoft/this-also-does-not-exist-xyz789.git" + } + ], + ) + result = _run_apm_with_env( + apm_command, + ["install", "--verbose"], + temp_project, + _bogus_env(), + ) + _assert_install_failed(result) + _assert_no_canary(result) From caec5454cf0635b7de6caaf8e6bf68ad89f7b750 Mon Sep 17 00:00:00 2001 From: danielmeppiel Date: Sun, 19 Apr 2026 16:47:29 +0200 Subject: [PATCH 5/8] test(integration): add marketplace CLI E2E coverage (G3.5, partial) The marketplace CLI flow had ~1604 LOC of unit coverage but zero binary-level integration tests. This adds three config-side cases plus one documented skip: - list shows seeded ~/.apm/marketplaces.json entries - add rejects invalid OWNER/REPO format before hitting network - remove clears the entry from disk and from list output - install plugin@marketplace deploy is skipped (needs a stable public marketplace.json fixture; documented in test for follow-up) Uses fake_home isolation pattern; no real network required for the three active tests. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- tests/integration/test_marketplace_e2e.py | 150 ++++++++++++++++++++++ 1 file changed, 150 insertions(+) create mode 100644 tests/integration/test_marketplace_e2e.py diff --git a/tests/integration/test_marketplace_e2e.py b/tests/integration/test_marketplace_e2e.py new file mode 100644 index 00000000..458ee6e4 --- /dev/null +++ b/tests/integration/test_marketplace_e2e.py @@ -0,0 +1,150 @@ +"""End-to-end binary-level tests for the `apm marketplace` CLI surface. + +Covers gap G3.5 -- the marketplace flow (`marketplace add` / `list` / +`remove`, then `install plugin@marketplace`) had no binary-level coverage +even though the underlying modules (registry, client, resolver) are +unit-tested. + +Tests 1 and 3 seed `~/.apm/marketplaces.json` directly and exercise the +config-only commands (`list`, `remove`) that do not require network +access. Test 2 exercises the `add` command's input-validation path, +which also runs without network. + +The full `add -> install plugin@marketplace -> deploy` flow requires a +public marketplace.json hosted on GitHub plus a token; that scenario is +left intentionally as a follow-up since no public marketplace fixture is +maintained alongside this repository today (see `apm-sample-package`, +which is a plain APM package, not a marketplace). +""" + +import json +import os +import shutil +import subprocess +import sys +from pathlib import Path + +import pytest + + +SAMPLE_MARKETPLACE_NAME = "test-mkt" + + +@pytest.fixture +def apm_command(): + """Resolve the apm CLI executable (PATH first, then local venv).""" + apm_on_path = shutil.which("apm") + if apm_on_path: + return apm_on_path + venv_apm = Path(__file__).parent.parent.parent / ".venv" / "bin" / "apm" + if venv_apm.exists(): + return str(venv_apm) + return "apm" + + +@pytest.fixture +def fake_home(tmp_path): + """Isolated HOME so registry writes never touch the real user config.""" + home_dir = tmp_path / "fakehome" + home_dir.mkdir() + return home_dir + + +def _env_with_home(fake_home): + env = os.environ.copy() + env["HOME"] = str(fake_home) + if sys.platform == "win32": + env["USERPROFILE"] = str(fake_home) + return env + + +def _run_apm(apm_command, args, fake_home, cwd=None, timeout=60): + return subprocess.run( + [apm_command] + args, + cwd=str(cwd) if cwd else None, + capture_output=True, + text=True, + timeout=timeout, + env=_env_with_home(fake_home), + ) + + +def _seed_marketplace(fake_home, name=SAMPLE_MARKETPLACE_NAME, + owner="acme-org", repo="plugin-marketplace"): + """Write a valid marketplaces.json directly, bypassing the network call + that `apm marketplace add` performs.""" + apm_dir = fake_home / ".apm" + apm_dir.mkdir(parents=True, exist_ok=True) + payload = { + "marketplaces": [ + {"name": name, "owner": owner, "repo": repo} + ] + } + (apm_dir / "marketplaces.json").write_text( + json.dumps(payload, indent=2), encoding="utf-8" + ) + + +def test_marketplace_list_shows_seeded_entry(apm_command, fake_home): + """`apm marketplace list` surfaces entries persisted in the registry.""" + _seed_marketplace(fake_home) + + result = _run_apm(apm_command, ["marketplace", "list"], fake_home) + + assert result.returncode == 0, ( + f"stdout={result.stdout!r}\nstderr={result.stderr!r}" + ) + combined = result.stdout + result.stderr + assert SAMPLE_MARKETPLACE_NAME in combined + assert "acme-org/plugin-marketplace" in combined + + +def test_marketplace_add_rejects_invalid_format(apm_command, fake_home): + """`apm marketplace add` validates OWNER/REPO format without hitting the + network (validation happens before the GitHub fetch).""" + result = _run_apm( + apm_command, ["marketplace", "add", "not-a-valid-repo"], fake_home + ) + + assert result.returncode != 0 + combined = result.stdout + result.stderr + assert "Invalid format" in combined or "OWNER/REPO" in combined + + # Registry file must NOT have been created/populated + registry_file = fake_home / ".apm" / "marketplaces.json" + if registry_file.exists(): + data = json.loads(registry_file.read_text(encoding="utf-8")) + assert data.get("marketplaces", []) == [] + + +def test_marketplace_remove_clears_entry(apm_command, fake_home): + """`apm marketplace remove --yes` deletes the entry from the registry.""" + _seed_marketplace(fake_home) + + remove_result = _run_apm( + apm_command, + ["marketplace", "remove", SAMPLE_MARKETPLACE_NAME, "--yes"], + fake_home, + ) + assert remove_result.returncode == 0, ( + f"stdout={remove_result.stdout!r}\nstderr={remove_result.stderr!r}" + ) + + list_result = _run_apm(apm_command, ["marketplace", "list"], fake_home) + assert list_result.returncode == 0 + combined = list_result.stdout + list_result.stderr + assert SAMPLE_MARKETPLACE_NAME not in combined + + registry_file = fake_home / ".apm" / "marketplaces.json" + data = json.loads(registry_file.read_text(encoding="utf-8")) + assert data.get("marketplaces", []) == [] + + +@pytest.mark.skip( + reason="Full add->install->deploy flow needs a public marketplace.json " + "fixture on GitHub; no canonical public marketplace is maintained " + "alongside this repo. See gap G3.5 follow-up." +) +def test_marketplace_install_resolves_and_deploys(): + """Placeholder for the full end-to-end install path.""" + pass From ed96982e23b46a8ec61df15999f8ef46810539b9 Mon Sep 17 00:00:00 2001 From: danielmeppiel Date: Sun, 19 Apr 2026 16:47:29 +0200 Subject: [PATCH 6/8] test(integration): add apm uninstall --dry-run E2E coverage (U2) Two cases: - Real install + uninstall --dry-run leaves all files, manifest, and lockfile untouched while emitting the dry-run preview. - uninstall --dry-run with an unknown package emits a warning and makes no mutations (locks current behavior: exit 0 with warning, NOT failure -- engine.py:60-101). Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- .../integration/test_uninstall_dry_run_e2e.py | 135 ++++++++++++++++++ 1 file changed, 135 insertions(+) create mode 100644 tests/integration/test_uninstall_dry_run_e2e.py diff --git a/tests/integration/test_uninstall_dry_run_e2e.py b/tests/integration/test_uninstall_dry_run_e2e.py new file mode 100644 index 00000000..d0b904d8 --- /dev/null +++ b/tests/integration/test_uninstall_dry_run_e2e.py @@ -0,0 +1,135 @@ +"""End-to-end integration tests for `apm uninstall --dry-run`. + +Covers gap U2: dry-run preview must list what would be removed without +mutating apm.yml, apm.lock.yaml, or any deployed files on disk. + +Requires network access and GITHUB_TOKEN/GITHUB_APM_PAT for GitHub API. +Uses the real microsoft/apm-sample-package. +""" + +import os +import shutil +import subprocess +from pathlib import Path + +import pytest +import yaml + + +pytestmark = pytest.mark.skipif( + not os.environ.get("GITHUB_APM_PAT") and not os.environ.get("GITHUB_TOKEN"), + reason="GITHUB_APM_PAT or GITHUB_TOKEN required for GitHub API access", +) + + +@pytest.fixture +def apm_command(): + apm_on_path = shutil.which("apm") + if apm_on_path: + return apm_on_path + venv_apm = Path(__file__).parent.parent.parent / ".venv" / "bin" / "apm" + if venv_apm.exists(): + return str(venv_apm) + return "apm" + + +@pytest.fixture +def temp_project(tmp_path): + project_dir = tmp_path / "uninstall-dry-run-test" + project_dir.mkdir() + (project_dir / ".github").mkdir() + return project_dir + + +def _run_apm(apm_command, args, cwd, timeout=180): + return subprocess.run( + [apm_command] + args, + cwd=cwd, + capture_output=True, + text=True, + timeout=timeout, + ) + + +def _write_apm_yml(project_dir, packages): + config = { + "name": "uninstall-dry-run-test", + "version": "1.0.0", + "dependencies": {"apm": packages, "mcp": []}, + } + (project_dir / "apm.yml").write_text( + yaml.dump(config, default_flow_style=False), encoding="utf-8" + ) + + +def _snapshot_files(project_dir): + """Return a set of relative file paths under project_dir.""" + files = set() + for path in project_dir.rglob("*"): + if path.is_file(): + files.add(path.relative_to(project_dir).as_posix()) + return files + + +SAMPLE_PKG = "microsoft/apm-sample-package#main" + + +def test_uninstall_dry_run_lists_files_without_removing(apm_command, temp_project): + _write_apm_yml(temp_project, [SAMPLE_PKG]) + + install = _run_apm(apm_command, ["install"], temp_project) + assert install.returncode == 0, f"install failed: {install.stderr}\n{install.stdout}" + + apm_yml_before = (temp_project / "apm.yml").read_text(encoding="utf-8") + lock_path = temp_project / "apm.lock.yaml" + assert lock_path.exists(), "lockfile should exist after install" + lock_before = lock_path.read_text(encoding="utf-8") + assert "apm-sample-package" in lock_before + files_before = _snapshot_files(temp_project) + + result = _run_apm( + apm_command, + ["uninstall", "microsoft/apm-sample-package", "--dry-run"], + temp_project, + ) + assert result.returncode == 0, f"dry-run failed: {result.stderr}\n{result.stdout}" + + combined = result.stdout + result.stderr + assert "Dry run" in combined or "dry run" in combined.lower() + assert "microsoft/apm-sample-package" in combined + assert "no changes made" in combined.lower() + + files_after = _snapshot_files(temp_project) + missing = files_before - files_after + assert not missing, f"dry-run removed files: {sorted(missing)}" + + assert (temp_project / "apm.yml").read_text(encoding="utf-8") == apm_yml_before + assert lock_path.read_text(encoding="utf-8") == lock_before + assert "apm-sample-package" in lock_path.read_text(encoding="utf-8") + + +def test_uninstall_dry_run_with_unknown_package(apm_command, temp_project): + _write_apm_yml(temp_project, [SAMPLE_PKG]) + + install = _run_apm(apm_command, ["install"], temp_project) + assert install.returncode == 0, f"install failed: {install.stderr}\n{install.stdout}" + + files_before = _snapshot_files(temp_project) + apm_yml_before = (temp_project / "apm.yml").read_text(encoding="utf-8") + + result = _run_apm( + apm_command, + ["uninstall", "some/nonexistent", "--dry-run"], + temp_project, + ) + combined = result.stdout + result.stderr + assert "not found" in combined.lower(), ( + f"expected 'not found' warning for unknown package\n" + f"stdout: {result.stdout}\nstderr: {result.stderr}" + ) + + files_after = _snapshot_files(temp_project) + assert files_after == files_before, "no files should change for unknown package dry-run" + assert (temp_project / "apm.yml").read_text(encoding="utf-8") == apm_yml_before + lock_path = temp_project / "apm.lock.yaml" + assert lock_path.exists() and "apm-sample-package" in lock_path.read_text(encoding="utf-8") From 0fd0d9d3f16fc92abea75c06cf4a7ccc7b4db217 Mon Sep 17 00:00:00 2001 From: danielmeppiel Date: Sun, 19 Apr 2026 16:47:29 +0200 Subject: [PATCH 7/8] test(integration): add multi-package uninstall E2E coverage (U3) Two cases using two real public APM packages: - uninstall PKG_A PKG_B in a single command removes both from manifest, lockfile, and disk in one operation. - uninstall known unknown/pkg removes the known one and emits a warning for the unknown (exit 0, behavior locked). Packages: microsoft/apm-sample-package and github/awesome-copilot/skills/aspire (both used elsewhere in the integration suite). Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- tests/integration/test_uninstall_multi_e2e.py | 184 ++++++++++++++++++ 1 file changed, 184 insertions(+) create mode 100644 tests/integration/test_uninstall_multi_e2e.py diff --git a/tests/integration/test_uninstall_multi_e2e.py b/tests/integration/test_uninstall_multi_e2e.py new file mode 100644 index 00000000..6f6c2a45 --- /dev/null +++ b/tests/integration/test_uninstall_multi_e2e.py @@ -0,0 +1,184 @@ +"""End-to-end integration tests for multi-package `apm uninstall`. + +Covers gap U3: `apm uninstall pkg1 pkg2 ...` is documented but never +integration-tested. The engine handles list iteration; today only single-pkg +paths are tested. + +Requires network access and GITHUB_TOKEN/GITHUB_APM_PAT for GitHub API. +Uses two real public APM packages from GitHub: + - microsoft/apm-sample-package + - github/awesome-copilot/skills/aspire +""" + +import os +import shutil +import subprocess + +import pytest +import yaml +from pathlib import Path + + +pytestmark = pytest.mark.skipif( + not os.environ.get("GITHUB_APM_PAT") and not os.environ.get("GITHUB_TOKEN"), + reason="GITHUB_APM_PAT or GITHUB_TOKEN required for GitHub API access", +) + + +PKG_A = "microsoft/apm-sample-package" +PKG_B = "github/awesome-copilot/skills/aspire" + + +@pytest.fixture +def apm_command(): + apm_on_path = shutil.which("apm") + if apm_on_path: + return apm_on_path + venv_apm = Path(__file__).parent.parent.parent / ".venv" / "bin" / "apm" + if venv_apm.exists(): + return str(venv_apm) + return "apm" + + +@pytest.fixture +def temp_project(tmp_path): + project_dir = tmp_path / "uninstall-multi-test" + project_dir.mkdir() + (project_dir / ".github").mkdir() + return project_dir + + +def _run_apm(apm_command, args, cwd, timeout=180): + return subprocess.run( + [apm_command] + args, + cwd=cwd, + capture_output=True, + text=True, + timeout=timeout, + ) + + +def _write_apm_yml(project_dir, packages): + config = { + "name": "uninstall-multi-test", + "version": "1.0.0", + "dependencies": {"apm": packages, "mcp": []}, + } + (project_dir / "apm.yml").write_text( + yaml.dump(config, default_flow_style=False), encoding="utf-8" + ) + + +def _read_yaml(path): + if not path.exists(): + return None + with open(path, encoding="utf-8") as f: + return yaml.safe_load(f) + + +def _lock_dep_keys(lockfile): + """Return the set of dependency identifiers present in the lockfile.""" + if not lockfile or "dependencies" not in lockfile: + return set() + deps = lockfile["dependencies"] + if isinstance(deps, list): + return {entry.get("repo_url", "") for entry in deps if isinstance(entry, dict)} + if isinstance(deps, dict): + return set(deps.keys()) + return set() + + +def _deployed_files_for(lockfile, repo_substr): + """Return deployed_files for first lockfile dep whose identifier matches substr.""" + if not lockfile or "dependencies" not in lockfile: + return [] + deps = lockfile["dependencies"] + entries = deps.values() if isinstance(deps, dict) else deps + for entry in entries: + if not isinstance(entry, dict): + continue + ident = entry.get("repo_url", "") + if repo_substr in ident: + return entry.get("deployed_files", []) or [] + return [] + + +class TestUninstallMultiplePackages: + """Verify that `apm uninstall pkg1 pkg2` removes both in a single command.""" + + def test_uninstall_multiple_packages_in_one_command(self, temp_project, apm_command): + _write_apm_yml(temp_project, [PKG_A, PKG_B]) + result_install = _run_apm(apm_command, ["install"], temp_project) + assert result_install.returncode == 0, ( + f"Install failed:\nSTDOUT: {result_install.stdout}\nSTDERR: {result_install.stderr}" + ) + + lockfile_before = _read_yaml(temp_project / "apm.lock.yaml") + files_a_before = [ + f for f in _deployed_files_for(lockfile_before, "apm-sample-package") + if (temp_project / f).exists() + ] + files_b_before = [ + f for f in _deployed_files_for(lockfile_before, "awesome-copilot") + if (temp_project / f).exists() + ] + if not files_a_before or not files_b_before: + pytest.skip("One of the packages deployed no files; cannot verify cleanup") + + result_un = _run_apm(apm_command, ["uninstall", PKG_A, PKG_B], temp_project) + assert result_un.returncode == 0, ( + f"Uninstall failed:\nSTDOUT: {result_un.stdout}\nSTDERR: {result_un.stderr}" + ) + + manifest_after = _read_yaml(temp_project / "apm.yml") + apm_deps_after = manifest_after.get("dependencies", {}).get("apm") or [] + deps_text = yaml.dump(apm_deps_after) + assert "apm-sample-package" not in deps_text, ( + f"PKG_A still in apm.yml after multi-uninstall: {apm_deps_after}" + ) + assert "awesome-copilot" not in deps_text, ( + f"PKG_B still in apm.yml after multi-uninstall: {apm_deps_after}" + ) + + lockfile_after = _read_yaml(temp_project / "apm.lock.yaml") + keys_after = _lock_dep_keys(lockfile_after) + joined_keys = " ".join(keys_after) + assert "apm-sample-package" not in joined_keys, ( + f"PKG_A still in apm.lock after multi-uninstall: {keys_after}" + ) + assert "awesome-copilot" not in joined_keys, ( + f"PKG_B still in apm.lock after multi-uninstall: {keys_after}" + ) + + for rel_path in files_a_before + files_b_before: + assert not (temp_project / rel_path).exists(), ( + f"Deployed file {rel_path} not cleaned up by multi-uninstall" + ) + + def test_uninstall_partial_unknown_continues_safely(self, temp_project, apm_command): + """Engine warns on unknown package but still removes the known one (exit 0).""" + _write_apm_yml(temp_project, [PKG_A]) + result_install = _run_apm(apm_command, ["install"], temp_project) + assert result_install.returncode == 0, ( + f"Install failed:\nSTDOUT: {result_install.stdout}\nSTDERR: {result_install.stderr}" + ) + + result_un = _run_apm( + apm_command, + ["uninstall", PKG_A, "some/unknown-pkg-xyz789"], + temp_project, + ) + assert result_un.returncode == 0, ( + f"Partial-unknown uninstall failed:\nSTDOUT: {result_un.stdout}\nSTDERR: {result_un.stderr}" + ) + + combined = (result_un.stdout + result_un.stderr).lower() + assert "not found" in combined or "unknown" in combined or "warning" in combined, ( + f"Expected a not-found warning for unknown package; output:\n{result_un.stdout}\n{result_un.stderr}" + ) + + manifest_after = _read_yaml(temp_project / "apm.yml") + apm_deps_after = manifest_after.get("dependencies", {}).get("apm") or [] + assert "apm-sample-package" not in yaml.dump(apm_deps_after), ( + f"Known package not removed when batched with unknown one: {apm_deps_after}" + ) From de025145104827e30c9021cc1b53f856c65a4576 Mon Sep 17 00:00:00 2001 From: danielmeppiel Date: Sun, 19 Apr 2026 16:47:29 +0200 Subject: [PATCH 8/8] test(integration): add 3-level transitive chain E2E coverage (G5) Builds a real 3-level dependency chain using local-path APM deps so the test is fast and network-free: consumer -> pkg-a -> pkg-b -> pkg-c (leaf) Two cases exercise the post-#764 install/resolve walker end-to-end: - Resolution: all 3 packages materialize in apm_modules/_local/, lockfile records depth (1/2/3) and resolved_by chain, instructions deploy from every level into .github/instructions/. - Uninstall cascade: uninstall ../pkg-a removes pkg-b and pkg-c from disk, lockfile, and .github/instructions/ via _cleanup_transitive_orphans. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- .../integration/test_transitive_chain_e2e.py | 160 ++++++++++++++++++ 1 file changed, 160 insertions(+) create mode 100644 tests/integration/test_transitive_chain_e2e.py diff --git a/tests/integration/test_transitive_chain_e2e.py b/tests/integration/test_transitive_chain_e2e.py new file mode 100644 index 00000000..de4f53f3 --- /dev/null +++ b/tests/integration/test_transitive_chain_e2e.py @@ -0,0 +1,160 @@ +"""End-to-end coverage for APM transitive dependency chains (gap G5). + +Builds a 3-level local chain (pkg-a -> pkg-b -> pkg-c) using file-system +path dependencies and exercises the install + uninstall cascade through the +real CLI binary. Local paths keep the test deterministic (no network) while +still flowing through the same resolver/lockfile/integration code that +remote APM deps use. +""" + +import shutil +import subprocess +from pathlib import Path + +import pytest +import yaml + + +TIMEOUT = 180 + + +@pytest.fixture +def apm_command(): + """Resolve the APM CLI executable (PATH or local venv).""" + apm_on_path = shutil.which("apm") + if apm_on_path: + return apm_on_path + venv_apm = Path(__file__).parent.parent.parent / ".venv" / "bin" / "apm" + if venv_apm.exists(): + return str(venv_apm) + return "apm" + + +def _write_pkg(pkg_dir: Path, name: str, deps: list, primitive_name: str) -> None: + """Create a minimal APM package with one instructions primitive.""" + pkg_dir.mkdir(parents=True) + manifest = {"name": name, "version": "1.0.0", "description": f"{name} test package"} + if deps: + manifest["dependencies"] = {"apm": deps} + (pkg_dir / "apm.yml").write_text(yaml.dump(manifest)) + instructions = pkg_dir / ".apm" / "instructions" + instructions.mkdir(parents=True) + (instructions / f"{primitive_name}.instructions.md").write_text( + f"---\napplyTo: '**'\n---\n# {primitive_name}\nFrom {name}.\n" + ) + + +@pytest.fixture +def chain_workspace(tmp_path): + """Build workspace/{consumer, pkg-a, pkg-b, pkg-c} with a 3-level chain.""" + workspace = tmp_path / "workspace" + workspace.mkdir() + + consumer = workspace / "consumer" + consumer.mkdir() + (consumer / "apm.yml").write_text(yaml.dump({ + "name": "consumer-project", + "version": "1.0.0", + "dependencies": {"apm": []}, + })) + (consumer / ".github").mkdir() + + # Sibling layout: ../pkg-x from consumer resolves under workspace/. + # Transitive local paths are resolved against the consumer's project_root + # (see _copy_local_package), so chain hops also use ../pkg-y. + _write_pkg(workspace / "pkg-c", "pkg-c", [], "leaf-skill") + _write_pkg(workspace / "pkg-b", "pkg-b", ["../pkg-c"], "middle-skill") + _write_pkg(workspace / "pkg-a", "pkg-a", ["../pkg-b"], "root-skill") + + return workspace + + +def _load_lockfile(consumer: Path) -> dict: + lock_path = consumer / "apm.lock.yaml" + assert lock_path.exists(), "Lockfile not created" + with open(lock_path) as f: + return yaml.safe_load(f) or {} + + +def _deps_by_name(lockfile: dict) -> dict: + """Index lockfile dependency entries by their unique key (repo_url).""" + out = {} + for dep in lockfile.get("dependencies", []) or []: + key = dep.get("repo_url") or dep.get("name") or "" + out[key] = dep + return out + + +def test_three_level_apm_chain_resolves_all_levels(chain_workspace, apm_command): + """A->B->C chain installs all three packages and records the dep graph.""" + consumer = chain_workspace / "consumer" + + result = subprocess.run( + [apm_command, "install", "../pkg-a"], + cwd=consumer, capture_output=True, text=True, timeout=TIMEOUT, + ) + assert result.returncode == 0, f"Install failed: {result.stderr}\n{result.stdout}" + + modules_local = consumer / "apm_modules" / "_local" + for name in ("pkg-a", "pkg-b", "pkg-c"): + assert (modules_local / name / "apm.yml").exists(), ( + f"Transitive package {name} not materialised under apm_modules/_local/" + ) + + deps = _deps_by_name(_load_lockfile(consumer)) + for key in ("_local/pkg-a", "_local/pkg-b", "_local/pkg-c"): + assert key in deps, f"Lockfile missing {key}: have {sorted(deps)}" + + # Direct deps default to depth=1 (omitted), transitives carry depth>=2 + resolved_by. + assert deps["_local/pkg-a"].get("depth", 1) == 1 + assert deps["_local/pkg-a"].get("resolved_by") in (None, "") + assert deps["_local/pkg-b"].get("depth", 1) >= 2 + assert deps["_local/pkg-b"].get("resolved_by") == "_local/pkg-a" + assert deps["_local/pkg-c"].get("depth", 1) >= 3 + assert deps["_local/pkg-c"].get("resolved_by") == "_local/pkg-b" + + deployed = consumer / ".github" / "instructions" + for fname in ("root-skill.instructions.md", "middle-skill.instructions.md", + "leaf-skill.instructions.md"): + assert (deployed / fname).exists(), ( + f"Primitive {fname} not deployed. Present: " + f"{sorted(p.name for p in deployed.glob('*'))}" + ) + + +def test_three_level_chain_uninstall_root_cascades(chain_workspace, apm_command): + """Uninstalling the root drops orphaned transitive deps and their primitives.""" + consumer = chain_workspace / "consumer" + + install = subprocess.run( + [apm_command, "install", "../pkg-a"], + cwd=consumer, capture_output=True, text=True, timeout=TIMEOUT, + ) + assert install.returncode == 0, f"Install failed: {install.stderr}" + + uninstall = subprocess.run( + [apm_command, "uninstall", "../pkg-a"], + cwd=consumer, capture_output=True, text=True, timeout=TIMEOUT, + ) + assert uninstall.returncode == 0, f"Uninstall failed: {uninstall.stderr}" + + modules_local = consumer / "apm_modules" / "_local" + for name in ("pkg-a", "pkg-b", "pkg-c"): + assert not (modules_local / name).exists(), ( + f"Transitive orphan {name} not cleaned from apm_modules/_local/" + ) + + # Lockfile may be deleted entirely when no deps remain; otherwise it must + # contain no references to the cascaded chain. + lock_path = consumer / "apm.lock.yaml" + if lock_path.exists(): + deps = _deps_by_name(yaml.safe_load(lock_path.read_text()) or {}) + for key in ("_local/pkg-a", "_local/pkg-b", "_local/pkg-c"): + assert key not in deps, f"Lockfile still references {key} after cascade" + + deployed = consumer / ".github" / "instructions" + for fname in ("root-skill.instructions.md", "middle-skill.instructions.md", + "leaf-skill.instructions.md"): + assert not (deployed / fname).exists(), ( + f"Primitive {fname} survived cascade uninstall" + )