diff --git a/.gitignore b/.gitignore index c65a9be36..67abd0a02 100644 --- a/.gitignore +++ b/.gitignore @@ -60,6 +60,3 @@ docs/wip/ .env.local .env.*.local AGENTS.md - -# APM dependencies -apm_modules/ diff --git a/CHANGELOG.md b/CHANGELOG.md index 043c813be..9dbf1b83d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,19 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.5.4] + +### Added +- **Agent Integration**: Automatic sync of `.agent.md` files to `.github/agents/` with `-apm` suffix (same pattern as prompt integration) + +### Fixed +- `sync_integration` URL normalization bug that caused ALL integrated files to be removed during uninstall instead of only the uninstalled package's files + - Root cause: Metadata stored full URLs (`https://github.com/owner/repo`) while dependency list used short form (`owner/repo`) + - Impact: Uninstalling one package would incorrectly remove prompts/agents from ALL other packages + - Fix: Normalize both URL formats to `owner/repo` before comparison + - Added comprehensive test coverage for multi-package scenarios +- Uninstall command now correctly removes only `apm_modules/owner/repo/` directory (not `apm_modules/owner/`) + ## [0.5.3] - 2025-11-16 ### Changed diff --git a/README.md b/README.md index ce1f60316..b195caac6 100644 --- a/README.md +++ b/README.md @@ -15,8 +15,9 @@ 📦 **Mix and match what your team needs**: -- **Agents** - Agentic workflows (.prompt.md files) -- **Context** - Company rules, standards, knowledge (.instructions.md files) and domain boundaries (.chatmode.md) +- **Agents** - AI personas (.agent.md files) +- **Prompts** - Executable workflows (.prompt.md files) +- **Context** - Company rules, standards, knowledge (.instructions.md files) ![APM Demo](docs/apm-demo.gif) diff --git a/docs/getting-started.md b/docs/getting-started.md index a2d299bbd..a0285e91c 100644 --- a/docs/getting-started.md +++ b/docs/getting-started.md @@ -284,12 +284,14 @@ This creates a complete Context structure: my-first-project/ ├── apm.yml # Project configuration └── .apm/ - ├── chatmodes/ # AI assistant personalities + ├── agents/ # AI assistant personalities ├── instructions/ # Context and coding standards ├── prompts/ # Reusable agent workflows └── context/ # Project knowledge base ``` +> **Note**: Legacy `.apm/chatmodes/` directory with `.chatmode.md` files is still supported. + ### 2. Explore Generated Files Let's look at what was created: @@ -376,7 +378,6 @@ apm install company/templates/chatmodes/qa-assistant.chatmode.md - `.prompt.md` - Agent workflows - `.instructions.md` - Context and rules -- `.chatmode.md` - Chat mode configurations - `.agent.md` - Agent definitions **Installation Structure:** diff --git a/docs/integrations.md b/docs/integrations.md index 17d34d5db..fd2a81c28 100644 --- a/docs/integrations.md +++ b/docs/integrations.md @@ -131,13 +131,15 @@ APM works natively with VSCode's Context implementation: VSCode already implements core Context concepts: -- **Chat Modes**: Domain-specific chat behavior with `.chatmode.md` files in `.github/chatmodes/` +- **Agents**: AI personas and workflows with `.agent.md` files in `.github/agents/` (legacy: `.chatmode.md` in `.github/chatmodes/`) - **Instructions Files**: Modular instructions with `copilot-instructions.md` and `.instructions.md` files - **Prompt Files**: Reusable task templates with `.prompt.md` files in `.github/prompts/` -### Automatic Prompt Integration with APM +> **Note**: APM supports both the new `.agent.md` format and legacy `.chatmode.md` format. VSCode provides Quick Fix actions to migrate from `.chatmode.md` to `.agent.md`. -APM automatically integrates prompts from installed packages into VSCode's native structure: +### Automatic Prompt and Agent Integration with APM + +APM automatically integrates prompts and agents from installed packages into VSCode's native structure: ```bash # Install APM packages - integration happens automatically @@ -145,6 +147,9 @@ apm install danielmeppiel/design-guidelines # Prompts are automatically integrated to: # .github/prompts/*-apm.prompt.md (with package metadata header) + +# Agents are automatically integrated to: +# .github/agents/*-apm.agent.md (with package metadata header) ``` **How Auto-Integration Works**: @@ -157,13 +162,14 @@ apm install danielmeppiel/design-guidelines **Integration Flow**: 1. Run `apm install` to fetch APM packages -2. APM automatically creates `.github/prompts/` directory if needed -3. Discovers `.prompt.md` files in each package +2. APM automatically creates `.github/prompts/` and `.github/agents/` directories if needed +3. Discovers `.prompt.md` and `.agent.md` files in each package 4. Copies prompts to `.github/prompts/` with `-apm` suffix (e.g., `accessibility-audit-apm.prompt.md`) -5. Adds metadata headers for version tracking -6. Updates `.gitignore` to exclude integrated prompts -7. VSCode automatically loads all prompts for your coding agents -8. Run `apm uninstall` to automatically remove integrated prompts +5. Copies agents to `.github/agents/` with `-apm` suffix (e.g., `security-apm.agent.md`) +6. Adds metadata headers for version tracking +7. Updates `.gitignore` to exclude integrated prompts and agents +8. VSCode automatically loads all prompts and agents for your coding agents +9. Run `apm uninstall` to automatically remove integrated prompts and agents **Intent-First Discovery**: The `-apm` suffix pattern enables natural autocomplete in VSCode: @@ -177,10 +183,15 @@ The `-apm` suffix pattern enables natural autocomplete in VSCode: apm install danielmeppiel/design-guidelines # Result in VSCode: +# Prompts: # .github/prompts/accessibility-audit-apm.prompt.md ✓ Available in chat # .github/prompts/design-review-apm.prompt.md ✓ Available in chat # .github/prompts/style-guide-check-apm.prompt.md ✓ Available in chat +# Agents: +# .github/agents/design-reviewer-apm.agent.md ✓ Available as chat mode +# .github/agents/accessibility-expert-apm.agent.md ✓ Available as chat mode + # Use with natural autocomplete: # Type: /design # VSCode suggests: design-review-apm.prompt.md ✨ @@ -188,10 +199,11 @@ apm install danielmeppiel/design-guidelines **VSCode Native Features**: - All integrated prompts appear in VSCode's prompt picker +- All integrated agents appear in VSCode's chat mode selector - Native chat integration with primitives - Seamless `/prompt` command support - File-pattern based instruction application -- Chatmode support for different personas +- Agent support for different personas and workflows ## Development Tool Integrations @@ -290,7 +302,8 @@ apm install danielmeppiel/design-guidelines # GitHub Copilot automatically picks up: # .github/prompts/*-apm.prompt.md (integrated prompts) -# .github/chatmodes/ (chat personalities) +# .github/agents/*-apm.agent.md (integrated agents) +# .github/agents/ or .github/chatmodes/ (AI personas - both formats supported) # .github/instructions/ (file-pattern rules) ``` diff --git a/docs/primitives.md b/docs/primitives.md index d2135c8fd..f1b4781e6 100644 --- a/docs/primitives.md +++ b/docs/primitives.md @@ -18,9 +18,9 @@ apm init my-project # Creates complete Context scaffolding + apm.yml my-project/ ├── apm.yml # Project configuration and script definitions └── .apm/ - ├── chatmodes/ # Role-based AI expertise with tool boundaries - │ ├── backend-dev.chatmode.md # API development specialist - │ └── frontend-dev.chatmode.md # UI development specialist + ├── agents/ # Role-based AI expertise with tool boundaries + │ ├── backend-dev.agent.md # API development specialist + │ └── frontend-dev.agent.md # UI development specialist ├── instructions/ # Targeted guidance by file type and domain │ ├── security.instructions.md # applyTo: "auth/**" │ └── testing.instructions.md # applyTo: "**/*test*" @@ -70,10 +70,12 @@ apm run review-copilot --param files="src/auth/" The APM CLI supports three types of primitives: -- **Chatmodes** (`.chatmode.md`) - Define AI assistant personalities and behaviors +- **Agents** (`.agent.md`) - Define AI assistant personalities and behaviors (legacy: `.chatmode.md`) - **Instructions** (`.instructions.md`) - Provide coding standards and guidelines for specific file types - **Context** (`.context.md`, `.memory.md`) - Supply background information and project context +> **Note**: Both `.agent.md` (new format) and `.chatmode.md` (legacy format) are fully supported. VSCode provides Quick Fix actions to help migrate from `.chatmode.md` to `.agent.md`. + ## File Structure ### Supported Locations @@ -83,7 +85,9 @@ APM discovers primitives in these locations: ``` # APM-native structure .apm/ -├── chatmodes/ # AI assistant definitions +├── agents/ # AI assistant definitions (new format) +│ └── *.agent.md +├── chatmodes/ # AI assistant definitions (legacy format) │ └── *.chatmode.md ├── instructions/ # Coding standards and guidelines │ └── *.instructions.md @@ -94,12 +98,15 @@ APM discovers primitives in these locations: # VSCode-compatible structure .github/ -├── chatmodes/ # VSCode Copilot chatmodes +├── agents/ # VSCode Copilot agents (new format) +│ └── *.agent.md +├── chatmodes/ # VSCode Copilot chatmodes (legacy format) │ └── *.chatmode.md └── instructions/ # VSCode Copilot instructions └── *.instructions.md # Generic files (anywhere in project) +*.agent.md *.chatmode.md *.instructions.md *.context.md @@ -142,10 +149,10 @@ Use ${input:auth_method} with ${input:session_duration} sessions Review [security standards](../context/security.context.md) before implementation ``` -### Chat Modes (.chatmode.md) +### Agents (.agent.md, legacy: .chatmode.md) **Agent Specialization Layer** - AI assistant personalities with tool boundaries -Chat modes create specialized AI assistants focused on specific domains. They define expertise areas, communication styles, and available tools. +Agents create specialized AI assistants focused on specific domains. They define expertise areas, communication styles, and available tools. ```yaml --- @@ -157,6 +164,8 @@ You are a senior backend engineer with 10+ years experience in API development. Focus on security, performance, and maintainable architecture patterns. ``` +> **File Format**: Use `.agent.md` for new files. Legacy `.chatmode.md` files continue to work and can be migrated using VSCode Quick Fix actions. + ### Context (.context.md) **Knowledge Management Layer** - Optimized project information for AI consumption @@ -172,15 +181,14 @@ Context files package project knowledge, architectural decisions, and team stand ## Primitive Types -### Chatmodes +### Agents -Chatmodes define AI assistant personalities and specialized behaviors for different development tasks. +Agents define AI assistant personalities and specialized behaviors for different development tasks. -**Format:** `.chatmode.md` +**Format:** `.agent.md` (new) or `.chatmode.md` (legacy) **Frontmatter:** -- `description` (required) - Clear explanation of the chatmode purpose -- `applyTo` (optional) - Glob pattern for file targeting (e.g., `"**/*.{py,js}"`) +- `description` (required) - Clear explanation of the agent purpose - `author` (optional) - Creator information - `version` (optional) - Version string @@ -189,7 +197,6 @@ Chatmodes define AI assistant personalities and specialized behaviors for differ --- description: AI pair programming assistant for code review author: Development Team -applyTo: "**/*.{py,js,ts}" version: "1.0.0" --- @@ -312,32 +319,13 @@ Team information (`.apm/memory/team-contacts.memory.md`): ## Discovery and Parsing -The APM CLI automatically discovers and parses all primitive files in your project: - -```python -from apm_cli.primitives import discover_primitives - -# Discover all primitives in current directory -collection = discover_primitives() - -print(f"Found {collection.count()} primitives:") -print(f" Chatmodes: {len(collection.chatmodes)}") -print(f" Instructions: {len(collection.instructions)}") -print(f" Contexts: {len(collection.contexts)}") - -# Access individual primitives -for chatmode in collection.chatmodes: - print(f"Chatmode: {chatmode.name}") - print(f" Description: {chatmode.description}") - if chatmode.apply_to: - print(f" Applies to: {chatmode.apply_to}") -``` +The APM CLI automatically discovers and parses all primitive files in your project. ## Validation All primitives are automatically validated during discovery: -- **Chatmodes**: Must have description and content +- **Agents**: Must have description and content (supports both `.agent.md` and `.chatmode.md`) - **Instructions**: Must have description, applyTo pattern, and content - **Context**: Must have content (description optional) @@ -347,7 +335,7 @@ Invalid files are skipped with warning messages, allowing valid primitives to co ### 1. Clear Naming Use descriptive names that indicate purpose: -- `code-review-assistant.chatmode.md` +- `code-review-assistant.agent.md` - `python-documentation.instructions.md` - `team-contacts.md` @@ -364,9 +352,9 @@ Keep primitives in version control alongside your code. Use semantic versioning Use the structured `.apm/` directories for better organization: ``` .apm/ -├── chatmodes/ -│ ├── code-reviewer.chatmode.md -│ └── documentation-writer.chatmode.md +├── agents/ +│ ├── code-reviewer.agent.md +│ └── documentation-writer.agent.md ├── instructions/ │ ├── python-style.instructions.md │ └── typescript-conventions.instructions.md @@ -385,13 +373,13 @@ Use the structured `.apm/` directories for better organization: For VSCode Copilot compatibility, place files in `.github/` directories: ``` .github/ -├── chatmodes/ -│ └── assistant.chatmode.md +├── agents/ +│ └── assistant.agent.md └── instructions/ └── coding-standards.instructions.md ``` -These files follow the same format and will be discovered alongside APM-specific primitives. +These files follow the same format and will be discovered alongside APM-specific primitives. ## Error Handling diff --git a/pyproject.toml b/pyproject.toml index b53535c68..456bba55e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "apm-cli" -version = "0.5.3" +version = "0.5.4" description = "MCP configuration tool" readme = "README.md" requires-python = ">=3.9" diff --git a/src/apm_cli/cli.py b/src/apm_cli/cli.py index 9da931449..d50b65b14 100644 --- a/src/apm_cli/cli.py +++ b/src/apm_cli/cli.py @@ -41,7 +41,7 @@ from apm_cli.deps.apm_resolver import APMDependencyResolver from apm_cli.deps.github_downloader import GitHubPackageDownloader from apm_cli.models.apm_package import APMPackage, DependencyReference - from apm_cli.integration import PromptIntegrator + from apm_cli.integration import PromptIntegrator, AgentIntegrator APM_DEPS_AVAILABLE = True except ImportError as e: @@ -369,20 +369,22 @@ def _validate_and_add_packages_to_apm_yml(packages, dry_run=False): continue # Check if package is already in dependencies - if package in current_deps: - _rich_warning(f"Package {package} already exists in apm.yml") - continue - + already_in_deps = package in current_deps + # Validate package exists and is accessible if _validate_package_exists(package): - validated_packages.append(package) - _rich_info(f"✓ {package} - accessible") + if already_in_deps: + _rich_info(f"✓ {package} - already in apm.yml, ensuring installation...") + else: + validated_packages.append(package) + _rich_info(f"✓ {package} - accessible") else: _rich_error(f"✗ {package} - not accessible or doesn't exist") if not validated_packages: if dry_run: - _rich_warning("No new valid packages to add") + _rich_warning("No new packages to add") + # If all packages already exist in apm.yml, that's OK - we'll reinstall them return [] if dry_run: @@ -546,9 +548,8 @@ def install(ctx, packages, runtime, exclude, only, update, dry_run, verbose): validated_packages = _validate_and_add_packages_to_apm_yml( packages, dry_run ) - if not validated_packages and not dry_run: - _rich_error("No valid packages to install") - sys.exit(1) + # Note: Empty validated_packages is OK if packages are already in apm.yml + # We'll proceed with installation from apm.yml to ensure everything is synced _rich_info("Installing dependencies from apm.yml...") @@ -593,6 +594,7 @@ def install(ctx, packages, runtime, exclude, only, update, dry_run, verbose): # Install APM dependencies first (if requested) apm_count = 0 prompt_count = 0 + agent_count = 0 if should_install_apm and apm_deps: if not APM_DEPS_AVAILABLE: _rich_error("APM dependency system not available") @@ -600,7 +602,7 @@ def install(ctx, packages, runtime, exclude, only, update, dry_run, verbose): sys.exit(1) try: - apm_count, prompt_count = _install_apm_dependencies(apm_package, update) + apm_count, prompt_count, agent_count = _install_apm_dependencies(apm_package, update) except Exception as e: _rich_error(f"Failed to install APM dependencies: {e}") sys.exit(1) @@ -619,7 +621,7 @@ def install(ctx, packages, runtime, exclude, only, update, dry_run, verbose): if not only: # Load apm.yml config for summary apm_config = _load_apm_config() - _show_install_summary(apm_count, prompt_count, mcp_count, apm_config) + _show_install_summary(apm_count, prompt_count, agent_count, mcp_count, apm_config) elif only == "apm": _rich_success(f"Installed {apm_count} APM dependencies") elif only == "mcp": @@ -862,24 +864,33 @@ def uninstall(ctx, packages, dry_run): if apm_modules_dir.exists(): for package in packages_to_remove: - package_name = package.split("/")[-1] # Extract package name - package_path = apm_modules_dir / package_name + # Parse package correctly using org/repo structure (like install does) + repo_parts = package.split("/") + if len(repo_parts) >= 2: + org_name = repo_parts[0] + repo_name = repo_parts[1] + package_path = apm_modules_dir / org_name / repo_name + else: + # Fallback for invalid format + package_path = apm_modules_dir / package if package_path.exists(): try: import shutil shutil.rmtree(package_path) - _rich_info(f"✓ Removed {package_name} from apm_modules/") + _rich_info(f"✓ Removed {package} from apm_modules/") removed_from_modules += 1 except Exception as e: _rich_error( - f"✗ Failed to remove {package_name} from apm_modules/: {e}" + f"✗ Failed to remove {package} from apm_modules/: {e}" ) else: - _rich_warning(f"Package {package_name} not found in apm_modules/") + _rich_warning(f"Package {package} not found in apm_modules/") # Sync prompt integration to remove orphaned prompts + prompts_cleaned = 0 + prompts_failed = 0 if Path(".github/prompts").exists(): try: from apm_cli.models.apm_package import APMPackage @@ -887,9 +898,35 @@ def uninstall(ctx, packages, dry_run): apm_package = APMPackage.from_apm_yml(Path("apm.yml")) integrator = PromptIntegrator() - integrator.sync_integration(apm_package, Path(".")) - except Exception: - pass # Silent cleanup failure OK + cleanup_result = integrator.sync_integration(apm_package, Path(".")) + prompts_cleaned = cleanup_result.get('files_removed', 0) + prompts_failed = cleanup_result.get('errors', 0) + except Exception as e: + prompts_failed += 1 + + # Sync agent integration to remove orphaned agents + agents_cleaned = 0 + agents_failed = 0 + if Path(".github/agents").exists(): + try: + from apm_cli.models.apm_package import APMPackage + from apm_cli.integration.agent_integrator import AgentIntegrator + + apm_package = APMPackage.from_apm_yml(Path("apm.yml")) + integrator = AgentIntegrator() + cleanup_result = integrator.sync_integration(apm_package, Path(".")) + agents_cleaned = cleanup_result.get('files_removed', 0) + agents_failed = cleanup_result.get('errors', 0) + except Exception as e: + agents_failed += 1 + + # Show cleanup feedback + if prompts_cleaned > 0: + _rich_info(f"✓ Cleaned up {prompts_cleaned} integrated prompt(s)") + if agents_cleaned > 0: + _rich_info(f"✓ Cleaned up {agents_cleaned} integrated agent(s)") + if prompts_failed > 0 or agents_failed > 0: + _rich_warning(f"⚠ Failed to clean up {prompts_failed + agents_failed} file(s)") # Final summary summary_lines = [] @@ -956,10 +993,12 @@ def _install_apm_dependencies(apm_package: "APMPackage", update_refs: bool = Fal apm_modules_dir = project_root / "apm_modules" apm_modules_dir.mkdir(exist_ok=True) - # Initialize prompt integrator - integrator = PromptIntegrator() - should_integrate = integrator.should_integrate(project_root) - total_integrated = 0 + # Initialize integrators + prompt_integrator = PromptIntegrator() + agent_integrator = AgentIntegrator() + should_integrate = prompt_integrator.should_integrate(project_root) + total_prompts_integrated = 0 + total_agents_integrated = 0 # Install each dependency with Rich progress display from rich.progress import Progress, SpinnerColumn, TextColumn, BarColumn, TaskProgressColumn @@ -1004,10 +1043,29 @@ def _install_apm_dependencies(apm_package: "APMPackage", update_refs: bool = Fal # Fallback for invalid repo URLs install_path = apm_modules_dir / dep_ref.repo_url - # Skip download if already exists and not updating, but still integrate prompts - if install_path.exists() and not update_refs: + # npm-like behavior: Branches always fetch latest, only tags/commits use cache + # Resolve git reference to determine type + from apm_cli.models.apm_package import GitReferenceType + + resolved_ref = None + if dep_ref.reference: + try: + resolved_ref = downloader.resolve_git_reference( + f"{dep_ref.repo_url}@{dep_ref.reference}" + ) + except Exception: + pass # If resolution fails, skip cache (fetch latest) + + # Use cache only for tags and commits (not branches) + is_cacheable = ( + resolved_ref and + resolved_ref.ref_type in [GitReferenceType.TAG, GitReferenceType.COMMIT] + ) + skip_download = install_path.exists() and is_cacheable and not update_refs + + if skip_download: display_name = str(dep_ref) if dep_ref.is_virtual else dep_ref.repo_url - _rich_info(f"✓ {display_name} (cached)") + _rich_info(f"✓ {display_name} @{dep_ref.reference} (cached)") # Still need to integrate prompts for cached packages (zero-config behavior) if should_integrate: @@ -1047,22 +1105,38 @@ def _install_apm_dependencies(apm_package: "APMPackage", update_refs: bool = Fal installed_at=datetime.now().isoformat() ) - integration_result = integrator.integrate_package_prompts( + # Integrate prompts + prompt_result = prompt_integrator.integrate_package_prompts( cached_package_info, project_root ) - if integration_result.files_integrated > 0: - total_integrated += integration_result.files_integrated + if prompt_result.files_integrated > 0: + total_prompts_integrated += prompt_result.files_integrated _rich_info( - f" └─ {integration_result.files_integrated} prompts integrated → .github/prompts/" + f" └─ {prompt_result.files_integrated} prompts integrated → .github/prompts/" ) - if integration_result.files_updated > 0: + if prompt_result.files_updated > 0: _rich_info( - f" └─ {integration_result.files_updated} prompts updated" + f" └─ {prompt_result.files_updated} prompts updated" + ) + + # Integrate agents + agent_result = agent_integrator.integrate_package_agents( + cached_package_info, + project_root + ) + if agent_result.files_integrated > 0: + total_agents_integrated += agent_result.files_integrated + _rich_info( + f" └─ {agent_result.files_integrated} agents integrated → .github/agents/" + ) + if agent_result.files_updated > 0: + _rich_info( + f" └─ {agent_result.files_updated} agents updated" ) except Exception as e: # Don't fail installation if integration fails - _rich_warning(f" ⚠ Failed to integrate prompts from cached package: {e}") + _rich_warning(f" ⚠ Failed to integrate primitives from cached package: {e}") continue @@ -1092,25 +1166,41 @@ def _install_apm_dependencies(apm_package: "APMPackage", update_refs: bool = Fal installed_count += 1 _rich_success(f"✓ {display_name}") - # Auto-integrate prompts if enabled + # Auto-integrate prompts and agents if enabled if should_integrate: try: - integration_result = integrator.integrate_package_prompts( + # Integrate prompts + prompt_result = prompt_integrator.integrate_package_prompts( + package_info, + project_root + ) + if prompt_result.files_integrated > 0: + total_prompts_integrated += prompt_result.files_integrated + _rich_info( + f" └─ {prompt_result.files_integrated} prompts integrated → .github/prompts/" + ) + if prompt_result.files_updated > 0: + _rich_info( + f" └─ {prompt_result.files_updated} prompts updated" + ) + + # Integrate agents + agent_result = agent_integrator.integrate_package_agents( package_info, project_root ) - if integration_result.files_integrated > 0: - total_integrated += integration_result.files_integrated + if agent_result.files_integrated > 0: + total_agents_integrated += agent_result.files_integrated _rich_info( - f" └─ {integration_result.files_integrated} prompts integrated → .github/prompts/" + f" └─ {agent_result.files_integrated} agents integrated → .github/agents/" ) - if integration_result.files_updated > 0: + if agent_result.files_updated > 0: _rich_info( - f" └─ {integration_result.files_updated} prompts updated" + f" └─ {agent_result.files_updated} agents updated" ) except Exception as e: # Don't fail installation if integration fails - _rich_warning(f" ⚠ Failed to integrate prompts: {e}") + _rich_warning(f" ⚠ Failed to integrate primitives: {e}") except Exception as e: display_name = str(dep_ref) if dep_ref.is_virtual else dep_ref.repo_url @@ -1125,17 +1215,26 @@ def _install_apm_dependencies(apm_package: "APMPackage", update_refs: bool = Fal _update_gitignore_for_apm_modules() # Update .gitignore for integrated prompts if any were integrated - if should_integrate and total_integrated > 0: + if should_integrate and total_prompts_integrated > 0: try: - updated = integrator.update_gitignore_for_integrated_prompts(project_root) + updated = prompt_integrator.update_gitignore_for_integrated_prompts(project_root) if updated: _rich_info("Updated .gitignore for integrated prompts (*-apm.prompt.md)") except Exception as e: _rich_warning(f"Could not update .gitignore for prompts: {e}") + + # Update .gitignore for integrated agents if any were integrated + if should_integrate and total_agents_integrated > 0: + try: + updated = agent_integrator.update_gitignore_for_integrated_agents(project_root) + if updated: + _rich_info("Updated .gitignore for integrated agents (*-apm.agent.md, *-apm.chatmode.md)") + except Exception as e: + _rich_warning(f"Could not update .gitignore for agents: {e}") _rich_success(f"Installed {installed_count} APM dependencies") - return installed_count, total_integrated + return installed_count, total_prompts_integrated, total_agents_integrated except Exception as e: raise RuntimeError(f"Failed to resolve APM dependencies: {e}") @@ -1366,12 +1465,13 @@ def _install_mcp_dependencies( raise RuntimeError("Registry operations module required for MCP installation") -def _show_install_summary(apm_count: int, prompt_count: int, mcp_count: int, apm_config): +def _show_install_summary(apm_count: int, prompt_count: int, agent_count: int, mcp_count: int, apm_config): """Show beautiful post-install summary with next steps. Args: apm_count: Number of APM packages installed prompt_count: Number of prompts integrated + agent_count: Number of agents integrated mcp_count: Number of MCP servers configured apm_config: The apm.yml configuration dict """ @@ -2006,11 +2106,17 @@ def _recompile(self, changed_file): observer.schedule(event_handler, ".apm", recursive=True) watch_paths.append(".apm/") - # Check for .github/instructions and chatmodes + # Check for .github/instructions and agents/chatmodes if Path(".github/instructions").exists(): observer.schedule(event_handler, ".github/instructions", recursive=True) watch_paths.append(".github/instructions/") + # Watch .github/agents/ (new standard) + if Path(".github/agents").exists(): + observer.schedule(event_handler, ".github/agents", recursive=True) + watch_paths.append(".github/agents/") + + # Watch .github/chatmodes/ (legacy) if Path(".github/chatmodes").exists(): observer.schedule(event_handler, ".github/chatmodes", recursive=True) watch_paths.append(".github/chatmodes/") diff --git a/src/apm_cli/integration/__init__.py b/src/apm_cli/integration/__init__.py index b6bfeeaf2..8107ff7eb 100644 --- a/src/apm_cli/integration/__init__.py +++ b/src/apm_cli/integration/__init__.py @@ -1,5 +1,6 @@ """APM package integration utilities.""" from .prompt_integrator import PromptIntegrator +from .agent_integrator import AgentIntegrator -__all__ = ['PromptIntegrator'] +__all__ = ['PromptIntegrator', 'AgentIntegrator'] diff --git a/src/apm_cli/integration/agent_integrator.py b/src/apm_cli/integration/agent_integrator.py new file mode 100644 index 000000000..df7dd3ae3 --- /dev/null +++ b/src/apm_cli/integration/agent_integrator.py @@ -0,0 +1,495 @@ +"""Agent integration functionality for APM packages.""" + +from pathlib import Path +from typing import List, Dict +from dataclasses import dataclass +from datetime import datetime +import hashlib + +from .utils import normalize_repo_url + + +@dataclass +class IntegrationResult: + """Result of agent integration operation.""" + files_integrated: int + files_updated: int # Updated due to version/commit change + files_skipped: int # Unchanged (same version/commit) + target_paths: List[Path] + gitignore_updated: bool + + +class AgentIntegrator: + """Handles integration of APM package agents into .github/agents/.""" + + def __init__(self): + """Initialize the agent integrator.""" + pass + + def should_integrate(self, project_root: Path) -> bool: + """Check if agent integration should be performed. + + Args: + project_root: Root directory of the project + + Returns: + bool: Always True - integration happens automatically + """ + return True + + def find_agent_files(self, package_path: Path) -> List[Path]: + """Find all .agent.md and .chatmode.md files in a package. + + Searches in: + - Package root directory (.agent.md and .chatmode.md) + - .apm/agents/ subdirectory (new standard) + - .apm/chatmodes/ subdirectory (legacy) + + Args: + package_path: Path to the package directory + + Returns: + List[Path]: List of absolute paths to agent files + """ + agent_files = [] + + # Search in package root + if package_path.exists(): + agent_files.extend(package_path.glob("*.agent.md")) + agent_files.extend(package_path.glob("*.chatmode.md")) # Legacy + + # Search in .apm/agents/ (new standard) + apm_agents = package_path / ".apm" / "agents" + if apm_agents.exists(): + agent_files.extend(apm_agents.glob("*.agent.md")) + + # Search in .apm/chatmodes/ (legacy) + apm_chatmodes = package_path / ".apm" / "chatmodes" + if apm_chatmodes.exists(): + agent_files.extend(apm_chatmodes.glob("*.chatmode.md")) + + return agent_files + + def _parse_header_metadata(self, file_path: Path) -> dict: + """Parse APM metadata from YAML frontmatter in an integrated agent file. + + Args: + file_path: Path to the integrated agent file + + Returns: + dict: Metadata extracted from frontmatter (version, commit, source, etc.) + Empty dict if no valid frontmatter found or parsing fails + """ + try: + import frontmatter + + post = frontmatter.load(file_path) + + # Extract APM metadata from nested 'apm' key (new format) + apm_data = post.metadata.get('apm', {}) + if apm_data: + metadata = { + 'Version': apm_data.get('version', ''), + 'Commit': apm_data.get('commit', ''), + 'Source': f"{apm_data.get('source', '')} ({apm_data.get('source_repo', '')})", + 'Original': apm_data.get('original_path', ''), + 'Installed': apm_data.get('installed_at', ''), + 'ContentHash': apm_data.get('content_hash', '') + } + return metadata + + # Fallback: Check for old flat format (backwards compatibility) + if 'apm_version' in post.metadata: + metadata = { + 'Version': post.metadata.get('apm_version', ''), + 'Commit': post.metadata.get('apm_commit', ''), + 'Source': f"{post.metadata.get('apm_source', '')} ({post.metadata.get('apm_source_repo', '')})", + 'Original': post.metadata.get('apm_original_path', ''), + 'Installed': post.metadata.get('apm_installed_at', ''), + 'ContentHash': post.metadata.get('apm_content_hash', '') + } + return metadata + + return {} # Not an APM-integrated file + except Exception: + # If any error occurs during parsing, return empty dict + return {} + + def _calculate_content_hash(self, file_path: Path) -> str: + """Calculate SHA256 hash of file content (excluding frontmatter). + + Args: + file_path: Path to the file + + Returns: + str: Hexadecimal hash of the content + """ + try: + import frontmatter + post = frontmatter.load(file_path) + # Hash only the content, not the frontmatter + return hashlib.sha256(post.content.encode()).hexdigest() + except Exception: + return "" + + def _should_update_agent(self, existing_header: dict, package_info, existing_file: Path = None) -> tuple[bool, bool]: + """Determine if an existing agent file should be updated. + + Args: + existing_header: Metadata from existing file's header + package_info: PackageInfo object with new package metadata + existing_file: Path to existing file for content hash verification + + Returns: + tuple[bool, bool]: (should_update, was_modified) + - should_update: True if file should be updated + - was_modified: True if content was modified by user + """ + # If no valid header exists, update the file + if not existing_header: + return (True, False) + + # Get new version and commit + new_version = package_info.package.version + new_commit = ( + package_info.resolved_reference.resolved_commit + if package_info.resolved_reference + else "unknown" + ) + + # Get existing version and commit from header + existing_version = existing_header.get('Version', '') + existing_commit = existing_header.get('Commit', '') + + # Check for content modifications if we have the file path + was_modified = False + if existing_file and existing_file.exists(): + stored_hash = existing_header.get('ContentHash', '') + if stored_hash: + current_hash = self._calculate_content_hash(existing_file) + was_modified = (current_hash != stored_hash and current_hash != "") + + # Update if version or commit has changed + should_update = (existing_version != new_version or existing_commit != new_commit) + return (should_update, was_modified) + + + + def get_target_filename(self, source_file: Path, package_name: str) -> str: + """Generate target filename with -apm suffix (intent-first naming). + + Args: + source_file: Source file path + package_name: Name of the package (not used in simple naming) + + Returns: + str: Target filename with -apm suffix (e.g., security-apm.agent.md or security-apm.chatmode.md) + """ + # Intent-first naming: insert -apm suffix before extension + # Preserve original extension (.agent.md or .chatmode.md) + # Examples: + # security.agent.md -> security-apm.agent.md + # default.chatmode.md -> default-apm.chatmode.md + + # Determine extension + if source_file.name.endswith('.agent.md'): + stem = source_file.name[:-9] # Remove .agent.md + extension = '.agent.md' + elif source_file.name.endswith('.chatmode.md'): + stem = source_file.name[:-12] # Remove .chatmode.md + extension = '.chatmode.md' + else: + # Fallback for unexpected naming + stem = source_file.stem + extension = ''.join(source_file.suffixes) + + return f"{stem}-apm{extension}" + + def copy_agent_with_metadata(self, source: Path, target: Path, package_info, original_path: Path) -> None: + """Copy agent file with APM metadata embedded in frontmatter. + + Args: + source: Source file path + target: Target file path + package_info: PackageInfo object with package metadata + original_path: Original path to the agent file + """ + import frontmatter + + # Read and parse source file with frontmatter + post = frontmatter.load(source) + + # Calculate content hash for modification detection + content_hash = hashlib.sha256(post.content.encode()).hexdigest() + + # Add APM metadata to frontmatter (nested under 'apm' key for clarity) + post.metadata['apm'] = { + 'source': package_info.package.name, + 'source_repo': package_info.package.source or "unknown", + 'version': package_info.package.version, + 'commit': ( + package_info.resolved_reference.resolved_commit + if package_info.resolved_reference + else "unknown" + ), + 'original_path': ( + str(original_path.relative_to(package_info.install_path)) + if original_path.is_relative_to(package_info.install_path) + else original_path.name + ), + 'installed_at': package_info.installed_at or datetime.now().isoformat(), + 'content_hash': content_hash + } + + # Write to target with modified frontmatter + with open(target, 'w', encoding='utf-8') as f: + f.write(frontmatter.dumps(post)) + + def integrate_package_agents(self, package_info, project_root: Path) -> IntegrationResult: + """Integrate all agents from a package into .github/agents/. + + Implements smart update logic: + - First install: Copy with header and -apm suffix + - Subsequent installs: + - Compare version/commit with existing file + - Update if different (re-copy with new header) + - Skip if unchanged (preserve file timestamps) + + Args: + package_info: PackageInfo object with package metadata + project_root: Root directory of the project + + Returns: + IntegrationResult: Results of the integration operation + """ + # Find all agent files in the package + agent_files = self.find_agent_files(package_info.install_path) + + if not agent_files: + return IntegrationResult( + files_integrated=0, + files_updated=0, + files_skipped=0, + target_paths=[], + gitignore_updated=False + ) + + # Create .github/agents/ if it doesn't exist + agents_dir = project_root / ".github" / "agents" + agents_dir.mkdir(parents=True, exist_ok=True) + + # Process each agent file + files_integrated = 0 + files_updated = 0 + files_skipped = 0 + target_paths = [] + + for source_file in agent_files: + # Generate target filename + target_filename = self.get_target_filename(source_file, package_info.package.name) + target_path = agents_dir / target_filename + + # Check if target already exists + if target_path.exists(): + # Parse existing file's frontmatter metadata + existing_header = self._parse_header_metadata(target_path) + + # Check if update is needed and if content was modified + should_update, was_modified = self._should_update_agent( + existing_header, package_info, target_path + ) + + if should_update: + # Warn if user modified the content + if was_modified: + from apm_cli.cli import _rich_warning + _rich_warning( + f"⚠ Restoring modified file: {target_path.name} " + f"(your changes will be overwritten)" + ) + # Version or commit changed - update the file + self.copy_agent_with_metadata(source_file, target_path, package_info, source_file) + files_updated += 1 + target_paths.append(target_path) + else: + # Unchanged version/commit - skip to preserve file timestamp + files_skipped += 1 + else: + # New file - integrate it + self.copy_agent_with_metadata(source_file, target_path, package_info, source_file) + files_integrated += 1 + target_paths.append(target_path) + + return IntegrationResult( + files_integrated=files_integrated, + files_updated=files_updated, + files_skipped=files_skipped, + target_paths=target_paths, + gitignore_updated=False + ) + + def sync_integration(self, apm_package, project_root: Path) -> Dict[str, int]: + """Sync .github/agents/ with currently installed packages. + + - Removes agents from uninstalled packages (orphans) + - Updates agents from updated packages + - Adds agents from new packages + + Idempotent: safe to call anytime. Reuses existing smart update logic. + + Args: + apm_package: APMPackage with current dependencies + project_root: Root directory of the project + + Returns: + Dict with 'files_removed' and 'errors' counts + """ + agents_dir = project_root / ".github" / "agents" + if not agents_dir.exists(): + return {'files_removed': 0, 'errors': 0} + + # Get currently installed package URLs + installed = {dep.repo_url for dep in apm_package.get_apm_dependencies()} + + # Track cleanup statistics + files_removed = 0 + errors = 0 + + # Remove orphaned agents (from uninstalled packages) + for agent_file in agents_dir.glob("*-apm.agent.md"): + metadata = self._parse_header_metadata(agent_file) + + # Skip files without valid metadata - they might be user's custom files + if not metadata: + continue + + source = metadata.get('Source', '') + + # Skip if no source metadata + if not source: + continue + + # Extract package repo URL from source + # Format: "package-name (owner/repo)" or "package-name (host.com/owner/repo)" + # The source_repo field in metadata contains full URL (e.g., https://github.com/owner/repo) + # but dep.repo_url contains short form (e.g., owner/repo) + # We need to normalize both for comparison + package_repo_url = None + if '(' in source and ')' in source: + # Extract content within parentheses - this is the full repo identifier + package_repo_url = source.split('(')[1].split(')')[0].strip() + + if not package_repo_url: + continue + + # Normalize the repo URL to owner/repo format for comparison + normalized_package_url = normalize_repo_url(package_repo_url) + + # Check if source package is still installed + # Compare normalized URLs + package_match = any( + pkg == normalized_package_url or + (pkg + '.git') == normalized_package_url or + pkg == package_repo_url # Fallback for exact match + for pkg in installed + ) + + if not package_match: + try: + agent_file.unlink() # Orphaned - remove it + files_removed += 1 + except Exception: + errors += 1 + + # Also remove orphaned legacy chatmode files + for chatmode_file in agents_dir.glob("*-apm.chatmode.md"): + metadata = self._parse_header_metadata(chatmode_file) + + # Skip files without valid metadata + if not metadata: + continue + + source = metadata.get('Source', '') + + if not source: + continue + + # Extract package repo URL from source + # The source_repo field in metadata contains full URL (e.g., https://github.com/owner/repo) + # but dep.repo_url contains short form (e.g., owner/repo) + # We need to normalize both for comparison + package_repo_url = None + if '(' in source and ')' in source: + package_repo_url = source.split('(')[1].split(')')[0].strip() + + if not package_repo_url: + continue + + # Normalize the repo URL to owner/repo format for comparison + normalized_package_url = normalize_repo_url(package_repo_url) + + # Check if source package is still installed + # Compare normalized URLs + package_match = any( + pkg == normalized_package_url or + (pkg + '.git') == normalized_package_url or + pkg == package_repo_url # Fallback for exact match + for pkg in installed + ) + + if not package_match: + try: + chatmode_file.unlink() # Orphaned - remove it + files_removed += 1 + except Exception: + errors += 1 + + return {'files_removed': files_removed, 'errors': errors} + + def update_gitignore_for_integrated_agents(self, project_root: Path) -> bool: + """Update .gitignore with pattern for integrated agents. + + Args: + project_root: Root directory of the project + + Returns: + bool: True if .gitignore was updated, False if pattern already exists + """ + gitignore_path = project_root / ".gitignore" + + # Define patterns for both new and legacy formats + patterns = [ + ".github/agents/*-apm.agent.md", + ".github/agents/*-apm.chatmode.md" + ] + + # Read current content + current_content = [] + if gitignore_path.exists(): + try: + with open(gitignore_path, "r", encoding="utf-8") as f: + current_content = [line.rstrip("\n\r") for line in f.readlines()] + except Exception: + return False + + # Check which patterns need to be added + patterns_to_add = [] + for pattern in patterns: + if not any(pattern in line for line in current_content): + patterns_to_add.append(pattern) + + if not patterns_to_add: + return False + + # Add patterns to .gitignore + try: + with open(gitignore_path, "a", encoding="utf-8") as f: + # Add a blank line before our entry if file isn't empty + if current_content and current_content[-1].strip(): + f.write("\n") + f.write("\n# APM integrated agents\n") + for pattern in patterns_to_add: + f.write(f"{pattern}\n") + return True + except Exception: + return False diff --git a/src/apm_cli/integration/prompt_integrator.py b/src/apm_cli/integration/prompt_integrator.py index 3f5f3c53d..71837aab6 100644 --- a/src/apm_cli/integration/prompt_integrator.py +++ b/src/apm_cli/integration/prompt_integrator.py @@ -1,10 +1,14 @@ """Prompt integration functionality for APM packages.""" from pathlib import Path -from typing import List +from typing import List, Dict from dataclasses import dataclass -import shutil +import hashlib from datetime import datetime +import frontmatter + +from .utils import normalize_repo_url +import hashlib @dataclass @@ -62,16 +66,31 @@ def find_prompt_files(self, package_path: Path) -> List[Path]: return prompt_files def _parse_header_metadata(self, file_path: Path) -> dict: - """Parse metadata from header comment in an integrated prompt file. + """Parse metadata from frontmatter or legacy header comment in an integrated prompt file. Args: file_path: Path to the integrated prompt file Returns: - dict: Metadata extracted from header (version, commit, source, etc.) - Empty dict if no valid header found or parsing fails + dict: Metadata extracted from frontmatter/header (version, commit, source, etc.) + Empty dict if no valid metadata found or parsing fails """ try: + # Try parsing frontmatter first (new format) + post = frontmatter.load(file_path) + + # Check for nested apm metadata (new format) + apm_data = post.metadata.get('apm', {}) + if apm_data: + metadata = { + 'Version': apm_data.get('version', ''), + 'Commit': apm_data.get('commit', ''), + 'Source': f"{apm_data.get('source', '')} ({apm_data.get('source_repo', '')})", + 'ContentHash': apm_data.get('content_hash', '') + } + return metadata + + # Fallback: Try legacy HTML comment format content = file_path.read_text(encoding='utf-8') # Check if file starts with comment block @@ -98,19 +117,38 @@ def _parse_header_metadata(self, file_path: Path) -> dict: # If any error occurs during parsing, return empty dict return {} - def _should_update_prompt(self, existing_header: dict, package_info) -> bool: + def _calculate_content_hash(self, file_path: Path) -> str: + """Calculate SHA256 hash of file content (excluding frontmatter). + + Args: + file_path: Path to the file + + Returns: + str: Hexadecimal hash of the content + """ + try: + post = frontmatter.load(file_path) + # Hash only the content, not the frontmatter + return hashlib.sha256(post.content.encode()).hexdigest() + except Exception: + return "" + + def _should_update_prompt(self, existing_header: dict, package_info, existing_file: Path = None) -> tuple[bool, bool]: """Determine if an existing prompt file should be updated. Args: existing_header: Metadata from existing file's header package_info: PackageInfo object with new package metadata + existing_file: Path to existing file for content hash verification Returns: - bool: True if file should be updated (version or commit changed) + tuple[bool, bool]: (should_update, was_modified) + - should_update: True if file should be updated + - was_modified: True if content was modified by user """ # If no valid header exists, update the file if not existing_header: - return True + return (True, False) # Get new version and commit new_version = package_info.package.version @@ -124,49 +162,58 @@ def _should_update_prompt(self, existing_header: dict, package_info) -> bool: existing_version = existing_header.get('Version', '') existing_commit = existing_header.get('Commit', '') + # Check for content modifications if we have the file path + was_modified = False + if existing_file and existing_file.exists(): + stored_hash = existing_header.get('ContentHash', '') + if stored_hash: + current_hash = self._calculate_content_hash(existing_file) + was_modified = (current_hash != stored_hash and current_hash != "") + # Update if version or commit has changed - return (existing_version != new_version or existing_commit != new_commit) + should_update = (existing_version != new_version or existing_commit != new_commit) + return (should_update, was_modified) - def generate_header_comment(self, package_info, original_path: Path) -> str: - """Generate metadata header comment for integrated prompt. + def copy_prompt_with_metadata(self, source: Path, target: Path, package_info, original_path: Path) -> None: + """Copy prompt file with metadata embedded in frontmatter. + + If source has frontmatter, adds nested apm: metadata. + If source has no frontmatter, creates frontmatter with apm: metadata only. Args: + source: Source file path + target: Target file path package_info: PackageInfo object with package metadata - original_path: Original path to the prompt file - - Returns: - str: Header comment with metadata + original_path: Original path to the prompt file (for metadata) """ - package_name = package_info.package.name - version = package_info.package.version - resolved_commit = ( - package_info.resolved_reference.resolved_commit - if package_info.resolved_reference - else "unknown" - ) - - # Get relative path within the package - try: - relative_path = original_path.relative_to(package_info.install_path) - except ValueError: - relative_path = original_path.name - - # Use installed_at from PackageInfo if available - installed_at = package_info.installed_at or datetime.now().isoformat() - - # Determine source repository - source_repo = package_info.package.source or "unknown" - - header = f""" - -""" - return header + # Parse source file + post = frontmatter.load(source) + + # Calculate content hash for modification detection + content_hash = hashlib.sha256(post.content.encode()).hexdigest() + + # Add nested apm metadata + post.metadata['apm'] = { + 'source': package_info.package.name, + 'source_repo': package_info.package.source or "unknown", + 'version': package_info.package.version, + 'commit': ( + package_info.resolved_reference.resolved_commit + if package_info.resolved_reference + else "unknown" + ), + 'original_path': ( + str(original_path.relative_to(package_info.install_path)) + if original_path.is_relative_to(package_info.install_path) + else original_path.name + ), + 'installed_at': package_info.installed_at or datetime.now().isoformat(), + 'content_hash': content_hash + } + + # Write to target with updated frontmatter + with open(target, 'w', encoding='utf-8') as f: + f.write(frontmatter.dumps(post)) def get_target_filename(self, source_file: Path, package_name: str) -> str: """Generate target filename with -apm suffix (intent-first naming). @@ -183,19 +230,7 @@ def get_target_filename(self, source_file: Path, package_name: str) -> str: stem = source_file.stem.replace('.prompt', '') # Remove .prompt from stem return f"{stem}-apm.prompt.md" - def copy_prompt_with_header(self, source: Path, target: Path, header: str) -> None: - """Copy prompt file with header comment prepended. - - Args: - source: Source file path - target: Target file path - header: Header comment to prepend - """ - # Read source content - source_content = source.read_text(encoding='utf-8') - - # Write target with header - target.write_text(header + source_content, encoding='utf-8') + def integrate_package_prompts(self, package_info, project_root: Path) -> IntegrationResult: """Integrate all prompts from a package into .github/prompts/. @@ -241,18 +276,26 @@ def integrate_package_prompts(self, package_info, project_root: Path) -> Integra target_filename = self.get_target_filename(source_file, package_info.package.name) target_path = prompts_dir / target_filename - # Generate header comment for new/updated file - header = self.generate_header_comment(package_info, source_file) - # Check if target already exists if target_path.exists(): - # Parse existing file's header + # Parse existing file's metadata existing_header = self._parse_header_metadata(target_path) - # Check if update is needed - if self._should_update_prompt(existing_header, package_info): + # Check if update is needed and if content was modified + should_update, was_modified = self._should_update_prompt( + existing_header, package_info, target_path + ) + + if should_update: + # Warn if user modified the content + if was_modified: + from apm_cli.cli import _rich_warning + _rich_warning( + f"⚠ Restoring modified file: {target_path.name} " + f"(your changes will be overwritten)" + ) # Version or commit changed - update the file - self.copy_prompt_with_header(source_file, target_path, header) + self.copy_prompt_with_metadata(source_file, target_path, package_info, source_file) files_updated += 1 target_paths.append(target_path) else: @@ -260,7 +303,7 @@ def integrate_package_prompts(self, package_info, project_root: Path) -> Integra files_skipped += 1 else: # New file - integrate it - self.copy_prompt_with_header(source_file, target_path, header) + self.copy_prompt_with_metadata(source_file, target_path, package_info, source_file) files_integrated += 1 target_paths.append(target_path) @@ -272,7 +315,7 @@ def integrate_package_prompts(self, package_info, project_root: Path) -> Integra gitignore_updated=False ) - def sync_integration(self, apm_package, project_root: Path) -> None: + def sync_integration(self, apm_package, project_root: Path) -> Dict[str, int]: """Sync .github/prompts/ with currently installed packages. - Removes prompts from uninstalled packages (orphans) @@ -284,14 +327,21 @@ def sync_integration(self, apm_package, project_root: Path) -> None: Args: apm_package: APMPackage with current dependencies project_root: Root directory of the project + + Returns: + Dict with 'files_removed' and 'errors' counts """ prompts_dir = project_root / ".github" / "prompts" if not prompts_dir.exists(): - return + return {'files_removed': 0, 'errors': 0} # Get currently installed package URLs installed = {dep.repo_url for dep in apm_package.get_apm_dependencies()} + # Track cleanup statistics + files_removed = 0 + errors = 0 + # Remove orphaned prompts (from uninstalled packages) for prompt_file in prompts_dir.glob("*-apm.prompt.md"): metadata = self._parse_header_metadata(prompt_file) @@ -308,8 +358,9 @@ def sync_integration(self, apm_package, project_root: Path) -> None: # Extract package repo URL from source # Format: "package-name (owner/repo)" or "package-name (host.com/owner/repo)" - # We need to match against the full URL including hostname if present - # Works with any Git host: github.com, gitlab.com, git.company.com, etc. + # The source_repo field in metadata contains full URL (e.g., https://github.com/owner/repo) + # but dep.repo_url contains short form (e.g., owner/repo) + # We need to normalize both for comparison package_repo_url = None if '(' in source and ')' in source: # Extract content within parentheses - this is the full repo identifier @@ -318,14 +369,26 @@ def sync_integration(self, apm_package, project_root: Path) -> None: if not package_repo_url: continue + # Normalize the repo URL to owner/repo format for comparison + normalized_package_url = normalize_repo_url(package_repo_url) + # Check if source package is still installed - package_match = any(pkg == package_repo_url for pkg in installed) + # Compare normalized URLs + package_match = any( + pkg == normalized_package_url or + (pkg + '.git') == normalized_package_url or + pkg == package_repo_url # Fallback for exact match + for pkg in installed + ) if not package_match: try: prompt_file.unlink() # Orphaned - remove it + files_removed += 1 except Exception: - pass # Silent failure OK for cleanup + errors += 1 + + return {'files_removed': files_removed, 'errors': errors} def update_gitignore_for_integrated_prompts(self, project_root: Path) -> bool: """Update .gitignore with pattern for integrated prompts. diff --git a/src/apm_cli/integration/utils.py b/src/apm_cli/integration/utils.py new file mode 100644 index 000000000..4e0dca61e --- /dev/null +++ b/src/apm_cli/integration/utils.py @@ -0,0 +1,46 @@ +"""Shared utility functions for integration modules.""" + + +def normalize_repo_url(package_repo_url: str) -> str: + """Normalize a repo URL to owner/repo format. + + Handles various URL formats: + - Full URLs: https://github.com/owner/repo -> owner/repo + - With .git suffix: owner/repo.git -> owner/repo + - Short form: owner/repo -> owner/repo (unchanged) + + Args: + package_repo_url: Repository URL in any format + + Returns: + str: Normalized owner/repo format + + Examples: + >>> normalize_repo_url("https://github.com/owner/repo") + 'owner/repo' + >>> normalize_repo_url("https://github.com/owner/repo.git") + 'owner/repo' + >>> normalize_repo_url("owner/repo") + 'owner/repo' + """ + if '://' not in package_repo_url: + # Already in short form, just remove .git suffix and trailing slashes + normalized = package_repo_url + if normalized.endswith('.git'): + normalized = normalized[:-4] + return normalized.rstrip('/') + + # Extract owner/repo from full URL: https://github.com/owner/repo -> owner/repo + parts = package_repo_url.split('://', 1)[1] # Remove protocol + if '/' in parts: + path_parts = parts.split('/', 1) # Split host from path + if len(path_parts) > 1: + normalized = path_parts[1] + # Remove trailing slashes first (e.g., "owner/repo.git/" -> "owner/repo.git") + normalized = normalized.rstrip('/') + # Then remove .git suffix if present + if normalized.endswith('.git'): + normalized = normalized[:-4] + return normalized + + return package_repo_url diff --git a/src/apm_cli/primitives/discovery.py b/src/apm_cli/primitives/discovery.py index da77f922b..e66dae2fd 100644 --- a/src/apm_cli/primitives/discovery.py +++ b/src/apm_cli/primitives/discovery.py @@ -13,6 +13,11 @@ # Common primitive patterns for local discovery (with recursive search) LOCAL_PRIMITIVE_PATTERNS: Dict[str, List[str]] = { 'chatmode': [ + # New standard (.agent.md) + "**/.apm/agents/*.agent.md", + "**/.github/agents/*.agent.md", + "**/*.agent.md", # Generic .agent.md files + # Legacy support (.chatmode.md) "**/.apm/chatmodes/*.chatmode.md", "**/.github/chatmodes/*.chatmode.md", "**/*.chatmode.md" # Generic .chatmode.md files @@ -34,7 +39,10 @@ # Dependency primitive patterns (for .apm directory within dependencies) DEPENDENCY_PRIMITIVE_PATTERNS: Dict[str, List[str]] = { - 'chatmode': ["chatmodes/*.chatmode.md"], + 'chatmode': [ + "agents/*.agent.md", # New standard + "chatmodes/*.chatmode.md" # Legacy + ], 'instruction': ["instructions/*.instructions.md"], 'context': [ "context/*.context.md", diff --git a/tests/integration/test_auto_integration.py b/tests/integration/test_auto_integration.py index cf72483c6..389e870e6 100644 --- a/tests/integration/test_auto_integration.py +++ b/tests/integration/test_auto_integration.py @@ -69,10 +69,9 @@ def test_full_integration_workflow(self): installed_at=datetime.now().isoformat() ) - # Run integration + # Run integration (auto-integration is always enabled now) integrator = PromptIntegrator() - with patch('apm_cli.integration.prompt_integrator.get_auto_integrate', return_value=True): - result = integrator.integrate_package_prompts(package_info, self.project_root) + result = integrator.integrate_package_prompts(package_info, self.project_root) # Verify results assert result.files_integrated == 2 @@ -82,8 +81,9 @@ def test_full_integration_workflow(self): assert (prompts_dir / "workflow1-apm.prompt.md").exists() assert (prompts_dir / "workflow2-apm.prompt.md").exists() - # Check header comments + # Check YAML frontmatter metadata content1 = (prompts_dir / "workflow1-apm.prompt.md").read_text() - assert "test-package" in content1 - assert "abc123def456" in content1 + assert "apm:" in content1 + assert "source: test-package" in content1 + assert "commit: abc123def456" in content1 assert "# workflow1" in content1 diff --git a/tests/unit/integration/test_agent_integrator.py b/tests/unit/integration/test_agent_integrator.py new file mode 100644 index 000000000..ca0f9d9d5 --- /dev/null +++ b/tests/unit/integration/test_agent_integrator.py @@ -0,0 +1,811 @@ +"""Tests for agent integration functionality.""" + +import tempfile +from pathlib import Path +from unittest.mock import Mock +from datetime import datetime + +from apm_cli.integration import AgentIntegrator +from apm_cli.models.apm_package import PackageInfo, APMPackage, ResolvedReference, GitReferenceType + + +class TestAgentIntegrator: + """Test agent integration logic.""" + + def setup_method(self): + """Set up test fixtures.""" + self.temp_dir = tempfile.mkdtemp() + self.project_root = Path(self.temp_dir) + self.integrator = AgentIntegrator() + + def teardown_method(self): + """Clean up after tests.""" + import shutil + shutil.rmtree(self.temp_dir, ignore_errors=True) + + def test_should_integrate_always_returns_true(self): + """Test integration is always enabled (zero-config approach).""" + # No .github/ directory needed + assert self.integrator.should_integrate(self.project_root) == True + + # Even with .github/ present + github_dir = self.project_root / ".github" + github_dir.mkdir() + assert self.integrator.should_integrate(self.project_root) == True + + def test_find_agent_files_in_root_new_format(self): + """Test finding .agent.md files in package root.""" + package_dir = self.project_root / "package" + package_dir.mkdir() + + # Create test agent files + (package_dir / "security.agent.md").write_text("# Security Agent") + (package_dir / "planner.agent.md").write_text("# Planner Agent") + (package_dir / "readme.md").write_text("# Readme") # Should not be found + + agents = self.integrator.find_agent_files(package_dir) + assert len(agents) == 2 + assert all(p.name.endswith('.agent.md') for p in agents) + + def test_find_agent_files_in_root_legacy_format(self): + """Test finding .chatmode.md files in package root (legacy).""" + package_dir = self.project_root / "package" + package_dir.mkdir() + + # Create legacy chatmode files + (package_dir / "default.chatmode.md").write_text("# Default Chatmode") + (package_dir / "backend.chatmode.md").write_text("# Backend Chatmode") + + agents = self.integrator.find_agent_files(package_dir) + assert len(agents) == 2 + assert all(p.name.endswith('.chatmode.md') for p in agents) + + def test_find_agent_files_in_apm_agents(self): + """Test finding .agent.md files in .apm/agents/ (new standard).""" + package_dir = self.project_root / "package" + apm_agents = package_dir / ".apm" / "agents" + apm_agents.mkdir(parents=True) + + (apm_agents / "security.agent.md").write_text("# Security Agent") + + agents = self.integrator.find_agent_files(package_dir) + assert len(agents) == 1 + assert agents[0].name == "security.agent.md" + + def test_find_agent_files_in_apm_chatmodes(self): + """Test finding .chatmode.md files in .apm/chatmodes/ (legacy).""" + package_dir = self.project_root / "package" + apm_chatmodes = package_dir / ".apm" / "chatmodes" + apm_chatmodes.mkdir(parents=True) + + (apm_chatmodes / "default.chatmode.md").write_text("# Default Chatmode") + + agents = self.integrator.find_agent_files(package_dir) + assert len(agents) == 1 + assert agents[0].name == "default.chatmode.md" + + def test_find_agent_files_mixed_formats(self): + """Test finding both .agent.md and .chatmode.md files.""" + package_dir = self.project_root / "package" + package_dir.mkdir() + + (package_dir / "new.agent.md").write_text("# New Agent") + (package_dir / "old.chatmode.md").write_text("# Old Chatmode") + + agents = self.integrator.find_agent_files(package_dir) + assert len(agents) == 2 + extensions = {tuple(p.name.split('.')[-2:]) for p in agents} + assert extensions == {('agent', 'md'), ('chatmode', 'md')} + + def test_copy_agent_with_metadata(self): + """Test copying agent file with metadata in frontmatter.""" + source = self.project_root / "source.agent.md" + target = self.project_root / "target.agent.md" + + source_content = "# Security Agent\n\nSome agent content." + source.write_text(source_content) + + package = APMPackage( + name="test-pkg", + version="1.0.0", + package_path=Path("/fake/path"), + source="github.com/test/repo" + ) + resolved_ref = ResolvedReference( + original_ref="main", + ref_type=GitReferenceType.BRANCH, + resolved_commit="abc123", + ref_name="main" + ) + package_info = PackageInfo( + package=package, + install_path=Path("/fake/install"), + resolved_reference=resolved_ref, + installed_at="2024-11-13T10:00:00" + ) + + self.integrator.copy_agent_with_metadata(source, target, package_info, source) + + target_content = target.read_text() + assert "---" in target_content # YAML frontmatter + assert "apm:" in target_content + assert "version: 1.0.0" in target_content + assert "commit: abc123" in target_content + assert "Some agent content" in target_content + + def test_get_target_filename_agent_format(self): + """Test target filename generation with -apm suffix for .agent.md.""" + source = Path("/package/security.agent.md") + package_name = "danielmeppiel/security-standards" + + target = self.integrator.get_target_filename(source, package_name) + # Intent-first naming: -apm suffix before extension + assert target == "security-apm.agent.md" + + def test_get_target_filename_chatmode_format(self): + """Test target filename generation with -apm suffix for .chatmode.md.""" + source = Path("/package/default.chatmode.md") + package_name = "danielmeppiel/design-guidelines" + + target = self.integrator.get_target_filename(source, package_name) + # Preserve original extension + assert target == "default-apm.chatmode.md" + + + + def test_integrate_package_agents_creates_directory(self): + """Test that integration creates .github/agents/ if missing.""" + package_dir = self.project_root / "package" + package_dir.mkdir() + (package_dir / "security.agent.md").write_text("# Security Agent") + + github_dir = self.project_root / ".github" + github_dir.mkdir() + + package = APMPackage( + name="test-pkg", + version="1.0.0", + package_path=package_dir + ) + resolved_ref = ResolvedReference( + original_ref="main", + ref_type=GitReferenceType.BRANCH, + resolved_commit="abc123", + ref_name="main" + ) + package_info = PackageInfo( + package=package, + install_path=package_dir, + resolved_reference=resolved_ref, + installed_at=datetime.now().isoformat() + ) + + result = self.integrator.integrate_package_agents(package_info, self.project_root) + + assert result.files_integrated == 1 + assert (self.project_root / ".github" / "agents").exists() + + def test_integrate_package_agents_skips_unchanged_files(self): + """Test that integration skips files with same version and commit.""" + package_dir = self.project_root / "package" + package_dir.mkdir() + (package_dir / "security.agent.md").write_text("# Security Agent") + + github_agents = self.project_root / ".github" / "agents" + github_agents.mkdir(parents=True) + + # Pre-create the target file with matching frontmatter + existing_content = """--- +apm: + source: test-pkg + source_repo: github.com/test/repo + version: 1.0.0 + commit: abc123 + original_path: security.agent.md + installed_at: '2024-01-01T00:00:00' + content_hash: da39a3ee5e6b4b0d3255bfef95601890afd80709 +--- + +# Existing""" + (github_agents / "security-apm.agent.md").write_text(existing_content) + + package = APMPackage( + name="test-pkg", + version="1.0.0", + package_path=package_dir, + source="github.com/test/repo" + ) + resolved_ref = ResolvedReference( + original_ref="main", + ref_type=GitReferenceType.BRANCH, + resolved_commit="abc123", + ref_name="main" + ) + package_info = PackageInfo( + package=package, + install_path=package_dir, + resolved_reference=resolved_ref, + installed_at="2024-01-01T00:00:00" + ) + + result = self.integrator.integrate_package_agents(package_info, self.project_root) + + assert result.files_integrated == 0 + assert result.files_updated == 0 + assert result.files_skipped == 1 + + def test_update_gitignore_adds_patterns(self): + """Test that gitignore is updated with integrated agents patterns.""" + gitignore = self.project_root / ".gitignore" + gitignore.write_text("# Existing content\napm_modules/\n") + + updated = self.integrator.update_gitignore_for_integrated_agents(self.project_root) + + assert updated == True + content = gitignore.read_text() + assert ".github/agents/*-apm.agent.md" in content + assert ".github/agents/*-apm.chatmode.md" in content + + def test_update_gitignore_skips_if_exists(self): + """Test that gitignore update is skipped if patterns exist.""" + gitignore = self.project_root / ".gitignore" + gitignore.write_text(".github/agents/*-apm.agent.md\n.github/agents/*-apm.chatmode.md\n") + + updated = self.integrator.update_gitignore_for_integrated_agents(self.project_root) + + assert updated == False + + # ========== Header-based Versioning Tests ========== + + def test_parse_header_metadata_valid(self): + """Test parsing metadata from valid YAML frontmatter.""" + header_content = """--- +apm: + source: security-standards + source_repo: danielmeppiel/security-standards + version: 1.0.0 + commit: abc123def456 + original_path: security.agent.md + installed_at: '2024-11-13T10:30:00Z' +--- + +# Agent content here""" + + test_file = self.project_root / "test.agent.md" + test_file.write_text(header_content) + + metadata = self.integrator._parse_header_metadata(test_file) + + assert metadata['Source'] == 'security-standards (danielmeppiel/security-standards)' + assert metadata['Version'] == '1.0.0' + assert metadata['Commit'] == 'abc123def456' + assert metadata['Original'] == 'security.agent.md' + assert metadata['Installed'] == '2024-11-13T10:30:00Z' + + def test_parse_header_metadata_no_header(self): + """Test parsing file without header returns empty dict.""" + test_file = self.project_root / "test.agent.md" + test_file.write_text("# Just content, no header") + + metadata = self.integrator._parse_header_metadata(test_file) + + assert metadata == {} + + def test_parse_header_metadata_malformed(self): + """Test parsing malformed header returns empty dict.""" + test_file = self.project_root / "test.agent.md" + test_file.write_text("\n" - - self.integrator.copy_prompt_with_header(source, target, header) - - target_content = target.read_text() - assert target_content.startswith(header) - assert source_content in target_content + def test_integrate_package_prompts_creates_directory(self): """Test that integration creates .github/prompts/ if missing.""" @@ -280,9 +274,10 @@ def test_should_update_prompt_new_version(self): installed_at=datetime.now().isoformat() ) - should_update = self.integrator._should_update_prompt(existing_header, package_info) + should_update, was_modified = self.integrator._should_update_prompt(existing_header, package_info) assert should_update == True + assert was_modified == False def test_should_update_prompt_new_commit(self): """Test that prompt should be updated when commit changes.""" @@ -309,9 +304,10 @@ def test_should_update_prompt_new_commit(self): installed_at=datetime.now().isoformat() ) - should_update = self.integrator._should_update_prompt(existing_header, package_info) + should_update, was_modified = self.integrator._should_update_prompt(existing_header, package_info) assert should_update == True + assert was_modified == False def test_should_update_prompt_no_change(self): """Test that prompt should not be updated when version and commit match.""" @@ -338,9 +334,10 @@ def test_should_update_prompt_no_change(self): installed_at=datetime.now().isoformat() ) - should_update = self.integrator._should_update_prompt(existing_header, package_info) + should_update, was_modified = self.integrator._should_update_prompt(existing_header, package_info) assert should_update == False + assert was_modified == False def test_should_update_prompt_no_header(self): """Test that prompt should be updated when no valid header exists.""" @@ -364,9 +361,10 @@ def test_should_update_prompt_no_header(self): installed_at=datetime.now().isoformat() ) - should_update = self.integrator._should_update_prompt(existing_header, package_info) + should_update, was_modified = self.integrator._should_update_prompt(existing_header, package_info) assert should_update == True + assert was_modified == False def test_integrate_first_time_creates_with_header(self): """Test that first-time integration creates files with proper headers.""" @@ -402,12 +400,13 @@ def test_integrate_first_time_creates_with_header(self): assert result.files_updated == 0 assert result.files_skipped == 0 - # Verify header was added + # Verify frontmatter metadata was added target_file = github_prompts / "test-apm.prompt.md" content = target_file.read_text() - assert content.startswith(' + # Pre-create file with old version in YAML frontmatter + old_content = """--- +apm: + source: test-pkg + source_repo: github.com/test/repo + version: 1.0.0 + commit: abc123 + original_path: test.prompt.md + installed_at: '2024-11-13T10:00:00' + content_hash: abc123 +--- # Old Content""" (github_prompts / "test-apm.prompt.md").write_text(old_content) @@ -456,10 +458,10 @@ def test_integrate_with_new_version_updates_file(self): assert result.files_updated == 1 assert result.files_skipped == 0 - # Verify content was updated + # Verify content was updated with new YAML frontmatter target_file = github_prompts / "test-apm.prompt.md" content = target_file.read_text() - assert 'Version: 2.0.0' in content + assert 'version: 2.0.0' in content assert '# Updated Content' in content assert '# Old Content' not in content @@ -472,14 +474,17 @@ def test_integrate_with_new_commit_updates_file(self): github_prompts = self.project_root / ".github" / "prompts" github_prompts.mkdir(parents=True) - # Pre-create file with old commit - old_content = """ + # Pre-create file with old commit in YAML frontmatter + old_content = """--- +apm: + source: test-pkg + source_repo: github.com/test/repo + version: 1.0.0 + commit: abc123 + original_path: test.prompt.md + installed_at: '2024-11-13T10:00:00' + content_hash: abc123 +--- # Old Content""" (github_prompts / "test-apm.prompt.md").write_text(old_content) @@ -509,10 +514,10 @@ def test_integrate_with_new_commit_updates_file(self): assert result.files_updated == 1 assert result.files_skipped == 0 - # Verify commit was updated + # Verify commit was updated in YAML frontmatter target_file = github_prompts / "test-apm.prompt.md" content = target_file.read_text() - assert 'Commit: def456' in content + assert 'commit: def456' in content assert '# Updated Content' in content def test_integrate_mixed_operations(self): @@ -528,28 +533,37 @@ def test_integrate_mixed_operations(self): github_prompts = self.project_root / ".github" / "prompts" github_prompts.mkdir(parents=True) - # Pre-create file to be updated (old version) - update_old = """ + # Pre-create file to be updated (old version) in YAML frontmatter + update_old = """--- +apm: + source: test-pkg + source_repo: github.com/test/repo + version: 1.0.0 + commit: abc123 + original_path: update.prompt.md + installed_at: '2024-11-13T10:00:00' + content_hash: abc123 +--- # Old Content""" (github_prompts / "update-apm.prompt.md").write_text(update_old) - # Pre-create file to be skipped (same version) - skip_same = """ + # Pre-create file to be skipped (same version) - need correct hash + import hashlib + skip_content = "# Unchanged File" + skip_hash = hashlib.sha256(skip_content.encode()).hexdigest() + skip_same = f"""--- +apm: + source: test-pkg + source_repo: github.com/test/repo + version: 2.0.0 + commit: def456 + original_path: skip.prompt.md + installed_at: '2024-11-13T10:00:00' + content_hash: {skip_hash} +--- -# Unchanged File""" +{skip_content}""" (github_prompts / "skip-apm.prompt.md").write_text(skip_same) package = APMPackage( @@ -580,9 +594,9 @@ def test_integrate_mixed_operations(self): # Verify new file exists assert (github_prompts / "new-apm.prompt.md").exists() - # Verify updated file has new version + # Verify updated file has new version in YAML frontmatter update_content = (github_prompts / "update-apm.prompt.md").read_text() - assert 'Version: 2.0.0' in update_content + assert 'version: 2.0.0' in update_content # Verify skipped file is unchanged skip_content = (github_prompts / "skip-apm.prompt.md").read_text() diff --git a/tests/unit/integration/test_sync_integration_url_normalization.py b/tests/unit/integration/test_sync_integration_url_normalization.py new file mode 100644 index 000000000..0f0a8b6b2 --- /dev/null +++ b/tests/unit/integration/test_sync_integration_url_normalization.py @@ -0,0 +1,338 @@ +"""Tests for sync_integration URL normalization fix. + +This test file specifically covers the critical bug fix where sync_integration +was incorrectly removing ALL integrated files instead of only orphaned ones. + +The bug was caused by URL format mismatch: +- Metadata stored: https://github.com/owner/repo (full URL) +- Dependency list: owner/repo (short form) +- Comparison failed, causing all files to be seen as orphans + +These tests ensure the URL normalization logic works correctly across: +- GitHub repositories +- Virtual packages +- Multiple packages installed simultaneously +- Different URL formats (with/without .git suffix) +""" + +import tempfile +from pathlib import Path +from unittest.mock import Mock + +from apm_cli.integration import PromptIntegrator, AgentIntegrator +from apm_cli.models.apm_package import DependencyReference + + +class TestSyncIntegrationURLNormalization: + """Test sync_integration URL normalization for multiple packages.""" + + def setup_method(self): + """Set up test fixtures.""" + self.temp_dir = tempfile.mkdtemp() + self.project_root = Path(self.temp_dir) + self.prompt_integrator = PromptIntegrator() + self.agent_integrator = AgentIntegrator() + + def teardown_method(self): + """Clean up after tests.""" + import shutil + shutil.rmtree(self.temp_dir, ignore_errors=True) + + def test_sync_removes_only_uninstalled_package_prompts(self): + """Test that uninstalling one package only removes its prompts, not others.""" + github_prompts = self.project_root / ".github" / "prompts" + github_prompts.mkdir(parents=True) + + # Create integrated prompts from multiple packages with YAML frontmatter + compliance_prompt = """--- +apm: + source: compliance-rules + source_repo: https://github.com/danielmeppiel/compliance-rules + version: 1.0.0 + commit: abc123 + original_path: compliance-audit.prompt.md + installed_at: '2024-11-13T10:00:00' + content_hash: hash1 +--- + +# Compliance Audit""" + + design_prompt = """--- +apm: + source: design-guidelines + source_repo: https://github.com/danielmeppiel/design-guidelines + version: 1.0.0 + commit: def456 + original_path: design-review.prompt.md + installed_at: '2024-11-13T10:00:00' + content_hash: hash2 +--- + +# Design Review""" + + virtual_prompt = """--- +apm: + source: awesome-copilot-breakdown-plan + source_repo: https://github.com/github/awesome-copilot + version: 1.0.0 + commit: unknown + original_path: .apm/prompts/breakdown-plan.prompt.md + installed_at: '2024-11-13T10:00:00' + content_hash: hash3 +--- + +# Breakdown Plan""" + + (github_prompts / "compliance-audit-apm.prompt.md").write_text(compliance_prompt) + (github_prompts / "design-review-apm.prompt.md").write_text(design_prompt) + (github_prompts / "breakdown-plan-apm.prompt.md").write_text(virtual_prompt) + + # Simulate uninstalling design-guidelines (keeping compliance-rules and virtual package) + apm_package = Mock() + apm_package.get_apm_dependencies.return_value = [ + DependencyReference( + repo_url="danielmeppiel/compliance-rules", + reference="main" + ), + DependencyReference( + repo_url="github/awesome-copilot", + reference="main" + ) + ] + + # Run sync + result = self.prompt_integrator.sync_integration(apm_package, self.project_root) + + # Verify only design-guidelines prompt was removed + assert not (github_prompts / "design-review-apm.prompt.md").exists(), "design-guidelines prompt should be removed" + assert (github_prompts / "compliance-audit-apm.prompt.md").exists(), "compliance-rules prompt should remain" + assert (github_prompts / "breakdown-plan-apm.prompt.md").exists(), "virtual package prompt should remain" + assert result['files_removed'] == 1, "Should remove exactly 1 file" + assert result['errors'] == 0, "Should have no errors" + + def test_sync_handles_github_url_formats(self): + """Test that sync correctly normalizes different GitHub URL formats.""" + github_prompts = self.project_root / ".github" / "prompts" + github_prompts.mkdir(parents=True) + + # Test various URL formats in metadata + test_cases = [ + ("https://github.com/owner/repo", "owner/repo"), + ("https://github.com/owner/repo.git", "owner/repo"), + ("https://gitlab.com/owner/repo", "owner/repo"), + ("https://git.company.com/owner/repo", "owner/repo"), + ] + + for idx, (source_repo_url, expected_match) in enumerate(test_cases): + prompt_content = f"""--- +apm: + source: test-package-{idx} + source_repo: {source_repo_url} + version: 1.0.0 + commit: abc123 + original_path: test.prompt.md + installed_at: '2024-11-13T10:00:00' + content_hash: hash{idx} +--- + +# Test Prompt {idx}""" + + (github_prompts / f"test-{idx}-apm.prompt.md").write_text(prompt_content) + + # Simulate package still installed (short form) + apm_package = Mock() + apm_package.get_apm_dependencies.return_value = [ + DependencyReference(repo_url="owner/repo", reference="main") + ] + + # Run sync + result = self.prompt_integrator.sync_integration(apm_package, self.project_root) + + # All prompts should remain (they all normalize to owner/repo) + assert result['files_removed'] == 0, "No files should be removed - all should match" + for idx in range(len(test_cases)): + assert (github_prompts / f"test-{idx}-apm.prompt.md").exists(), f"Prompt {idx} should still exist" + + def test_sync_removes_only_uninstalled_package_agents(self): + """Test that uninstalling one package only removes its agents, not others.""" + github_agents = self.project_root / ".github" / "agents" + github_agents.mkdir(parents=True) + + # Create integrated agents from multiple packages with YAML frontmatter + compliance_agent = """--- +apm: + source: compliance-rules + source_repo: https://github.com/danielmeppiel/compliance-rules + version: 1.0.0 + commit: abc123 + original_path: compliance-agent.agent.md + installed_at: '2024-11-13T10:00:00' + content_hash: hash1 +--- + +# Compliance Agent""" + + design_agent = """--- +apm: + source: design-guidelines + source_repo: https://github.com/danielmeppiel/design-guidelines + version: 1.0.0 + commit: def456 + original_path: design-agent.agent.md + installed_at: '2024-11-13T10:00:00' + content_hash: hash2 +--- + +# Design Agent""" + + (github_agents / "compliance-agent-apm.agent.md").write_text(compliance_agent) + (github_agents / "design-agent-apm.agent.md").write_text(design_agent) + + # Simulate uninstalling design-guidelines (keeping compliance-rules) + apm_package = Mock() + apm_package.get_apm_dependencies.return_value = [ + DependencyReference( + repo_url="danielmeppiel/compliance-rules", + reference="main" + ) + ] + + # Run sync + result = self.agent_integrator.sync_integration(apm_package, self.project_root) + + # Verify only design-guidelines agent was removed + assert not (github_agents / "design-agent-apm.agent.md").exists(), "design-guidelines agent should be removed" + assert (github_agents / "compliance-agent-apm.agent.md").exists(), "compliance-rules agent should remain" + assert result['files_removed'] == 1, "Should remove exactly 1 file" + assert result['errors'] == 0, "Should have no errors" + + def test_sync_with_three_packages_removes_one(self): + """Test realistic scenario: 3 packages installed, uninstall 1, verify 2 remain.""" + github_prompts = self.project_root / ".github" / "prompts" + github_prompts.mkdir(parents=True) + + # Create prompts from 3 packages + packages = [ + ("pkg-a", "https://github.com/owner/pkg-a"), + ("pkg-b", "https://github.com/owner/pkg-b"), + ("pkg-c", "https://github.com/owner/pkg-c"), + ] + + for pkg_name, repo_url in packages: + prompt = f"""--- +apm: + source: {pkg_name} + source_repo: {repo_url} + version: 1.0.0 + commit: abc123 + original_path: test.prompt.md + installed_at: '2024-11-13T10:00:00' + content_hash: hash +--- + +# Prompt from {pkg_name}""" + (github_prompts / f"{pkg_name}-apm.prompt.md").write_text(prompt) + + # Uninstall pkg-b (keep pkg-a and pkg-c) + apm_package = Mock() + apm_package.get_apm_dependencies.return_value = [ + DependencyReference(repo_url="owner/pkg-a", reference="main"), + DependencyReference(repo_url="owner/pkg-c", reference="main") + ] + + # Run sync + result = self.prompt_integrator.sync_integration(apm_package, self.project_root) + + # Verify correct removal + assert (github_prompts / "pkg-a-apm.prompt.md").exists(), "pkg-a should remain" + assert not (github_prompts / "pkg-b-apm.prompt.md").exists(), "pkg-b should be removed" + assert (github_prompts / "pkg-c-apm.prompt.md").exists(), "pkg-c should remain" + assert result['files_removed'] == 1, "Should remove exactly pkg-b" + + def test_sync_preserves_files_without_metadata(self): + """Test that sync doesn't remove user's custom files without APM metadata.""" + github_prompts = self.project_root / ".github" / "prompts" + github_prompts.mkdir(parents=True) + + # Create a user's custom prompt without APM metadata + custom_prompt = """# Custom User Prompt + +This is a custom prompt without APM metadata.""" + (github_prompts / "my-custom-apm.prompt.md").write_text(custom_prompt) + + # Create an APM-integrated prompt + apm_prompt = """--- +apm: + source: test-package + source_repo: https://github.com/owner/test-package + version: 1.0.0 + commit: abc123 + original_path: test.prompt.md + installed_at: '2024-11-13T10:00:00' + content_hash: hash +--- + +# APM Prompt""" + (github_prompts / "test-apm.prompt.md").write_text(apm_prompt) + + # Uninstall the package (no packages remain) + apm_package = Mock() + apm_package.get_apm_dependencies.return_value = [] + + # Run sync + result = self.prompt_integrator.sync_integration(apm_package, self.project_root) + + # User's custom file should remain, APM file should be removed + assert (github_prompts / "my-custom-apm.prompt.md").exists(), "Custom file should remain" + assert not (github_prompts / "test-apm.prompt.md").exists(), "APM file should be removed" + assert result['files_removed'] == 1, "Should only remove the APM file" + + def test_sync_handles_virtual_packages_correctly(self): + """Test that virtual packages (single file imports) are handled correctly.""" + github_prompts = self.project_root / ".github" / "prompts" + github_prompts.mkdir(parents=True) + + # Virtual package: github/awesome-copilot/prompts/breakdown-plan.prompt.md + # The source_repo will be the repo root, not the file path + virtual_prompt = """--- +apm: + source: awesome-copilot-breakdown-plan + source_repo: https://github.com/github/awesome-copilot + version: 1.0.0 + commit: unknown + original_path: .apm/prompts/breakdown-plan.prompt.md + installed_at: '2024-11-13T10:00:00' + content_hash: hash +--- + +# Breakdown Plan""" + (github_prompts / "breakdown-plan-apm.prompt.md").write_text(virtual_prompt) + + # Regular package + regular_prompt = """--- +apm: + source: test-package + source_repo: https://github.com/owner/test-package + version: 1.0.0 + commit: abc123 + original_path: test.prompt.md + installed_at: '2024-11-13T10:00:00' + content_hash: hash +--- + +# Regular Prompt""" + (github_prompts / "test-apm.prompt.md").write_text(regular_prompt) + + # Keep only the virtual package + apm_package = Mock() + apm_package.get_apm_dependencies.return_value = [ + DependencyReference(repo_url="github/awesome-copilot", reference="main") + ] + + # Run sync + result = self.prompt_integrator.sync_integration(apm_package, self.project_root) + + # Virtual package should remain, regular should be removed + assert (github_prompts / "breakdown-plan-apm.prompt.md").exists(), "Virtual package prompt should remain" + assert not (github_prompts / "test-apm.prompt.md").exists(), "Regular package prompt should be removed" + assert result['files_removed'] == 1, "Should remove only the regular package" diff --git a/tests/unit/integration/test_utils.py b/tests/unit/integration/test_utils.py new file mode 100644 index 000000000..57a1b632e --- /dev/null +++ b/tests/unit/integration/test_utils.py @@ -0,0 +1,83 @@ +"""Tests for integration utility functions.""" + +from apm_cli.integration.utils import normalize_repo_url + + +class TestNormalizeRepoUrl: + """Tests for normalize_repo_url utility function.""" + + def test_normalize_short_form_unchanged(self): + """Short form URLs should remain unchanged.""" + assert normalize_repo_url("owner/repo") == "owner/repo" + + def test_normalize_short_form_with_git_suffix(self): + """Short form with .git suffix should have it removed.""" + assert normalize_repo_url("owner/repo.git") == "owner/repo" + + def test_normalize_github_https_url(self): + """Full GitHub HTTPS URL should be normalized to owner/repo.""" + assert normalize_repo_url("https://github.com/owner/repo") == "owner/repo" + + def test_normalize_github_https_url_with_git_suffix(self): + """Full GitHub HTTPS URL with .git should be normalized.""" + assert normalize_repo_url("https://github.com/owner/repo.git") == "owner/repo" + + def test_normalize_gitlab_url(self): + """GitLab URLs should be normalized to owner/repo.""" + assert normalize_repo_url("https://gitlab.com/owner/repo") == "owner/repo" + + def test_normalize_enterprise_github_url(self): + """Enterprise GitHub URLs should be normalized.""" + assert normalize_repo_url("https://github.enterprise.com/owner/repo") == "owner/repo" + + def test_normalize_enterprise_github_url_with_git(self): + """Enterprise GitHub URLs with .git should be normalized.""" + assert normalize_repo_url("https://github.enterprise.com/owner/repo.git") == "owner/repo" + + def test_normalize_http_url(self): + """HTTP URLs (not HTTPS) should also be normalized.""" + assert normalize_repo_url("http://github.com/owner/repo") == "owner/repo" + + def test_normalize_nested_org_path(self): + """URLs with nested paths should extract the full path after host.""" + assert normalize_repo_url("https://gitlab.com/group/subgroup/repo") == "group/subgroup/repo" + + def test_normalize_complex_enterprise_url(self): + """Complex enterprise URLs should be handled correctly.""" + url = "https://git.enterprise.internal/organization/team/project" + assert normalize_repo_url(url) == "organization/team/project" + + def test_normalize_url_without_path(self): + """URLs without a path component should be returned as-is.""" + assert normalize_repo_url("https://github.com") == "https://github.com" + + def test_normalize_empty_string(self): + """Empty string should be returned unchanged.""" + assert normalize_repo_url("") == "" + + def test_normalize_multiple_git_suffixes(self): + """Only the trailing .git should be removed.""" + # This is an edge case - repo name contains 'git' + assert normalize_repo_url("owner/mygit-repo.git") == "owner/mygit-repo" + + def test_normalize_preserves_case(self): + """Case should be preserved in the normalized URL.""" + assert normalize_repo_url("https://github.com/Owner/Repo") == "Owner/Repo" + + def test_normalize_handles_trailing_slash(self): + """Trailing slashes should be removed for consistent matching.""" + assert normalize_repo_url("https://github.com/owner/repo/") == "owner/repo" + + def test_normalize_handles_trailing_slash_short_form(self): + """Trailing slashes should be removed from short form URLs too.""" + assert normalize_repo_url("owner/repo/") == "owner/repo" + + def test_normalize_handles_trailing_slash_with_git(self): + """Trailing slashes and .git suffix should both be removed.""" + assert normalize_repo_url("https://github.com/owner/repo.git/") == "owner/repo" + + def test_normalize_ssh_url_unchanged(self): + """SSH URLs without :// shouldn't be modified (edge case).""" + # SSH URLs like git@github.com:owner/repo.git don't have :// + # so they're treated as short form + assert normalize_repo_url("git@github.com:owner/repo.git") == "git@github.com:owner/repo" diff --git a/tests/unit/test_install_command.py b/tests/unit/test_install_command.py index 6ff3bdf5f..4e8b50f61 100644 --- a/tests/unit/test_install_command.py +++ b/tests/unit/test_install_command.py @@ -66,7 +66,7 @@ def test_install_no_apm_yml_with_packages_creates_minimal_apm_yml( mock_apm_package.from_apm_yml.return_value = mock_pkg_instance # Mock the install function to avoid actual installation - mock_install_apm.return_value = (0, 0) # Return tuple (installed_count, total_integrated) + mock_install_apm.return_value = (0, 0, 0) # Return tuple (installed_count, prompts_integrated, agents_integrated) result = self.runner.invoke(cli, ["install", "test/package"]) @@ -106,7 +106,7 @@ def test_install_no_apm_yml_with_multiple_packages( mock_pkg_instance.get_mcp_dependencies.return_value = [] mock_apm_package.from_apm_yml.return_value = mock_pkg_instance - mock_install_apm.return_value = (0, 0) # Return tuple (installed_count, total_integrated) + mock_install_apm.return_value = (0, 0, 0) # Return tuple (installed_count, prompts_integrated, agents_integrated) result = self.runner.invoke(cli, ["install", "org1/pkg1", "org2/pkg2"]) @@ -148,6 +148,8 @@ def test_install_existing_apm_yml_preserves_behavior( mock_pkg_instance.get_mcp_dependencies.return_value = [] mock_apm_package.from_apm_yml.return_value = mock_pkg_instance + mock_install_apm.return_value = (0, 0, 0) # Return tuple (installed_count, prompts_integrated, agents_integrated) + result = self.runner.invoke(cli, ["install"]) # Should succeed and NOT show "Created apm.yml" @@ -184,7 +186,7 @@ def test_install_auto_created_apm_yml_has_correct_metadata( mock_pkg_instance.get_mcp_dependencies.return_value = [] mock_apm_package.from_apm_yml.return_value = mock_pkg_instance - mock_install_apm.return_value = (0, 0) # Return tuple (installed_count, total_integrated) + mock_install_apm.return_value = (0, 0, 0) # Return tuple (installed_count, prompts_integrated, agents_integrated) result = self.runner.invoke(cli, ["install", "test/package"])