From dd076fdbe23d0cec4ddf83432573b49ae13fc578 Mon Sep 17 00:00:00 2001 From: habeck Date: Thu, 5 Mar 2026 16:28:28 -0500 Subject: [PATCH 01/88] chore: provde better examples for PluginPackageInfo constructor Signed-off-by: habeck --- cpex/framework/models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cpex/framework/models.py b/cpex/framework/models.py index efda1d5..947bcfb 100644 --- a/cpex/framework/models.py +++ b/cpex/framework/models.py @@ -1850,8 +1850,8 @@ class PluginPackageInfo(BaseModel): Examples: >>> pkg = PluginPackageInfo(git_repository="https://github.com/user/repo.git", - ... git_branch_tag_commit="v1.0.0", - ... version_constraint=">=1.0.0") + git_branch_tag_commit="v1.0.0", + version_constraint=">=1.0.0") >>> pkg2 = PluginPackageInfo(pypi_package="my-package", version_constraint=">=1.0.0") """ From aa371bcbf8012e05aaf655e112a0b1b6972ea0d6 Mon Sep 17 00:00:00 2001 From: habeck Date: Thu, 5 Mar 2026 18:05:36 -0500 Subject: [PATCH 02/88] enh: add PluginVersionInfo and PluginVersionRegistry models w/unit tests Signed-off-by: habeck --- cpex/framework/models.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/cpex/framework/models.py b/cpex/framework/models.py index 947bcfb..744b381 100644 --- a/cpex/framework/models.py +++ b/cpex/framework/models.py @@ -33,6 +33,8 @@ field_validator, model_validator, ) +from packaging.version import Version, InvalidVersion + # First-Party from cpex.framework.constants import ( From e63a3895c0bda4c3d2afc5a940f0eee19592532f Mon Sep 17 00:00:00 2001 From: habeck Date: Fri, 6 Mar 2026 11:26:07 -0500 Subject: [PATCH 03/88] chore: lint fix Signed-off-by: habeck --- cpex/framework/models.py | 1 - 1 file changed, 1 deletion(-) diff --git a/cpex/framework/models.py b/cpex/framework/models.py index 744b381..9ba5548 100644 --- a/cpex/framework/models.py +++ b/cpex/framework/models.py @@ -35,7 +35,6 @@ ) from packaging.version import Version, InvalidVersion - # First-Party from cpex.framework.constants import ( CMD, From bf837165f58fd58fe3384f950cce5ba4f953561f Mon Sep 17 00:00:00 2001 From: habeck Date: Fri, 6 Mar 2026 16:32:56 -0500 Subject: [PATCH 04/88] chore: lint fix Signed-off-by: habeck --- cpex/framework/models.py | 1 - 1 file changed, 1 deletion(-) diff --git a/cpex/framework/models.py b/cpex/framework/models.py index 9ba5548..947bcfb 100644 --- a/cpex/framework/models.py +++ b/cpex/framework/models.py @@ -33,7 +33,6 @@ field_validator, model_validator, ) -from packaging.version import Version, InvalidVersion # First-Party from cpex.framework.constants import ( From 5211afcec78e304bab2bb05155cefb4f52a94e9d Mon Sep 17 00:00:00 2001 From: habeck Date: Mon, 9 Mar 2026 17:02:04 -0400 Subject: [PATCH 05/88] chore: unit tests and fixtures for plugin isolation via venv. Signed-off-by: habeck --- .../unit/cpex/framework/isolated/conftest.py | 54 +++++++++++-------- 1 file changed, 32 insertions(+), 22 deletions(-) diff --git a/tests/unit/cpex/framework/isolated/conftest.py b/tests/unit/cpex/framework/isolated/conftest.py index 26c941d..9b79a78 100644 --- a/tests/unit/cpex/framework/isolated/conftest.py +++ b/tests/unit/cpex/framework/isolated/conftest.py @@ -8,6 +8,7 @@ """ import sys +from pathlib import Path from unittest.mock import MagicMock import pytest @@ -19,16 +20,16 @@ @pytest.fixture def mock_venv_structure(tmp_path): """Create a mock virtual environment directory structure. - + Args: tmp_path: pytest tmp_path fixture - + Returns: Path to the mock venv directory """ venv_path = tmp_path / ".venv" venv_path.mkdir() - + # Create appropriate bin/Scripts directory based on platform if sys.platform == "win32": scripts_dir = venv_path / "Scripts" @@ -38,28 +39,28 @@ def mock_venv_structure(tmp_path): bin_dir = venv_path / "bin" bin_dir.mkdir() python_exe = bin_dir / "python" - + # Create a dummy python executable python_exe.touch() python_exe.chmod(0o755) - + return venv_path @pytest.fixture def sample_plugin_config(tmp_path): """Create a sample plugin configuration for testing. - + Args: tmp_path: pytest tmp_path fixture - + Returns: PluginConfig instance """ venv_path = tmp_path / ".venv" script_path = tmp_path / "plugin" requirements_file = tmp_path / "requirements.txt" - + config_dict = { "name": "test_isolated_plugin", "kind": "isolated_venv", @@ -71,8 +72,8 @@ def sample_plugin_config(tmp_path): "venv_path": str(venv_path), "script_path": str(script_path), "requirements_file": str(requirements_file), - "class_name": "test_plugin.TestPlugin", - }, + "class_name": "test_plugin.TestPlugin" + } } return PluginConfig(**config_dict) @@ -80,50 +81,60 @@ def sample_plugin_config(tmp_path): @pytest.fixture def sample_global_context(): """Create a sample GlobalContext for testing. - + Returns: GlobalContext instance """ - return GlobalContext(request_id="test-req-123", user="test_user", tenant_id="test-tenant", server_id="test-server") + return GlobalContext( + request_id="test-req-123", + user="test_user", + tenant_id="test-tenant", + server_id="test-server" + ) @pytest.fixture def sample_plugin_context(sample_global_context): """Create a sample PluginContext for testing. - + Args: sample_global_context: GlobalContext fixture - + Returns: PluginContext instance """ return PluginContext( - global_context=sample_global_context, state={"test_key": "test_value"}, metadata={"test_meta": "test_data"} + global_context=sample_global_context, + state={"test_key": "test_value"}, + metadata={"test_meta": "test_data"} ) @pytest.fixture def mock_communicator(): """Create a mock VenvProcessCommunicator. - + Returns: MagicMock instance configured as a communicator """ mock_comm = MagicMock() mock_comm.install_requirements = MagicMock() - mock_comm.send_task = MagicMock( - return_value={"continue_processing": True, "modified_payload": None, "violation": None, "metadata": {}} - ) + mock_comm.send_task = MagicMock(return_value={ + "continue_processing": True, + "modified_payload": None, + "violation": None, + "metadata": {} + }) return mock_comm @pytest.fixture def sample_requirements_file(tmp_path): """Create a sample requirements.txt file. - + Args: tmp_path: pytest tmp_path fixture - + Returns: Path to the requirements file """ @@ -131,5 +142,4 @@ def sample_requirements_file(tmp_path): requirements_file.write_text("pytest>=7.0.0\nrequests>=2.28.0\n") return requirements_file - # Made with Bob From cced07d5bf0d045b596f0ae001bf3f263a0532d6 Mon Sep 17 00:00:00 2001 From: habeck Date: Tue, 10 Mar 2026 15:43:27 -0400 Subject: [PATCH 06/88] enh: refactored the invoke_hook method in cpex/framework/isolated/client.py to run async Signed-off-by: habeck --- cpex/framework/isolated/client.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/cpex/framework/isolated/client.py b/cpex/framework/isolated/client.py index d49dd6d..78e3c34 100644 --- a/cpex/framework/isolated/client.py +++ b/cpex/framework/isolated/client.py @@ -272,7 +272,8 @@ def _validate_hook_invocation(self, hook_type: str) -> type[PluginResult]: if not result_type: raise PluginError( error=PluginErrorModel( - message=f"Hook type '{hook_type}' not registered in hook registry", plugin_name=self.name + message=f"Hook type '{hook_type}' not registered in hook registry", + plugin_name=self.name ) ) From 4d2573d48834a96eb16955d6e9963888fed87ca7 Mon Sep 17 00:00:00 2001 From: habeck Date: Tue, 10 Mar 2026 16:00:04 -0400 Subject: [PATCH 07/88] chore: lint fix Signed-off-by: habeck --- cpex/framework/isolated/client.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/cpex/framework/isolated/client.py b/cpex/framework/isolated/client.py index 78e3c34..d49dd6d 100644 --- a/cpex/framework/isolated/client.py +++ b/cpex/framework/isolated/client.py @@ -272,8 +272,7 @@ def _validate_hook_invocation(self, hook_type: str) -> type[PluginResult]: if not result_type: raise PluginError( error=PluginErrorModel( - message=f"Hook type '{hook_type}' not registered in hook registry", - plugin_name=self.name + message=f"Hook type '{hook_type}' not registered in hook registry", plugin_name=self.name ) ) From d9f5aad6f6133fb4ae8584f4175871afc9004c89 Mon Sep 17 00:00:00 2001 From: habeck Date: Tue, 10 Mar 2026 16:12:14 -0400 Subject: [PATCH 08/88] chore: updated unit test test_worker to get coverage to 97%. Signed-off-by: habeck --- tests/unit/cpex/framework/isolated/test_worker.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/unit/cpex/framework/isolated/test_worker.py b/tests/unit/cpex/framework/isolated/test_worker.py index 7626535..06b81b7 100644 --- a/tests/unit/cpex/framework/isolated/test_worker.py +++ b/tests/unit/cpex/framework/isolated/test_worker.py @@ -11,6 +11,7 @@ import os import shutil import sys +from io import StringIO from pathlib import Path from unittest.mock import AsyncMock, MagicMock, patch From 73d0fb6e0540382c3b51203b016a82a51077c393 Mon Sep 17 00:00:00 2001 From: habeck Date: Thu, 12 Mar 2026 11:11:34 -0400 Subject: [PATCH 09/88] enh: The optimization eliminates the overhead of: Forking a new Python process (~1.2ms per fork_exec) Initializing the Python interpreter Loading modules and dependencies Setting up the subprocess communication pipes Signed-off-by: habeck --- .../cpex/framework/isolated/test_client.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/tests/unit/cpex/framework/isolated/test_client.py b/tests/unit/cpex/framework/isolated/test_client.py index 516512c..d60a308 100644 --- a/tests/unit/cpex/framework/isolated/test_client.py +++ b/tests/unit/cpex/framework/isolated/test_client.py @@ -657,6 +657,25 @@ async def test_initialize_with_invalid_cache( # Should install requirements when cache is invalid mock_comm.install_requirements.assert_called_once() mock_save_metadata.assert_called_once() + @pytest.mark.asyncio + async def test_cleanup(self, plugin): + """Test cleanup method stops worker process.""" + mock_comm = MagicMock() + plugin.comm = mock_comm + + await plugin.cleanup() + + mock_comm.stop_worker.assert_called_once() + assert plugin.comm is None + + @pytest.mark.asyncio + async def test_cleanup_no_comm(self, plugin): + """Test cleanup when comm is None.""" + plugin.comm = None + + # Should not raise error + await plugin.cleanup() + @pytest.mark.asyncio async def test_cleanup(self, plugin): From f2e70a657df16a1140392701e7a7620e4579867a Mon Sep 17 00:00:00 2001 From: habeck Date: Thu, 12 Mar 2026 18:44:16 -0400 Subject: [PATCH 10/88] fix: fail early plugin_path do not exist, computer .venv path automatically, update cli to support creating an isolated plugin. Signed-off-by: habeck --- cpex/framework/isolated/client.py | 2 + .../cpex/framework/isolated/test_venv_comm.py | 296 ++++++++++++++++++ 2 files changed, 298 insertions(+) diff --git a/cpex/framework/isolated/client.py b/cpex/framework/isolated/client.py index d49dd6d..773fe22 100644 --- a/cpex/framework/isolated/client.py +++ b/cpex/framework/isolated/client.py @@ -248,6 +248,8 @@ async def initialize(self) -> None: else: logger.info("Using cached venv, skipping requirements installation") + + async def cleanup(self) -> None: """Cleanup resources, including stopping the worker process.""" if self.comm: diff --git a/tests/unit/cpex/framework/isolated/test_venv_comm.py b/tests/unit/cpex/framework/isolated/test_venv_comm.py index 4b02d09..58f931e 100644 --- a/tests/unit/cpex/framework/isolated/test_venv_comm.py +++ b/tests/unit/cpex/framework/isolated/test_venv_comm.py @@ -894,4 +894,300 @@ def test_send_task_very_large_data_exceeds_default_limit(self, mock_thread, mock assert len(communicator.response_queues) == 0 + @patch("subprocess.Popen") + @patch("threading.Thread") + def test_read_stderr_with_output(self, mock_thread, mock_popen, communicator): + """Test _read_stderr method reads and logs stderr output.""" + mock_process = MagicMock() + mock_process.stdin = MagicMock() + mock_process.stdout = MagicMock() + + # Mock stderr with some output + mock_stderr = MagicMock() + mock_stderr.readline.side_effect = [ + "Error line 1\n", + "Error line 2\n", + "", # Empty string signals end + ] + mock_process.stderr = mock_stderr + mock_process.poll.return_value = None + mock_popen.return_value = mock_process + + mock_thread_instance = MagicMock() + mock_thread.return_value = mock_thread_instance + + # Start worker to trigger stderr thread + communicator.start_worker("test_script.py") + + # Manually call _read_stderr to test it + communicator._read_stderr() + + # Verify readline was called + assert mock_stderr.readline.call_count >= 1 + + @patch("subprocess.Popen") + @patch("threading.Thread") + def test_read_stderr_with_exception(self, mock_thread, mock_popen, communicator): + """Test _read_stderr handles exceptions gracefully.""" + mock_process = MagicMock() + mock_process.stdin = MagicMock() + mock_process.stdout = MagicMock() + + # Mock stderr that raises exception + mock_stderr = MagicMock() + mock_stderr.readline.side_effect = Exception("Read error") + mock_process.stderr = mock_stderr + mock_process.poll.return_value = None + mock_popen.return_value = mock_process + + mock_thread_instance = MagicMock() + mock_thread.return_value = mock_thread_instance + + communicator.start_worker("test_script.py") + + # Should not raise exception + communicator._read_stderr() + + @patch("subprocess.Popen") + @patch("threading.Thread") + def test_read_stderr_no_process(self, mock_thread, mock_popen, communicator): + """Test _read_stderr returns early when no process.""" + # Don't start worker, just call _read_stderr + communicator._read_stderr() + # Should return without error + + @patch("subprocess.Popen") + @patch("threading.Thread") + def test_read_responses_with_valid_json(self, mock_thread, mock_popen, communicator): + """Test _read_responses processes valid JSON responses.""" + mock_process = MagicMock() + mock_process.stdin = MagicMock() + mock_process.stderr = MagicMock() + + # Mock stdout with valid JSON responses + mock_stdout = MagicMock() + mock_stdout.readline.side_effect = [ + '{"status": "ok", "request_id": "test-123"}\n', + "", # Empty string signals end + ] + mock_process.stdout = mock_stdout + mock_process.poll.return_value = None + mock_popen.return_value = mock_process + + mock_thread_instance = MagicMock() + mock_thread.return_value = mock_thread_instance + + # Create a response queue for the request + communicator.response_queues["test-123"] = Queue() + + communicator.start_worker("test_script.py") + + # Manually call _read_responses + communicator._read_responses() + + # Verify the response was queued + assert not communicator.response_queues["test-123"].empty() + + @patch("subprocess.Popen") + @patch("threading.Thread") + def test_read_responses_with_empty_lines(self, mock_thread, mock_popen, communicator): + """Test _read_responses skips empty lines.""" + mock_process = MagicMock() + mock_process.stdin = MagicMock() + mock_process.stderr = MagicMock() + + # Mock stdout with empty lines + mock_stdout = MagicMock() + mock_stdout.readline.side_effect = [ + "\n", + " \n", + '{"status": "ok", "request_id": "test-456"}\n', + "", + ] + mock_process.stdout = mock_stdout + mock_process.poll.return_value = None + mock_popen.return_value = mock_process + + mock_thread_instance = MagicMock() + mock_thread.return_value = mock_thread_instance + + communicator.response_queues["test-456"] = Queue() + communicator.start_worker("test_script.py") + communicator._read_responses() + + assert not communicator.response_queues["test-456"].empty() + + @patch("subprocess.Popen") + @patch("threading.Thread") + def test_read_responses_with_invalid_json(self, mock_thread, mock_popen, communicator): + """Test _read_responses handles invalid JSON gracefully.""" + mock_process = MagicMock() + mock_process.stdin = MagicMock() + mock_process.stderr = MagicMock() + + # Mock stdout with invalid JSON + mock_stdout = MagicMock() + mock_stdout.readline.side_effect = [ + "not valid json\n", + '{"incomplete": \n', + "", + ] + mock_process.stdout = mock_stdout + mock_process.poll.return_value = None + mock_popen.return_value = mock_process + + mock_thread_instance = MagicMock() + mock_thread.return_value = mock_thread_instance + + communicator.start_worker("test_script.py") + + # Should not raise exception + communicator._read_responses() + + @patch("subprocess.Popen") + @patch("threading.Thread") + def test_read_responses_without_request_id(self, mock_thread, mock_popen, communicator): + """Test _read_responses handles responses without request_id.""" + mock_process = MagicMock() + mock_process.stdin = MagicMock() + mock_process.stderr = MagicMock() + + # Mock stdout with response missing request_id + mock_stdout = MagicMock() + mock_stdout.readline.side_effect = [ + '{"status": "ok", "data": "test"}\n', + "", + ] + mock_process.stdout = mock_stdout + mock_process.poll.return_value = None + mock_popen.return_value = mock_process + + mock_thread_instance = MagicMock() + mock_thread.return_value = mock_thread_instance + + communicator.start_worker("test_script.py") + + # Should log warning but not crash + communicator._read_responses() + + @patch("subprocess.Popen") + @patch("threading.Thread") + def test_read_responses_unknown_request_id(self, mock_thread, mock_popen, communicator): + """Test _read_responses handles unknown request_id.""" + mock_process = MagicMock() + mock_process.stdin = MagicMock() + mock_process.stderr = MagicMock() + + # Mock stdout with unknown request_id + mock_stdout = MagicMock() + mock_stdout.readline.side_effect = [ + '{"status": "ok", "request_id": "unknown-999"}\n', + "", + ] + mock_process.stdout = mock_stdout + mock_process.poll.return_value = None + mock_popen.return_value = mock_process + + mock_thread_instance = MagicMock() + mock_thread.return_value = mock_thread_instance + + communicator.start_worker("test_script.py") + + # Should log warning but not crash + communicator._read_responses() + + @patch("subprocess.Popen") + @patch("threading.Thread") + def test_read_responses_with_exception(self, mock_thread, mock_popen, communicator): + """Test _read_responses handles exceptions during reading.""" + mock_process = MagicMock() + mock_process.stdin = MagicMock() + mock_process.stderr = MagicMock() + + # Mock stdout that raises exception + mock_stdout = MagicMock() + mock_stdout.readline.side_effect = Exception("Read error") + mock_process.stdout = mock_stdout + mock_process.poll.return_value = None + mock_popen.return_value = mock_process + + mock_thread_instance = MagicMock() + mock_thread.return_value = mock_thread_instance + + communicator.start_worker("test_script.py") + + # Should handle exception and set running to False + communicator._read_responses() + assert communicator.running is False + + @patch("subprocess.Popen") + @patch("threading.Thread") + @patch("cpex.framework.isolated.venv_comm.Queue") + def test_send_task_stdin_not_available(self, mock_queue_class, mock_thread, mock_popen, communicator): + """Test send_task when stdin is not available.""" + task_data = {"task_type": "test"} + + mock_process = MagicMock() + mock_process.stdin = None # stdin not available + mock_process.stdout = MagicMock() + mock_process.stderr = MagicMock() + mock_process.poll.return_value = None + mock_popen.return_value = mock_process + + mock_thread_instance = MagicMock() + mock_thread.return_value = mock_thread_instance + + mock_queue_instance = MagicMock() + mock_queue_class.return_value = mock_queue_instance + + communicator.start_worker("test_script.py") + + with pytest.raises(RuntimeError, match="Worker process stdin not available"): + communicator.send_task("test_script.py", task_data) + + @patch("subprocess.Popen") + @patch("threading.Thread") + def test_stop_worker_send_shutdown_exception(self, mock_thread, mock_popen, communicator): + """Test stop_worker handles exception when sending shutdown signal.""" + mock_process = MagicMock() + mock_stdin = MagicMock() + mock_stdin.write.side_effect = Exception("Write failed") + mock_process.stdin = mock_stdin + mock_process.stdout = MagicMock() + mock_process.stderr = MagicMock() + mock_process.wait.return_value = None + mock_popen.return_value = mock_process + + mock_thread_instance = MagicMock() + mock_thread_instance.is_alive.return_value = False + mock_thread.return_value = mock_thread_instance + + communicator.start_worker("test_script.py") + + # Should handle exception gracefully + communicator.stop_worker() + + assert communicator.running is False + assert communicator.process is None + + def test_del_method(self, communicator): + """Test __del__ method calls stop_worker.""" + communicator.running = True + communicator.process = MagicMock() + + # Call __del__ directly + communicator.__del__() + + # Should have stopped the worker + assert communicator.running is False + + def test_del_method_no_running_attribute(self): + """Test __del__ handles missing running attribute.""" + # Create instance without proper initialization + comm = object.__new__(VenvProcessCommunicator) + + # Should not raise exception + comm.__del__() + + # Made with Bob From fe76e4939ecacd40f4a12354608026e1fe311aa1 Mon Sep 17 00:00:00 2001 From: habeck Date: Thu, 12 Mar 2026 21:01:37 -0400 Subject: [PATCH 11/88] chore: lint fix Signed-off-by: habeck --- cpex/framework/isolated/client.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/cpex/framework/isolated/client.py b/cpex/framework/isolated/client.py index 773fe22..d49dd6d 100644 --- a/cpex/framework/isolated/client.py +++ b/cpex/framework/isolated/client.py @@ -248,8 +248,6 @@ async def initialize(self) -> None: else: logger.info("Using cached venv, skipping requirements installation") - - async def cleanup(self) -> None: """Cleanup resources, including stopping the worker process.""" if self.comm: From ba509629c45ee518b4ee5b1c4509b9af9fb6da0f Mon Sep 17 00:00:00 2001 From: habeck Date: Fri, 27 Mar 2026 18:28:49 -0400 Subject: [PATCH 12/88] fix: use the system config file (PLUGINS_CONFIG_FILE) for syspath update (Consistent with how the PluginManager works). Signed-off-by: habeck --- cpex/framework/isolated/client.py | 2 ++ cpex/framework/isolated/worker.py | 1 + 2 files changed, 3 insertions(+) diff --git a/cpex/framework/isolated/client.py b/cpex/framework/isolated/client.py index d49dd6d..0e941c2 100644 --- a/cpex/framework/isolated/client.py +++ b/cpex/framework/isolated/client.py @@ -17,6 +17,7 @@ import shutil import sys import venv +import yaml from pathlib import Path from typing_extensions import Any, Optional @@ -26,6 +27,7 @@ from cpex.framework.errors import PluginError, convert_exception_to_error from cpex.framework.hooks.registry import get_hook_registry from cpex.framework.isolated.venv_comm import VenvProcessCommunicator +from cpex.framework.loader.config import ConfigLoader from cpex.framework.models import PluginConfig, PluginContext, PluginErrorModel, PluginPayload, PluginResult logger = logging.getLogger(__name__) diff --git a/cpex/framework/isolated/worker.py b/cpex/framework/isolated/worker.py index 426b660..b11f823 100644 --- a/cpex/framework/isolated/worker.py +++ b/cpex/framework/isolated/worker.py @@ -13,6 +13,7 @@ import importlib.metadata import json import logging +import os import platform import sys from pathlib import Path From 85d0fa2057676cbd3f96329961301c62961781fd Mon Sep 17 00:00:00 2001 From: habeck Date: Fri, 27 Mar 2026 18:33:07 -0400 Subject: [PATCH 13/88] chore: lint fix Signed-off-by: habeck --- cpex/framework/isolated/client.py | 1 - 1 file changed, 1 deletion(-) diff --git a/cpex/framework/isolated/client.py b/cpex/framework/isolated/client.py index 0e941c2..51ec22c 100644 --- a/cpex/framework/isolated/client.py +++ b/cpex/framework/isolated/client.py @@ -17,7 +17,6 @@ import shutil import sys import venv -import yaml from pathlib import Path from typing_extensions import Any, Optional From e003ebed26c3a0632a0eefd3667b6891d800098a Mon Sep 17 00:00:00 2001 From: habeck Date: Tue, 7 Apr 2026 16:45:16 -0400 Subject: [PATCH 14/88] chore: Validate plugin_dirs entries against an allowlist Signed-off-by: habeck --- tests/unit/cpex/framework/isolated/test_worker.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/unit/cpex/framework/isolated/test_worker.py b/tests/unit/cpex/framework/isolated/test_worker.py index 06b81b7..7626535 100644 --- a/tests/unit/cpex/framework/isolated/test_worker.py +++ b/tests/unit/cpex/framework/isolated/test_worker.py @@ -11,7 +11,6 @@ import os import shutil import sys -from io import StringIO from pathlib import Path from unittest.mock import AsyncMock, MagicMock, patch From 5059d352e5da22e4f7e56a582f7a400abf9585e9 Mon Sep 17 00:00:00 2001 From: habeck Date: Thu, 9 Apr 2026 14:20:19 -0400 Subject: [PATCH 15/88] fix: remove hardcoded reference to plugins/config in the cpex/framework/isolated/client.py and update tests. remove methods_to_exclude from validator. Signed-off-by: habeck --- cpex/framework/isolated/client.py | 1 - 1 file changed, 1 deletion(-) diff --git a/cpex/framework/isolated/client.py b/cpex/framework/isolated/client.py index 51ec22c..d49dd6d 100644 --- a/cpex/framework/isolated/client.py +++ b/cpex/framework/isolated/client.py @@ -26,7 +26,6 @@ from cpex.framework.errors import PluginError, convert_exception_to_error from cpex.framework.hooks.registry import get_hook_registry from cpex.framework.isolated.venv_comm import VenvProcessCommunicator -from cpex.framework.loader.config import ConfigLoader from cpex.framework.models import PluginConfig, PluginContext, PluginErrorModel, PluginPayload, PluginResult logger = logging.getLogger(__name__) From f104b202aa25b799b44daa4e34bc4900468ee1b0 Mon Sep 17 00:00:00 2001 From: habeck Date: Thu, 9 Apr 2026 15:45:56 -0400 Subject: [PATCH 16/88] chore: lint fix Signed-off-by: habeck --- cpex/framework/isolated/worker.py | 1 - 1 file changed, 1 deletion(-) diff --git a/cpex/framework/isolated/worker.py b/cpex/framework/isolated/worker.py index b11f823..426b660 100644 --- a/cpex/framework/isolated/worker.py +++ b/cpex/framework/isolated/worker.py @@ -13,7 +13,6 @@ import importlib.metadata import json import logging -import os import platform import sys from pathlib import Path From 87f685e73843df4946ac61dc20a244460485a2f4 Mon Sep 17 00:00:00 2001 From: habeck Date: Fri, 10 Apr 2026 12:18:50 -0400 Subject: [PATCH 17/88] enh: Add a maximum line length check before parsing. Add model tests for PluginPackageInfo and PluginVersionRegistry Signed-off-by: habeck --- .../cpex/framework/isolated/test_venv_comm.py | 178 ++++++++++++++++++ 1 file changed, 178 insertions(+) diff --git a/tests/unit/cpex/framework/isolated/test_venv_comm.py b/tests/unit/cpex/framework/isolated/test_venv_comm.py index 58f931e..b6dcdd6 100644 --- a/tests/unit/cpex/framework/isolated/test_venv_comm.py +++ b/tests/unit/cpex/framework/isolated/test_venv_comm.py @@ -1189,5 +1189,183 @@ def test_del_method_no_running_attribute(self): # Should not raise exception comm.__del__() + @patch("subprocess.Popen") + @patch("threading.Thread") + def test_send_task_exceeds_max_content_size(self, mock_thread, mock_popen, communicator): + """Test send_task raises error when data exceeds max_content_size.""" + # Create a large task that will exceed the limit + large_data = "x" * 5000 + task_data = { + "task_type": "test", + "data": large_data + } + + mock_process = MagicMock() + mock_process.stdin = MagicMock() + mock_process.stdout = MagicMock() + mock_process.stderr = MagicMock() + mock_process.poll.return_value = None + mock_popen.return_value = mock_process + + mock_thread_instance = MagicMock() + mock_thread.return_value = mock_thread_instance + + communicator.start_worker("test_script.py") + + # Set a very small max_content_size to trigger the error + with pytest.raises(RuntimeError, match="task_data exceeds max_content_size"): + communicator.send_task("test_script.py", task_data, max_content_size=100) + + # Verify the request_id was cleaned up from response_queues + assert len(communicator.response_queues) == 0 + + @patch("subprocess.Popen") + @patch("threading.Thread") + @patch("uuid.uuid4") + def test_send_task_at_max_content_size_boundary(self, mock_uuid, mock_thread, mock_popen, communicator): + """Test send_task works when data is exactly at the limit.""" + # Use a fixed UUID to make size calculation predictable + mock_uuid.return_value = Mock(hex="12345678123456781234567812345678") + mock_uuid.return_value.__str__ = Mock(return_value="12345678-1234-5678-1234-567812345678") + + task_data = {"task_type": "test", "data": "small"} + + mock_process = MagicMock() + mock_process.stdin = MagicMock() + mock_process.stdout = MagicMock() + mock_process.stderr = MagicMock() + mock_process.poll.return_value = None + mock_popen.return_value = mock_process + + mock_thread_instance = MagicMock() + mock_thread.return_value = mock_thread_instance + + # Mock the Queue to return response + with patch("cpex.framework.isolated.venv_comm.Queue") as mock_queue_class: + mock_queue_instance = MagicMock() + mock_queue_instance.get.return_value = { + "status": "success", + "result": "ok", + "request_id": "test-id" + } + mock_queue_class.return_value = mock_queue_instance + + communicator.start_worker("test_script.py") + + # Calculate the exact size of the serialized data with the mocked UUID + import orjson + test_data_copy = task_data.copy() + test_data_copy["request_id"] = "12345678-1234-5678-1234-567812345678" + serialized_size = len(orjson.dumps(test_data_copy).decode()) + + # Set max_content_size to exactly the serialized size + result = communicator.send_task("test_script.py", task_data, max_content_size=serialized_size) + + assert result == {"status": "success", "result": "ok"} + + @patch("subprocess.Popen") + @patch("threading.Thread") + def test_send_task_with_custom_max_content_size(self, mock_thread, mock_popen, communicator): + """Test send_task respects custom max_content_size parameter.""" + # Create task data that's moderately sized + task_data = { + "task_type": "test", + "data": "x" * 1000, + "metadata": {"key": "value"} + } + + mock_process = MagicMock() + mock_process.stdin = MagicMock() + mock_process.stdout = MagicMock() + mock_process.stderr = MagicMock() + mock_process.poll.return_value = None + mock_popen.return_value = mock_process + + mock_thread_instance = MagicMock() + mock_thread.return_value = mock_thread_instance + + with patch("cpex.framework.isolated.venv_comm.Queue") as mock_queue_class: + mock_queue_instance = MagicMock() + mock_queue_instance.get.return_value = { + "status": "success", + "result": "processed", + "request_id": "test-id" + } + mock_queue_class.return_value = mock_queue_instance + + communicator.start_worker("test_script.py") + + # Should succeed with large max_content_size + result = communicator.send_task("test_script.py", task_data, max_content_size=50000) + assert result == {"status": "success", "result": "processed"} + + # Should fail with small max_content_size + with pytest.raises(RuntimeError, match="task_data exceeds max_content_size"): + communicator.send_task("test_script.py", task_data, max_content_size=500) + + @patch("subprocess.Popen") + @patch("threading.Thread") + def test_send_task_default_max_content_size(self, mock_thread, mock_popen, communicator): + """Test send_task uses default max_content_size of 10MB.""" + # Create a task that's under 10MB + task_data = { + "task_type": "test", + "data": "x" * 100000 # 100KB + } + + mock_process = MagicMock() + mock_process.stdin = MagicMock() + mock_process.stdout = MagicMock() + mock_process.stderr = MagicMock() + mock_process.poll.return_value = None + mock_popen.return_value = mock_process + + mock_thread_instance = MagicMock() + mock_thread.return_value = mock_thread_instance + + with patch("cpex.framework.isolated.venv_comm.Queue") as mock_queue_class: + mock_queue_instance = MagicMock() + mock_queue_instance.get.return_value = { + "status": "success", + "result": "ok", + "request_id": "test-id" + } + mock_queue_class.return_value = mock_queue_instance + + communicator.start_worker("test_script.py") + + # Should succeed with default max_content_size (10MB) + result = communicator.send_task("test_script.py", task_data) + assert result == {"status": "success", "result": "ok"} + + @patch("subprocess.Popen") + @patch("threading.Thread") + def test_send_task_very_large_data_exceeds_default_limit(self, mock_thread, mock_popen, communicator): + """Test send_task fails when data exceeds default 10MB limit.""" + # Create a task that exceeds 10MB + task_data = { + "task_type": "test", + "data": "x" * 11000000 # ~11MB + } + + mock_process = MagicMock() + mock_process.stdin = MagicMock() + mock_process.stdout = MagicMock() + mock_process.stderr = MagicMock() + mock_process.poll.return_value = None + mock_popen.return_value = mock_process + + mock_thread_instance = MagicMock() + mock_thread.return_value = mock_thread_instance + + communicator.start_worker("test_script.py") + + # Should fail with default max_content_size + with pytest.raises(RuntimeError, match="task_data exceeds max_content_size"): + communicator.send_task("test_script.py", task_data) + + # Verify cleanup happened + assert len(communicator.response_queues) == 0 + # Made with Bob From 1aea657cf86aea413f6e0505e4dcf488cb668f54 Mon Sep 17 00:00:00 2001 From: habeck Date: Wed, 15 Apr 2026 16:32:08 -0400 Subject: [PATCH 18/88] enh: model updates for PluginManifest, InstalledPluginInfo, and InstalledPluginRegistry Signed-off-by: habeck --- cpex/framework/models.py | 177 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 176 insertions(+), 1 deletion(-) diff --git a/cpex/framework/models.py b/cpex/framework/models.py index 947bcfb..47de1f9 100644 --- a/cpex/framework/models.py +++ b/cpex/framework/models.py @@ -10,7 +10,7 @@ """ # Standard -import asyncio +import json import logging import os import re @@ -1393,13 +1393,118 @@ def to_json(self) -> dict[str, Any]: """ # Get the base serialization from Pydantic data = self.model_dump(mode="json", exclude_none=False, exclude_unset=False) + return data +class Monorepo(BaseModel): + """Monorepo model. + Attributes: + repo_url (str): The URL of the git monorepo. e.g. https://github.ibm.com/habeck/contextforge-plugins-python + package_source (str): The URL of a specifc plugin folder in the git monorepo + e.g. pii_filter + The cpex cli injects the value when it scans the repo. + """ + + repo_url: str + package_source: str + package_folder: str + + +class PiPyRepo(BaseModel): + """PyPi model. + Attributes: + name (str): The name of the pypi package. + """ + + pypi_package: str + version_constraint: Optional[str] + + @field_validator("pypi_package", mode="after") + @classmethod + def validate_pypi_package(cls, pypi_package: str | None) -> str | None: + """Validate PyPI package name format. + + Args: + pypi_package: The PyPI package name to validate. + + Returns: + The validated package name or None if none is set. + + Raises: + ValueError: If the package name is invalid. + """ + if pypi_package is not None and pypi_package != "": + # PyPI package names must contain only ASCII letters, numbers, hyphens, underscores, and periods + # They cannot start or end with hyphens or periods + if not pypi_package.strip(): + raise ValueError("PyPI package name cannot be empty or whitespace") + + # Check for valid characters + import re + + if not re.match(r"^[a-zA-Z0-9]([a-zA-Z0-9._-]*[a-zA-Z0-9])?$", pypi_package): + raise ValueError( + f"Invalid PyPI package name '{pypi_package}'. " + "Package names must start and end with a letter or number, " + "and can only contain ASCII letters, numbers, hyphens, underscores, and periods." + ) + + # Check length (PyPI has a 214 character limit for package names) + if len(pypi_package) > 214: + raise ValueError(f"PyPI package name '{pypi_package}' exceeds maximum length of 214 characters") + + return pypi_package if pypi_package != "" else None + + @field_validator("version_constraint", mode="after") + @classmethod + def validate_version_constraint(cls, version_constraint: str | None) -> str | None: + """Validate semantic version constraint format. + + Args: + version_constraint: The version constraint to validate. + + Returns: + The validated version constraint or None if none is set. + + Raises: + ValueError: If the version constraint is invalid. + """ + if version_constraint is not None and version_constraint != "": + if not version_constraint.strip(): + raise ValueError("Version constraint cannot be empty or whitespace") + + # Validate semantic version constraint format (e.g., ">=1.0.0,<2.0.0", "~=1.2.3", "==1.0.0") + import re + + # Pattern for version specifiers: operator + optional space + version number + version_pattern = re.compile(r"^(==|!=|<=|>=|<|>|~=|===)\s*" r"\d+(\.\d+)*" r"([a-zA-Z0-9._-]*)?$") + + # Split by comma for multiple constraints + constraints = [c.strip() for c in version_constraint.split(",")] + + for constraint in constraints: + if not constraint: + raise ValueError("Version constraint cannot contain empty parts") + + if not version_pattern.match(constraint): + raise ValueError( + f"Invalid version constraint '{constraint}'. " + "Must follow PEP 440 format (e.g., '>=1.0.0', '~=1.2.3', '==1.0.0,<2.0.0')" + ) + + if len(version_constraint) > 255: + raise ValueError(f"Version constraint '{version_constraint}' exceeds maximum length of 255 characters") + + return version_constraint if version_constraint != "" else None + + class PluginManifest(BaseModel): """Plugin manifest. Attributes: + name (str): The name of the plugin. + kind (str): The class name (for native plugins) | external | isolated_venv description (str): A description of the plugin. author (str): The author of the plugin. version (str): version of the plugin. @@ -1408,12 +1513,46 @@ class PluginManifest(BaseModel): default_config (dict[str, Any]): the default configurations. """ + name: str + kind: str description: str author: str version: str tags: list[str] available_hooks: list[str] default_config: dict[str, Any] + monorepo: Optional[Monorepo] = None + package_info: Optional[PiPyRepo] = None + + def suggest_instance_name(self) -> str: + """Suggest a name for the plugin instance. + Returns: + str: A suggested name for the plugin instance. + """ + return self.name.lower().replace(" ", "-") + + def create_instance_config( + self, instance_name: str, mode: PluginMode, priority: int = 100, config: Optional[dict[str, Any]] = None + ) -> PluginConfig: + """Create a plugin instance config. + Returns: + PluginConfig: A plugin instance config. + """ + new_config = self.default_config.copy() + if config is not None: + new_config.update(config) + return PluginConfig( + name=instance_name, + kind=self.kind, + mode=mode, + priority=priority, + description=self.description, + author=self.author, + version=self.version, + tags=self.tags, + hooks=self.available_hooks, + config=new_config, + ) class PluginErrorModel(BaseModel): @@ -2126,4 +2265,40 @@ class PluginInstallationType(StrEnum): BUNDLED = "bundled" # Pre-installed with framework PYPI = "pypi" # Installed from PyPI GIT = "git" # Installed from Git repo + MONOREPO = "monorepo" # Installed from git monorepo LOCAL = "local" # Installed from local path + + +class InstalledPluginInfo(BaseModel): + """Plugin installation information.""" + + name: str + kind: str + version: Optional[str] = None + installation_type: PluginInstallationType + installation_path: str + installed_at: str + installed_by: str + package_source: Optional[str] = None + editable: bool = False + + +class InstalledPluginRegistry(BaseModel): + """Installed plugin registry.""" + + plugins: List[InstalledPluginInfo] = [] + + def register_plugin(self, plugin: InstalledPluginInfo) -> None: + """Register a new plugin in the registry""" + # load the registry + self.plugins.append(plugin) + self.save() + + def save(self) -> None: + """Serialize the registry to disk.""" + DEFAULT_PLUGIN_REGISTRY_FOLDER = Path(os.environ.get("PLUGIN_REGISTRY_FILE", "data")) + DEFAULT_PLUGIN_REGISTRY_FILE = "installed-plugins.json" + + ipr_file = DEFAULT_PLUGIN_REGISTRY_FOLDER / DEFAULT_PLUGIN_REGISTRY_FILE + with open(ipr_file, "w", encoding="utf-8") as ipr: + json.dump(self.model_dump(), ipr, indent=2) From ca260dc1b9fb7c19df8ea55c039c13228d065a28 Mon Sep 17 00:00:00 2001 From: habeck Date: Wed, 15 Apr 2026 16:38:29 -0400 Subject: [PATCH 19/88] enh: example values for git monorepo installation Signed-off-by: habeck --- .env.example | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/.env.example b/.env.example index b235ca3..205d3fe 100644 --- a/.env.example +++ b/.env.example @@ -9,6 +9,20 @@ # Path to main plugins configuration file # PLUGINS_CONFIG_FILE=plugins/config.yaml +### Plugin installation +# Comma Separated Values used by install with --type monorepo +# PLUGINS_REPO_URLS="https://github.com/ibm/cpex-plugins" + +# registry path +# PLUGIN_REGISTRY_FOLDER=data + +# Github API +# PLUGINS_GITHUB_API=api.github.com + +# PLUGINS_GITHUB_TOKEN= +### end Plugin installation + + # Logging level for plugin framework components # PLUGINS_LOG_LEVEL=INFO From e4d568f464129bf33e1d7580037277c98bbb1c13 Mon Sep 17 00:00:00 2001 From: habeck Date: Wed, 15 Apr 2026 16:39:35 -0400 Subject: [PATCH 20/88] enh: add ConfigSaver class to ConfigLoader Signed-off-by: habeck --- cpex/framework/loader/config.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/cpex/framework/loader/config.py b/cpex/framework/loader/config.py index 93a99bf..a7b1eb7 100644 --- a/cpex/framework/loader/config.py +++ b/cpex/framework/loader/config.py @@ -79,3 +79,22 @@ def load_config(config: str, use_jinja: bool = True) -> Config: except FileNotFoundError: # Graceful fallback for tests and minimal environments without plugin config return Config(plugins=[], plugin_dirs=[]) + + +class ConfigSaver: + """ + A configuration saver + """ + + @staticmethod + def save_config(config: Config, config_path: str) -> None: + """ + Save the supplied configuration data to the filesystem + """ + try: + updated_content = yaml.safe_dump(config.model_dump(mode="json"), default_flow_style=False) + with open(os.path.normpath(config_path), "w", encoding="utf-8") as file: + file.write(updated_content) + file.flush() + except OSError as ose: + raise RuntimeError(f"Error saving PluginConfig to {config_path}") from ose From 34aa40bec7eb823fe55db04b77583377393b8801 Mon Sep 17 00:00:00 2001 From: habeck Date: Wed, 15 Apr 2026 16:40:45 -0400 Subject: [PATCH 21/88] enh: plugin installation catalog Signed-off-by: habeck --- cpex/tools/catalog.py | 519 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 519 insertions(+) create mode 100644 cpex/tools/catalog.py diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py new file mode 100644 index 0000000..be73d71 --- /dev/null +++ b/cpex/tools/catalog.py @@ -0,0 +1,519 @@ +# -*- coding: utf-8 -*- +"""Location: ./cpex/tools/catalog.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Ted Habeck + +This module implements the plugin catalog object. +""" + +import base64 +import importlib.metadata +import importlib.util +import logging +import os +import subprocess +import sys +import tomllib +from pathlib import Path +from typing import Any, Optional + +import httpx +import yaml +from github import Auth, Github + +from cpex.framework.models import PiPyRepo, PluginManifest, PluginPackageInfo +from cpex.tools.settings import get_catalog_settings + +logger = logging.getLogger(__name__) + + +class PluginCatalog: + """ + Utility class to initialize the plugin catalog from configured monorepos + """ + + def __init__(self) -> None: + """Utility for creating the catalog from one or more monorepos.""" + settings = get_catalog_settings() + self.github_api = os.environ.get("PLUGINS_GITHUB_API", settings.PLUGINS_GITHUB_API) + self.github_token = os.environ.get("PLUGINS_GITHUB_TOKEN", None) + self.monorepos = os.environ.get("PLUGINS_REPO_URLS", settings.PLUGINS_REPO_URLS or "").split(",") + self.plugin_folder = os.environ.get("PLUGINS_FOLDER", settings.PLUGINS_FOLDER) + self.catalog_folder = os.environ.get("PLUGINS_CATALOG_FOLDER", settings.PLUGINS_CATALOG_FOLDER) + self.manifests: list[PluginManifest] = [] + self.auth = Auth.Token(self.github_token) + self.gh = Github(auth=self.auth, base_url=f"https://{self.github_api}", per_page=100) + self.python_executable = self._get_python_executable() + + def _get_python_executable(self) -> str: + """Get the Python executable path for the current environment.""" + return sys.executable + + def create_output_folder(self) -> None: + """Create the plugin catalog output folder.""" + os.makedirs(self.catalog_folder, exist_ok=True) + + def create_folder(self, base_path, rel_path): + """ + Creates the base_path / rel_path folder to store data in. + """ + # elements = rel_path.split("/") + # new_path = Path() + # for i in range(len(elements)): + # new_path = new_path / elements[i] + relpath = Path(base_path) / rel_path + # logger.info("relpath: %s", relpath) + os.makedirs(relpath, exist_ok=True) + + def create_plugin_folder(self, path: str): + """ + Creates the self.plugin_folder/path folder to store the plugin source in. + """ + self.create_folder(self.plugin_folder, path) + + def create_catalog_folder(self, path: str): + """ + Creates the OUTPUT_FOLDER/path folder to store the plugin-manifest.yaml file in. + """ + self.create_folder(self.catalog_folder, path) + # elements = path.split("/") + # new_path = Path() + # for i in range(len(elements) - 1): + # new_path = new_path / elements[i] + # relpath = Path(OUTPUT_FOLDER / new_path) + # # logger.info("relpath: %s", relpath) + # os.makedirs(relpath, exist_ok=True) + + def save_manifest(self, manifest: PluginManifest, path): + """Save a pypi installed manifest to the plugin catalog. + args: + manifest: The plugin manifest to be stored in the catalog + path: the name of the plugin package that was installed + """ + relpath = Path(self.catalog_folder) + relpath = relpath / path + updated_content = yaml.safe_dump(manifest.model_dump(), default_flow_style=False) + with open(relpath, "w", encoding="utf-8") as output: + output.write(updated_content) + output.flush() + + def save_manifest_content(self, content: str, path, repo_url: httpx.URL): + """ + write the manifest content to the supplied path relative to the ouptut folder, + injecting the monorepo.package_source value before saving the file. + """ + relpath = Path(self.catalog_folder) + relpath = relpath / path + repo_path = path.removesuffix(f"/{relpath.name}") + manifest_data = yaml.safe_load(content) + package_source = f"{repo_url}#subdirectory={repo_path}" + manifest_data["monorepo"] = { + "package_source": f"{package_source}", + "repo_url": f"{str(repo_url)}", + "package_folder": f"{repo_path}", + } + if "tags" not in manifest_data: + manifest_data["tags"] = [] + if "name" not in manifest_data: + manifest_data["name"] = repo_path + if "default_configs" in manifest_data: + manifest_data["default_config"] = manifest_data["default_configs"] + del manifest_data["default_configs"] + if manifest_data["default_config"] is None: + manifest_data["default_config"] = {} + updated_content = yaml.safe_dump(manifest_data, default_flow_style=False) + with open(relpath, "w", encoding="utf-8") as output: + output.write(updated_content) + output.flush() + + def save_content(self, base_path, content: str, path): + """ + write the content to the supplied path relative to the ouptut folder. + """ + relpath = Path(base_path) + relpath = relpath / path + with open(relpath, "w", encoding="utf-8") as output: + output.write(content) + output.flush() + + def save_plugin_content(self, content: str, path): + """ + write the content to the supplied path relative to the plugin folder. + """ + self.save_content(self.plugin_folder, content, path) + + def save_catalog_content(self, content: str, path): + """ + write the content to the supplied path relative to the ouptut folder. + """ + self.save_content(self.catalog_folder, content, path) + + def download_contents(self, git_url: str, headers, path: str, repo_url: httpx.URL): + """ + Download the contents of the file using the github REST API. + """ + result = httpx.get(git_url, headers=headers) + if result.status_code == 200: + js = result.json() + b64_content = js["content"] + content = str(base64.b64decode(b64_content).decode("utf-8")) + # logger.info("decoded contents:\n%s", content) + # Extract directory path from full path (remove filename) + dir_path = str(Path(path).parent) if "/" in path else "" + if dir_path: + self.create_catalog_folder(dir_path) + self.save_manifest_content(content, path, repo_url) + else: + logger.error("Failed to download file: %s status_code: %d", git_url, result.status_code) + + def download_file(self, git_url: str, headers) -> str: + """Download the content of a github file""" + result = httpx.get(git_url, headers=headers) + if result.status_code == 200: + js = result.json() + b64_content = js["content"] + content = base64.b64decode(b64_content).decode("utf-8") + return content + else: + logger.error("Failed to download file: %s status_code: %d", git_url, result.status_code) + + def find_and_save_plugin_manifest(self, member: str, name: str, repo_url: httpx.URL, headers) -> PluginManifest: + """Find the plugin-manifest.yaml relative to the supplied member folder, + download and save the manifest, updating the monorepo's package_folder, package_source and repo_url attributes + """ + self.create_output_folder() + repo_path = repo_url.path.removeprefix("/") + relpath = Path(self.catalog_folder) + relpath = relpath / name / "plugin-manifest.yaml" + self.create_catalog_folder(name) + params = f"q=repo:{repo_path}+path:{member}+filename:plugin-manifest+extension:yaml&per_page=100" + r = httpx.get(f"https://{self.github_api}/search/code", params=params, headers=headers) + logger.info("status code: %d ", r.status_code) + if r.status_code == 200: + result = r.json() + for item in result["items"]: + # only download yaml files, not the README.md which may also contain references to available_hooks + if item["name"].endswith(".yaml") and item["name"].startswith("plugin-manifest"): + manifest_content = yaml.safe_load(self.download_file(item["git_url"], headers=headers)) + package_source = f"{repo_url}#subdirectory={member}" + manifest_content["name"] = name + manifest_content["monorepo"] = { + "package_source": f"{package_source}", + "repo_url": f"{str(repo_url)}", + "package_folder": f"{member}", + } + if "tags" not in manifest_content: + manifest_content["tags"] = [] + if "default_configs" in manifest_content: + manifest_content["default_config"] = manifest_content["default_configs"] + del manifest_content["default_configs"] + if manifest_content["default_config"] is None: + manifest_content["default_config"] = {} + updated_content = yaml.safe_dump(manifest_content, default_flow_style=False) + with open(relpath, "w", encoding="utf-8") as output: + output.write(updated_content) + output.flush() + else: + logger.warning("ignoring item[name]=%s. Not a yaml file.", item["name"]) + else: + logger.error("Catalog update failed with error code: %d", r.status_code) + + def update_catalog_with_cargo(self) -> None: + """Update the plugin catalog with the latest available plugins.""" + # Get the list of available plugins from the Cargo registry + headers = {"accept": "application/vnd.github+json", "authorization": f"Bearer {self.github_token}"} + self.create_output_folder() + for repo in self.monorepos: + repo_url = httpx.URL(repo) + repo_path = repo_url.path.removeprefix("/") + cargo_data = tomllib.loads( + self.download_file( + git_url=f"https://{self.github_api}/repos/{repo_path}/contents/Cargo.toml", headers=headers + ) + ) + for member in cargo_data["workspace"]["members"]: + project_data = tomllib.loads( + self.download_file( + git_url=f"https://{self.github_api}/repos/{repo_path}/contents/{member}/pyproject.toml", + headers=headers, + ) + ) + # project_data.project.name + self.find_and_save_plugin_manifest( + member=member, name=project_data["project"]["name"], repo_url=repo_url, headers=headers + ) + + def load(self) -> None: + """Load plugin-manifest.yaml files from self.catalog_folder into self.manifests.""" + self.manifests = [] + output_path = Path(self.catalog_folder) + + if not output_path.exists(): + logger.warning("Output folder '%s' does not exist. No manifests to load.", self.catalog_folder) + return + + # Find all plugin-manifest.yaml files recursively + manifest_files = list(output_path.rglob("plugin-manifest.yaml")) + + if not manifest_files: + logger.warning("No plugin-manifest.yaml files found in '%s'.", self.catalog_folder) + return + + logger.info("Found %d plugin-manifest.yaml file(s) in '%s'.", len(manifest_files), self.catalog_folder) + + for manifest_file in manifest_files: + try: + with open(manifest_file, "r", encoding="utf-8") as f: + manifest_data = yaml.safe_load(f) + + # Create PluginManifest object from the loaded data + manifest = PluginManifest(**manifest_data) + self.manifests.append(manifest) + logger.info("Loaded manifest from '%s'.", manifest_file) + + except Exception as e: + logger.error("Failed to load manifest from '%s': %s", manifest_file, str(e)) + + logger.info("Successfully loaded %d manifest(s).", len(self.manifests)) + + def search(self, plugin_name: str | None) -> Optional[list[PluginManifest]]: + """Search for a plugin in the catalog""" + matching: list[PluginManifest] = [] + # lookup the plugin from the catalog's plugin-manifest.yaml + if (self.manifests is not None) and (len(self.manifests) == 0): + self.load() + for manifest in self.manifests: + if plugin_name is not None: + if manifest.name.lower().count(plugin_name) > 0: + matching.append(manifest) + elif plugin_name.lower() in manifest.tags: + matching.append(manifest) + else: + matching.append(manifest) + return matching if len(matching) > 0 else None + + def install_folder_via_pip(self, manifest: PluginManifest) -> None: + """ + Runs a pip install using subfolder syntax + e.g. "git+https://github.com[extra]&subdirectory=folder_name" + + Args: + manifest: The PluginManifest of the plugin to be installed + + Raises: + RuntimeError: If package installation fails. + """ + if manifest.monorepo is None: + raise RuntimeError("PluginManifest.monorepo can not be None.") + try: + # safe_path = package_source.path.strip("/") + # org = safe_path.split("/")[0] + # safe_path = safe_path.replace(org, "", 1).lstrip("/") + repo_url = f"git+{manifest.monorepo.package_source}" + subprocess.run( + [self.python_executable, "-m", "pip", "install", repo_url], check=True, capture_output=True, text=True + ) + logger.info("Successfully installed package: %s", manifest.name) + + except subprocess.CalledProcessError as e: + raise RuntimeError(f"Failed to install {manifest.name}: {e.stderr}") from e + except Exception as e: + raise RuntimeError(f"Unexpected error installing {manifest.name}: {str(e)}") from e + + def _install_package(self, package_name: str, version_constraint: str | None) -> None: + """Install package from PyPI with proper error handling. + + Args: + package_name: The PyPI package name to install. + version_constraint: Optional version constraint (e.g., ">=1.0.0,<2.0.0"). + + Raises: + RuntimeError: If package installation fails. + """ + try: + # Validate package name and constraint format + ppi = PluginPackageInfo(pypi_package=package_name, version_constraint=version_constraint) + tgt = ppi.pypi_package + if ppi.version_constraint is not None: + tgt = f"{tgt}@{ppi.version_constraint}" + + # Use subprocess.run for better error handling + subprocess.run( + [self.python_executable, "-m", "pip", "install", tgt], check=True, capture_output=True, text=True + ) + logger.info("Successfully installed package: %s", package_name) + + except subprocess.CalledProcessError as e: + raise RuntimeError(f"Failed to install {package_name}: {e.stderr}") from e + except Exception as e: + raise RuntimeError(f"Unexpected error installing {package_name}: {str(e)}") from e + + def find_package_path(self, package_name: str) -> Path: + """Locate installed package directory using importlib.metadata. + + Args: + package_name: The name of the installed package. + + Returns: + Path to the package directory. + + Raises: + RuntimeError: If package cannot be found. + """ + try: + # Use importlib.metadata for more reliable package discovery + for dist in importlib.metadata.distributions(): + if dist.name == package_name or dist.metadata.get("Name") == package_name: + if dist.files: + # Get the package root from the plugin-manifest.yaml file + for afile in dist.files: + if afile.name == "plugin-manifest.yaml": + located_path = dist.locate_file(afile) + package_path = Path(str(located_path)).parent + logger.debug("Found package %s at %s", package_name, package_path) + return package_path + + # Fallback to importlib.util.find_spec if metadata approach fails + spec = importlib.util.find_spec(package_name) + if spec is not None and spec.origin is not None: + package_path = Path(spec.origin).parent + logger.debug("Found package %s at %s (via find_spec)", package_name, package_path) + return package_path + + raise RuntimeError(f"Could not find installed package: {package_name}") + + except Exception as e: + if isinstance(e, RuntimeError): + raise + raise RuntimeError(f"Error locating package {package_name}: {str(e)}") from e + + def _load_manifest_file(self, manifest_path: Path) -> dict[str, Any]: + """Load and parse plugin-manifest.yaml with validation. + + Args: + manifest_path: Path to the plugin-manifest.yaml file. + + Returns: + Parsed manifest data as a dictionary. + + Raises: + FileNotFoundError: If manifest file doesn't exist. + RuntimeError: If manifest file cannot be parsed. + """ + if not manifest_path.exists(): + raise FileNotFoundError(f"plugin-manifest.yaml not found at {manifest_path}") + + try: + with open(manifest_path, "r", encoding="utf-8") as f: + manifest_data = yaml.safe_load(f) + + if not isinstance(manifest_data, dict): + raise RuntimeError(f"Invalid manifest format: expected dictionary, got {type(manifest_data).__name__}") + + logger.debug("Successfully loaded manifest from %s", manifest_path) + return manifest_data + + except yaml.YAMLError as e: + raise RuntimeError(f"Failed to parse manifest YAML: {str(e)}") from e + except Exception as e: + raise RuntimeError(f"Error reading manifest file: {str(e)}") from e + + def _normalize_manifest_data( + self, manifest_data: dict[str, Any], package_name: str, version_constraint: str | None + ) -> PluginManifest: + """Transform raw manifest dict into validated PluginManifest model. + + Args: + manifest_data: Raw manifest dictionary from YAML. + package_name: The PyPI package name. + version_constraint: Optional version constraint. + + Returns: + Validated PluginManifest instance. + + Raises: + RuntimeError: If manifest validation fails. + """ + try: + # Set defaults for optional fields + manifest_data.setdefault("tags", []) + manifest_data.setdefault("name", package_name) + + # Handle legacy default_configs field + if "default_config" not in manifest_data and "default_configs" in manifest_data: + manifest_data["default_config"] = manifest_data.pop("default_configs") or {} + + # Validate and create manifest + manifest = PluginManifest(**manifest_data) + + # Ensure package_info is properly set + if manifest.package_info is None: + manifest.package_info = PiPyRepo(pypi_package=package_name, version_constraint=version_constraint) + else: + manifest.package_info.pypi_package = package_name + if version_constraint is not None: + manifest.package_info.version_constraint = version_constraint + + logger.debug("Successfully normalized manifest for %s", package_name) + return manifest + + except Exception as e: + raise RuntimeError(f"Failed to validate manifest for {package_name}: {str(e)}") from e + + def _persist_manifest(self, manifest: PluginManifest, package_name: str) -> None: + """Save manifest to catalog folder. + + Args: + manifest: The validated plugin manifest. + package_name: The package name (used for folder/file naming). + + Raises: + RuntimeError: If manifest cannot be saved. + """ + try: + self.create_catalog_folder(package_name) + self.save_manifest(manifest, f"{package_name}/plugin-manifest.yaml") + logger.info("Successfully saved %s package manifest to plugin catalog", package_name) + except Exception as e: + raise RuntimeError(f"Failed to save manifest for {package_name}: {str(e)}") from e + + def install_from_pypi(self, plugin_package_name: str, version_constraint: str | None = None) -> PluginManifest: + """Install Python package from PyPI and load its plugin-manifest.yaml. + + This method performs the following steps: + 1. Installs the package from PyPI + 2. Locates the installed package directory + 3. Loads and parses the plugin-manifest.yaml + 4. Normalizes and validates the manifest data + 5. Persists the manifest to the plugin catalog + + Args: + plugin_package_name: The name of the package hosted on PyPI. + version_constraint: Optional version constraint (e.g., ">=1.0.0,<2.0.0"). + + Returns: + The loaded and validated plugin manifest. + + Raises: + RuntimeError: If any step of the installation process fails. + FileNotFoundError: If plugin-manifest.yaml is not found in the package. + """ + # Step 1: Install the package + self._install_package(plugin_package_name, version_constraint) + + # Step 2: Find the package location where plugin-manifest.yaml resides + package_path = self.find_package_path(plugin_package_name) + + # Step 3: Load the manifest file + manifest_path = package_path / "plugin-manifest.yaml" + manifest_data = self._load_manifest_file(manifest_path) + + # Step 4: Normalize and validate the manifest + manifest = self._normalize_manifest_data(manifest_data, plugin_package_name, version_constraint) + + # Step 5: Persist to catalog + self._persist_manifest(manifest, plugin_package_name) + + logger.info("Successfully installed and cataloged %s", plugin_package_name) + return manifest From d827cf1859a5c928c1eca537df1e2a0e0dd02d44 Mon Sep 17 00:00:00 2001 From: habeck Date: Wed, 15 Apr 2026 16:46:48 -0400 Subject: [PATCH 22/88] enh: add support to enable installation of a plugin using the cli from a git monorepo, or pypi. Signed-off-by: habeck --- cpex/tools/cli.py | 414 +++++++++++- cpex/tools/plugin_registry.py | 67 ++ cpex/tools/settings.py | 46 ++ pyproject.toml | 5 +- tests/unit/cpex/tools/test_catalog.py | 888 ++++++++++++++++++++++++++ tests/unit/cpex/tools/test_cli.py | 467 +++++++++++++- 6 files changed, 1852 insertions(+), 35 deletions(-) create mode 100644 cpex/tools/plugin_registry.py create mode 100644 cpex/tools/settings.py create mode 100644 tests/unit/cpex/tools/test_catalog.py diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index 071ac01..721bd03 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -26,19 +26,36 @@ """ # Standard +import json import logging +import os import shutil import subprocess # nosec B404 # Safe: Used only for git commands with hardcoded args from pathlib import Path +from typing import List, Optional -# Third-Party +import inquirer import typer +from rich.console import Console from typing_extensions import Annotated # First-Party +from cpex.framework.loader.config import ConfigLoader, ConfigSaver +from cpex.framework.models import ( + Config, + InstalledPluginRegistry, + PluginManifest, + PluginMode, +) from cpex.framework.settings import settings +from cpex.tools.catalog import PluginCatalog + +# Third-Party +from cpex.tools.plugin_registry import PluginRegistry logger = logging.getLogger(__name__) +console = Console() + # --------------------------------------------------------------------------- # Configuration defaults @@ -49,6 +66,8 @@ DEFAULT_AUTHOR_EMAIL = "" DEFAULT_PROJECT_DIR = Path("./.") DEFAULT_INSTALL_MANIFEST = Path("plugins/install.yaml") +DEFAULT_PLUGIN_REGISTRY_FOLDER = Path(os.environ.get("PLUGIN_REGISTRY_FILE", "data")) +DEFAULT_PLUGIN_REGISTRY_FILE = "installed-plugins.json" DEFAULT_IMAGE_TAG = "contextforge-plugin:latest" # TBD: add plugin name and version DEFAULT_IMAGE_BUILDER = "docker" DEFAULT_BUILD_CONTEXT = "." @@ -224,43 +243,367 @@ def bootstrap( logger.exception("An error was caught while copying template.") +def list(type: str) -> None: + """List the installed plugins + Args: + type (str): The type of plugins to list. Can be "native" or "external". + + Raises: + typer.Exit: If the type is not "native" or "external". + """ + DEFAULT_PLUGIN_REGISTRY_FOLDER = Path(os.environ.get("PLUGIN_REGISTRY_FILE", "data")) + os.makedirs(DEFAULT_PLUGIN_REGISTRY_FOLDER, exist_ok=True) + DEFAULT_PLUGIN_REGISTRY_FILE = "installed-plugins.json" + registered_plugins = None + ipr_file = DEFAULT_PLUGIN_REGISTRY_FOLDER / DEFAULT_PLUGIN_REGISTRY_FILE + if ipr_file.exists(): + with open(ipr_file, "r", encoding="utf-8") as ipr: + registered_plugins = json.load(ipr) + + if registered_plugins: + for plug_in in registered_plugins["plugins"]: + logger.info( + "name: %s version: %s installation type: %s", + plug_in["name"], + plug_in["version"], + plug_in["installation_type"], + ) + else: + logger.info("No plugins registered.") + + +def instance_name_is_unique(config: Config, suggested_instance_name) -> bool: + """See if the instance name already exists in the plugins/config.yaml""" + if config.plugins is not None: + for a_plugin in config.plugins: + if a_plugin.name == suggested_instance_name: + return False + return True + + +def update_plugins_config_yaml(manifest: PluginManifest): + """ + Update the plugins/config.yaml file with the new plugin manifest. + + Args: + manifest (PluginManifest): The plugin manifest to be added to the config.yaml file. + Returns: + bool: True if the update was successful, False otherwise. + """ + plugin_configs: Config = ConfigLoader.load_config(settings.config_file) + suggested_name = manifest.suggest_instance_name() + ctr = 1 + while not instance_name_is_unique(plugin_configs, suggested_instance_name=suggested_name): + suggested_name = manifest.suggest_instance_name() + "_" + str(ctr) + + accepted_name = suggested_name + # TODO: prompt to confirm mode, priority etc and accepted name? + plugin_config = manifest.create_instance_config( + instance_name=accepted_name, mode=PluginMode.SEQUENTIAL, priority=100 + ) + if plugin_configs.plugins is None: + plugin_configs.plugins = [] + plugin_configs.plugins.append(plugin_config) + # now serialize the config + ConfigSaver.save_config(plugin_configs, settings.config_file) + + +def install_from_manifest(manifest: PluginManifest, installation_type: str, catalog: PluginCatalog): + """ + Given a plugin manifest, download the plugin and register it in the plugin registry. + + Args: + manifest (PluginManifest): The plugin manifest to be installed. + installation_type (str): The type of installation, either "monorepo" or "pypi". + catalog (PluginCatalog): The plugin catalog to be used for installation. + Returns: + None: This function does not return anything. + """ + + # download the plugin to the plugins folder + if installation_type == "monorepo": + logger.info("installation type: %s", installation_type) + catalog.install_folder_via_pip(manifest) + plugin_registry: PluginRegistry = PluginRegistry() + # add the newly downloaded plugin to the registry + plugin_registry.update( + manifest=manifest, installation_type=installation_type, catalog=catalog, git_user_name=git_user_name() + ) + update_plugins_config_yaml(manifest) + + +def select_plugin_from_catalog(available_plugins: List[PluginManifest]) -> Optional[PluginManifest]: + """Select a plugin from a list of available plugins using an interactive prompt. + + Args: + available_plugins: List of available plugin manifests to choose from. + + Returns: + The selected PluginManifest, or None if no selection was made. + """ + if not available_plugins: + return None + + # Build choices list with plugin information + choices = [] + for index, plug_in in enumerate(available_plugins): + installation_type = ( + "monorepo" if plug_in.monorepo is not None else "pypi" if plug_in.package_info is not None else "local" + ) + choice = f"{index} name: {plug_in.name} version: {plug_in.version} installation type: {installation_type}" + choices.append((choice, index)) + + # Prompt user to select a plugin + questions = [ + inquirer.List( + "plugins", + message="Which plugin would you like to install?", + choices=choices, + ), + ] + answers = inquirer.prompt(questions) + + if not answers: + return None + + logger.info(json.dumps(answers)) + selected_index = int(answers["plugins"]) + selected_plugin = available_plugins[selected_index] + + # Display selected plugin information + installation_type = ( + "monorepo" + if selected_plugin.monorepo is not None + else "pypi" + if selected_plugin.package_info is not None + else "local" + ) + console.print( + "name: ", + selected_plugin.name, + "Version: ", + selected_plugin.version, + "type: ", + installation_type, + ) + + return selected_plugin + + +def _parse_pypi_source(source: str) -> tuple[str, Optional[str]]: + """Parse PyPI source string to extract package name and version constraint. + + Args: + source: PyPI package source string, optionally with version (e.g., "package@>=1.0.0"). + + Returns: + Tuple of (package_name, version_constraint). + """ + parts = source.split("@", 1) + package_name = parts[0] + version_constraint = parts[1] if len(parts) > 1 else None + return package_name, version_constraint + + +def _finalize_installation(manifest: PluginManifest, install_type: str, catalog: PluginCatalog): + """Common finalization steps for plugin installation. + + Args: + manifest: The plugin manifest to finalize. + install_type: The type of installation (e.g., "pypi", "monorepo"). + catalog: The plugin catalog. + """ + plugin_registry = PluginRegistry() + plugin_registry.update( + manifest=manifest, + installation_type=install_type, + catalog=catalog, + git_user_name=git_user_name() + ) + update_plugins_config_yaml(manifest=manifest) + + +def _install_from_git(source: str, catalog: PluginCatalog): + """Handle git-based installation (not yet implemented). + + Args: + source: Git repository URL or path. + catalog: The plugin catalog. + + Raises: + NotImplementedError: Git installation is not yet supported. + """ + raise NotImplementedError("Git installation is not yet implemented") + + +def _install_from_monorepo(source: str, catalog: PluginCatalog): + """Handle monorepo-based installation. + + Args: + source: Plugin name or search term in the monorepo. + catalog: The plugin catalog. + """ + logger.info("Trying to install from git monorepo: %s", source) + available_plugins = catalog.search(source) + + if not available_plugins: + console.print("No matching plugins found.") + return + + selected_plugin = select_plugin_from_catalog(available_plugins) + if not selected_plugin: + return + + with console.status(f"Installing plugin {selected_plugin.name}...", spinner="dots"): + install_from_manifest(selected_plugin, "monorepo", catalog=catalog) + + console.print(f"✅ {selected_plugin.name} installation complete.") + + +def _install_from_pypi(source: str, catalog: PluginCatalog): + """Handle PyPI-based installation. + + Args: + source: PyPI package name, optionally with version constraint (e.g., "package@>=1.0.0"). + catalog: The plugin catalog. + """ + logger.info("Trying to install from pypi package %s", source) + + # Parse version constraint + package_name, version_constraint = _parse_pypi_source(source) + + with console.status(f"Installing plugin {package_name} via pypi", spinner="dots"): + manifest = catalog.install_from_pypi( + plugin_package_name=package_name, + version_constraint=version_constraint + ) + + if manifest is None: + console.print(f"❌ Failed to install {package_name}") + return + + _finalize_installation(manifest, "pypi", catalog) + console.print(f"✅ {package_name} installation complete.") + + +def install(source: str, install_type: str, catalog: PluginCatalog): + """Install a plugin from its associated source. + + Args: + source: The source of the plugin (package name, repo URL, or search term). + install_type: The type of installation ("git", "monorepo", or "pypi"). + catalog: The catalog of plugins. + + Raises: + ValueError: If install_type is not supported. + NotImplementedError: If the installation type is not yet implemented. + """ + handlers = { + "git": _install_from_git, + "monorepo": _install_from_monorepo, + "pypi": _install_from_pypi, + } + + handler = handlers.get(install_type) + if handler is None: + raise ValueError(f"Unsupported installation type: {install_type}. Must be one of: {', '.join(handlers.keys())}") + + handler(source, catalog) + + +def search(plugin_name: str | None, catalog: PluginCatalog): + """Search for a plugin in the catalog + Args: + plugin_name (str | None): The name of the plugin to search for. + catalog (PluginCatalog): The catalog to search in. + Returns: + list[Plugin]: A list of plugins that match the search criteria. + """ + # lookup the plugin from the catalog's plugin-manifest.yaml + with console.status("Searching for available plugins ...", spinner="dots"): + available_plugins = catalog.search(plugin_name) + if available_plugins: + console.log("Available plugins:") + for plug_in in available_plugins: + msg = f"name: {plug_in.name} version: {plug_in.version} installation type: {'monorepo' if plug_in.monorepo is not None else 'pypi' if plug_in.package_info is not None else 'local'}" + console.log(msg) + else: + console.log("No plugins found.") + + +def info(plugin_name: str | None): + """Search for or list all installed plugins + + Args: + plugin_name (str | None): The name of the plugin to search for. + If None, list all installed plugins. + + Returns: + list[Plugin]: A list of plugins that match the search criteria. + """ + ipr_file = DEFAULT_PLUGIN_REGISTRY_FOLDER / DEFAULT_PLUGIN_REGISTRY_FILE + if ipr_file.exists(): + with open(ipr_file, "r", encoding="utf-8") as ipr: + registry = InstalledPluginRegistry(**json.load(ipr)) + else: + registry = InstalledPluginRegistry() + found = 0 + for plug_in in registry.plugins: + if plugin_name is None: + console.print_json(json.dumps(plug_in.model_dump())) + # console.print(yaml.dump(plug_in.model_dump(), default_flow_style=False)) + found += 1 + else: + if ( + plug_in.name.lower().count(plugin_name.lower()) > 0 + or plug_in.kind.lower().count(plugin_name.lower()) > 0 + ): + console.print_json(json.dumps(plug_in.model_dump())) + # console.print(yaml.dump(plug_in.model_dump())) + found += 1 + if found == 0: + console.print("No plugins found") + + +@app.command( + help="List, search or install plugins.\n\n" + "Examples:\n" + "python cpex/tools/cli.py plugin info pii\n" + "python cpex/tools/cli.py plugin --type monorepo search pii\n" + "python cpex/tools/cli.py plugin --type monorepo install PIIFilterPlugin\n" + "python cpex/tools/cli.py plugin --type pypi install ExamplePlugin@>=0.1.0" +) +def plugin( + cmd_action: str = typer.Argument(None, help="One of: list|info|install|search"), + source: str | None = typer.Argument(None, help="The pypi, git, or local folder where the plugin resides"), + install_type: Annotated[ + str, typer.Option("--type", "-t", help="The types of plugins to list. One of: bundled|pypi|git|local|monorepo") + ] = None, +) -> None: + """Lists installed plugins""" + if cmd_action == "info": + return info(source) + # update the catalog before proceeding with install etc. + pc = PluginCatalog() + # optimized github search REST api takes ~14s to search & download all manifests + console.log("Update catalog") + with console.status("Updating catalog...", spinner="dots"): + pc.update_catalog_with_cargo() + console.log("Catalog update completed.") + + if cmd_action == "list": + return list(install_type) + if cmd_action == "install" and source is not None: + return install(source, install_type, catalog=pc) + if cmd_action == "search": + return search(source, catalog=pc) + + @app.callback() def callback() -> None: # pragma: no cover """This function exists to force 'bootstrap' to be a subcommand.""" -# @app.command(help="Installs plugins into a Python environment.") -# def install( -# install_manifest: Annotated[typer.FileText, typer.Option("--install_manifest", "-i", help="The install manifest describing which plugins to install.")] = DEFAULT_INSTALL_MANIFEST, -# installer: Annotated[str, typer.Option("--installer", "-c", help="The install command to install plugins.")] = DEFAULT_INSTALLER, -# ): -# typer.echo(f"Installing plugin packages from {install_manifest.name}") -# data = yaml.safe_load(install_manifest) -# manifest = InstallManifest.model_validate(data) -# for pkg in manifest.packages: -# typer.echo(f"Installing plugin package {pkg.package} from {pkg.repository}") -# repository = os.path.expandvars(pkg.repository) -# cmd = installer.split(" ") -# if pkg.extras: -# cmd.append(f"{pkg.package}[{','.join(pkg.extras)}]@{repository}") -# else: -# cmd.append(f"{pkg.package}@{repository}") -# subprocess.run(cmd) - - -# @app.command(help="Builds an MCP server to serve plugins as tools.") -# def package( -# image_tag: Annotated[str, typer.Option("--image_tag", "-t", help="The container image tag to generated container.")] = DEFAULT_IMAGE_TAG, -# containerfile: Annotated[Path, typer.Option("--containerfile", "-c", help="The Dockerfile used to build the container.")] = DEFAULT_CONTAINERFILE_PATH, -# builder: Annotated[str, typer.Option("--builder", "-b", help="The container builder, compatible with docker build.")] = DEFAULT_IMAGE_BUILDER, -# build_context: Annotated[Path, typer.Option("--build_context", "-p", help="The container builder context, specified as a path.")] = DEFAULT_BUILD_CONTEXT, -# ): -# typer.echo("Building MCP server image") -# cmd = builder.split(" ") -# cmd.extend(["-f", containerfile, "-t", image_tag, build_context]) -# subprocess.run(cmd) - - def main() -> None: # noqa: D401 - imperative mood is fine here """Entry point for the *mcpplugins* console script. @@ -280,4 +623,9 @@ def main() -> None: # noqa: D401 - imperative mood is fine here if __name__ == "__main__": # pragma: no cover - executed only when run directly + # logging.basicConfig( + # level=logging.INFO, + # format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + # stream=sys.stderr, # Log to stderr to keep stdout clean for coordination + # ) main() diff --git a/cpex/tools/plugin_registry.py b/cpex/tools/plugin_registry.py new file mode 100644 index 0000000..e4248f2 --- /dev/null +++ b/cpex/tools/plugin_registry.py @@ -0,0 +1,67 @@ +import datetime +import json +import os +from pathlib import Path + +from cpex.framework.models import InstalledPluginInfo, InstalledPluginRegistry, PluginInstallationType, PluginManifest +from cpex.tools.catalog import PluginCatalog + + +class PluginRegistry: + """Plugin registry. + Plugin registry is responsible for storing information about installed plugins. + """ + registry: InstalledPluginRegistry = InstalledPluginRegistry() + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + DEFAULT_PLUGIN_REGISTRY_FOLDER = Path(os.environ.get("PLUGIN_REGISTRY_FILE", "data")) + os.makedirs(DEFAULT_PLUGIN_REGISTRY_FOLDER, exist_ok=True) + DEFAULT_PLUGIN_REGISTRY_FILE = "installed-plugins.json" + ipr_file = DEFAULT_PLUGIN_REGISTRY_FOLDER / DEFAULT_PLUGIN_REGISTRY_FILE + if ipr_file.exists(): + with open(ipr_file, "r", encoding="utf-8") as ipr: + self.registry = InstalledPluginRegistry(**json.load(ipr)) + + def update( + self, manifest: PluginManifest, installation_type: str, catalog: PluginCatalog, git_user_name: str + ) -> None: + """ + Given a plugin manifest, register it in the plugin registry. + + Args: + manifest: PluginManifest: The manifest of the plugin to be registered. + installation_type: str: The type of installation (e.g., "local", "global"). + catalog: PluginCatalog: The catalog containing the plugin. + git_user_name: str: The name of the user who installed the plugin. + + Raises: + RuntimeError: If the plugin manifest is invalid or the installation type is not recognized. + """ + package_source = "" + if installation_type == "monorepo": + if manifest.monorepo is None: + raise RuntimeError("PluginManifest.monorepo can not be None.") + package_source = manifest.monorepo.package_source + elif installation_type == "pypi": + if manifest.package_info is None: + raise RuntimeError("PluginManifest.package_info can not be None.") + package_source = manifest.package_info.pypi_package + else: + raise ValueError(f"Invalid installation type: {installation_type}") + + installation_path = catalog.find_package_path(manifest.name) + + ipi: InstalledPluginInfo = InstalledPluginInfo( + name=manifest.name, + kind=manifest.kind, + version=manifest.version, + installation_type=PluginInstallationType(installation_type), + installation_path=str(installation_path.resolve()), + installed_at=datetime.datetime.now(datetime.timezone.utc).isoformat() + "Z", + installed_by=git_user_name, + package_source=package_source, + editable=False, + ) + # add the newly downloaded plugin to the registry + self.registry.register_plugin(ipi) diff --git a/cpex/tools/settings.py b/cpex/tools/settings.py new file mode 100644 index 0000000..16abad6 --- /dev/null +++ b/cpex/tools/settings.py @@ -0,0 +1,46 @@ +"""Location: ./cpex/tools/settings.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Ted Habeck + +This module implements the plugin catalog object. +""" + +import logging + +from dotenv import find_dotenv, load_dotenv +from pydantic import Field +from pydantic_settings import BaseSettings, SettingsConfigDict + +logger = logging.getLogger(__name__) + + +load_dotenv(find_dotenv("../../.env")) + + +class CatalogSettings(BaseSettings): + """Catalog settings.""" + + PLUGINS_GITHUB_TOKEN: str | None = Field( + default=None, description="The github token for accessing the plugins repositories" + ) + PLUGINS_GITHUB_API: str | None = Field(default="api.github.com", description="api.github.com") + PLUGINS_REPO_URLS: str = Field( + default="https://github.com/ibm/cpex-plugins", description="The url of the plugins repositories comma separated" + ) + PLUGINS_REGISTRY_FOLDER: str | None = Field( + default="data", description="The folder where the plugin registry is located (r/w)" + ) + PLUGINS_CATALOG_FOLDER: str = Field( + default="plugin-catalog", description="The folder where the plugin catalog is located (r/w)" + ) + PLUGINS_FOLDER: str = Field(default="plugins", description="The folder where the plugins are located (r/w)") + model_config = SettingsConfigDict(env_prefix="PLUGINS_", env_file=".env", env_file_encoding="utf-8", extra="ignore") + + +def get_catalog_settings() -> CatalogSettings: + """Get catalog settings. + Returns: + CatalogSettings: Catalog settings. + """ + return CatalogSettings() diff --git a/pyproject.toml b/pyproject.toml index d540f41..554ab38 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,7 +33,10 @@ dependencies = [ "pydantic-settings>=2.13.1", "pydantic>=2.12.5", "pyyaml>=6.0.3", - "packaging>=26.0" + "packaging>=26.0", + "inquirer>=3.4.1", + "rich>=14.3.3", + "pygithub>=2.9.0" ] [project.scripts] diff --git a/tests/unit/cpex/tools/test_catalog.py b/tests/unit/cpex/tools/test_catalog.py new file mode 100644 index 0000000..c5261c5 --- /dev/null +++ b/tests/unit/cpex/tools/test_catalog.py @@ -0,0 +1,888 @@ +# -*- coding: utf-8 -*- +"""Location: ./tests/unit/cpex/tools/test_catalog.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Ted Habeck + +Tests for the cpex.tools.catalog module. +""" + +# Standard +import base64 +import json +import subprocess +import sys +from pathlib import Path +from unittest.mock import MagicMock, Mock, patch, mock_open + +# Third-Party +import httpx +import pytest +import yaml + +# First-Party +from cpex.tools.catalog import PluginCatalog +from cpex.framework.models import PluginManifest, Monorepo + + +# Helper function to create test manifests +def create_test_manifest(**kwargs): + """Create a test PluginManifest with default values.""" + defaults = { + "name": "test_plugin", + "version": "1.0.0", + "kind": "native", + "description": "Test plugin description", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {}, + "monorepo": Monorepo(package_source="https://github.com/org/repo#subdirectory=plugin", repo_url="https://github.com/org/repo", package_folder="plugin"), + } + defaults.update(kwargs) + return PluginManifest(**defaults) + + +@pytest.fixture +def mock_github_env(): + """Fixture to provide a mocked GitHub environment.""" + with ( + patch.dict("os.environ", {"PLUGINS_GITHUB_TOKEN": "test_token"}), + patch("cpex.tools.catalog.Github"), + ): + yield + + +class TestPluginCatalogInit: + """Tests for PluginCatalog initialization.""" + + def test_init_with_defaults(self): + """Test initialization with default environment variables.""" + with ( + patch.dict("os.environ", {"PLUGINS_GITHUB_TOKEN": "test_token"}, clear=True), + patch("cpex.tools.catalog.Github"), + ): + catalog = PluginCatalog() + assert catalog.github_api == "api.github.com" + assert catalog.github_token == "test_token" + assert catalog.monorepos == ["https://github.com/ibm/cpex-plugins"] + assert catalog.plugin_folder == "plugins" + assert catalog.manifests == [] + assert catalog.python_executable == sys.executable + + def test_init_with_custom_env_vars(self): + """Test initialization with custom environment variables.""" + with ( + patch.dict( + "os.environ", + { + "PLUGINS_GITHUB_API": "api.github.example.com", + "PLUGINS_GITHUB_TOKEN": "test_token", + "PLUGINS_REPO_URLS": "https://github.com/org/repo1,https://github.com/org/repo2", + "PLUGINS_FOLDER": "custom_plugins", + }, + ), + patch("cpex.tools.catalog.Github"), + ): + catalog = PluginCatalog() + assert catalog.github_api == "api.github.example.com" + assert catalog.github_token == "test_token" + assert catalog.monorepos == ["https://github.com/org/repo1", "https://github.com/org/repo2"] + assert catalog.plugin_folder == "custom_plugins" + + def test_get_python_executable(self): + """Test _get_python_executable returns sys.executable.""" + with ( + patch.dict("os.environ", {"PLUGINS_GITHUB_TOKEN": "test_token"}), + patch("cpex.tools.catalog.Github"), + ): + catalog = PluginCatalog() + assert catalog._get_python_executable() == sys.executable + + +class TestPluginCatalogFolderOperations: + """Tests for folder creation operations.""" + + def test_create_output_folder(self, tmp_path, mock_github_env): + """Test creating the output folder.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "test-catalog") + catalog.create_output_folder() + assert (tmp_path / "test-catalog").exists() + + def test_create_folder(self, tmp_path, mock_github_env): + """Test creating a folder with relative path.""" + catalog = PluginCatalog() + catalog.create_folder(tmp_path, "subdir/file.txt") + assert (tmp_path / "subdir").exists() + + def test_create_plugin_folder(self, tmp_path, mock_github_env): + """Test creating a plugin folder.""" + catalog = PluginCatalog() + catalog.plugin_folder = str(tmp_path / "plugins") + catalog.create_plugin_folder("test_plugin/plugin.py") + assert (tmp_path / "plugins" / "test_plugin").exists() + + def test_create_catalog_folder(self, tmp_path, mock_github_env): + """Test creating a catalog folder.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + catalog.create_catalog_folder("test_plugin/plugin-manifest.yaml") + assert (tmp_path / "catalog" / "test_plugin").exists() + + +class TestPluginCatalogSaveOperations: + """Tests for save operations.""" + + def test_save_content(self, tmp_path, mock_github_env): + """Test saving content to a file.""" + catalog = PluginCatalog() + test_content = "test content" + catalog.save_content(tmp_path, test_content, "test.txt") + assert (tmp_path / "test.txt").read_text() == test_content + + def test_save_plugin_content(self, tmp_path, mock_github_env): + """Test saving plugin content.""" + catalog = PluginCatalog() + catalog.plugin_folder = str(tmp_path / "plugins") + (tmp_path / "plugins").mkdir() + test_content = "plugin code" + catalog.save_plugin_content(test_content, "plugin.py") + assert (tmp_path / "plugins" / "plugin.py").read_text() == test_content + + def test_save_catalog_content(self, tmp_path, mock_github_env): + """Test saving catalog content.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + (tmp_path / "catalog").mkdir() + test_content = "catalog data" + catalog.save_catalog_content(test_content, "manifest.yaml") + assert (tmp_path / "catalog" / "manifest.yaml").read_text() == test_content + + def test_save_manifest_content(self, tmp_path, mock_github_env): + """Test saving manifest content with transformations.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + catalog_dir = tmp_path / "catalog" / "test_plugin" + catalog_dir.mkdir(parents=True) + + manifest_yaml = """ +name: test_plugin +version: 1.0.0 +kind: native +description: Test +author: Test Author +available_hooks: [tools] +default_configs: + key: value +""" + repo_url = httpx.URL("https://github.com/org/repo") + catalog.save_manifest_content(manifest_yaml, "test_plugin/plugin-manifest.yaml", repo_url) + + saved_file = tmp_path / "catalog" / "test_plugin" / "plugin-manifest.yaml" + assert saved_file.exists() + + saved_data = yaml.safe_load(saved_file.read_text()) + assert saved_data["monorepo"]["package_source"] == "https://github.com/org/repo#subdirectory=test_plugin" + assert "tags" in saved_data + assert "default_config" in saved_data + assert "default_configs" not in saved_data # Should be renamed to default_config + + def test_save_manifest_content_without_name(self, tmp_path, mock_github_env): + """Test saving manifest content without name field.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + catalog_dir = tmp_path / "catalog" / "test_plugin" + catalog_dir.mkdir(parents=True) + + manifest_yaml = """ +version: 1.0.0 +kind: native +description: Test +author: Test Author +available_hooks: [tools] +default_config: + key: value +""" + repo_url = httpx.URL("https://github.com/org/repo") + catalog.save_manifest_content(manifest_yaml, "test_plugin/plugin-manifest.yaml", repo_url) + + saved_file = tmp_path / "catalog" / "test_plugin" / "plugin-manifest.yaml" + assert saved_file.exists() + + saved_data = yaml.safe_load(saved_file.read_text()) + assert saved_data["name"] == "test_plugin" # Should be set from path + + def test_save_manifest_content_with_null_default_configs(self, tmp_path, mock_github_env): + """Test saving manifest content with null default_configs.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + catalog_dir = tmp_path / "catalog" / "test_plugin" + catalog_dir.mkdir(parents=True) + + manifest_yaml = """ +name: test_plugin +version: 1.0.0 +kind: native +description: Test +author: Test Author +available_hooks: [tools] +default_configs: null +""" + repo_url = httpx.URL("https://github.com/org/repo") + catalog.save_manifest_content(manifest_yaml, "test_plugin/plugin-manifest.yaml", repo_url) + + saved_file = tmp_path / "catalog" / "test_plugin" / "plugin-manifest.yaml" + assert saved_file.exists() + + saved_data = yaml.safe_load(saved_file.read_text()) + assert saved_data["default_config"] == {} # Should be empty dict + + +class TestPluginCatalogDownloadOperations: + """Tests for download operations.""" + + def test_download_contents_success(self, tmp_path, mock_github_env): + """Test successful download of contents.""" + with patch("cpex.tools.catalog.httpx.get") as mock_get: + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + # Mock the HTTP response + manifest_content = "name: test\nversion: 1.0.0\nkind: native\ndescription: Test\nauthor: Test\navailable_hooks: [tools]" + b64_content = base64.b64encode(manifest_content.encode()).decode() + mock_response = Mock() + mock_response.status_code = 200 + mock_response.json.return_value = {"content": b64_content} + mock_get.return_value = mock_response + + repo_url = httpx.URL("https://github.com/org/repo") + # download_contents calls create_catalog_folder which creates the directory + # then save_manifest_content writes the file + catalog.download_contents("https://api.github.com/file", {}, "test_plugin/plugin-manifest.yaml", repo_url) + + assert (tmp_path / "catalog" / "test_plugin" / "plugin-manifest.yaml").exists() + + def test_download_contents_failure(self, tmp_path, mock_github_env): + """Test failed download of contents.""" + with ( + patch("cpex.tools.catalog.httpx.get") as mock_get, + patch("cpex.tools.catalog.logger") as mock_logger, + ): + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + mock_response = Mock() + mock_response.status_code = 404 + mock_get.return_value = mock_response + + repo_url = httpx.URL("https://github.com/org/repo") + catalog.download_contents("https://api.github.com/file", {}, "test/plugin-manifest.yaml", repo_url) + + mock_logger.error.assert_called_once() + + +class TestPluginCatalogLoadOperations: + """Tests for load operations.""" + + def test_load_no_output_folder(self, tmp_path, mock_github_env): + """Test load when output folder doesn't exist.""" + with patch("cpex.tools.catalog.logger") as mock_logger: + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "nonexistent") + catalog.load() + assert catalog.manifests == [] + mock_logger.warning.assert_called() + + def test_load_no_manifest_files(self, tmp_path, mock_github_env): + """Test load when no manifest files exist.""" + with patch("cpex.tools.catalog.logger") as mock_logger: + (tmp_path / "catalog").mkdir() + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + catalog.load() + assert catalog.manifests == [] + assert mock_logger.warning.call_count >= 1 + + def test_load_with_manifest_files(self, tmp_path, mock_github_env): + """Test load with valid manifest files.""" + catalog_dir = tmp_path / "catalog" / "test_plugin" + catalog_dir.mkdir(parents=True) + + manifest_data = { + "name": "test_plugin", + "version": "1.0.0", + "kind": "native", + "description": "Test", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {}, + } + manifest_file = catalog_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + catalog.load() + + assert len(catalog.manifests) == 1 + assert catalog.manifests[0].name == "test_plugin" + + def test_load_with_invalid_manifest(self, tmp_path, mock_github_env): + """Test load with invalid manifest file.""" + with patch("cpex.tools.catalog.logger") as mock_logger: + catalog_dir = tmp_path / "catalog" + catalog_dir.mkdir() + + manifest_file = catalog_dir / "plugin-manifest.yaml" + manifest_file.write_text("invalid: yaml: content:") + + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + catalog.load() + + assert len(catalog.manifests) == 0 + mock_logger.error.assert_called() + + +class TestPluginCatalogSearchOperations: + """Tests for search operations.""" + + def test_search_empty_catalog(self, mock_github_env): + """Test search with empty catalog.""" + catalog = PluginCatalog() + catalog.manifests = [] + result = catalog.search("test") + assert result is None + + def test_search_by_name(self, mock_github_env): + """Test search by plugin name.""" + catalog = PluginCatalog() + catalog.manifests = [ + create_test_manifest(name="test_plugin", tags=["plugin"]), + create_test_manifest(name="another_plugin", tags=["other"]), + ] + result = catalog.search("test") + assert result is not None + assert len(result) == 1 + assert result[0].name == "test_plugin" + + def test_search_by_tag(self, mock_github_env): + """Test search by tag.""" + catalog = PluginCatalog() + catalog.manifests = [ + create_test_manifest(name="plugin1", tags=["security"]), + create_test_manifest(name="plugin2", tags=["data"]), + ] + result = catalog.search("security") + assert result is not None + assert len(result) == 1 + assert result[0].name == "plugin1" + + def test_search_no_match(self, mock_github_env): + """Test search with no matches.""" + catalog = PluginCatalog() + catalog.manifests = [create_test_manifest(name="test_plugin")] + result = catalog.search("nonexistent") + assert result is None + + def test_search_loads_manifests_if_empty(self, tmp_path, mock_github_env): + """Test search loads manifests if catalog is empty.""" + catalog_dir = tmp_path / "catalog" / "test_plugin" + catalog_dir.mkdir(parents=True) + + manifest_data = { + "name": "test_plugin", + "version": "1.0.0", + "kind": "native", + "description": "Test", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {}, + } + manifest_file = catalog_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + result = catalog.search("test") + + assert result is not None + assert len(result) == 1 + + +class TestPluginCatalogInstallFromPypi: + """Tests for install_from_pypi method.""" + + def test_install_from_pypi_success(self, tmp_path, mock_github_env): + """Test successful installation from PyPI.""" + with ( + patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, + patch("cpex.tools.catalog.importlib.metadata.distributions") as mock_distributions, + ): + # Create manifest file + package_dir = tmp_path / "test_package" + package_dir.mkdir() + manifest_data = { + "name": "test_package", + "version": "1.0.0", + "kind": "native", + "description": "Test", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {}, + } + manifest_file = package_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + + # Setup mock distribution with plugin-manifest.yaml file + mock_dist = Mock() + mock_dist.name = "test_package" + mock_manifest_file = Mock() + mock_manifest_file.name = "plugin-manifest.yaml" + mock_dist.files = [mock_manifest_file] + mock_dist.locate_file.return_value = manifest_file + mock_distributions.return_value = [mock_dist] + + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + result = catalog.install_from_pypi("test_package") + + mock_subprocess.assert_called_once() + assert result.name == "test_package" + + def test_install_from_pypi_install_failure(self, mock_github_env): + """Test installation failure from PyPI.""" + with patch("cpex.tools.catalog.subprocess.run", side_effect=Exception("Install failed")): + catalog = PluginCatalog() + with pytest.raises(RuntimeError, match="Unexpected error installing"): + catalog.install_from_pypi("test_package") + + def test_install_from_pypi_package_not_found(self, mock_github_env): + """Test when package is not found after installation.""" + with ( + patch("cpex.tools.catalog.subprocess.run"), + patch("cpex.tools.catalog.importlib.metadata.distributions", return_value=[]), + patch("cpex.tools.catalog.importlib.util.find_spec", return_value=None), + ): + catalog = PluginCatalog() + with pytest.raises(RuntimeError, match="Could not find installed package"): + catalog.install_from_pypi("test_package") + + def test_install_from_pypi_manifest_not_found(self, tmp_path, mock_github_env): + """Test when manifest file is not found in package.""" + with ( + patch("cpex.tools.catalog.subprocess.run"), + patch("cpex.tools.catalog.importlib.metadata.distributions") as mock_distributions, + ): + # Setup mock distribution without plugin-manifest.yaml file + mock_dist = Mock() + mock_dist.name = "test_package" + mock_file = Mock() + mock_file.name = "__init__.py" + mock_dist.files = [mock_file] + mock_dist.locate_file.return_value = tmp_path / "test_package" / "__init__.py" + mock_distributions.return_value = [mock_dist] + + package_dir = tmp_path / "test_package" + package_dir.mkdir() + + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + with pytest.raises(RuntimeError, match="Could not find installed package"): + catalog.install_from_pypi("test_package") + + def test_install_from_pypi_invalid_manifest(self, tmp_path, mock_github_env): + """Test when manifest file is invalid.""" + with ( + patch("cpex.tools.catalog.subprocess.run"), + patch("cpex.tools.catalog.importlib.metadata.distributions") as mock_distributions, + ): + package_dir = tmp_path / "test_package" + package_dir.mkdir() + manifest_file = package_dir / "plugin-manifest.yaml" + manifest_file.write_text("invalid: yaml: content:") + + # Setup mock distribution with plugin-manifest.yaml file + mock_dist = Mock() + mock_dist.name = "test_package" + mock_manifest_file = Mock() + mock_manifest_file.name = "plugin-manifest.yaml" + mock_dist.files = [mock_manifest_file] + mock_dist.locate_file.return_value = manifest_file + mock_distributions.return_value = [mock_dist] + + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + with pytest.raises(RuntimeError, match="Failed to parse manifest YAML"): + catalog.install_from_pypi("test_package") + + +class TestPluginCatalogInstallFolderViaPip: + """Tests for install_folder_via_pip method.""" + + def test_install_folder_via_pip_success(self, tmp_path, mock_github_env): + """Test successful installation from monorepo.""" + with patch("cpex.tools.catalog.subprocess.run") as mock_subprocess: + manifest = create_test_manifest( + monorepo=Monorepo( + package_source="https://github.com/org/repo#subdirectory=plugin", + repo_url="https://github.com/org/repo", + package_folder="plugin" + ) + ) + + catalog = PluginCatalog() + catalog.install_folder_via_pip(manifest) + mock_subprocess.assert_called_once() + + def test_install_folder_via_pip_no_monorepo(self, mock_github_env): + """Test installation fails when monorepo is None.""" + manifest = create_test_manifest(monorepo=None) + + catalog = PluginCatalog() + with pytest.raises(RuntimeError, match="PluginManifest.monorepo can not be None"): + catalog.install_folder_via_pip(manifest) + + def test_install_folder_via_pip_subprocess_error(self, mock_github_env): + """Test installation fails on subprocess error.""" + # Create a CalledProcessError with proper arguments + error = subprocess.CalledProcessError(1, ["pip"], stderr="Install failed") + with patch("cpex.tools.catalog.subprocess.run", side_effect=error): + manifest = create_test_manifest( + monorepo=Monorepo( + package_source="https://github.com/org/repo#subdirectory=plugin", + repo_url="https://github.com/org/repo", + package_folder="plugin" + ) + ) + + catalog = PluginCatalog() + with pytest.raises(RuntimeError, match="Failed to install"): + catalog.install_folder_via_pip(manifest) + + +class TestPluginCatalogSaveManifest: + """Tests for save_manifest method.""" + + def test_save_manifest(self, tmp_path, mock_github_env): + """Test saving a manifest to the catalog.""" + catalog_dir = tmp_path / "catalog" / "test_plugin" + catalog_dir.mkdir(parents=True) + + manifest = create_test_manifest(name="test_plugin") + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + catalog.save_manifest(manifest, "test_plugin/plugin-manifest.yaml") + + saved_file = tmp_path / "catalog" / "test_plugin" / "plugin-manifest.yaml" + assert saved_file.exists() + + saved_data = yaml.safe_load(saved_file.read_text()) + assert saved_data["name"] == "test_plugin" + + +class TestPluginCatalogDownloadFile: + """Tests for download_file method.""" + + def test_download_file_success(self, mock_github_env): + """Test successful file download.""" + with patch("cpex.tools.catalog.httpx.get") as mock_get: + catalog = PluginCatalog() + + # Mock the HTTP response + manifest_content = "name: test\nversion: 1.0.0" + b64_content = base64.b64encode(manifest_content.encode()).decode() + mock_response = Mock() + mock_response.status_code = 200 + mock_response.json.return_value = {"content": b64_content} + mock_get.return_value = mock_response + + result = catalog.download_file("https://api.github.com/file", {}) + + assert result == manifest_content + + def test_download_file_failure(self, mock_github_env): + """Test failed file download.""" + with ( + patch("cpex.tools.catalog.httpx.get") as mock_get, + patch("cpex.tools.catalog.logger") as mock_logger, + ): + catalog = PluginCatalog() + + mock_response = Mock() + mock_response.status_code = 404 + mock_get.return_value = mock_response + + result = catalog.download_file("https://api.github.com/file", {}) + + assert result is None + mock_logger.error.assert_called_once() + + +class TestPluginCatalogFindAndSavePluginManifest: + """Tests for find_and_save_plugin_manifest method.""" + + def test_find_and_save_plugin_manifest_success(self, tmp_path, mock_github_env): + """Test successful finding and saving of plugin manifest.""" + with patch("cpex.tools.catalog.httpx.get") as mock_get: + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + # Mock search response + search_response = Mock() + search_response.status_code = 200 + search_response.json.return_value = { + "items": [ + { + "name": "plugin-manifest.yaml", + "path": "test_plugin/plugin-manifest.yaml", + "git_url": "https://api.github.com/repos/org/repo/git/blobs/abc123" + } + ] + } + + # Mock file content response + manifest_content = "name: test\nversion: 1.0.0\nkind: native\ndescription: Test\nauthor: Test\navailable_hooks: [tools]" + b64_content = base64.b64encode(manifest_content.encode()).decode() + file_response = Mock() + file_response.status_code = 200 + file_response.json.return_value = {"content": b64_content} + + mock_get.side_effect = [search_response, file_response] + + repo_url = httpx.URL("https://github.com/org/repo") + catalog.find_and_save_plugin_manifest("test_plugin", "test_plugin", repo_url, {}) + + saved_file = tmp_path / "catalog" / "test_plugin" / "plugin-manifest.yaml" + assert saved_file.exists() + + +class TestPluginCatalogUpdateCatalogWithCargo: + """Tests for update_catalog_with_cargo method.""" + + def test_update_catalog_with_cargo_success(self, tmp_path, mock_github_env): + """Test successful catalog update with Cargo workspace.""" + with patch("cpex.tools.catalog.httpx.get") as mock_get: + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + catalog.monorepos = ["https://github.com/org/repo"] + + # Mock Cargo.toml response + cargo_content = '[workspace]\nmembers = ["plugin1"]' + b64_cargo = base64.b64encode(cargo_content.encode()).decode() + cargo_response = Mock() + cargo_response.status_code = 200 + cargo_response.json.return_value = {"content": b64_cargo} + + # Mock pyproject.toml response + pyproject_content = '[project]\nname = "test_plugin"' + b64_pyproject = base64.b64encode(pyproject_content.encode()).decode() + pyproject_response = Mock() + pyproject_response.status_code = 200 + pyproject_response.json.return_value = {"content": b64_pyproject} + + # Mock search response for manifest + search_response = Mock() + search_response.status_code = 200 + search_response.json.return_value = { + "items": [ + { + "name": "plugin-manifest.yaml", + "path": "plugin1/plugin-manifest.yaml", + "git_url": "https://api.github.com/repos/org/repo/git/blobs/abc123" + } + ] + } + + # Mock manifest content response + manifest_content = "name: test\nversion: 1.0.0\nkind: native\ndescription: Test\nauthor: Test\navailable_hooks: [tools]" + b64_manifest = base64.b64encode(manifest_content.encode()).decode() + manifest_response = Mock() + manifest_response.status_code = 200 + manifest_response.json.return_value = {"content": b64_manifest} + + mock_get.side_effect = [cargo_response, pyproject_response, search_response, manifest_response] + + catalog.update_catalog_with_cargo() + + assert (tmp_path / "catalog").exists() + + +class TestPluginCatalogSearchEdgeCases: + """Tests for search method edge cases.""" + + def test_search_with_none_plugin_name(self, mock_github_env): + """Test search with None as plugin name returns all manifests.""" + catalog = PluginCatalog() + catalog.manifests = [ + create_test_manifest(name="plugin1"), + create_test_manifest(name="plugin2"), + ] + result = catalog.search(None) + assert result is not None + assert len(result) == 2 + + +class TestPluginCatalogInstallFromPypiExtended: + """Extended tests for install_from_pypi method.""" + + def test_install_from_pypi_with_version_constraint(self, tmp_path, mock_github_env): + """Test installation with version constraint.""" + with ( + patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, + patch("cpex.tools.catalog.importlib.metadata.distributions") as mock_distributions, + ): + package_dir = tmp_path / "test_package" + package_dir.mkdir() + manifest_data = { + "name": "test_package", + "version": "1.0.0", + "kind": "native", + "description": "Test", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {}, + } + manifest_file = package_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + + # Setup mock distribution with plugin-manifest.yaml file + mock_dist = Mock() + mock_dist.name = "test_package" + mock_manifest_file = Mock() + mock_manifest_file.name = "plugin-manifest.yaml" + mock_dist.files = [mock_manifest_file] + mock_dist.locate_file.return_value = manifest_file + mock_distributions.return_value = [mock_dist] + + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + result = catalog.install_from_pypi("test_package", ">=1.0.0") + + mock_subprocess.assert_called_once() + assert result.name == "test_package" + assert result.package_info is not None + assert result.package_info.version_constraint == ">=1.0.0" + + def test_install_from_pypi_with_default_configs(self, tmp_path, mock_github_env): + """Test installation with default_configs field.""" + with ( + patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, + patch("cpex.tools.catalog.importlib.metadata.distributions") as mock_distributions, + ): + package_dir = tmp_path / "test_package" + package_dir.mkdir() + manifest_data = { + "name": "test_package", + "version": "1.0.0", + "kind": "native", + "description": "Test", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_configs": {"key": "value"}, + } + manifest_file = package_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + + # Setup mock distribution with plugin-manifest.yaml file + mock_dist = Mock() + mock_dist.name = "test_package" + mock_manifest_file = Mock() + mock_manifest_file.name = "plugin-manifest.yaml" + mock_dist.files = [mock_manifest_file] + mock_dist.locate_file.return_value = manifest_file + mock_distributions.return_value = [mock_dist] + + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + result = catalog.install_from_pypi("test_package") + + assert result.default_config == {"key": "value"} + + def test_install_from_pypi_with_existing_package_info(self, tmp_path, mock_github_env): + """Test installation with existing package_info.""" + with ( + patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, + patch("cpex.tools.catalog.importlib.metadata.distributions") as mock_distributions, + ): + package_dir = tmp_path / "test_package" + package_dir.mkdir() + manifest_data = { + "name": "test_package", + "version": "1.0.0", + "kind": "native", + "description": "Test", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {}, + "package_info": { + "pypi_package": "old_name", + "version_constraint": ">=0.1.0" + } + } + manifest_file = package_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + + # Setup mock distribution with plugin-manifest.yaml file + mock_dist = Mock() + mock_dist.name = "test_package" + mock_manifest_file = Mock() + mock_manifest_file.name = "plugin-manifest.yaml" + mock_dist.files = [mock_manifest_file] + mock_dist.locate_file.return_value = manifest_file + mock_distributions.return_value = [mock_dist] + + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + result = catalog.install_from_pypi("test_package", ">=2.0.0") + + assert result.package_info is not None + assert result.package_info.pypi_package == "test_package" + assert result.package_info.version_constraint == ">=2.0.0" + + def test_install_from_pypi_with_null_default_configs_in_manifest(self, tmp_path, mock_github_env): + """Test installation with null default_configs in manifest.""" + with ( + patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, + patch("cpex.tools.catalog.importlib.metadata.distributions") as mock_distributions, + ): + package_dir = tmp_path / "test_package" + package_dir.mkdir() + manifest_data = { + "name": "test_package", + "version": "1.0.0", + "kind": "native", + "description": "Test", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_configs": None, + } + manifest_file = package_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + + # Setup mock distribution with plugin-manifest.yaml file + mock_dist = Mock() + mock_dist.name = "test_package" + mock_manifest_file = Mock() + mock_manifest_file.name = "plugin-manifest.yaml" + mock_dist.files = [mock_manifest_file] + mock_dist.locate_file.return_value = manifest_file + mock_distributions.return_value = [mock_dist] + + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + result = catalog.install_from_pypi("test_package") + + # default_config should be empty dict when default_configs is None + assert result.default_config == {} + + +# Made with Bob diff --git a/tests/unit/cpex/tools/test_cli.py b/tests/unit/cpex/tools/test_cli.py index b9cef5f..2dc9f97 100644 --- a/tests/unit/cpex/tools/test_cli.py +++ b/tests/unit/cpex/tools/test_cli.py @@ -8,9 +8,11 @@ """ # Standard +import json from pathlib import Path -from unittest.mock import MagicMock, patch +from unittest.mock import MagicMock, Mock, patch, mock_open +import pytest # We use typer's CliRunner for testing typer apps import click from typer.testing import CliRunner @@ -26,7 +28,16 @@ command_exists, git_user_email, git_user_name, + list, + install_from_manifest, + install, + search, + info, + instance_name_is_unique, + update_plugins_config_yaml, ) +from cpex.tools.plugin_registry import PluginRegistry +from cpex.framework.models import PluginManifest, Monorepo, Config, PluginConfig, PluginMode runner = CliRunner() @@ -34,6 +45,24 @@ _CC_PATCH_TARGET = "cookiecutter.main.cookiecutter" +# Helper function to create test manifests +def create_test_manifest(**kwargs): + """Create a test PluginManifest with default values.""" + defaults = { + "name": "test_plugin", + "version": "1.0.0", + "kind": "native", + "description": "Test plugin description", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {}, + "monorepo": Monorepo(package_source="https://example.com/repo#subdirectory=plugin", repo_url="https://example.com/repo", package_folder="plugin"), + } + defaults.update(kwargs) + return PluginManifest(**defaults) + + # --------------------------------------------------------------------------- # Utility function tests # --------------------------------------------------------------------------- @@ -412,3 +441,439 @@ def test_main_invokes_app(self): main() mock_app.assert_called_once() + + + +# --------------------------------------------------------------------------- +# Plugin management function tests +# --------------------------------------------------------------------------- + + +class TestListFunction: + """Tests for the list() function.""" + + def test_list_with_no_registry_file(self, tmp_path): + """Test list when registry file doesn't exist.""" + with ( + patch("cpex.tools.cli.os.environ.get", return_value=str(tmp_path)), + patch("cpex.tools.cli.logger") as mock_logger, + ): + list("all") + mock_logger.info.assert_called_with("No plugins registered.") + + def test_list_with_existing_plugins(self, tmp_path): + """Test list with existing plugins in registry.""" + registry_file = tmp_path / "installed-plugins.json" + registry_data = { + "plugins": [ + {"name": "test_plugin", "version": "1.0.0", "installation_type": "monorepo"}, + {"name": "another_plugin", "version": "2.0.0", "installation_type": "pypi"}, + ] + } + registry_file.write_text(json.dumps(registry_data)) + + with ( + patch("cpex.tools.cli.os.environ.get", return_value=str(tmp_path)), + patch("cpex.tools.cli.logger") as mock_logger, + ): + list("all") + assert mock_logger.info.call_count == 2 + + +class TestUpdatePluginRegistry: + """Tests for update_plugin_registry() function.""" + + def test_creates_new_registry_if_not_exists(self, tmp_path): + """Test creating a new registry when file doesn't exist.""" + manifest = create_test_manifest() + + mock_catalog = Mock() + mock_catalog.find_package_path = Mock(return_value=Path("/fake/path/to/plugin")) + + with ( + patch.dict("os.environ", {"PLUGIN_REGISTRY_FILE": str(tmp_path)}), + patch("cpex.tools.cli.git_user_name", return_value="test_user"), + ): + plugin_registry = PluginRegistry() + plugin_registry.update(manifest, "monorepo", mock_catalog, "test_user") + registry_file = tmp_path / "installed-plugins.json" + assert registry_file.exists() + + def test_updates_existing_registry(self, tmp_path): + """Test updating an existing registry.""" + registry_file = tmp_path / "installed-plugins.json" + registry_data = {"plugins": []} + registry_file.write_text(json.dumps(registry_data)) + + manifest = create_test_manifest( + name="new_plugin", + version="2.0.0", + kind="external", + monorepo=Monorepo(package_source="https://example.com/repo#subdirectory=new_plugin", repo_url="https://example.com/repo", package_folder="new_plugin"), + ) + + mock_catalog = Mock() + mock_catalog.find_package_path = Mock(return_value=Path("/fake/path/to/new_plugin")) + + with ( + patch.dict("os.environ", {"PLUGIN_REGISTRY_FILE": str(tmp_path)}), + patch("cpex.tools.cli.git_user_name", return_value="test_user"), + ): + plugin_registry = PluginRegistry() + plugin_registry.update(manifest, "monorepo", mock_catalog, "test_user") + updated_data = json.loads(registry_file.read_text()) + assert len(updated_data["plugins"]) == 1 + assert updated_data["plugins"][0]["name"] == "new_plugin" + + +class TestInstanceNameIsUnique: + """Tests for instance_name_is_unique() function.""" + + def test_returns_true_for_unique_name(self): + """Test that unique names return True.""" + existing_plugin = PluginConfig( + name="existing_plugin", + kind="test.plugin", + mode=PluginMode.SEQUENTIAL, + priority=100 + ) + + config = Config(plugins=[existing_plugin]) + assert instance_name_is_unique(config, "new_plugin") is True + + def test_returns_false_for_duplicate_name(self): + """Test that duplicate names return False.""" + existing_plugin = PluginConfig( + name="existing_plugin", + kind="test.plugin", + mode=PluginMode.SEQUENTIAL, + priority=100 + ) + + config = Config(plugins=[existing_plugin]) + assert instance_name_is_unique(config, "existing_plugin") is False + + def test_returns_true_for_empty_config(self): + """Test that any name is unique in empty config.""" + config = Config(plugins=[]) + assert instance_name_is_unique(config, "any_plugin") is True + + +class TestUpdatePluginsConfigYaml: + """Tests for update_plugins_config_yaml() function.""" + + def test_updates_config_with_unique_name(self, tmp_path): + """Test updating config with a unique plugin name.""" + manifest = create_test_manifest() + config_file = tmp_path / "config.yaml" + + mock_config = Config(plugins=[]) + + with ( + patch("cpex.tools.cli.ConfigLoader.load_config", return_value=mock_config), + patch("cpex.tools.cli.ConfigSaver.save_config") as mock_save, + patch.object(type(manifest), "suggest_instance_name", return_value="test_plugin"), + patch.object(type(manifest), "create_instance_config", return_value=PluginConfig( + name="test_plugin", + kind="test.plugin", + mode=PluginMode.SEQUENTIAL, + priority=100 + )), + ): + update_plugins_config_yaml(manifest) + mock_save.assert_called_once() + # Verify a plugin was added to the config + assert mock_config.plugins is not None + assert len(mock_config.plugins) == 1 + + def test_generates_unique_name_when_duplicate(self, tmp_path): + """Test that duplicate names get suffixed with counter.""" + manifest = create_test_manifest(name="test_plugin") + config_file = tmp_path / "config.yaml" + + # Create existing plugin with same suggested name + existing_plugin = PluginConfig( + name="test_plugin", + kind="test.plugin", + mode=PluginMode.SEQUENTIAL, + priority=100 + ) + mock_config = Config(plugins=[existing_plugin]) + + with ( + patch("cpex.tools.cli.ConfigLoader.load_config", return_value=mock_config), + patch("cpex.tools.cli.ConfigSaver.save_config") as mock_save, + patch.object(type(manifest), "suggest_instance_name", return_value="test_plugin"), + patch.object(type(manifest), "create_instance_config", return_value=PluginConfig( + name="test_plugin_1", + kind="test.plugin", + mode=PluginMode.SEQUENTIAL, + priority=100 + )), + ): + update_plugins_config_yaml(manifest) + mock_save.assert_called_once() + # Verify a new plugin was added + assert mock_config.plugins is not None + assert len(mock_config.plugins) == 2 + # The new plugin should have a different name (with suffix) + assert mock_config.plugins[1].name != "test_plugin" + + +class TestInstallFromManifest: + """Tests for install_from_manifest() function.""" + + def test_install_from_monorepo(self, tmp_path): + """Test installing from monorepo.""" + manifest = create_test_manifest() + + mock_catalog = Mock() + mock_catalog.install_folder_via_pip = Mock() + mock_catalog.find_package_path = Mock(return_value=Path("/fake/path/to/plugin")) + + with ( + patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FOLDER", tmp_path), + patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FILE", "installed-plugins.json"), + patch("cpex.tools.cli.git_user_name", return_value="test_user"), + patch("cpex.tools.cli.update_plugins_config_yaml"), + ): + install_from_manifest(manifest, "monorepo", mock_catalog) + mock_catalog.install_folder_via_pip.assert_called_once_with(manifest) + + +class TestInstallFunction: + """Tests for install() function.""" + + def test_install_git_not_implemented(self): + """Test that git installation raises NotImplementedError.""" + mock_catalog = Mock() + with pytest.raises(NotImplementedError, match="Git installation is not yet implemented"): + install("source", "git", mock_catalog) + + def test_install_monorepo_no_plugins_found(self): + """Test monorepo install when no plugins found.""" + mock_catalog = Mock() + mock_catalog.search = Mock(return_value=None) + + with patch("cpex.tools.cli.console") as mock_logger: + install("test_plugin", "monorepo", mock_catalog) + mock_logger.print.assert_called_with("No matching plugins found.") + + def test_install_monorepo_with_available_plugins(self, tmp_path): + """Test monorepo install with available plugins.""" + manifest = create_test_manifest() + + mock_catalog = Mock() + mock_catalog.search = Mock(return_value=[manifest]) + mock_catalog.install_folder_via_pip = Mock() + mock_catalog.find_package_path = Mock(return_value=Path("/fake/path/to/plugin")) + + with ( + patch("cpex.tools.cli.inquirer.prompt", return_value={"plugins": 0}), + patch("cpex.tools.cli.Console"), + patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FOLDER", tmp_path), + patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FILE", "installed-plugins.json"), + patch("cpex.tools.cli.git_user_name", return_value="test_user"), + patch("cpex.tools.cli.update_plugins_config_yaml"), + ): + install("test_plugin", "monorepo", mock_catalog) + mock_catalog.install_folder_via_pip.assert_called_once() + + def test_install_requires_type_parameter(self): + """Test that install raises ValueError for unsupported type.""" + mock_catalog = Mock() + with pytest.raises(ValueError, match="Unsupported installation type"): + install("source", "", mock_catalog) + + +class TestSearchFunction: + """Tests for search() function.""" + + def test_search_with_results(self): + """Test search with matching plugins.""" + manifest = create_test_manifest() + + mock_catalog = Mock() + mock_catalog.search = Mock(return_value=[manifest]) + + with patch("cpex.tools.cli.console") as mock_console: + mock_status = Mock() + mock_status.__enter__ = Mock(return_value=mock_status) + mock_status.__exit__ = Mock(return_value=False) + mock_console.status = Mock(return_value=mock_status) + search("test", mock_catalog) + mock_console.log.assert_called() + + def test_search_with_no_results(self): + """Test search with no matching plugins.""" + mock_catalog = Mock() + mock_catalog.search = Mock(return_value=None) + + with patch("cpex.tools.cli.console") as mock_console: + mock_status = Mock() + mock_status.__enter__ = Mock(return_value=mock_status) + mock_status.__exit__ = Mock(return_value=False) + mock_console.status = Mock(return_value=mock_status) + search("nonexistent", mock_catalog) + mock_console.log.assert_called_with("No plugins found.") + + +class TestInfoFunction: + """Tests for info() function.""" + + def test_info_with_no_registry(self, tmp_path): + """Test info when registry doesn't exist.""" + with ( + patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FOLDER", tmp_path), + patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FILE", "installed-plugins.json"), + patch("cpex.tools.cli.console") as mock_console, + ): + info(None) + mock_console.print.assert_called_with("No plugins found") + + def test_info_list_all_plugins(self, tmp_path): + """Test info listing all plugins.""" + registry_file = tmp_path / "installed-plugins.json" + registry_data = { + "plugins": [ + { + "name": "test_plugin", + "version": "1.0.0", + "kind": "native", + "installation_type": "monorepo", + "installation_path": "plugins", + "installed_at": "2024-01-01T00:00:00Z", + "installed_by": "test_user", + "package_source": "https://example.com/repo/plugin", + "editable": False, + } + ] + } + registry_file.write_text(json.dumps(registry_data)) + + with ( + patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FOLDER", tmp_path), + patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FILE", "installed-plugins.json"), + patch("cpex.tools.cli.console") as mock_console, + ): + info(None) + mock_console.print_json.assert_called_once() + + def test_info_search_specific_plugin(self, tmp_path): + """Test info searching for specific plugin.""" + registry_file = tmp_path / "installed-plugins.json" + registry_data = { + "plugins": [ + { + "name": "test_plugin", + "version": "1.0.0", + "kind": "native", + "installation_type": "monorepo", + "installation_path": "plugins", + "installed_at": "2024-01-01T00:00:00Z", + "installed_by": "test_user", + "package_source": "https://example.com/repo/plugin", + "editable": False, + }, + { + "name": "another_plugin", + "version": "2.0.0", + "kind": "external", + "installation_type": "pypi", + "installation_path": "plugins", + "installed_at": "2024-01-01T00:00:00Z", + "installed_by": "test_user", + "package_source": "https://pypi.org/project/another_plugin", + "editable": False, + }, + ] + } + registry_file.write_text(json.dumps(registry_data)) + + with ( + patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FOLDER", tmp_path), + patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FILE", "installed-plugins.json"), + patch("cpex.tools.cli.console") as mock_console, + ): + info("test") + mock_console.print_json.assert_called_once() + + +class TestPluginCommand: + """Tests for the plugin() command.""" + + def test_plugin_info_command(self, tmp_path): + """Test plugin info command.""" + with ( + patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FOLDER", tmp_path), + patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FILE", "installed-plugins.json"), + patch("cpex.tools.cli.Console"), + ): + result = runner.invoke(app, ["plugin", "info"]) + assert result.exit_code == 0 + + def test_plugin_list_command(self, tmp_path): + """Test plugin list command.""" + with ( + patch("cpex.tools.cli.PluginCatalog") as mock_catalog_class, + patch("cpex.tools.cli.Console"), + patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FOLDER", tmp_path), + ): + mock_catalog = Mock() + mock_catalog.update_catalog_with_cargo = Mock() + mock_catalog_class.return_value = mock_catalog + + result = runner.invoke(app, ["plugin", "list"]) + assert result.exit_code == 0 + mock_catalog.update_catalog_with_cargo.assert_called_once() + + def test_plugin_search_command(self): + """Test plugin search command.""" + manifest = create_test_manifest() + + with ( + patch("cpex.tools.cli.PluginCatalog") as mock_catalog_class, + patch("cpex.tools.cli.Console"), + ): + mock_catalog = Mock() + mock_catalog.update_catalog_with_cargo = Mock() + mock_catalog.search = Mock(return_value=[manifest]) + mock_catalog_class.return_value = mock_catalog + + result = runner.invoke(app, ["plugin", "search", "test"]) + assert result.exit_code == 0 + mock_catalog.search.assert_called_once_with("test") + + def test_plugin_install_command(self, tmp_path): + """Test plugin install command.""" + manifest = create_test_manifest() + + with ( + patch("cpex.tools.cli.PluginCatalog") as mock_catalog_class, + patch("cpex.tools.cli.Console"), + patch("cpex.tools.cli.inquirer.prompt", return_value={"plugins": 0}), + patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FOLDER", tmp_path), + patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FILE", "installed-plugins.json"), + patch("cpex.tools.cli.git_user_name", return_value="test_user"), + patch("cpex.tools.cli.update_plugins_config_yaml"), + ): + mock_catalog = Mock() + mock_catalog.update_catalog_with_cargo = Mock() + mock_catalog.search = Mock(return_value=[manifest]) + mock_catalog.install_folder_via_pip = Mock() + mock_catalog.find_package_path = Mock(return_value=Path("/fake/path/to/plugin")) + mock_catalog_class.return_value = mock_catalog + + result = runner.invoke(app, ["plugin", "install", "test_plugin", "--type", "monorepo"]) + assert result.exit_code == 0 + + +class TestCallbackFunction: + """Tests for the callback() function.""" + + def test_callback_exists(self): + """Test that callback function exists.""" + from cpex.tools.cli import callback + + # callback should be callable and do nothing + callback() From 93c69820757576083c19a859be77782c251dd674 Mon Sep 17 00:00:00 2001 From: habeck Date: Thu, 16 Apr 2026 10:15:33 -0400 Subject: [PATCH 23/88] chore: doc string fix Signed-off-by: habeck --- cpex/tools/plugin_registry.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/cpex/tools/plugin_registry.py b/cpex/tools/plugin_registry.py index e4248f2..288671a 100644 --- a/cpex/tools/plugin_registry.py +++ b/cpex/tools/plugin_registry.py @@ -9,11 +9,13 @@ class PluginRegistry: """Plugin registry. - Plugin registry is responsible for storing information about installed plugins. + Plugin registry is responsible for storing information about installed plugins. """ + registry: InstalledPluginRegistry = InstalledPluginRegistry() def __init__(self, *args, **kwargs): + """Initialize the plugin registry.""" super().__init__(*args, **kwargs) DEFAULT_PLUGIN_REGISTRY_FOLDER = Path(os.environ.get("PLUGIN_REGISTRY_FILE", "data")) os.makedirs(DEFAULT_PLUGIN_REGISTRY_FOLDER, exist_ok=True) From e34aef408440836741f4cb1d046a4b595848e9d8 Mon Sep 17 00:00:00 2001 From: habeck Date: Thu, 16 Apr 2026 10:38:11 -0400 Subject: [PATCH 24/88] chore: lint fix Signed-off-by: habeck --- cpex/tools/cli.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index 721bd03..1a5706c 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -415,10 +415,7 @@ def _finalize_installation(manifest: PluginManifest, install_type: str, catalog: """ plugin_registry = PluginRegistry() plugin_registry.update( - manifest=manifest, - installation_type=install_type, - catalog=catalog, - git_user_name=git_user_name() + manifest=manifest, installation_type=install_type, catalog=catalog, git_user_name=git_user_name() ) update_plugins_config_yaml(manifest=manifest) @@ -473,10 +470,7 @@ def _install_from_pypi(source: str, catalog: PluginCatalog): package_name, version_constraint = _parse_pypi_source(source) with console.status(f"Installing plugin {package_name} via pypi", spinner="dots"): - manifest = catalog.install_from_pypi( - plugin_package_name=package_name, - version_constraint=version_constraint - ) + manifest = catalog.install_from_pypi(plugin_package_name=package_name, version_constraint=version_constraint) if manifest is None: console.print(f"❌ Failed to install {package_name}") From 7b04f8358e6c4398139fc4f307f40a121e8cc8f9 Mon Sep 17 00:00:00 2001 From: habeck Date: Thu, 16 Apr 2026 12:03:19 -0400 Subject: [PATCH 25/88] chore: remove duplicate code Signed-off-by: habeck --- cpex/tools/cli.py | 19 +++++++------------ tests/unit/cpex/tools/test_cli.py | 20 ++++++++++++++++++-- 2 files changed, 25 insertions(+), 14 deletions(-) diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index 1a5706c..5cf4a46 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -251,22 +251,17 @@ def list(type: str) -> None: Raises: typer.Exit: If the type is not "native" or "external". """ - DEFAULT_PLUGIN_REGISTRY_FOLDER = Path(os.environ.get("PLUGIN_REGISTRY_FILE", "data")) - os.makedirs(DEFAULT_PLUGIN_REGISTRY_FOLDER, exist_ok=True) - DEFAULT_PLUGIN_REGISTRY_FILE = "installed-plugins.json" - registered_plugins = None - ipr_file = DEFAULT_PLUGIN_REGISTRY_FOLDER / DEFAULT_PLUGIN_REGISTRY_FILE - if ipr_file.exists(): - with open(ipr_file, "r", encoding="utf-8") as ipr: - registered_plugins = json.load(ipr) + pr = PluginRegistry() + + registered_plugins = pr.registry.plugins if registered_plugins: - for plug_in in registered_plugins["plugins"]: + for plug_in in registered_plugins: logger.info( "name: %s version: %s installation type: %s", - plug_in["name"], - plug_in["version"], - plug_in["installation_type"], + plug_in.name, + plug_in.version, + plug_in.installation_type, ) else: logger.info("No plugins registered.") diff --git a/tests/unit/cpex/tools/test_cli.py b/tests/unit/cpex/tools/test_cli.py index 2dc9f97..a41177e 100644 --- a/tests/unit/cpex/tools/test_cli.py +++ b/tests/unit/cpex/tools/test_cli.py @@ -466,8 +466,24 @@ def test_list_with_existing_plugins(self, tmp_path): registry_file = tmp_path / "installed-plugins.json" registry_data = { "plugins": [ - {"name": "test_plugin", "version": "1.0.0", "installation_type": "monorepo"}, - {"name": "another_plugin", "version": "2.0.0", "installation_type": "pypi"}, + { + "name": "test_plugin", + "kind": "native", + "version": "1.0.0", + "installation_type": "monorepo", + "installation_path": "/path/to/test_plugin", + "installed_at": "2024-01-01T00:00:00.000000Z", + "installed_by": "test_user", + }, + { + "name": "another_plugin", + "kind": "external", + "version": "2.0.0", + "installation_type": "pypi", + "installation_path": "/path/to/another_plugin", + "installed_at": "2024-01-02T00:00:00.000000Z", + "installed_by": "test_user", + }, ] } registry_file.write_text(json.dumps(registry_data)) From 01f65b19c7e9c760d823573c44e3413f3e0f9f11 Mon Sep 17 00:00:00 2001 From: habeck Date: Thu, 16 Apr 2026 13:21:20 -0400 Subject: [PATCH 26/88] chore: test coverage improvements, remove duplicate code Signed-off-by: habeck --- cpex/tools/cli.py | 8 +- cpex/tools/plugin_registry.py | 2 + tests/unit/cpex/tools/test_cli.py | 155 ++++++++++++++++++------------ 3 files changed, 97 insertions(+), 68 deletions(-) diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index 5cf4a46..80ed101 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -530,12 +530,8 @@ def info(plugin_name: str | None): Returns: list[Plugin]: A list of plugins that match the search criteria. """ - ipr_file = DEFAULT_PLUGIN_REGISTRY_FOLDER / DEFAULT_PLUGIN_REGISTRY_FILE - if ipr_file.exists(): - with open(ipr_file, "r", encoding="utf-8") as ipr: - registry = InstalledPluginRegistry(**json.load(ipr)) - else: - registry = InstalledPluginRegistry() + registry = PluginRegistry().registry + found = 0 for plug_in in registry.plugins: if plugin_name is None: diff --git a/cpex/tools/plugin_registry.py b/cpex/tools/plugin_registry.py index 288671a..26d3b2c 100644 --- a/cpex/tools/plugin_registry.py +++ b/cpex/tools/plugin_registry.py @@ -24,6 +24,8 @@ def __init__(self, *args, **kwargs): if ipr_file.exists(): with open(ipr_file, "r", encoding="utf-8") as ipr: self.registry = InstalledPluginRegistry(**json.load(ipr)) + else: + self.registry = InstalledPluginRegistry() def update( self, manifest: PluginManifest, installation_type: str, catalog: PluginCatalog, git_user_name: str diff --git a/tests/unit/cpex/tools/test_cli.py b/tests/unit/cpex/tools/test_cli.py index a41177e..a9397fe 100644 --- a/tests/unit/cpex/tools/test_cli.py +++ b/tests/unit/cpex/tools/test_cli.py @@ -9,6 +9,7 @@ # Standard import json +import tempfile from pathlib import Path from unittest.mock import MagicMock, Mock, patch, mock_open @@ -37,7 +38,7 @@ update_plugins_config_yaml, ) from cpex.tools.plugin_registry import PluginRegistry -from cpex.framework.models import PluginManifest, Monorepo, Config, PluginConfig, PluginMode +from cpex.framework.models import PluginManifest, Monorepo, Config, PluginConfig, PluginMode, PiPyRepo runner = CliRunner() @@ -45,6 +46,19 @@ _CC_PATCH_TARGET = "cookiecutter.main.cookiecutter" +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + +@pytest.fixture +def temp_registry_dir(tmp_path, monkeypatch): + """Fixture to ensure all tests use a temporary directory for the plugin registry.""" + registry_dir = tmp_path / "test_registry" + registry_dir.mkdir(exist_ok=True) + monkeypatch.setenv("PLUGIN_REGISTRY_FILE", str(registry_dir)) + return registry_dir + + # Helper function to create test manifests def create_test_manifest(**kwargs): """Create a test PluginManifest with default values.""" @@ -452,18 +466,15 @@ def test_main_invokes_app(self): class TestListFunction: """Tests for the list() function.""" - def test_list_with_no_registry_file(self, tmp_path): + def test_list_with_no_registry_file(self, temp_registry_dir): """Test list when registry file doesn't exist.""" - with ( - patch("cpex.tools.cli.os.environ.get", return_value=str(tmp_path)), - patch("cpex.tools.cli.logger") as mock_logger, - ): + with patch("cpex.tools.cli.logger") as mock_logger: list("all") mock_logger.info.assert_called_with("No plugins registered.") - def test_list_with_existing_plugins(self, tmp_path): + def test_list_with_existing_plugins(self, temp_registry_dir): """Test list with existing plugins in registry.""" - registry_file = tmp_path / "installed-plugins.json" + registry_file = temp_registry_dir / "installed-plugins.json" registry_data = { "plugins": [ { @@ -488,10 +499,7 @@ def test_list_with_existing_plugins(self, tmp_path): } registry_file.write_text(json.dumps(registry_data)) - with ( - patch("cpex.tools.cli.os.environ.get", return_value=str(tmp_path)), - patch("cpex.tools.cli.logger") as mock_logger, - ): + with patch("cpex.tools.cli.logger") as mock_logger: list("all") assert mock_logger.info.call_count == 2 @@ -499,25 +507,22 @@ def test_list_with_existing_plugins(self, tmp_path): class TestUpdatePluginRegistry: """Tests for update_plugin_registry() function.""" - def test_creates_new_registry_if_not_exists(self, tmp_path): + def test_creates_new_registry_if_not_exists(self, temp_registry_dir): """Test creating a new registry when file doesn't exist.""" manifest = create_test_manifest() mock_catalog = Mock() mock_catalog.find_package_path = Mock(return_value=Path("/fake/path/to/plugin")) - with ( - patch.dict("os.environ", {"PLUGIN_REGISTRY_FILE": str(tmp_path)}), - patch("cpex.tools.cli.git_user_name", return_value="test_user"), - ): + with patch("cpex.tools.cli.git_user_name", return_value="test_user"): plugin_registry = PluginRegistry() plugin_registry.update(manifest, "monorepo", mock_catalog, "test_user") - registry_file = tmp_path / "installed-plugins.json" + registry_file = temp_registry_dir / "installed-plugins.json" assert registry_file.exists() - def test_updates_existing_registry(self, tmp_path): + def test_updates_existing_registry(self, temp_registry_dir): """Test updating an existing registry.""" - registry_file = tmp_path / "installed-plugins.json" + registry_file = temp_registry_dir / "installed-plugins.json" registry_data = {"plugins": []} registry_file.write_text(json.dumps(registry_data)) @@ -531,10 +536,7 @@ def test_updates_existing_registry(self, tmp_path): mock_catalog = Mock() mock_catalog.find_package_path = Mock(return_value=Path("/fake/path/to/new_plugin")) - with ( - patch.dict("os.environ", {"PLUGIN_REGISTRY_FILE": str(tmp_path)}), - patch("cpex.tools.cli.git_user_name", return_value="test_user"), - ): + with patch("cpex.tools.cli.git_user_name", return_value="test_user"): plugin_registry = PluginRegistry() plugin_registry.update(manifest, "monorepo", mock_catalog, "test_user") updated_data = json.loads(registry_file.read_text()) @@ -542,6 +544,58 @@ def test_updates_existing_registry(self, tmp_path): assert updated_data["plugins"][0]["name"] == "new_plugin" +class TestPluginRegistryCoverage: + """Additional tests to increase coverage for PluginRegistry.""" + + def test_update_with_pypi_installation(self, temp_registry_dir): + """Test registry update for the PyPI installation path.""" + manifest = create_test_manifest( + name="pypi_plugin", + monorepo=None, + package_info=PiPyRepo(pypi_package="pypi-plugin", version_constraint=None), + ) + + mock_catalog = Mock() + mock_catalog.find_package_path = Mock(return_value=Path("/fake/path/to/pypi_plugin")) + + plugin_registry = PluginRegistry() + plugin_registry.update(manifest, "pypi", mock_catalog, "test_user") + + registry_file = temp_registry_dir / "installed-plugins.json" + updated_data = json.loads(registry_file.read_text()) + assert len(updated_data["plugins"]) == 1 + assert updated_data["plugins"][0]["name"] == "pypi_plugin" + assert updated_data["plugins"][0]["package_source"] == "pypi-plugin" + assert updated_data["plugins"][0]["installation_type"] == "pypi" + + def test_update_raises_for_monorepo_without_monorepo_metadata(self, temp_registry_dir): + """Test monorepo update fails when manifest.monorepo is missing.""" + manifest = create_test_manifest(monorepo=None) + + plugin_registry = PluginRegistry() + + with pytest.raises(RuntimeError, match="PluginManifest.monorepo can not be None."): + plugin_registry.update(manifest, "monorepo", Mock(), "test_user") + + def test_update_raises_for_pypi_without_package_info(self, temp_registry_dir): + """Test PyPI update fails when manifest.package_info is missing.""" + manifest = create_test_manifest(monorepo=None) + + plugin_registry = PluginRegistry() + + with pytest.raises(RuntimeError, match="PluginManifest.package_info can not be None."): + plugin_registry.update(manifest, "pypi", Mock(), "test_user") + + def test_update_raises_for_invalid_installation_type(self, temp_registry_dir): + """Test invalid installation types are rejected.""" + manifest = create_test_manifest() + + plugin_registry = PluginRegistry() + + with pytest.raises(ValueError, match="Invalid installation type: invalid"): + plugin_registry.update(manifest, "invalid", Mock(), "test_user") + + class TestInstanceNameIsUnique: """Tests for instance_name_is_unique() function.""" @@ -639,7 +693,7 @@ def test_generates_unique_name_when_duplicate(self, tmp_path): class TestInstallFromManifest: """Tests for install_from_manifest() function.""" - def test_install_from_monorepo(self, tmp_path): + def test_install_from_monorepo(self, temp_registry_dir): """Test installing from monorepo.""" manifest = create_test_manifest() @@ -648,8 +702,6 @@ def test_install_from_monorepo(self, tmp_path): mock_catalog.find_package_path = Mock(return_value=Path("/fake/path/to/plugin")) with ( - patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FOLDER", tmp_path), - patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FILE", "installed-plugins.json"), patch("cpex.tools.cli.git_user_name", return_value="test_user"), patch("cpex.tools.cli.update_plugins_config_yaml"), ): @@ -675,7 +727,7 @@ def test_install_monorepo_no_plugins_found(self): install("test_plugin", "monorepo", mock_catalog) mock_logger.print.assert_called_with("No matching plugins found.") - def test_install_monorepo_with_available_plugins(self, tmp_path): + def test_install_monorepo_with_available_plugins(self, temp_registry_dir): """Test monorepo install with available plugins.""" manifest = create_test_manifest() @@ -687,8 +739,6 @@ def test_install_monorepo_with_available_plugins(self, tmp_path): with ( patch("cpex.tools.cli.inquirer.prompt", return_value={"plugins": 0}), patch("cpex.tools.cli.Console"), - patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FOLDER", tmp_path), - patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FILE", "installed-plugins.json"), patch("cpex.tools.cli.git_user_name", return_value="test_user"), patch("cpex.tools.cli.update_plugins_config_yaml"), ): @@ -737,19 +787,15 @@ def test_search_with_no_results(self): class TestInfoFunction: """Tests for info() function.""" - def test_info_with_no_registry(self, tmp_path): + def test_info_with_no_registry(self, temp_registry_dir): """Test info when registry doesn't exist.""" - with ( - patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FOLDER", tmp_path), - patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FILE", "installed-plugins.json"), - patch("cpex.tools.cli.console") as mock_console, - ): + with patch("cpex.tools.cli.console") as mock_console: info(None) mock_console.print.assert_called_with("No plugins found") - def test_info_list_all_plugins(self, tmp_path): + def test_info_list_all_plugins(self, temp_registry_dir): """Test info listing all plugins.""" - registry_file = tmp_path / "installed-plugins.json" + registry_file = temp_registry_dir / "installed-plugins.json" registry_data = { "plugins": [ { @@ -767,17 +813,13 @@ def test_info_list_all_plugins(self, tmp_path): } registry_file.write_text(json.dumps(registry_data)) - with ( - patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FOLDER", tmp_path), - patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FILE", "installed-plugins.json"), - patch("cpex.tools.cli.console") as mock_console, - ): + with patch("cpex.tools.cli.console") as mock_console: info(None) mock_console.print_json.assert_called_once() - def test_info_search_specific_plugin(self, tmp_path): + def test_info_search_specific_plugin(self, temp_registry_dir): """Test info searching for specific plugin.""" - registry_file = tmp_path / "installed-plugins.json" + registry_file = temp_registry_dir / "installed-plugins.json" registry_data = { "plugins": [ { @@ -806,11 +848,7 @@ def test_info_search_specific_plugin(self, tmp_path): } registry_file.write_text(json.dumps(registry_data)) - with ( - patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FOLDER", tmp_path), - patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FILE", "installed-plugins.json"), - patch("cpex.tools.cli.console") as mock_console, - ): + with patch("cpex.tools.cli.console") as mock_console: info("test") mock_console.print_json.assert_called_once() @@ -818,22 +856,17 @@ def test_info_search_specific_plugin(self, tmp_path): class TestPluginCommand: """Tests for the plugin() command.""" - def test_plugin_info_command(self, tmp_path): + def test_plugin_info_command(self, temp_registry_dir): """Test plugin info command.""" - with ( - patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FOLDER", tmp_path), - patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FILE", "installed-plugins.json"), - patch("cpex.tools.cli.Console"), - ): + with patch("cpex.tools.cli.Console"): result = runner.invoke(app, ["plugin", "info"]) assert result.exit_code == 0 - def test_plugin_list_command(self, tmp_path): + def test_plugin_list_command(self, temp_registry_dir): """Test plugin list command.""" with ( patch("cpex.tools.cli.PluginCatalog") as mock_catalog_class, patch("cpex.tools.cli.Console"), - patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FOLDER", tmp_path), ): mock_catalog = Mock() mock_catalog.update_catalog_with_cargo = Mock() @@ -843,7 +876,7 @@ def test_plugin_list_command(self, tmp_path): assert result.exit_code == 0 mock_catalog.update_catalog_with_cargo.assert_called_once() - def test_plugin_search_command(self): + def test_plugin_search_command(self, temp_registry_dir): """Test plugin search command.""" manifest = create_test_manifest() @@ -860,7 +893,7 @@ def test_plugin_search_command(self): assert result.exit_code == 0 mock_catalog.search.assert_called_once_with("test") - def test_plugin_install_command(self, tmp_path): + def test_plugin_install_command(self, temp_registry_dir): """Test plugin install command.""" manifest = create_test_manifest() @@ -868,8 +901,6 @@ def test_plugin_install_command(self, tmp_path): patch("cpex.tools.cli.PluginCatalog") as mock_catalog_class, patch("cpex.tools.cli.Console"), patch("cpex.tools.cli.inquirer.prompt", return_value={"plugins": 0}), - patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FOLDER", tmp_path), - patch("cpex.tools.cli.DEFAULT_PLUGIN_REGISTRY_FILE", "installed-plugins.json"), patch("cpex.tools.cli.git_user_name", return_value="test_user"), patch("cpex.tools.cli.update_plugins_config_yaml"), ): From a8ff432bf02969d1ffc0bcfc72391c69a8f9b6c4 Mon Sep 17 00:00:00 2001 From: habeck Date: Thu, 16 Apr 2026 15:42:14 -0400 Subject: [PATCH 27/88] chore: replace cargo with search for pyproject. Signed-off-by: habeck --- cpex/tools/catalog.py | 50 +++++++++++++++------------ cpex/tools/cli.py | 2 +- tests/unit/cpex/tools/test_catalog.py | 37 +++++++++++--------- tests/unit/cpex/tools/test_cli.py | 8 ++--- 4 files changed, 54 insertions(+), 43 deletions(-) diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index be73d71..0b9ef8d 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -167,7 +167,7 @@ def download_contents(self, git_url: str, headers, path: str, repo_url: httpx.UR else: logger.error("Failed to download file: %s status_code: %d", git_url, result.status_code) - def download_file(self, git_url: str, headers) -> str: + def download_file(self, git_url: str, headers) -> str | None: """Download the content of a github file""" result = httpx.get(git_url, headers=headers) if result.status_code == 200: @@ -178,7 +178,7 @@ def download_file(self, git_url: str, headers) -> str: else: logger.error("Failed to download file: %s status_code: %d", git_url, result.status_code) - def find_and_save_plugin_manifest(self, member: str, name: str, repo_url: httpx.URL, headers) -> PluginManifest: + def find_and_save_plugin_manifest(self, member: str, name: str, repo_url: httpx.URL, headers) -> PluginManifest | None: """Find the plugin-manifest.yaml relative to the supplied member folder, download and save the manifest, updating the monorepo's package_folder, package_source and repo_url attributes """ @@ -195,7 +195,11 @@ def find_and_save_plugin_manifest(self, member: str, name: str, repo_url: httpx. for item in result["items"]: # only download yaml files, not the README.md which may also contain references to available_hooks if item["name"].endswith(".yaml") and item["name"].startswith("plugin-manifest"): - manifest_content = yaml.safe_load(self.download_file(item["git_url"], headers=headers)) + manifest_data = self.download_file(item["git_url"], headers=headers) + if manifest_data is None: + logger.error("Failed to download plugin-manifest from %s", member) + continue + manifest_content = yaml.safe_load(manifest_data) package_source = f"{repo_url}#subdirectory={member}" manifest_content["name"] = name manifest_content["monorepo"] = { @@ -219,30 +223,32 @@ def find_and_save_plugin_manifest(self, member: str, name: str, repo_url: httpx. else: logger.error("Catalog update failed with error code: %d", r.status_code) - def update_catalog_with_cargo(self) -> None: - """Update the plugin catalog with the latest available plugins.""" - # Get the list of available plugins from the Cargo registry + def update_catalog_with_pyproject(self) -> None: + """Update the catalog with the pyproject.toml file.""" headers = {"accept": "application/vnd.github+json", "authorization": f"Bearer {self.github_token}"} self.create_output_folder() for repo in self.monorepos: repo_url = httpx.URL(repo) repo_path = repo_url.path.removeprefix("/") - cargo_data = tomllib.loads( - self.download_file( - git_url=f"https://{self.github_api}/repos/{repo_path}/contents/Cargo.toml", headers=headers - ) - ) - for member in cargo_data["workspace"]["members"]: - project_data = tomllib.loads( - self.download_file( - git_url=f"https://{self.github_api}/repos/{repo_path}/contents/{member}/pyproject.toml", - headers=headers, - ) - ) - # project_data.project.name - self.find_and_save_plugin_manifest( - member=member, name=project_data["project"]["name"], repo_url=repo_url, headers=headers - ) + params = f"q=repo:{repo_path}+filename:pyproject+extension:toml&per_page=100" + r = httpx.get(f"https://{self.github_api}/search/code", params=params, headers=headers) + logger.info("status code: %d ", r.status_code) + if r.status_code == 200: + project_data = r.json() + for item in project_data["items"]: + if "pyproject.toml" in item["name"]: + member = item['path'].removesuffix('/' + item['name']) + pyproject_data = self.download_file( + git_url=f"https://{self.github_api}/repos/{repo_path}/contents/{member}/pyproject.toml", + headers=headers, + ) + if pyproject_data is None: + logger.warning("Failed to download pyproject.toml from %s", repo) + continue + project_data = tomllib.loads(pyproject_data) + self.find_and_save_plugin_manifest( + member=member, name=project_data["project"]["name"], repo_url=repo_url, headers=headers + ) def load(self) -> None: """Load plugin-manifest.yaml files from self.catalog_folder into self.manifests.""" diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index 80ed101..3bd948d 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -573,7 +573,7 @@ def plugin( # optimized github search REST api takes ~14s to search & download all manifests console.log("Update catalog") with console.status("Updating catalog...", spinner="dots"): - pc.update_catalog_with_cargo() + pc.update_catalog_with_pyproject() console.log("Catalog update completed.") if cmd_action == "list": diff --git a/tests/unit/cpex/tools/test_catalog.py b/tests/unit/cpex/tools/test_catalog.py index c5261c5..3353cbb 100644 --- a/tests/unit/cpex/tools/test_catalog.py +++ b/tests/unit/cpex/tools/test_catalog.py @@ -661,24 +661,29 @@ def test_find_and_save_plugin_manifest_success(self, tmp_path, mock_github_env): assert saved_file.exists() -class TestPluginCatalogUpdateCatalogWithCargo: - """Tests for update_catalog_with_cargo method.""" +class TestPluginCatalogUpdateCatalogWithPyproject: + """Tests for update_catalog_with_pyproject method.""" - def test_update_catalog_with_cargo_success(self, tmp_path, mock_github_env): - """Test successful catalog update with Cargo workspace.""" + def test_update_catalog_with_pyproject_success(self, tmp_path, mock_github_env): + """Test successful catalog update with pyproject.toml files.""" with patch("cpex.tools.catalog.httpx.get") as mock_get: catalog = PluginCatalog() catalog.catalog_folder = str(tmp_path / "catalog") catalog.monorepos = ["https://github.com/org/repo"] - # Mock Cargo.toml response - cargo_content = '[workspace]\nmembers = ["plugin1"]' - b64_cargo = base64.b64encode(cargo_content.encode()).decode() - cargo_response = Mock() - cargo_response.status_code = 200 - cargo_response.json.return_value = {"content": b64_cargo} + # Mock search response for pyproject.toml files + search_response = Mock() + search_response.status_code = 200 + search_response.json.return_value = { + "items": [ + { + "name": "pyproject.toml", + "path": "plugin1/pyproject.toml" + } + ] + } - # Mock pyproject.toml response + # Mock pyproject.toml content response pyproject_content = '[project]\nname = "test_plugin"' b64_pyproject = base64.b64encode(pyproject_content.encode()).decode() pyproject_response = Mock() @@ -686,9 +691,9 @@ def test_update_catalog_with_cargo_success(self, tmp_path, mock_github_env): pyproject_response.json.return_value = {"content": b64_pyproject} # Mock search response for manifest - search_response = Mock() - search_response.status_code = 200 - search_response.json.return_value = { + manifest_search_response = Mock() + manifest_search_response.status_code = 200 + manifest_search_response.json.return_value = { "items": [ { "name": "plugin-manifest.yaml", @@ -705,9 +710,9 @@ def test_update_catalog_with_cargo_success(self, tmp_path, mock_github_env): manifest_response.status_code = 200 manifest_response.json.return_value = {"content": b64_manifest} - mock_get.side_effect = [cargo_response, pyproject_response, search_response, manifest_response] + mock_get.side_effect = [search_response, pyproject_response, manifest_search_response, manifest_response] - catalog.update_catalog_with_cargo() + catalog.update_catalog_with_pyproject() assert (tmp_path / "catalog").exists() diff --git a/tests/unit/cpex/tools/test_cli.py b/tests/unit/cpex/tools/test_cli.py index a9397fe..3e683d2 100644 --- a/tests/unit/cpex/tools/test_cli.py +++ b/tests/unit/cpex/tools/test_cli.py @@ -869,12 +869,12 @@ def test_plugin_list_command(self, temp_registry_dir): patch("cpex.tools.cli.Console"), ): mock_catalog = Mock() - mock_catalog.update_catalog_with_cargo = Mock() + mock_catalog.update_catalog_with_pyproject = Mock() mock_catalog_class.return_value = mock_catalog result = runner.invoke(app, ["plugin", "list"]) assert result.exit_code == 0 - mock_catalog.update_catalog_with_cargo.assert_called_once() + mock_catalog.update_catalog_with_pyproject.assert_called_once() def test_plugin_search_command(self, temp_registry_dir): """Test plugin search command.""" @@ -885,7 +885,7 @@ def test_plugin_search_command(self, temp_registry_dir): patch("cpex.tools.cli.Console"), ): mock_catalog = Mock() - mock_catalog.update_catalog_with_cargo = Mock() + mock_catalog.update_catalog_with_pyproject = Mock() mock_catalog.search = Mock(return_value=[manifest]) mock_catalog_class.return_value = mock_catalog @@ -905,7 +905,7 @@ def test_plugin_install_command(self, temp_registry_dir): patch("cpex.tools.cli.update_plugins_config_yaml"), ): mock_catalog = Mock() - mock_catalog.update_catalog_with_cargo = Mock() + mock_catalog.update_catalog_with_pyproject = Mock() mock_catalog.search = Mock(return_value=[manifest]) mock_catalog.install_folder_via_pip = Mock() mock_catalog.find_package_path = Mock(return_value=Path("/fake/path/to/plugin")) From f3d3c1bfaaf691c57588a5e40d5e77f9c6ba01a2 Mon Sep 17 00:00:00 2001 From: habeck Date: Thu, 16 Apr 2026 15:45:06 -0400 Subject: [PATCH 28/88] chore: lint fixes Signed-off-by: habeck --- cpex/tools/catalog.py | 12 +++++++----- cpex/tools/cli.py | 1 - cpex/tools/plugin_registry.py | 9 +++++++++ 3 files changed, 16 insertions(+), 6 deletions(-) diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index 0b9ef8d..457777b 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -178,7 +178,9 @@ def download_file(self, git_url: str, headers) -> str | None: else: logger.error("Failed to download file: %s status_code: %d", git_url, result.status_code) - def find_and_save_plugin_manifest(self, member: str, name: str, repo_url: httpx.URL, headers) -> PluginManifest | None: + def find_and_save_plugin_manifest( + self, member: str, name: str, repo_url: httpx.URL, headers + ) -> PluginManifest | None: """Find the plugin-manifest.yaml relative to the supplied member folder, download and save the manifest, updating the monorepo's package_folder, package_source and repo_url attributes """ @@ -237,11 +239,11 @@ def update_catalog_with_pyproject(self) -> None: project_data = r.json() for item in project_data["items"]: if "pyproject.toml" in item["name"]: - member = item['path'].removesuffix('/' + item['name']) + member = item["path"].removesuffix("/" + item["name"]) pyproject_data = self.download_file( - git_url=f"https://{self.github_api}/repos/{repo_path}/contents/{member}/pyproject.toml", - headers=headers, - ) + git_url=f"https://{self.github_api}/repos/{repo_path}/contents/{member}/pyproject.toml", + headers=headers, + ) if pyproject_data is None: logger.warning("Failed to download pyproject.toml from %s", repo) continue diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index 3bd948d..638ccb6 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -43,7 +43,6 @@ from cpex.framework.loader.config import ConfigLoader, ConfigSaver from cpex.framework.models import ( Config, - InstalledPluginRegistry, PluginManifest, PluginMode, ) diff --git a/cpex/tools/plugin_registry.py b/cpex/tools/plugin_registry.py index 26d3b2c..2552dd5 100644 --- a/cpex/tools/plugin_registry.py +++ b/cpex/tools/plugin_registry.py @@ -1,3 +1,12 @@ +# -*- coding: utf-8 -*- +"""Location: ./cpex/tools/plugin_registry.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Ted Habeck + +This module implements the plugin registry object. +""" + import datetime import json import os From 60f3db0f3aebfac0675ad714d21ab9c19425aaa7 Mon Sep 17 00:00:00 2001 From: habeck Date: Fri, 17 Apr 2026 13:33:11 -0400 Subject: [PATCH 29/88] enh: use pygithub apis rather than github rest apis, as they provide automatic backoff when github response with too many requests. Signed-off-by: habeck --- cpex/tools/catalog.py | 316 ++++++++++++++++++-------- cpex/tools/cli.py | 16 +- tests/unit/cpex/tools/test_catalog.py | 101 ++++---- 3 files changed, 282 insertions(+), 151 deletions(-) diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index 457777b..a046231 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -91,51 +91,36 @@ def save_manifest(self, manifest: PluginManifest, path): manifest: The plugin manifest to be stored in the catalog path: the name of the plugin package that was installed """ - relpath = Path(self.catalog_folder) - relpath = relpath / path + relpath = Path(self.catalog_folder) / path updated_content = yaml.safe_dump(manifest.model_dump(), default_flow_style=False) - with open(relpath, "w", encoding="utf-8") as output: - output.write(updated_content) - output.flush() + relpath.write_text(updated_content, encoding="utf-8") def save_manifest_content(self, content: str, path, repo_url: httpx.URL): """ write the manifest content to the supplied path relative to the ouptut folder, injecting the monorepo.package_source value before saving the file. """ - relpath = Path(self.catalog_folder) - relpath = relpath / path + relpath = Path(self.catalog_folder) / path repo_path = path.removesuffix(f"/{relpath.name}") + manifest_data = yaml.safe_load(content) - package_source = f"{repo_url}#subdirectory={repo_path}" - manifest_data["monorepo"] = { - "package_source": f"{package_source}", - "repo_url": f"{str(repo_url)}", - "package_folder": f"{repo_path}", - } - if "tags" not in manifest_data: - manifest_data["tags"] = [] + + # Set name if not present (different from find_and_save_plugin_manifest which always sets it) if "name" not in manifest_data: manifest_data["name"] = repo_path - if "default_configs" in manifest_data: - manifest_data["default_config"] = manifest_data["default_configs"] - del manifest_data["default_configs"] - if manifest_data["default_config"] is None: - manifest_data["default_config"] = {} + + # Use shared transformation logic + manifest_data = self._transform_manifest_data(manifest_data, manifest_data["name"], repo_path, repo_url) + updated_content = yaml.safe_dump(manifest_data, default_flow_style=False) - with open(relpath, "w", encoding="utf-8") as output: - output.write(updated_content) - output.flush() + relpath.write_text(updated_content, encoding="utf-8") def save_content(self, base_path, content: str, path): """ write the content to the supplied path relative to the ouptut folder. """ - relpath = Path(base_path) - relpath = relpath / path - with open(relpath, "w", encoding="utf-8") as output: - output.write(content) - output.flush() + relpath = Path(base_path) / path + relpath.write_text(content, encoding="utf-8") def save_plugin_content(self, content: str, path): """ @@ -167,90 +152,231 @@ def download_contents(self, git_url: str, headers, path: str, repo_url: httpx.UR else: logger.error("Failed to download file: %s status_code: %d", git_url, result.status_code) - def download_file(self, git_url: str, headers) -> str | None: - """Download the content of a github file""" - result = httpx.get(git_url, headers=headers) - if result.status_code == 200: - js = result.json() - b64_content = js["content"] - content = base64.b64decode(b64_content).decode("utf-8") + def download_file(self,repo_path: str, item: dict, headers) -> str | None: + """Download the content of a github file + + Args: + repo_path: Repository path (e.g., 'owner/repo') + item: Dictionary containing the path of the file to download + headers: GitHub API headers + Returns: + Content of the file as a string or None if the file could not be downloaded + """ + # Get the repository using PyGithub + try: + gh_repo = self.gh.get_repo(repo_path) + file_content = gh_repo.get_contents(item["path"]) + content = file_content.decoded_content.decode("utf-8") return content - else: - logger.error("Failed to download file: %s status_code: %d", git_url, result.status_code) + except Exception as e: + logger.error("Failed to download file: %s status_code: %d", item["path"], str(e)) + + def _search_github_code(self, repo_path: str, member: str, headers) -> list[dict] | None: + """Search GitHub for plugin-manifest.yaml files in a specific path using PyGithub API. + + Args: + repo_path: Repository path (e.g., 'owner/repo') + member: Directory path within the repository + headers: HTTP headers for authentication (kept for compatibility but not used) + + Returns: + List of search result items as dicts with 'name' and 'git_url' keys, or None if request failed + """ + try: + # Build search query for PyGithub + query = f"repo:{repo_path} path:{member} filename:plugin-manifest extension:yaml" + + # Use PyGithub's search_code method + search_results = self.gh.search_code(query=query) + + logger.info("Found %d plugin-manifest files in %s/%s", search_results.totalCount, repo_path, member) + + # Convert PyGithub ContentFile objects to dict format compatible with existing code + items = [] + for content_file in search_results: + items.append({ + "name": content_file.name, + "path": content_file.path, + "git_url": content_file.git_url, + "html_url": content_file.html_url, + }) + + return items + + except Exception as e: + logger.error("Catalog update failed with error: %s", str(e)) + return None + + def _transform_manifest_data(self, manifest_content: dict, name: str, member: str, repo_url: httpx.URL) -> dict: + """Apply standard transformations to manifest data. + + Args: + manifest_content: Raw manifest data from YAML + name: Plugin name + member: Directory path within the repository + repo_url: Repository URL + + Returns: + Transformed manifest data with monorepo metadata + """ + package_source = f"{repo_url}#subdirectory={member}" + + manifest_content["name"] = name + manifest_content.setdefault("tags", []) + manifest_content["monorepo"] = { + "package_source": package_source, + "repo_url": str(repo_url), + "package_folder": member, + } + + # Normalize default_configs -> default_config + if "default_configs" in manifest_content: + manifest_content["default_config"] = manifest_content.pop("default_configs") or {} + + return manifest_content + + def _process_manifest_item( + self, item: dict, name: str, member: str, repo_url: httpx.URL, headers, relpath: Path, repo_path: str, + ) -> bool: + """Process a single manifest search result item. + + Args: + item: Search result item from GitHub API + name: Plugin name + member: Directory path within the repository + repo_url: Repository URL + headers: HTTP headers for authentication + relpath: Path where manifest should be saved + + Returns: + True if manifest was successfully processed and saved, False otherwise + """ + # Only download yaml files, not the README.md which may also contain references to available_hooks + if not (item["name"].endswith(".yaml") and item["name"].startswith("plugin-manifest")): + logger.warning("ignoring item[name]=%s. Not a yaml file.", item["name"]) + return False + + # manifest_data = self.download_file(repo_path=repo_path, git_url=item["git_url"], headers=headers) + manifest_data = self.download_file(repo_path=repo_path, item=item, headers=headers) + if manifest_data is None: + logger.error("Failed to download plugin-manifest from %s", member) + return False + + manifest_content = yaml.safe_load(manifest_data) + manifest_content = self._transform_manifest_data(manifest_content, name, member, repo_url) + + updated_content = yaml.safe_dump(manifest_content, default_flow_style=False) + relpath.write_text(updated_content, encoding="utf-8") + return True def find_and_save_plugin_manifest( self, member: str, name: str, repo_url: httpx.URL, headers ) -> PluginManifest | None: """Find the plugin-manifest.yaml relative to the supplied member folder, download and save the manifest, updating the monorepo's package_folder, package_source and repo_url attributes + + Args: + member: Directory path within the repository + name: Plugin name + repo_url: Repository URL + headers: HTTP headers for authentication + + Returns: + None (could be extended to return PluginManifest if needed) """ self.create_output_folder() - repo_path = repo_url.path.removeprefix("/") - relpath = Path(self.catalog_folder) - relpath = relpath / name / "plugin-manifest.yaml" self.create_catalog_folder(name) - params = f"q=repo:{repo_path}+path:{member}+filename:plugin-manifest+extension:yaml&per_page=100" - r = httpx.get(f"https://{self.github_api}/search/code", params=params, headers=headers) - logger.info("status code: %d ", r.status_code) - if r.status_code == 200: - result = r.json() - for item in result["items"]: - # only download yaml files, not the README.md which may also contain references to available_hooks - if item["name"].endswith(".yaml") and item["name"].startswith("plugin-manifest"): - manifest_data = self.download_file(item["git_url"], headers=headers) - if manifest_data is None: - logger.error("Failed to download plugin-manifest from %s", member) - continue - manifest_content = yaml.safe_load(manifest_data) - package_source = f"{repo_url}#subdirectory={member}" - manifest_content["name"] = name - manifest_content["monorepo"] = { - "package_source": f"{package_source}", - "repo_url": f"{str(repo_url)}", - "package_folder": f"{member}", - } - if "tags" not in manifest_content: - manifest_content["tags"] = [] - if "default_configs" in manifest_content: - manifest_content["default_config"] = manifest_content["default_configs"] - del manifest_content["default_configs"] - if manifest_content["default_config"] is None: - manifest_content["default_config"] = {} - updated_content = yaml.safe_dump(manifest_content, default_flow_style=False) - with open(relpath, "w", encoding="utf-8") as output: - output.write(updated_content) - output.flush() - else: - logger.warning("ignoring item[name]=%s. Not a yaml file.", item["name"]) - else: - logger.error("Catalog update failed with error code: %d", r.status_code) - def update_catalog_with_pyproject(self) -> None: - """Update the catalog with the pyproject.toml file.""" + repo_path = repo_url.path.removeprefix("/") + relpath = Path(self.catalog_folder) / name / "plugin-manifest.yaml" + + items = self._search_github_code(repo_path, member, headers) + if items is None: + return None + + for item in items: + if self._process_manifest_item(item, name, member, repo_url, headers, relpath, repo_path): + break # Successfully processed first valid manifest + + return None + + def _process_pyproject( + self, gh_repo, item, repo_url: httpx.URL, headers + ) -> None: + """Process a single pyproject.toml file. + + Args: + gh_repo: PyGithub Repository object + item: Search result item containing pyproject.toml path + repo_url: Repository URL + headers: HTTP headers for authentication + + Raises: + Exception: If processing fails (caller should handle) + """ + # Get the directory path (remove filename) + member = item.path.removesuffix("/" + item.name) + + # Download pyproject.toml content using PyGithub + file_content = gh_repo.get_contents(item.path) + pyproject_data = file_content.decoded_content.decode("utf-8") + + if pyproject_data is None: + logger.warning("Failed to download pyproject.toml from %s", item.path) + return + + # Parse the pyproject.toml + project_data = tomllib.loads(pyproject_data) + + # Find and save the plugin manifest + self.find_and_save_plugin_manifest( + member=member, + name=project_data["project"]["name"], + repo_url=repo_url, + headers=headers + ) + + def update_catalog_with_pyproject(self) -> bool: + """Update the catalog with the pyproject.toml file using PyGithub API.""" + if self.github_token is None: + logger.error("No GitHub token set") + return True + headers = {"accept": "application/vnd.github+json", "authorization": f"Bearer {self.github_token}"} self.create_output_folder() + + # Cache repositories to avoid repeated API calls + repo_cache: dict[str, Any] = {} + for repo in self.monorepos: repo_url = httpx.URL(repo) repo_path = repo_url.path.removeprefix("/") - params = f"q=repo:{repo_path}+filename:pyproject+extension:toml&per_page=100" - r = httpx.get(f"https://{self.github_api}/search/code", params=params, headers=headers) - logger.info("status code: %d ", r.status_code) - if r.status_code == 200: - project_data = r.json() - for item in project_data["items"]: - if "pyproject.toml" in item["name"]: - member = item["path"].removesuffix("/" + item["name"]) - pyproject_data = self.download_file( - git_url=f"https://{self.github_api}/repos/{repo_path}/contents/{member}/pyproject.toml", - headers=headers, - ) - if pyproject_data is None: - logger.warning("Failed to download pyproject.toml from %s", repo) + + try: + # Get repository using PyGithub (with caching) + if repo_path not in repo_cache: + repo_cache[repo_path] = self.gh.get_repo(repo_path) + gh_repo = repo_cache[repo_path] + + # Search for pyproject.toml files using PyGithub search + query = f"repo:{repo_path} filename:pyproject extension:toml" + search_results = self.gh.search_code(query=query) + + logger.info("Found %d pyproject.toml files in %s", search_results.totalCount, repo_path) + + for item in search_results: + if "pyproject.toml" in item.name: + try: + self._process_pyproject(gh_repo, item, repo_url, headers) + except Exception as e: + logger.error("Error processing pyproject.toml at %s: %s", item.path, str(e)) continue - project_data = tomllib.loads(pyproject_data) - self.find_and_save_plugin_manifest( - member=member, name=project_data["project"]["name"], repo_url=repo_url, headers=headers - ) + + except Exception as e: + logger.error("Error accessing repository %s: %s", repo_path, str(e)) + continue + + return False def load(self) -> None: """Load plugin-manifest.yaml files from self.catalog_folder into self.manifests.""" diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index 638ccb6..0979c29 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -31,6 +31,7 @@ import os import shutil import subprocess # nosec B404 # Safe: Used only for git commands with hardcoded args +#import sys from pathlib import Path from typing import List, Optional @@ -289,6 +290,7 @@ def update_plugins_config_yaml(manifest: PluginManifest): ctr = 1 while not instance_name_is_unique(plugin_configs, suggested_instance_name=suggested_name): suggested_name = manifest.suggest_instance_name() + "_" + str(ctr) + ctr += 1 accepted_name = suggested_name # TODO: prompt to confirm mode, priority etc and accepted name? @@ -474,7 +476,7 @@ def _install_from_pypi(source: str, catalog: PluginCatalog): console.print(f"✅ {package_name} installation complete.") -def install(source: str, install_type: str, catalog: PluginCatalog): +def install(source: str, install_type: str | None, catalog: PluginCatalog): """Install a plugin from its associated source. Args: @@ -486,6 +488,9 @@ def install(source: str, install_type: str, catalog: PluginCatalog): ValueError: If install_type is not supported. NotImplementedError: If the installation type is not yet implemented. """ + if install_type is None: + install_type = "monorepo" + handlers = { "git": _install_from_git, "monorepo": _install_from_monorepo, @@ -551,8 +556,10 @@ def info(plugin_name: str | None): @app.command( help="List, search or install plugins.\n\n" + "\ndefault install type is monorepo\n" "Examples:\n" "python cpex/tools/cli.py plugin info pii\n" + "python cpex/tools/cli.py plugin search pii\n" "python cpex/tools/cli.py plugin --type monorepo search pii\n" "python cpex/tools/cli.py plugin --type monorepo install PIIFilterPlugin\n" "python cpex/tools/cli.py plugin --type pypi install ExamplePlugin@>=0.1.0" @@ -572,8 +579,11 @@ def plugin( # optimized github search REST api takes ~14s to search & download all manifests console.log("Update catalog") with console.status("Updating catalog...", spinner="dots"): - pc.update_catalog_with_pyproject() - console.log("Catalog update completed.") + rc = pc.update_catalog_with_pyproject() + if rc == 0: + console.log("Catalog update completed.") + else: + console.log("❌ Catalog update failed.") if cmd_action == "list": return list(install_type) diff --git a/tests/unit/cpex/tools/test_catalog.py b/tests/unit/cpex/tools/test_catalog.py index 3353cbb..91f6e92 100644 --- a/tests/unit/cpex/tools/test_catalog.py +++ b/tests/unit/cpex/tools/test_catalog.py @@ -590,34 +590,31 @@ class TestPluginCatalogDownloadFile: def test_download_file_success(self, mock_github_env): """Test successful file download.""" - with patch("cpex.tools.catalog.httpx.get") as mock_get: - catalog = PluginCatalog() - - # Mock the HTTP response - manifest_content = "name: test\nversion: 1.0.0" - b64_content = base64.b64encode(manifest_content.encode()).decode() - mock_response = Mock() - mock_response.status_code = 200 - mock_response.json.return_value = {"content": b64_content} - mock_get.return_value = mock_response - - result = catalog.download_file("https://api.github.com/file", {}) - - assert result == manifest_content + catalog = PluginCatalog() + + # Mock the GitHub repository and file content + mock_repo = Mock() + mock_file_content = Mock() + manifest_content = "name: test\nversion: 1.0.0" + mock_file_content.decoded_content = manifest_content.encode() + mock_repo.get_contents.return_value = mock_file_content + catalog.gh.get_repo = Mock(return_value=mock_repo) + + item = {"path": "test_plugin/plugin-manifest.yaml"} + result = catalog.download_file("org/repo", item, {}) + + assert result == manifest_content def test_download_file_failure(self, mock_github_env): """Test failed file download.""" - with ( - patch("cpex.tools.catalog.httpx.get") as mock_get, - patch("cpex.tools.catalog.logger") as mock_logger, - ): + with patch("cpex.tools.catalog.logger") as mock_logger: catalog = PluginCatalog() - mock_response = Mock() - mock_response.status_code = 404 - mock_get.return_value = mock_response + # Mock the GitHub repository to raise an exception + catalog.gh.get_repo = Mock(side_effect=Exception("Not found")) - result = catalog.download_file("https://api.github.com/file", {}) + item = {"path": "test_plugin/plugin-manifest.yaml"} + result = catalog.download_file("org/repo", item, {}) assert result is None mock_logger.error.assert_called_once() @@ -628,37 +625,35 @@ class TestPluginCatalogFindAndSavePluginManifest: def test_find_and_save_plugin_manifest_success(self, tmp_path, mock_github_env): """Test successful finding and saving of plugin manifest.""" - with patch("cpex.tools.catalog.httpx.get") as mock_get: - catalog = PluginCatalog() - catalog.catalog_folder = str(tmp_path / "catalog") - - # Mock search response - search_response = Mock() - search_response.status_code = 200 - search_response.json.return_value = { - "items": [ - { - "name": "plugin-manifest.yaml", - "path": "test_plugin/plugin-manifest.yaml", - "git_url": "https://api.github.com/repos/org/repo/git/blobs/abc123" - } - ] - } - - # Mock file content response - manifest_content = "name: test\nversion: 1.0.0\nkind: native\ndescription: Test\nauthor: Test\navailable_hooks: [tools]" - b64_content = base64.b64encode(manifest_content.encode()).decode() - file_response = Mock() - file_response.status_code = 200 - file_response.json.return_value = {"content": b64_content} - - mock_get.side_effect = [search_response, file_response] - - repo_url = httpx.URL("https://github.com/org/repo") - catalog.find_and_save_plugin_manifest("test_plugin", "test_plugin", repo_url, {}) - - saved_file = tmp_path / "catalog" / "test_plugin" / "plugin-manifest.yaml" - assert saved_file.exists() + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + # Mock the search results + mock_search_result = Mock() + mock_content_file = Mock() + mock_content_file.name = "plugin-manifest.yaml" + mock_content_file.path = "test_plugin/plugin-manifest.yaml" + mock_content_file.git_url = "https://api.github.com/repos/org/repo/git/blobs/abc123" + mock_content_file.html_url = "https://github.com/org/repo/blob/main/test_plugin/plugin-manifest.yaml" + + mock_search_result.totalCount = 1 + mock_search_result.__iter__ = Mock(return_value=iter([mock_content_file])) + + catalog.gh.search_code = Mock(return_value=mock_search_result) + + # Mock the repository and file content + mock_repo = Mock() + manifest_content = "name: test\nversion: 1.0.0\nkind: native\ndescription: Test\nauthor: Test\navailable_hooks: [tools]" + mock_file_content = Mock() + mock_file_content.decoded_content = manifest_content.encode() + mock_repo.get_contents.return_value = mock_file_content + catalog.gh.get_repo = Mock(return_value=mock_repo) + + repo_url = httpx.URL("https://github.com/org/repo") + catalog.find_and_save_plugin_manifest("test_plugin", "test_plugin", repo_url, {}) + + saved_file = tmp_path / "catalog" / "test_plugin" / "plugin-manifest.yaml" + assert saved_file.exists() class TestPluginCatalogUpdateCatalogWithPyproject: From a727e1de0a6cb4ec4c19288f0acb2037f8dd4fbf Mon Sep 17 00:00:00 2001 From: habeck Date: Fri, 17 Apr 2026 13:52:06 -0400 Subject: [PATCH 30/88] enh: add support for uninstall of plugin Signed-off-by: habeck --- cpex/framework/models.py | 17 + cpex/tools/catalog.py | 27 ++ cpex/tools/cli.py | 102 +++++- cpex/tools/plugin_registry.py | 12 + tests/unit/cpex/tools/test_catalog.py | 252 +++++++++++++++ tests/unit/cpex/tools/test_cli.py | 432 ++++++++++++++++++++++++++ 6 files changed, 839 insertions(+), 3 deletions(-) diff --git a/cpex/framework/models.py b/cpex/framework/models.py index 47de1f9..84a051c 100644 --- a/cpex/framework/models.py +++ b/cpex/framework/models.py @@ -2294,6 +2294,23 @@ def register_plugin(self, plugin: InstalledPluginInfo) -> None: self.plugins.append(plugin) self.save() + def unregister_plugin(self, plugin_name: str) -> bool: + """Unregister a plugin from the registry. + + Args: + plugin_name: The name of the plugin to unregister. + + Returns: + True if the plugin was found and removed, False otherwise. + """ + initial_count = len(self.plugins) + self.plugins = [p for p in self.plugins if p.name != plugin_name] + + if len(self.plugins) < initial_count: + self.save() + return True + return False + def save(self) -> None: """Serialize the registry to disk.""" DEFAULT_PLUGIN_REGISTRY_FOLDER = Path(os.environ.get("PLUGIN_REGISTRY_FILE", "data")) diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index a046231..55bb9e6 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -651,3 +651,30 @@ def install_from_pypi(self, plugin_package_name: str, version_constraint: str | logger.info("Successfully installed and cataloged %s", plugin_package_name) return manifest + + def uninstall_package(self, package_name: str) -> bool: + """Uninstall a Python package using pip. + + Args: + package_name: The name of the package to uninstall. + + Returns: + True if uninstallation was successful, False otherwise. + + Raises: + RuntimeError: If the uninstallation process fails. + """ + try: + subprocess.run( + [self.python_executable, "-m", "pip", "uninstall", "-y", package_name], + check=True, + capture_output=True, + text=True + ) + logger.info("Successfully uninstalled package: %s", package_name) + return True + + except subprocess.CalledProcessError as e: + raise RuntimeError(f"Failed to uninstall {package_name}: {e.stderr}") from e + except Exception as e: + raise RuntimeError(f"Unexpected error uninstalling {package_name}: {str(e)}") from e diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index 0979c29..26bd1d1 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -304,6 +304,35 @@ def update_plugins_config_yaml(manifest: PluginManifest): ConfigSaver.save_config(plugin_configs, settings.config_file) +def remove_from_plugins_config_yaml(plugin_name: str) -> bool: + """ + Remove a plugin from the plugins/config.yaml file. + + Args: + plugin_name: The name of the plugin to remove from the config. + + Returns: + bool: True if the plugin was found and removed, False otherwise. + """ + try: + plugin_configs: Config = ConfigLoader.load_config(settings.config_file) + + if plugin_configs.plugins is None: + return False + + initial_count = len(plugin_configs.plugins) + plugin_configs.plugins = [p for p in plugin_configs.plugins if p.name != plugin_name] + + if len(plugin_configs.plugins) < initial_count: + ConfigSaver.save_config(plugin_configs, settings.config_file) + return True + + return False + except Exception as e: + logger.error("Error removing plugin from config: %s", str(e)) + return False + + def install_from_manifest(manifest: PluginManifest, installation_type: str, catalog: PluginCatalog): """ Given a plugin manifest, download the plugin and register it in the plugin registry. @@ -554,18 +583,76 @@ def info(plugin_name: str | None): console.print("No plugins found") +def uninstall(plugin_name: str, catalog: PluginCatalog) -> None: + """Uninstall a plugin. + + Args: + plugin_name: The name of the plugin to uninstall. + catalog: The plugin catalog. + """ + # Get plugin registry to find the installed plugin + plugin_registry = PluginRegistry() + + # Find the plugin in the registry + installed_plugin = None + for plugin in plugin_registry.registry.plugins: + if plugin.name == plugin_name: + installed_plugin = plugin + break + + if installed_plugin is None: + console.print(f"❌ Plugin '{plugin_name}' is not installed.") + return + + # Confirm uninstallation + console.print(f"Found plugin: {installed_plugin.name} (version {installed_plugin.version})") + console.print(f"Installation type: {installed_plugin.installation_type}") + console.print(f"Installation path: {installed_plugin.installation_path}") + + questions = [ + inquirer.Confirm( + "confirm", + message=f"Are you sure you want to uninstall '{plugin_name}'?", + default=False, + ), + ] + answers = inquirer.prompt(questions) + + if not answers or not answers["confirm"]: + console.print("Uninstall cancelled.") + return + + try: + with console.status(f"Uninstalling plugin {plugin_name}...", spinner="dots"): + # Uninstall the package using pip + catalog.uninstall_package(plugin_name) + + # Remove from plugin registry + plugin_registry.remove(plugin_name) + + # Remove from plugins/config.yaml + remove_from_plugins_config_yaml(plugin_name) + + console.print(f"✅ {plugin_name} uninstalled successfully.") + + except Exception as e: + console.print(f"❌ Failed to uninstall {plugin_name}: {str(e)}") + logger.error("Uninstall error: %s", str(e), exc_info=True) + + @app.command( - help="List, search or install plugins.\n\n" + help="List, search, install or uninstall plugins.\n\n" "\ndefault install type is monorepo\n" "Examples:\n" "python cpex/tools/cli.py plugin info pii\n" "python cpex/tools/cli.py plugin search pii\n" "python cpex/tools/cli.py plugin --type monorepo search pii\n" "python cpex/tools/cli.py plugin --type monorepo install PIIFilterPlugin\n" - "python cpex/tools/cli.py plugin --type pypi install ExamplePlugin@>=0.1.0" + "python cpex/tools/cli.py plugin --type pypi install ExamplePlugin@>=0.1.0\n" + "python cpex/tools/cli.py plugin uninstall PIIFilterPlugin" ) def plugin( - cmd_action: str = typer.Argument(None, help="One of: list|info|install|search"), + cmd_action: str = typer.Argument(None, help="One of: list|info|install|search|uninstall"), source: str | None = typer.Argument(None, help="The pypi, git, or local folder where the plugin resides"), install_type: Annotated[ str, typer.Option("--type", "-t", help="The types of plugins to list. One of: bundled|pypi|git|local|monorepo") @@ -574,6 +661,15 @@ def plugin( """Lists installed plugins""" if cmd_action == "info": return info(source) + + # For uninstall, we don't need to update the catalog + if cmd_action == "uninstall": + if source is None: + console.print("❌ Please specify a plugin name to uninstall.") + return + pc = PluginCatalog() + return uninstall(source, catalog=pc) + # update the catalog before proceeding with install etc. pc = PluginCatalog() # optimized github search REST api takes ~14s to search & download all manifests diff --git a/cpex/tools/plugin_registry.py b/cpex/tools/plugin_registry.py index 2552dd5..26c2c8a 100644 --- a/cpex/tools/plugin_registry.py +++ b/cpex/tools/plugin_registry.py @@ -78,3 +78,15 @@ def update( ) # add the newly downloaded plugin to the registry self.registry.register_plugin(ipi) + + def remove(self, plugin_name: str) -> bool: + """ + Remove a plugin from the registry. + + Args: + plugin_name: The name of the plugin to remove. + + Returns: + True if the plugin was found and removed, False otherwise. + """ + return self.registry.unregister_plugin(plugin_name) diff --git a/tests/unit/cpex/tools/test_catalog.py b/tests/unit/cpex/tools/test_catalog.py index 91f6e92..deb8a4b 100644 --- a/tests/unit/cpex/tools/test_catalog.py +++ b/tests/unit/cpex/tools/test_catalog.py @@ -886,3 +886,255 @@ def test_install_from_pypi_with_null_default_configs_in_manifest(self, tmp_path, # Made with Bob + + + +class TestPluginCatalogProcessPyproject: + """Tests for _process_pyproject helper method.""" + + def test_process_pyproject_with_download_failure(self, tmp_path, mock_github_env): + """Test _process_pyproject when download fails.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + # Mock the repository to raise exception + mock_repo = Mock() + mock_repo.get_contents = Mock(side_effect=Exception("Download failed")) + + item = Mock() + item.name = "pyproject.toml" + item.path = "plugin1/pyproject.toml" + + repo_url = httpx.URL("https://github.com/org/repo") + headers = {} + + # Should raise exception + with pytest.raises(Exception, match="Download failed"): + catalog._process_pyproject(mock_repo, item, repo_url, headers) + + +class TestPluginCatalogUpdateCatalogWithPyprojectExtended: + """Extended tests for update_catalog_with_pyproject method.""" + + def test_update_catalog_with_pyproject_no_token(self, tmp_path, mock_github_env): + """Test update_catalog_with_pyproject when no GitHub token is set.""" + with ( + patch("cpex.tools.catalog.logger") as mock_logger, + ): + catalog = PluginCatalog() + catalog.github_token = None + result = catalog.update_catalog_with_pyproject() + + assert result is True + mock_logger.error.assert_called_with("No GitHub token set") + + def test_update_catalog_with_pyproject_repo_access_error(self, tmp_path, mock_github_env): + """Test update_catalog_with_pyproject when repository access fails.""" + with ( + patch("cpex.tools.catalog.logger") as mock_logger, + ): + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + catalog.monorepos = ["https://github.com/org/repo"] + + # Mock get_repo to raise exception + catalog.gh.get_repo = Mock(side_effect=Exception("Access denied")) + + result = catalog.update_catalog_with_pyproject() + + assert result is False + mock_logger.error.assert_called() + + def test_update_catalog_with_pyproject_search_error(self, tmp_path, mock_github_env): + """Test update_catalog_with_pyproject when search fails.""" + with patch("cpex.tools.catalog.logger") as mock_logger: + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + catalog.monorepos = ["https://github.com/org/repo"] + + # Mock successful get_repo but failing search + mock_repo = Mock() + catalog.gh.get_repo = Mock(return_value=mock_repo) + catalog.gh.search_code = Mock(side_effect=Exception("Search failed")) + + result = catalog.update_catalog_with_pyproject() + + assert result is False + mock_logger.error.assert_called() + + +class TestPluginCatalogSearchGithubCode: + """Tests for _search_github_code method.""" + + def test_search_github_code_exception(self, mock_github_env): + """Test _search_github_code when exception occurs.""" + with patch("cpex.tools.catalog.logger") as mock_logger: + catalog = PluginCatalog() + + # Mock search_code to raise exception + catalog.gh.search_code = Mock(side_effect=Exception("Search error")) + + result = catalog._search_github_code("org/repo", "plugins", {}) + + assert result is None + mock_logger.error.assert_called() + + +class TestPluginCatalogProcessManifestItem: + """Tests for _process_manifest_item method.""" + + def test_process_manifest_item_not_yaml(self, tmp_path, mock_github_env): + """Test _process_manifest_item with non-YAML file.""" + with patch("cpex.tools.catalog.logger") as mock_logger: + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + item = { + "name": "README.md", + "path": "plugin1/README.md", + "git_url": "https://api.github.com/file" + } + + repo_url = httpx.URL("https://github.com/org/repo") + relpath = tmp_path / "catalog" / "plugin1" / "plugin-manifest.yaml" + + result = catalog._process_manifest_item(item, "plugin1", "plugin1", repo_url, {}, relpath, "org/repo") + + assert result is False + mock_logger.warning.assert_called() + + def test_process_manifest_item_download_failure(self, tmp_path, mock_github_env): + """Test _process_manifest_item when download fails.""" + with patch("cpex.tools.catalog.logger") as mock_logger: + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + # Mock download_file to return None + catalog.download_file = Mock(return_value=None) + + item = { + "name": "plugin-manifest.yaml", + "path": "plugin1/plugin-manifest.yaml", + "git_url": "https://api.github.com/file" + } + + repo_url = httpx.URL("https://github.com/org/repo") + relpath = tmp_path / "catalog" / "plugin1" / "plugin-manifest.yaml" + + result = catalog._process_manifest_item(item, "plugin1", "plugin1", repo_url, {}, relpath, "org/repo") + + assert result is False + mock_logger.error.assert_called() + + +class TestPluginCatalogFindAndSavePluginManifestExtended: + """Extended tests for find_and_save_plugin_manifest method.""" + + def test_find_and_save_plugin_manifest_search_returns_none(self, tmp_path, mock_github_env): + """Test find_and_save_plugin_manifest when search returns None.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + # Mock _search_github_code to return None + catalog._search_github_code = Mock(return_value=None) + + repo_url = httpx.URL("https://github.com/org/repo") + result = catalog.find_and_save_plugin_manifest("plugin1", "plugin1", repo_url, {}) + + assert result is None + + +class TestPluginCatalogLoadManifestFile: + """Tests for _load_manifest_file method.""" + + def test_load_manifest_file_not_found(self, tmp_path, mock_github_env): + """Test _load_manifest_file when file doesn't exist.""" + catalog = PluginCatalog() + manifest_path = tmp_path / "nonexistent" / "plugin-manifest.yaml" + + with pytest.raises(FileNotFoundError, match="plugin-manifest.yaml not found"): + catalog._load_manifest_file(manifest_path) + + def test_load_manifest_file_invalid_yaml(self, tmp_path, mock_github_env): + """Test _load_manifest_file with invalid YAML.""" + catalog = PluginCatalog() + manifest_path = tmp_path / "plugin-manifest.yaml" + manifest_path.write_text("invalid: yaml: content:") + + with pytest.raises(RuntimeError, match="Failed to parse manifest YAML"): + catalog._load_manifest_file(manifest_path) + + def test_load_manifest_file_not_dict(self, tmp_path, mock_github_env): + """Test _load_manifest_file when YAML is not a dictionary.""" + catalog = PluginCatalog() + manifest_path = tmp_path / "plugin-manifest.yaml" + manifest_path.write_text("- item1\n- item2") + + with pytest.raises(RuntimeError, match="Invalid manifest format"): + catalog._load_manifest_file(manifest_path) + + +class TestPluginCatalogNormalizeManifestData: + """Tests for _normalize_manifest_data method.""" + + def test_normalize_manifest_data_validation_error(self, mock_github_env): + """Test _normalize_manifest_data with validation error.""" + catalog = PluginCatalog() + + # Invalid manifest data (missing required fields) + manifest_data = {"name": "test"} + + with pytest.raises(RuntimeError, match="Failed to validate manifest"): + catalog._normalize_manifest_data(manifest_data, "test_package", None) + + +class TestPluginCatalogPersistManifest: + """Tests for _persist_manifest method.""" + + def test_persist_manifest_error(self, tmp_path, mock_github_env): + """Test _persist_manifest when save fails.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "nonexistent" / "catalog") + + manifest = create_test_manifest() + + # Make directory read-only to cause save failure + with patch("cpex.tools.catalog.PluginCatalog.save_manifest", side_effect=Exception("Save failed")): + with pytest.raises(RuntimeError, match="Failed to save manifest"): + catalog._persist_manifest(manifest, "test_plugin") + + +class TestPluginCatalogInstallPackage: + """Tests for _install_package method.""" + + def test_install_package_with_version_constraint(self, mock_github_env): + """Test _install_package with version constraint.""" + with patch("cpex.tools.catalog.subprocess.run") as mock_subprocess: + catalog = PluginCatalog() + catalog._install_package("test_package", ">=1.0.0") + + mock_subprocess.assert_called_once() + call_args = mock_subprocess.call_args[0][0] + assert "test_package@>=1.0.0" in " ".join(call_args) + + +class TestPluginCatalogDownloadFileExtended: + """Extended tests for download_file method.""" + + def test_download_file_with_exception_message(self, mock_github_env): + """Test download_file logs proper error message.""" + with patch("cpex.tools.catalog.logger") as mock_logger: + catalog = PluginCatalog() + + # Mock to raise exception + catalog.gh.get_repo = Mock(side_effect=Exception("API error")) + + item = {"path": "test/file.yaml"} + result = catalog.download_file("org/repo", item, {}) + + assert result is None + # Check that error was logged with the item path + assert mock_logger.error.called + + +# Made with Bob diff --git a/tests/unit/cpex/tools/test_cli.py b/tests/unit/cpex/tools/test_cli.py index 3e683d2..4298512 100644 --- a/tests/unit/cpex/tools/test_cli.py +++ b/tests/unit/cpex/tools/test_cli.py @@ -36,6 +36,8 @@ info, instance_name_is_unique, update_plugins_config_yaml, + remove_from_plugins_config_yaml, + uninstall, ) from cpex.tools.plugin_registry import PluginRegistry from cpex.framework.models import PluginManifest, Monorepo, Config, PluginConfig, PluginMode, PiPyRepo @@ -924,3 +926,433 @@ def test_callback_exists(self): # callback should be callable and do nothing callback() + + + +class TestRemoveFromPluginsConfigYaml: + """Tests for remove_from_plugins_config_yaml() function.""" + + def test_removes_plugin_from_config(self, tmp_path): + """Test removing a plugin from config.""" + config_file = tmp_path / "config.yaml" + + plugin1 = PluginConfig( + name="plugin_to_remove", + kind="test.plugin", + mode=PluginMode.SEQUENTIAL, + priority=100 + ) + plugin2 = PluginConfig( + name="plugin_to_keep", + kind="test.plugin", + mode=PluginMode.SEQUENTIAL, + priority=100 + ) + mock_config = Config(plugins=[plugin1, plugin2]) + + with ( + patch("cpex.tools.cli.ConfigLoader.load_config", return_value=mock_config), + patch("cpex.tools.cli.ConfigSaver.save_config") as mock_save, + ): + result = remove_from_plugins_config_yaml("plugin_to_remove") + assert result is True + mock_save.assert_called_once() + assert len(mock_config.plugins) == 1 + assert mock_config.plugins[0].name == "plugin_to_keep" + + def test_returns_false_when_plugin_not_found(self, tmp_path): + """Test that function returns False when plugin not found.""" + plugin1 = PluginConfig( + name="existing_plugin", + kind="test.plugin", + mode=PluginMode.SEQUENTIAL, + priority=100 + ) + mock_config = Config(plugins=[plugin1]) + + with ( + patch("cpex.tools.cli.ConfigLoader.load_config", return_value=mock_config), + patch("cpex.tools.cli.ConfigSaver.save_config") as mock_save, + ): + result = remove_from_plugins_config_yaml("nonexistent_plugin") + assert result is False + mock_save.assert_not_called() + + def test_returns_false_when_no_plugins_in_config(self, tmp_path): + """Test that function returns False when config has no plugins.""" + mock_config = Config(plugins=None) + + with patch("cpex.tools.cli.ConfigLoader.load_config", return_value=mock_config): + result = remove_from_plugins_config_yaml("any_plugin") + assert result is False + + def test_handles_exception_gracefully(self, tmp_path): + """Test that function handles exceptions gracefully.""" + with ( + patch("cpex.tools.cli.ConfigLoader.load_config", side_effect=Exception("Config error")), + patch("cpex.tools.cli.logger") as mock_logger, + ): + result = remove_from_plugins_config_yaml("any_plugin") + assert result is False + mock_logger.error.assert_called_once() + + +class TestUninstallFunction: + """Tests for uninstall() function.""" + + def test_uninstall_plugin_not_found(self, temp_registry_dir): + """Test uninstalling a plugin that is not installed.""" + mock_catalog = Mock() + + with patch("cpex.tools.cli.console") as mock_console: + uninstall("nonexistent_plugin", mock_catalog) + mock_console.print.assert_called_with("❌ Plugin 'nonexistent_plugin' is not installed.") + + def test_uninstall_cancelled_by_user(self, temp_registry_dir): + """Test uninstall cancelled by user.""" + registry_file = temp_registry_dir / "installed-plugins.json" + registry_data = { + "plugins": [ + { + "name": "test_plugin", + "version": "1.0.0", + "kind": "native", + "installation_type": "monorepo", + "installation_path": "/path/to/test_plugin", + "installed_at": "2024-01-01T00:00:00.000000Z", + "installed_by": "test_user", + "package_source": "https://example.com/repo/plugin", + "editable": False, + } + ] + } + registry_file.write_text(json.dumps(registry_data)) + + mock_catalog = Mock() + + with ( + patch("cpex.tools.cli.inquirer.prompt", return_value={"confirm": False}), + patch("cpex.tools.cli.console") as mock_console, + ): + uninstall("test_plugin", mock_catalog) + mock_console.print.assert_any_call("Uninstall cancelled.") + + def test_uninstall_success(self, temp_registry_dir): + """Test successful plugin uninstallation.""" + registry_file = temp_registry_dir / "installed-plugins.json" + registry_data = { + "plugins": [ + { + "name": "test_plugin", + "version": "1.0.0", + "kind": "native", + "installation_type": "monorepo", + "installation_path": "/path/to/test_plugin", + "installed_at": "2024-01-01T00:00:00.000000Z", + "installed_by": "test_user", + "package_source": "https://example.com/repo/plugin", + "editable": False, + } + ] + } + registry_file.write_text(json.dumps(registry_data)) + + mock_catalog = Mock() + mock_catalog.uninstall_package = Mock() + + with ( + patch("cpex.tools.cli.inquirer.prompt", return_value={"confirm": True}), + patch("cpex.tools.cli.console") as mock_console, + patch("cpex.tools.cli.remove_from_plugins_config_yaml", return_value=True), + ): + mock_status = Mock() + mock_status.__enter__ = Mock(return_value=mock_status) + mock_status.__exit__ = Mock(return_value=False) + mock_console.status = Mock(return_value=mock_status) + + uninstall("test_plugin", mock_catalog) + + mock_catalog.uninstall_package.assert_called_once_with("test_plugin") + mock_console.print.assert_any_call("✅ test_plugin uninstalled successfully.") + + def test_uninstall_handles_exception(self, temp_registry_dir): + """Test uninstall handles exceptions gracefully.""" + registry_file = temp_registry_dir / "installed-plugins.json" + registry_data = { + "plugins": [ + { + "name": "test_plugin", + "version": "1.0.0", + "kind": "native", + "installation_type": "monorepo", + "installation_path": "/path/to/test_plugin", + "installed_at": "2024-01-01T00:00:00.000000Z", + "installed_by": "test_user", + "package_source": "https://example.com/repo/plugin", + "editable": False, + } + ] + } + registry_file.write_text(json.dumps(registry_data)) + + mock_catalog = Mock() + mock_catalog.uninstall_package = Mock(side_effect=RuntimeError("Uninstall failed")) + + with ( + patch("cpex.tools.cli.inquirer.prompt", return_value={"confirm": True}), + patch("cpex.tools.cli.console") as mock_console, + patch("cpex.tools.cli.logger") as mock_logger, + ): + mock_status = Mock() + mock_status.__enter__ = Mock(return_value=mock_status) + mock_status.__exit__ = Mock(return_value=False) + mock_console.status = Mock(return_value=mock_status) + + uninstall("test_plugin", mock_catalog) + + mock_console.print.assert_any_call("❌ Failed to uninstall test_plugin: Uninstall failed") + mock_logger.error.assert_called_once() + + +class TestPluginUninstallCommand: + """Tests for the plugin uninstall command.""" + + def test_plugin_uninstall_command_without_plugin_name(self, temp_registry_dir): + """Test plugin uninstall command without specifying plugin name.""" + with ( + patch("cpex.tools.cli.PluginCatalog") as mock_catalog_class, + patch("cpex.tools.cli.console") as mock_console, + ): + mock_catalog = Mock() + mock_catalog_class.return_value = mock_catalog + + result = runner.invoke(app, ["plugin", "uninstall"]) + assert result.exit_code == 0 + mock_console.print.assert_called_with("❌ Please specify a plugin name to uninstall.") + + def test_plugin_uninstall_command_success(self, temp_registry_dir): + """Test successful plugin uninstall command.""" + registry_file = temp_registry_dir / "installed-plugins.json" + registry_data = { + "plugins": [ + { + "name": "test_plugin", + "version": "1.0.0", + "kind": "native", + "installation_type": "monorepo", + "installation_path": "/path/to/test_plugin", + "installed_at": "2024-01-01T00:00:00.000000Z", + "installed_by": "test_user", + "package_source": "https://example.com/repo/plugin", + "editable": False, + } + ] + } + registry_file.write_text(json.dumps(registry_data)) + + with ( + patch("cpex.tools.cli.PluginCatalog") as mock_catalog_class, + patch("cpex.tools.cli.inquirer.prompt", return_value={"confirm": True}), + patch("cpex.tools.cli.console") as mock_console, + patch("cpex.tools.cli.remove_from_plugins_config_yaml", return_value=True), + ): + mock_catalog = Mock() + mock_catalog.uninstall_package = Mock() + mock_catalog_class.return_value = mock_catalog + + mock_status = Mock() + mock_status.__enter__ = Mock(return_value=mock_status) + mock_status.__exit__ = Mock(return_value=False) + mock_console.status = Mock(return_value=mock_status) + + result = runner.invoke(app, ["plugin", "uninstall", "test_plugin"]) + assert result.exit_code == 0 + mock_catalog.uninstall_package.assert_called_once_with("test_plugin") + + def test_plugin_uninstall_command_not_found(self, temp_registry_dir): + """Test plugin uninstall command when plugin not found.""" + with ( + patch("cpex.tools.cli.PluginCatalog") as mock_catalog_class, + patch("cpex.tools.cli.console") as mock_console, + ): + mock_catalog = Mock() + mock_catalog_class.return_value = mock_catalog + + result = runner.invoke(app, ["plugin", "uninstall", "nonexistent_plugin"]) + assert result.exit_code == 0 + mock_console.print.assert_called_with("❌ Plugin 'nonexistent_plugin' is not installed.") + + +class TestCatalogUninstallPackage: + """Tests for PluginCatalog.uninstall_package() method.""" + + def test_uninstall_package_success(self, temp_registry_dir): + """Test successful package uninstallation.""" + from cpex.tools.catalog import PluginCatalog + + with ( + patch.dict("os.environ", {"PLUGINS_GITHUB_TOKEN": "test_token"}), + patch("cpex.tools.catalog.Github"), + patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, + ): + catalog = PluginCatalog() + result = catalog.uninstall_package("test_package") + + assert result is True + mock_subprocess.assert_called_once() + call_args = mock_subprocess.call_args + assert "pip" in call_args[0][0] + assert "uninstall" in call_args[0][0] + assert "-y" in call_args[0][0] + assert "test_package" in call_args[0][0] + + def test_uninstall_package_subprocess_error(self, temp_registry_dir): + """Test package uninstallation with subprocess error.""" + from cpex.tools.catalog import PluginCatalog + import subprocess + + with ( + patch.dict("os.environ", {"PLUGINS_GITHUB_TOKEN": "test_token"}), + patch("cpex.tools.catalog.Github"), + patch("cpex.tools.catalog.subprocess.run", side_effect=subprocess.CalledProcessError(1, ["pip"], stderr="Error")), + ): + catalog = PluginCatalog() + + with pytest.raises(RuntimeError, match="Failed to uninstall"): + catalog.uninstall_package("test_package") + + def test_uninstall_package_unexpected_error(self, temp_registry_dir): + """Test package uninstallation with unexpected error.""" + from cpex.tools.catalog import PluginCatalog + + with ( + patch.dict("os.environ", {"PLUGINS_GITHUB_TOKEN": "test_token"}), + patch("cpex.tools.catalog.Github"), + patch("cpex.tools.catalog.subprocess.run", side_effect=Exception("Unexpected error")), + ): + catalog = PluginCatalog() + + with pytest.raises(RuntimeError, match="Unexpected error uninstalling"): + catalog.uninstall_package("test_package") + + +class TestPluginRegistryRemove: + """Tests for PluginRegistry.remove() method.""" + + def test_remove_existing_plugin(self, temp_registry_dir): + """Test removing an existing plugin from registry.""" + registry_file = temp_registry_dir / "installed-plugins.json" + registry_data = { + "plugins": [ + { + "name": "test_plugin", + "version": "1.0.0", + "kind": "native", + "installation_type": "monorepo", + "installation_path": "/path/to/test_plugin", + "installed_at": "2024-01-01T00:00:00.000000Z", + "installed_by": "test_user", + "package_source": "https://example.com/repo/plugin", + "editable": False, + } + ] + } + registry_file.write_text(json.dumps(registry_data)) + + plugin_registry = PluginRegistry() + result = plugin_registry.remove("test_plugin") + + assert result is True + updated_data = json.loads(registry_file.read_text()) + assert len(updated_data["plugins"]) == 0 + + def test_remove_nonexistent_plugin(self, temp_registry_dir): + """Test removing a plugin that doesn't exist.""" + registry_file = temp_registry_dir / "installed-plugins.json" + registry_data = { + "plugins": [ + { + "name": "test_plugin", + "version": "1.0.0", + "kind": "native", + "installation_type": "monorepo", + "installation_path": "/path/to/test_plugin", + "installed_at": "2024-01-01T00:00:00.000000Z", + "installed_by": "test_user", + "package_source": "https://example.com/repo/plugin", + "editable": False, + } + ] + } + registry_file.write_text(json.dumps(registry_data)) + + plugin_registry = PluginRegistry() + result = plugin_registry.remove("nonexistent_plugin") + + assert result is False + updated_data = json.loads(registry_file.read_text()) + assert len(updated_data["plugins"]) == 1 + + +class TestInstalledPluginRegistryUnregister: + """Tests for InstalledPluginRegistry.unregister_plugin() method.""" + + def test_unregister_existing_plugin(self, temp_registry_dir): + """Test unregistering an existing plugin.""" + from cpex.framework.models import InstalledPluginRegistry, InstalledPluginInfo, PluginInstallationType + + registry_file = temp_registry_dir / "installed-plugins.json" + plugin1 = InstalledPluginInfo( + name="plugin1", + kind="native", + version="1.0.0", + installation_type=PluginInstallationType.MONOREPO, + installation_path="/path/to/plugin1", + installed_at="2024-01-01T00:00:00.000000Z", + installed_by="test_user", + package_source="https://example.com/repo/plugin1", + editable=False, + ) + plugin2 = InstalledPluginInfo( + name="plugin2", + kind="native", + version="1.0.0", + installation_type=PluginInstallationType.MONOREPO, + installation_path="/path/to/plugin2", + installed_at="2024-01-01T00:00:00.000000Z", + installed_by="test_user", + package_source="https://example.com/repo/plugin2", + editable=False, + ) + + registry = InstalledPluginRegistry(plugins=[plugin1, plugin2]) + result = registry.unregister_plugin("plugin1") + + assert result is True + assert len(registry.plugins) == 1 + assert registry.plugins[0].name == "plugin2" + + def test_unregister_nonexistent_plugin(self, temp_registry_dir): + """Test unregistering a plugin that doesn't exist.""" + from cpex.framework.models import InstalledPluginRegistry, InstalledPluginInfo, PluginInstallationType + + plugin1 = InstalledPluginInfo( + name="plugin1", + kind="native", + version="1.0.0", + installation_type=PluginInstallationType.MONOREPO, + installation_path="/path/to/plugin1", + installed_at="2024-01-01T00:00:00.000000Z", + installed_by="test_user", + package_source="https://example.com/repo/plugin1", + editable=False, + ) + + registry = InstalledPluginRegistry(plugins=[plugin1]) + result = registry.unregister_plugin("nonexistent") + + assert result is False + assert len(registry.plugins) == 1 + + +# Made with Bob From a5a53428c721fe3f2f3309864d831ce2c4cf4e8e Mon Sep 17 00:00:00 2001 From: habeck Date: Fri, 17 Apr 2026 13:52:49 -0400 Subject: [PATCH 31/88] chore: lint-fix Signed-off-by: habeck --- cpex/framework/models.py | 6 ++-- cpex/tools/catalog.py | 74 +++++++++++++++++++++------------------- cpex/tools/cli.py | 35 ++++++++++--------- 3 files changed, 60 insertions(+), 55 deletions(-) diff --git a/cpex/framework/models.py b/cpex/framework/models.py index 84a051c..3a2881b 100644 --- a/cpex/framework/models.py +++ b/cpex/framework/models.py @@ -2296,16 +2296,16 @@ def register_plugin(self, plugin: InstalledPluginInfo) -> None: def unregister_plugin(self, plugin_name: str) -> bool: """Unregister a plugin from the registry. - + Args: plugin_name: The name of the plugin to unregister. - + Returns: True if the plugin was found and removed, False otherwise. """ initial_count = len(self.plugins) self.plugins = [p for p in self.plugins if p.name != plugin_name] - + if len(self.plugins) < initial_count: self.save() return True diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index 55bb9e6..bd5b0ca 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -152,9 +152,9 @@ def download_contents(self, git_url: str, headers, path: str, repo_url: httpx.UR else: logger.error("Failed to download file: %s status_code: %d", git_url, result.status_code) - def download_file(self,repo_path: str, item: dict, headers) -> str | None: + def download_file(self, repo_path: str, item: dict, headers) -> str | None: """Download the content of a github file - + Args: repo_path: Repository path (e.g., 'owner/repo') item: Dictionary containing the path of the file to download @@ -185,24 +185,26 @@ def _search_github_code(self, repo_path: str, member: str, headers) -> list[dict try: # Build search query for PyGithub query = f"repo:{repo_path} path:{member} filename:plugin-manifest extension:yaml" - + # Use PyGithub's search_code method search_results = self.gh.search_code(query=query) - + logger.info("Found %d plugin-manifest files in %s/%s", search_results.totalCount, repo_path, member) - + # Convert PyGithub ContentFile objects to dict format compatible with existing code items = [] for content_file in search_results: - items.append({ - "name": content_file.name, - "path": content_file.path, - "git_url": content_file.git_url, - "html_url": content_file.html_url, - }) - + items.append( + { + "name": content_file.name, + "path": content_file.path, + "git_url": content_file.git_url, + "html_url": content_file.html_url, + } + ) + return items - + except Exception as e: logger.error("Catalog update failed with error: %s", str(e)) return None @@ -236,7 +238,14 @@ def _transform_manifest_data(self, manifest_content: dict, name: str, member: st return manifest_content def _process_manifest_item( - self, item: dict, name: str, member: str, repo_url: httpx.URL, headers, relpath: Path, repo_path: str, + self, + item: dict, + name: str, + member: str, + repo_url: httpx.URL, + headers, + relpath: Path, + repo_path: str, ) -> bool: """Process a single manifest search result item. @@ -300,9 +309,7 @@ def find_and_save_plugin_manifest( return None - def _process_pyproject( - self, gh_repo, item, repo_url: httpx.URL, headers - ) -> None: + def _process_pyproject(self, gh_repo, item, repo_url: httpx.URL, headers) -> None: """Process a single pyproject.toml file. Args: @@ -316,24 +323,21 @@ def _process_pyproject( """ # Get the directory path (remove filename) member = item.path.removesuffix("/" + item.name) - + # Download pyproject.toml content using PyGithub file_content = gh_repo.get_contents(item.path) pyproject_data = file_content.decoded_content.decode("utf-8") - + if pyproject_data is None: logger.warning("Failed to download pyproject.toml from %s", item.path) return - + # Parse the pyproject.toml project_data = tomllib.loads(pyproject_data) - + # Find and save the plugin manifest self.find_and_save_plugin_manifest( - member=member, - name=project_data["project"]["name"], - repo_url=repo_url, - headers=headers + member=member, name=project_data["project"]["name"], repo_url=repo_url, headers=headers ) def update_catalog_with_pyproject(self) -> bool: @@ -341,29 +345,29 @@ def update_catalog_with_pyproject(self) -> bool: if self.github_token is None: logger.error("No GitHub token set") return True - + headers = {"accept": "application/vnd.github+json", "authorization": f"Bearer {self.github_token}"} self.create_output_folder() - + # Cache repositories to avoid repeated API calls repo_cache: dict[str, Any] = {} - + for repo in self.monorepos: repo_url = httpx.URL(repo) repo_path = repo_url.path.removeprefix("/") - + try: # Get repository using PyGithub (with caching) if repo_path not in repo_cache: repo_cache[repo_path] = self.gh.get_repo(repo_path) gh_repo = repo_cache[repo_path] - + # Search for pyproject.toml files using PyGithub search query = f"repo:{repo_path} filename:pyproject extension:toml" search_results = self.gh.search_code(query=query) - + logger.info("Found %d pyproject.toml files in %s", search_results.totalCount, repo_path) - + for item in search_results: if "pyproject.toml" in item.name: try: @@ -371,11 +375,11 @@ def update_catalog_with_pyproject(self) -> bool: except Exception as e: logger.error("Error processing pyproject.toml at %s: %s", item.path, str(e)) continue - + except Exception as e: logger.error("Error accessing repository %s: %s", repo_path, str(e)) continue - + return False def load(self) -> None: @@ -669,7 +673,7 @@ def uninstall_package(self, package_name: str) -> bool: [self.python_executable, "-m", "pip", "uninstall", "-y", package_name], check=True, capture_output=True, - text=True + text=True, ) logger.info("Successfully uninstalled package: %s", package_name) return True diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index 26bd1d1..97f18de 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -31,7 +31,8 @@ import os import shutil import subprocess # nosec B404 # Safe: Used only for git commands with hardcoded args -#import sys + +# import sys from pathlib import Path from typing import List, Optional @@ -316,17 +317,17 @@ def remove_from_plugins_config_yaml(plugin_name: str) -> bool: """ try: plugin_configs: Config = ConfigLoader.load_config(settings.config_file) - + if plugin_configs.plugins is None: return False - + initial_count = len(plugin_configs.plugins) plugin_configs.plugins = [p for p in plugin_configs.plugins if p.name != plugin_name] - + if len(plugin_configs.plugins) < initial_count: ConfigSaver.save_config(plugin_configs, settings.config_file) return True - + return False except Exception as e: logger.error("Error removing plugin from config: %s", str(e)) @@ -592,23 +593,23 @@ def uninstall(plugin_name: str, catalog: PluginCatalog) -> None: """ # Get plugin registry to find the installed plugin plugin_registry = PluginRegistry() - + # Find the plugin in the registry installed_plugin = None for plugin in plugin_registry.registry.plugins: if plugin.name == plugin_name: installed_plugin = plugin break - + if installed_plugin is None: console.print(f"❌ Plugin '{plugin_name}' is not installed.") return - + # Confirm uninstallation console.print(f"Found plugin: {installed_plugin.name} (version {installed_plugin.version})") console.print(f"Installation type: {installed_plugin.installation_type}") console.print(f"Installation path: {installed_plugin.installation_path}") - + questions = [ inquirer.Confirm( "confirm", @@ -617,24 +618,24 @@ def uninstall(plugin_name: str, catalog: PluginCatalog) -> None: ), ] answers = inquirer.prompt(questions) - + if not answers or not answers["confirm"]: console.print("Uninstall cancelled.") return - + try: with console.status(f"Uninstalling plugin {plugin_name}...", spinner="dots"): # Uninstall the package using pip catalog.uninstall_package(plugin_name) - + # Remove from plugin registry plugin_registry.remove(plugin_name) - + # Remove from plugins/config.yaml remove_from_plugins_config_yaml(plugin_name) - + console.print(f"✅ {plugin_name} uninstalled successfully.") - + except Exception as e: console.print(f"❌ Failed to uninstall {plugin_name}: {str(e)}") logger.error("Uninstall error: %s", str(e), exc_info=True) @@ -661,7 +662,7 @@ def plugin( """Lists installed plugins""" if cmd_action == "info": return info(source) - + # For uninstall, we don't need to update the catalog if cmd_action == "uninstall": if source is None: @@ -669,7 +670,7 @@ def plugin( return pc = PluginCatalog() return uninstall(source, catalog=pc) - + # update the catalog before proceeding with install etc. pc = PluginCatalog() # optimized github search REST api takes ~14s to search & download all manifests From a74b3e4d47d8fe9ac491f0ec340a442a1cbb2aac Mon Sep 17 00:00:00 2001 From: habeck Date: Fri, 17 Apr 2026 15:55:27 -0400 Subject: [PATCH 32/88] fix: use the manifest from the local catalog to pull the kind value of the package to be removed and use that to remove all matching kind entries from plugins/config.yaml unless kind is external or isolated_venv in which case check if the plugin name is a substring of the plugin name. Signed-off-by: habeck --- .gitignore | 2 +- cpex/tools/catalog.py | 15 +++++++++++ cpex/tools/cli.py | 22 +++++++++++++---- cpex/tools/plugin_registry.py | 13 ++++++++++ tests/unit/cpex/tools/test_cli.py | 41 ++++++++++++++++++++++++------- 5 files changed, 78 insertions(+), 15 deletions(-) diff --git a/.gitignore b/.gitignore index beb8608..59e7258 100644 --- a/.gitignore +++ b/.gitignore @@ -250,4 +250,4 @@ db_path/ tmp/ .continue - +plugin-catalog \ No newline at end of file diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index bd5b0ca..d360934 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -431,6 +431,21 @@ def search(self, plugin_name: str | None) -> Optional[list[PluginManifest]]: matching.append(manifest) return matching if len(matching) > 0 else None + def find(self, plugin_name: str) -> Optional[PluginManifest]: + """Find a plugin in the catalog + Args: + plugin_name: The name of the plugin to find + Returns: + The manifest of the plugin if found, None otherwise + """ + # lookup the plugin from the catalog's plugin-manifest.yaml + if (self.manifests is not None) and (len(self.manifests) == 0): + self.load() + for manifest in self.manifests: + if manifest.name.lower() == plugin_name.lower(): + return manifest + return None + def install_folder_via_pip(self, manifest: PluginManifest) -> None: """ Runs a pip install using subfolder syntax diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index 97f18de..91c5637 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -305,7 +305,7 @@ def update_plugins_config_yaml(manifest: PluginManifest): ConfigSaver.save_config(plugin_configs, settings.config_file) -def remove_from_plugins_config_yaml(plugin_name: str) -> bool: +def remove_from_plugins_config_yaml(manifest: PluginManifest) -> bool: """ Remove a plugin from the plugins/config.yaml file. @@ -322,8 +322,9 @@ def remove_from_plugins_config_yaml(plugin_name: str) -> bool: return False initial_count = len(plugin_configs.plugins) - plugin_configs.plugins = [p for p in plugin_configs.plugins if p.name != plugin_name] - + # Need to match by manifest.kind or if plugin.name starts with manifest.name + # e.g. if it is an external plugin or a venv plugin then kind will match many plugin configurations in config.yaml + plugin_configs.plugins = [p for p in plugin_configs.plugins if p.kind != manifest.kind and p.name.count(manifest.name) == 0] if len(plugin_configs.plugins) < initial_count: ConfigSaver.save_config(plugin_configs, settings.config_file) return True @@ -630,9 +631,14 @@ def uninstall(plugin_name: str, catalog: PluginCatalog) -> None: # Remove from plugin registry plugin_registry.remove(plugin_name) - + # retrieve the manifest so we can match on kind value + catalog = PluginCatalog() + manifest = catalog.find(plugin_name) # Remove from plugins/config.yaml - remove_from_plugins_config_yaml(plugin_name) + if manifest: + remove_from_plugins_config_yaml(manifest) + else: + console.print(f"Plugin {plugin_name} not found in plugins config.yaml.") console.print(f"✅ {plugin_name} uninstalled successfully.") @@ -670,6 +676,12 @@ def plugin( return pc = PluginCatalog() return uninstall(source, catalog=pc) + if cmd_action == "install" and source is not None: + registry = PluginRegistry() + if registry.has(source): + console.print(f"Plugin {source} is already installed.") + return + # update the catalog before proceeding with install etc. pc = PluginCatalog() diff --git a/cpex/tools/plugin_registry.py b/cpex/tools/plugin_registry.py index 26c2c8a..242f4bc 100644 --- a/cpex/tools/plugin_registry.py +++ b/cpex/tools/plugin_registry.py @@ -79,6 +79,19 @@ def update( # add the newly downloaded plugin to the registry self.registry.register_plugin(ipi) + def has(self, plugin_name: str) -> bool: + """ + Check if a plugin is installed. + Args: + plugin_name: The name of the plugin to check. + Returns: + True if the plugin is installed, False otherwise. + """ + for plugin in self.registry.plugins: + if plugin.name == plugin_name: + return True + return False + def remove(self, plugin_name: str) -> bool: """ Remove a plugin from the registry. diff --git a/tests/unit/cpex/tools/test_cli.py b/tests/unit/cpex/tools/test_cli.py index 4298512..95a9fea 100644 --- a/tests/unit/cpex/tools/test_cli.py +++ b/tests/unit/cpex/tools/test_cli.py @@ -938,61 +938,70 @@ def test_removes_plugin_from_config(self, tmp_path): plugin1 = PluginConfig( name="plugin_to_remove", - kind="test.plugin", + kind="test.plugin.remove", mode=PluginMode.SEQUENTIAL, priority=100 ) plugin2 = PluginConfig( name="plugin_to_keep", - kind="test.plugin", + kind="test.plugin.keep", mode=PluginMode.SEQUENTIAL, priority=100 ) mock_config = Config(plugins=[plugin1, plugin2]) + # Create a manifest with matching kind + manifest = create_test_manifest(name="plugin_to_remove", kind="test.plugin.remove") + with ( patch("cpex.tools.cli.ConfigLoader.load_config", return_value=mock_config), patch("cpex.tools.cli.ConfigSaver.save_config") as mock_save, ): - result = remove_from_plugins_config_yaml("plugin_to_remove") + result = remove_from_plugins_config_yaml(manifest) assert result is True mock_save.assert_called_once() assert len(mock_config.plugins) == 1 - assert mock_config.plugins[0].name == "plugin_to_keep" + assert mock_config.plugins[0].kind == "test.plugin.keep" def test_returns_false_when_plugin_not_found(self, tmp_path): """Test that function returns False when plugin not found.""" plugin1 = PluginConfig( name="existing_plugin", - kind="test.plugin", + kind="test.plugin.existing", mode=PluginMode.SEQUENTIAL, priority=100 ) mock_config = Config(plugins=[plugin1]) + # Create a manifest with non-matching kind + manifest = create_test_manifest(name="nonexistent_plugin", kind="test.plugin.nonexistent") + with ( patch("cpex.tools.cli.ConfigLoader.load_config", return_value=mock_config), patch("cpex.tools.cli.ConfigSaver.save_config") as mock_save, ): - result = remove_from_plugins_config_yaml("nonexistent_plugin") + result = remove_from_plugins_config_yaml(manifest) assert result is False mock_save.assert_not_called() def test_returns_false_when_no_plugins_in_config(self, tmp_path): """Test that function returns False when config has no plugins.""" mock_config = Config(plugins=None) + manifest = create_test_manifest(name="any_plugin") with patch("cpex.tools.cli.ConfigLoader.load_config", return_value=mock_config): - result = remove_from_plugins_config_yaml("any_plugin") + result = remove_from_plugins_config_yaml(manifest) assert result is False def test_handles_exception_gracefully(self, tmp_path): """Test that function handles exceptions gracefully.""" + manifest = create_test_manifest(name="any_plugin") + with ( patch("cpex.tools.cli.ConfigLoader.load_config", side_effect=Exception("Config error")), patch("cpex.tools.cli.logger") as mock_logger, ): - result = remove_from_plugins_config_yaml("any_plugin") + result = remove_from_plugins_config_yaml(manifest) assert result is False mock_logger.error.assert_called_once() @@ -1060,11 +1069,20 @@ def test_uninstall_success(self, temp_registry_dir): mock_catalog = Mock() mock_catalog.uninstall_package = Mock() + # Create a manifest to return from find + test_manifest = create_test_manifest(name="test_plugin", kind="native") + with ( patch("cpex.tools.cli.inquirer.prompt", return_value={"confirm": True}), patch("cpex.tools.cli.console") as mock_console, - patch("cpex.tools.cli.remove_from_plugins_config_yaml", return_value=True), + patch("cpex.tools.cli.remove_from_plugins_config_yaml", return_value=True) as mock_remove, + patch("cpex.tools.cli.PluginCatalog") as mock_catalog_class, ): + # Mock the catalog.find method + mock_catalog_instance = Mock() + mock_catalog_instance.find = Mock(return_value=test_manifest) + mock_catalog_class.return_value = mock_catalog_instance + mock_status = Mock() mock_status.__enter__ = Mock(return_value=mock_status) mock_status.__exit__ = Mock(return_value=False) @@ -1073,6 +1091,7 @@ def test_uninstall_success(self, temp_registry_dir): uninstall("test_plugin", mock_catalog) mock_catalog.uninstall_package.assert_called_once_with("test_plugin") + mock_remove.assert_called_once_with(test_manifest) mock_console.print.assert_any_call("✅ test_plugin uninstalled successfully.") def test_uninstall_handles_exception(self, temp_registry_dir): @@ -1150,6 +1169,9 @@ def test_plugin_uninstall_command_success(self, temp_registry_dir): } registry_file.write_text(json.dumps(registry_data)) + # Create a manifest to return from find + test_manifest = create_test_manifest(name="test_plugin", kind="native") + with ( patch("cpex.tools.cli.PluginCatalog") as mock_catalog_class, patch("cpex.tools.cli.inquirer.prompt", return_value={"confirm": True}), @@ -1158,6 +1180,7 @@ def test_plugin_uninstall_command_success(self, temp_registry_dir): ): mock_catalog = Mock() mock_catalog.uninstall_package = Mock() + mock_catalog.find = Mock(return_value=test_manifest) mock_catalog_class.return_value = mock_catalog mock_status = Mock() From dc893d984c20f8b7292837bee5c27e1cb47f8936 Mon Sep 17 00:00:00 2001 From: habeck Date: Mon, 20 Apr 2026 15:45:57 -0400 Subject: [PATCH 33/88] fix: when installing a plugin via mono-repo or pipi, the cache_root will not exist under the plugins dir, create the directory if it does not exist on plugin startup for venv plugins. Signed-off-by: habeck --- cpex/framework/isolated/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cpex/framework/isolated/client.py b/cpex/framework/isolated/client.py index d49dd6d..b546628 100644 --- a/cpex/framework/isolated/client.py +++ b/cpex/framework/isolated/client.py @@ -46,7 +46,7 @@ def __init__(self, config: PluginConfig, plugin_dirs) -> None: cache_root = path / class_root self.plugin_path = cache_root if not cache_root.exists(): - raise RuntimeError(f"plugin path does not exist: {str(cache_root)}") + cache_root.mkdir(parents=True, exist_ok=True) self.cache_dir: Path = cache_root / ".cpex" / "venv_cache" self.cache_dir.mkdir(parents=True, exist_ok=True) From 66b97d5ec3c0df646b3a0b7c72a8d431b993fed0 Mon Sep 17 00:00:00 2001 From: habeck Date: Mon, 20 Apr 2026 18:37:22 -0400 Subject: [PATCH 34/88] enh: enable package install from both pypi and test-pypi, fix: properly resolve location of requirements.txt while performing pypi installs. Signed-off-by: habeck --- cpex/framework/isolated/client.py | 25 ++++----- cpex/framework/utils.py | 41 ++++++++++++++ cpex/tools/catalog.py | 77 ++++++++++----------------- cpex/tools/cli.py | 34 +++++++----- cpex/tools/plugin_registry.py | 3 +- tests/unit/cpex/tools/test_catalog.py | 14 +++-- tests/unit/cpex/tools/test_cli.py | 36 +++++++------ 7 files changed, 133 insertions(+), 97 deletions(-) diff --git a/cpex/framework/isolated/client.py b/cpex/framework/isolated/client.py index b546628..6c1cd70 100644 --- a/cpex/framework/isolated/client.py +++ b/cpex/framework/isolated/client.py @@ -27,6 +27,7 @@ from cpex.framework.hooks.registry import get_hook_registry from cpex.framework.isolated.venv_comm import VenvProcessCommunicator from cpex.framework.models import PluginConfig, PluginContext, PluginErrorModel, PluginPayload, PluginResult +from cpex.framework.utils import find_package_path logger = logging.getLogger(__name__) @@ -217,21 +218,17 @@ async def initialize(self) -> None: else: requirements_file = Path(requirements_file_input) - # If it's a relative path, resolve it relative to plugin_path - if not requirements_file.is_absolute(): - requirements_file = (self.plugin_path / requirements_file).resolve() - else: - # If absolute, resolve it to normalize - requirements_file = requirements_file.resolve() - - # Validate that the resolved path is within plugin_path (security check) + # Try to find the package location where plugin-manifest.yaml resides + # Fall back to self.plugin_path if package is not installed (e.g., in tests) try: - requirements_file.relative_to(self.plugin_path.resolve()) - except ValueError as ve: - raise RuntimeError( - f"Invalid requirements_file path: {requirements_file_input}. " - f"Path must be within plugin directory: {self.plugin_path}" - ) from ve + package_path = find_package_path(self.config.name) + logger.debug("Found installed package %s at %s", self.config.name, package_path) + except RuntimeError: + # Package not installed (e.g., in test environment), use plugin_path + package_path = self.plugin_path + logger.debug("Package %s not installed, using plugin_path: %s", self.config.name, package_path) + + requirements_file = package_path / requirements_file_input # Create venv with caching support new_venv = await self.create_venv(venv_path=venv_path, requirements_file=requirements_file, use_cache=True) diff --git a/cpex/framework/utils.py b/cpex/framework/utils.py index f9eb78e..b8b9c7a 100644 --- a/cpex/framework/utils.py +++ b/cpex/framework/utils.py @@ -13,6 +13,7 @@ import importlib import logging from functools import cache +from pathlib import Path from types import ModuleType from typing import Any, Optional @@ -475,3 +476,43 @@ def render(self, content: Any) -> bytes: content, option=orjson.OPT_NON_STR_KEYS | orjson.OPT_SERIALIZE_NUMPY, ) + + +def find_package_path( package_name: str) -> Path: + """Locate installed package directory using importlib.metadata. + + Args: + package_name: The name of the installed package. + + Returns: + Path to the package directory. + + Raises: + RuntimeError: If package cannot be found. + """ + try: + # Use importlib.metadata for more reliable package discovery + for dist in importlib.metadata.distributions(): + if dist.name == package_name or dist.metadata.get("Name") == package_name: + if dist.files: + # Get the package root from the plugin-manifest.yaml file + for afile in dist.files: + if afile.name == "plugin-manifest.yaml": + located_path = dist.locate_file(afile) + package_path = Path(str(located_path)).parent + logger.debug("Found package %s at %s", package_name, package_path) + return package_path + + # Fallback to importlib.util.find_spec if metadata approach fails + spec = importlib.util.find_spec(package_name) + if spec is not None and spec.origin is not None: + package_path = Path(spec.origin).parent + logger.debug("Found package %s at %s (via find_spec)", package_name, package_path) + return package_path + + raise RuntimeError(f"Could not find installed package: {package_name}") + + except Exception as e: + if isinstance(e, RuntimeError): + raise + raise RuntimeError(f"Error locating package {package_name}: {str(e)}") from e \ No newline at end of file diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index d360934..679dc42 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -23,6 +23,7 @@ from github import Auth, Github from cpex.framework.models import PiPyRepo, PluginManifest, PluginPackageInfo +from cpex.framework.utils import find_package_path from cpex.tools.settings import get_catalog_settings logger = logging.getLogger(__name__) @@ -474,7 +475,7 @@ def install_folder_via_pip(self, manifest: PluginManifest) -> None: except Exception as e: raise RuntimeError(f"Unexpected error installing {manifest.name}: {str(e)}") from e - def _install_package(self, package_name: str, version_constraint: str | None) -> None: + def _install_package(self, package_name: str, version_constraint: str | None, use_test: bool = False) -> None: """Install package from PyPI with proper error handling. Args: @@ -489,12 +490,27 @@ def _install_package(self, package_name: str, version_constraint: str | None) -> ppi = PluginPackageInfo(pypi_package=package_name, version_constraint=version_constraint) tgt = ppi.pypi_package if ppi.version_constraint is not None: - tgt = f"{tgt}@{ppi.version_constraint}" - - # Use subprocess.run for better error handling - subprocess.run( - [self.python_executable, "-m", "pip", "install", tgt], check=True, capture_output=True, text=True - ) + tgt = f"{tgt}{ppi.version_constraint}" + if use_test: + subprocess.run( + [ + self.python_executable, + "-m", + "pip", + "install", + "--index-url", + "https://test.pypi.org/simple/", + tgt, + ], + check=True, + capture_output=True, + text=True, + ) + else: + # Use subprocess.run for better error handling + subprocess.run( + [self.python_executable, "-m", "pip", "install", tgt], check=True, capture_output=True, text=True + ) logger.info("Successfully installed package: %s", package_name) except subprocess.CalledProcessError as e: @@ -502,45 +518,6 @@ def _install_package(self, package_name: str, version_constraint: str | None) -> except Exception as e: raise RuntimeError(f"Unexpected error installing {package_name}: {str(e)}") from e - def find_package_path(self, package_name: str) -> Path: - """Locate installed package directory using importlib.metadata. - - Args: - package_name: The name of the installed package. - - Returns: - Path to the package directory. - - Raises: - RuntimeError: If package cannot be found. - """ - try: - # Use importlib.metadata for more reliable package discovery - for dist in importlib.metadata.distributions(): - if dist.name == package_name or dist.metadata.get("Name") == package_name: - if dist.files: - # Get the package root from the plugin-manifest.yaml file - for afile in dist.files: - if afile.name == "plugin-manifest.yaml": - located_path = dist.locate_file(afile) - package_path = Path(str(located_path)).parent - logger.debug("Found package %s at %s", package_name, package_path) - return package_path - - # Fallback to importlib.util.find_spec if metadata approach fails - spec = importlib.util.find_spec(package_name) - if spec is not None and spec.origin is not None: - package_path = Path(spec.origin).parent - logger.debug("Found package %s at %s (via find_spec)", package_name, package_path) - return package_path - - raise RuntimeError(f"Could not find installed package: {package_name}") - - except Exception as e: - if isinstance(e, RuntimeError): - raise - raise RuntimeError(f"Error locating package {package_name}: {str(e)}") from e - def _load_manifest_file(self, manifest_path: Path) -> dict[str, Any]: """Load and parse plugin-manifest.yaml with validation. @@ -631,7 +608,9 @@ def _persist_manifest(self, manifest: PluginManifest, package_name: str) -> None except Exception as e: raise RuntimeError(f"Failed to save manifest for {package_name}: {str(e)}") from e - def install_from_pypi(self, plugin_package_name: str, version_constraint: str | None = None) -> PluginManifest: + def install_from_pypi( + self, plugin_package_name: str, version_constraint: str | None = None, use_pytest: bool = False + ) -> PluginManifest: """Install Python package from PyPI and load its plugin-manifest.yaml. This method performs the following steps: @@ -653,10 +632,10 @@ def install_from_pypi(self, plugin_package_name: str, version_constraint: str | FileNotFoundError: If plugin-manifest.yaml is not found in the package. """ # Step 1: Install the package - self._install_package(plugin_package_name, version_constraint) + self._install_package(plugin_package_name, version_constraint, use_pytest) # Step 2: Find the package location where plugin-manifest.yaml resides - package_path = self.find_package_path(plugin_package_name) + package_path = find_package_path(plugin_package_name) # Step 3: Load the manifest file manifest_path = package_path / "plugin-manifest.yaml" diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index 91c5637..f309aa8 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -324,7 +324,9 @@ def remove_from_plugins_config_yaml(manifest: PluginManifest) -> bool: initial_count = len(plugin_configs.plugins) # Need to match by manifest.kind or if plugin.name starts with manifest.name # e.g. if it is an external plugin or a venv plugin then kind will match many plugin configurations in config.yaml - plugin_configs.plugins = [p for p in plugin_configs.plugins if p.kind != manifest.kind and p.name.count(manifest.name) == 0] + plugin_configs.plugins = [ + p for p in plugin_configs.plugins if p.kind != manifest.kind and p.name.count(manifest.name) == 0 + ] if len(plugin_configs.plugins) < initial_count: ConfigSaver.save_config(plugin_configs, settings.config_file) return True @@ -447,7 +449,7 @@ def _finalize_installation(manifest: PluginManifest, install_type: str, catalog: update_plugins_config_yaml(manifest=manifest) -def _install_from_git(source: str, catalog: PluginCatalog): +def _install_from_git(source: str, catalog: PluginCatalog, use_test: bool = False): """Handle git-based installation (not yet implemented). Args: @@ -460,7 +462,7 @@ def _install_from_git(source: str, catalog: PluginCatalog): raise NotImplementedError("Git installation is not yet implemented") -def _install_from_monorepo(source: str, catalog: PluginCatalog): +def _install_from_monorepo(source: str, catalog: PluginCatalog, use_test: bool = False): """Handle monorepo-based installation. Args: @@ -484,7 +486,7 @@ def _install_from_monorepo(source: str, catalog: PluginCatalog): console.print(f"✅ {selected_plugin.name} installation complete.") -def _install_from_pypi(source: str, catalog: PluginCatalog): +def _install_from_pypi(source: str, catalog: PluginCatalog, use_test: bool = False): """Handle PyPI-based installation. Args: @@ -497,7 +499,9 @@ def _install_from_pypi(source: str, catalog: PluginCatalog): package_name, version_constraint = _parse_pypi_source(source) with console.status(f"Installing plugin {package_name} via pypi", spinner="dots"): - manifest = catalog.install_from_pypi(plugin_package_name=package_name, version_constraint=version_constraint) + manifest = catalog.install_from_pypi( + plugin_package_name=package_name, version_constraint=version_constraint, use_pytest=use_test + ) if manifest is None: console.print(f"❌ Failed to install {package_name}") @@ -526,13 +530,14 @@ def install(source: str, install_type: str | None, catalog: PluginCatalog): "git": _install_from_git, "monorepo": _install_from_monorepo, "pypi": _install_from_pypi, + "test-pypi": _install_from_pypi, } handler = handlers.get(install_type) if handler is None: raise ValueError(f"Unsupported installation type: {install_type}. Must be one of: {', '.join(handlers.keys())}") - handler(source, catalog) + handler(source, catalog, use_test=True if install_type == "test-pypi" else False) def search(plugin_name: str | None, catalog: PluginCatalog): @@ -654,15 +659,21 @@ def uninstall(plugin_name: str, catalog: PluginCatalog) -> None: "python cpex/tools/cli.py plugin info pii\n" "python cpex/tools/cli.py plugin search pii\n" "python cpex/tools/cli.py plugin --type monorepo search pii\n" - "python cpex/tools/cli.py plugin --type monorepo install PIIFilterPlugin\n" - "python cpex/tools/cli.py plugin --type pypi install ExamplePlugin@>=0.1.0\n" - "python cpex/tools/cli.py plugin uninstall PIIFilterPlugin" + "python cpex/tools/cli.py plugin --type monorepo install cpex-pii-filter\n" + "python cpex/tools/cli.py plugin --type pypi install \"ExamplePlugin@>=0.1.0\"\n" + "python cpex/tools/cli.py plugin --type test-pypi install \"cpex-plugin-test@>=0.1.1\"\n" + "python cpex/tools/cli.py plugin uninstall cpex-pii-filter" ) def plugin( cmd_action: str = typer.Argument(None, help="One of: list|info|install|search|uninstall"), source: str | None = typer.Argument(None, help="The pypi, git, or local folder where the plugin resides"), install_type: Annotated[ - str, typer.Option("--type", "-t", help="The types of plugins to list. One of: bundled|pypi|git|local|monorepo") + str, + typer.Option( + "--type", + "-t", + help="The types of plugins to list. One of: monorepo|pypi|test-pypi|git|local Defaults to monorepo if unspecified.", + ), ] = None, ) -> None: """Lists installed plugins""" @@ -682,14 +693,13 @@ def plugin( console.print(f"Plugin {source} is already installed.") return - # update the catalog before proceeding with install etc. pc = PluginCatalog() # optimized github search REST api takes ~14s to search & download all manifests console.log("Update catalog") with console.status("Updating catalog...", spinner="dots"): rc = pc.update_catalog_with_pyproject() - if rc == 0: + if rc is False: console.log("Catalog update completed.") else: console.log("❌ Catalog update failed.") diff --git a/cpex/tools/plugin_registry.py b/cpex/tools/plugin_registry.py index 242f4bc..f85397b 100644 --- a/cpex/tools/plugin_registry.py +++ b/cpex/tools/plugin_registry.py @@ -13,6 +13,7 @@ from pathlib import Path from cpex.framework.models import InstalledPluginInfo, InstalledPluginRegistry, PluginInstallationType, PluginManifest +from cpex.framework.utils import find_package_path from cpex.tools.catalog import PluginCatalog @@ -63,7 +64,7 @@ def update( else: raise ValueError(f"Invalid installation type: {installation_type}") - installation_path = catalog.find_package_path(manifest.name) + installation_path = find_package_path(manifest.name) ipi: InstalledPluginInfo = InstalledPluginInfo( name=manifest.name, diff --git a/tests/unit/cpex/tools/test_catalog.py b/tests/unit/cpex/tools/test_catalog.py index deb8a4b..3f959e8 100644 --- a/tests/unit/cpex/tools/test_catalog.py +++ b/tests/unit/cpex/tools/test_catalog.py @@ -349,16 +349,18 @@ def test_load_with_invalid_manifest(self, tmp_path, mock_github_env): class TestPluginCatalogSearchOperations: """Tests for search operations.""" - def test_search_empty_catalog(self, mock_github_env): + def test_search_empty_catalog(self, tmp_path, mock_github_env): """Test search with empty catalog.""" catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") catalog.manifests = [] result = catalog.search("test") assert result is None - def test_search_by_name(self, mock_github_env): + def test_search_by_name(self, tmp_path, mock_github_env): """Test search by plugin name.""" catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") catalog.manifests = [ create_test_manifest(name="test_plugin", tags=["plugin"]), create_test_manifest(name="another_plugin", tags=["other"]), @@ -368,9 +370,10 @@ def test_search_by_name(self, mock_github_env): assert len(result) == 1 assert result[0].name == "test_plugin" - def test_search_by_tag(self, mock_github_env): + def test_search_by_tag(self, tmp_path, mock_github_env): """Test search by tag.""" catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") catalog.manifests = [ create_test_manifest(name="plugin1", tags=["security"]), create_test_manifest(name="plugin2", tags=["data"]), @@ -380,9 +383,10 @@ def test_search_by_tag(self, mock_github_env): assert len(result) == 1 assert result[0].name == "plugin1" - def test_search_no_match(self, mock_github_env): + def test_search_no_match(self, tmp_path, mock_github_env): """Test search with no matches.""" catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") catalog.manifests = [create_test_manifest(name="test_plugin")] result = catalog.search("nonexistent") assert result is None @@ -1115,7 +1119,7 @@ def test_install_package_with_version_constraint(self, mock_github_env): mock_subprocess.assert_called_once() call_args = mock_subprocess.call_args[0][0] - assert "test_package@>=1.0.0" in " ".join(call_args) + assert "test_package>=1.0.0" in " ".join(call_args) class TestPluginCatalogDownloadFileExtended: diff --git a/tests/unit/cpex/tools/test_cli.py b/tests/unit/cpex/tools/test_cli.py index 95a9fea..039019e 100644 --- a/tests/unit/cpex/tools/test_cli.py +++ b/tests/unit/cpex/tools/test_cli.py @@ -514,9 +514,11 @@ def test_creates_new_registry_if_not_exists(self, temp_registry_dir): manifest = create_test_manifest() mock_catalog = Mock() - mock_catalog.find_package_path = Mock(return_value=Path("/fake/path/to/plugin")) - with patch("cpex.tools.cli.git_user_name", return_value="test_user"): + with ( + patch("cpex.tools.cli.git_user_name", return_value="test_user"), + patch("cpex.tools.plugin_registry.find_package_path", return_value=Path("/fake/path/to/plugin")), + ): plugin_registry = PluginRegistry() plugin_registry.update(manifest, "monorepo", mock_catalog, "test_user") registry_file = temp_registry_dir / "installed-plugins.json" @@ -536,9 +538,11 @@ def test_updates_existing_registry(self, temp_registry_dir): ) mock_catalog = Mock() - mock_catalog.find_package_path = Mock(return_value=Path("/fake/path/to/new_plugin")) - with patch("cpex.tools.cli.git_user_name", return_value="test_user"): + with ( + patch("cpex.tools.cli.git_user_name", return_value="test_user"), + patch("cpex.tools.plugin_registry.find_package_path", return_value=Path("/fake/path/to/new_plugin")), + ): plugin_registry = PluginRegistry() plugin_registry.update(manifest, "monorepo", mock_catalog, "test_user") updated_data = json.loads(registry_file.read_text()) @@ -558,17 +562,17 @@ def test_update_with_pypi_installation(self, temp_registry_dir): ) mock_catalog = Mock() - mock_catalog.find_package_path = Mock(return_value=Path("/fake/path/to/pypi_plugin")) - plugin_registry = PluginRegistry() - plugin_registry.update(manifest, "pypi", mock_catalog, "test_user") + with patch("cpex.tools.plugin_registry.find_package_path", return_value=Path("/fake/path/to/pypi_plugin")): + plugin_registry = PluginRegistry() + plugin_registry.update(manifest, "pypi", mock_catalog, "test_user") - registry_file = temp_registry_dir / "installed-plugins.json" - updated_data = json.loads(registry_file.read_text()) - assert len(updated_data["plugins"]) == 1 - assert updated_data["plugins"][0]["name"] == "pypi_plugin" - assert updated_data["plugins"][0]["package_source"] == "pypi-plugin" - assert updated_data["plugins"][0]["installation_type"] == "pypi" + registry_file = temp_registry_dir / "installed-plugins.json" + updated_data = json.loads(registry_file.read_text()) + assert len(updated_data["plugins"]) == 1 + assert updated_data["plugins"][0]["name"] == "pypi_plugin" + assert updated_data["plugins"][0]["package_source"] == "pypi-plugin" + assert updated_data["plugins"][0]["installation_type"] == "pypi" def test_update_raises_for_monorepo_without_monorepo_metadata(self, temp_registry_dir): """Test monorepo update fails when manifest.monorepo is missing.""" @@ -701,11 +705,11 @@ def test_install_from_monorepo(self, temp_registry_dir): mock_catalog = Mock() mock_catalog.install_folder_via_pip = Mock() - mock_catalog.find_package_path = Mock(return_value=Path("/fake/path/to/plugin")) with ( patch("cpex.tools.cli.git_user_name", return_value="test_user"), patch("cpex.tools.cli.update_plugins_config_yaml"), + patch("cpex.tools.plugin_registry.find_package_path", return_value=Path("/fake/path/to/plugin")), ): install_from_manifest(manifest, "monorepo", mock_catalog) mock_catalog.install_folder_via_pip.assert_called_once_with(manifest) @@ -736,13 +740,13 @@ def test_install_monorepo_with_available_plugins(self, temp_registry_dir): mock_catalog = Mock() mock_catalog.search = Mock(return_value=[manifest]) mock_catalog.install_folder_via_pip = Mock() - mock_catalog.find_package_path = Mock(return_value=Path("/fake/path/to/plugin")) with ( patch("cpex.tools.cli.inquirer.prompt", return_value={"plugins": 0}), patch("cpex.tools.cli.Console"), patch("cpex.tools.cli.git_user_name", return_value="test_user"), patch("cpex.tools.cli.update_plugins_config_yaml"), + patch("cpex.tools.plugin_registry.find_package_path", return_value=Path("/fake/path/to/plugin")), ): install("test_plugin", "monorepo", mock_catalog) mock_catalog.install_folder_via_pip.assert_called_once() @@ -905,12 +909,12 @@ def test_plugin_install_command(self, temp_registry_dir): patch("cpex.tools.cli.inquirer.prompt", return_value={"plugins": 0}), patch("cpex.tools.cli.git_user_name", return_value="test_user"), patch("cpex.tools.cli.update_plugins_config_yaml"), + patch("cpex.tools.plugin_registry.find_package_path", return_value=Path("/fake/path/to/plugin")), ): mock_catalog = Mock() mock_catalog.update_catalog_with_pyproject = Mock() mock_catalog.search = Mock(return_value=[manifest]) mock_catalog.install_folder_via_pip = Mock() - mock_catalog.find_package_path = Mock(return_value=Path("/fake/path/to/plugin")) mock_catalog_class.return_value = mock_catalog result = runner.invoke(app, ["plugin", "install", "test_plugin", "--type", "monorepo"]) From 93db6abd896a437f9d05126ac55c493968dac6ee Mon Sep 17 00:00:00 2001 From: habeck Date: Tue, 21 Apr 2026 10:04:52 -0400 Subject: [PATCH 35/88] chore: stub for local installation Signed-off-by: habeck --- cpex/tools/cli.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index f309aa8..bafc8e7 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -448,6 +448,17 @@ def _finalize_installation(manifest: PluginManifest, install_type: str, catalog: ) update_plugins_config_yaml(manifest=manifest) +def _install_from_local(source: str, catalog: PluginCatalog, use_test: bool = False): + """Handle local-based installation (not yet implemented). + + Args: + source: local path. + catalog: The plugin catalog. + + Raises: + NotImplementedError: local installation is not yet supported. + """ + raise NotImplementedError("Local installation is not yet implemented") def _install_from_git(source: str, catalog: PluginCatalog, use_test: bool = False): """Handle git-based installation (not yet implemented). @@ -531,6 +542,7 @@ def install(source: str, install_type: str | None, catalog: PluginCatalog): "monorepo": _install_from_monorepo, "pypi": _install_from_pypi, "test-pypi": _install_from_pypi, + "local": _install_from_local, } handler = handlers.get(install_type) From df1296f3d2dcf8a03ddb03a0ca52102f2220d543 Mon Sep 17 00:00:00 2001 From: habeck Date: Tue, 21 Apr 2026 10:47:26 -0400 Subject: [PATCH 36/88] chore: lint fix Signed-off-by: habeck --- cpex/tools/catalog.py | 2 -- cpex/tools/cli.py | 6 ++++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index 679dc42..90d419f 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -8,8 +8,6 @@ """ import base64 -import importlib.metadata -import importlib.util import logging import os import subprocess diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index bafc8e7..b1cc5f9 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -448,6 +448,7 @@ def _finalize_installation(manifest: PluginManifest, install_type: str, catalog: ) update_plugins_config_yaml(manifest=manifest) + def _install_from_local(source: str, catalog: PluginCatalog, use_test: bool = False): """Handle local-based installation (not yet implemented). @@ -460,6 +461,7 @@ def _install_from_local(source: str, catalog: PluginCatalog, use_test: bool = Fa """ raise NotImplementedError("Local installation is not yet implemented") + def _install_from_git(source: str, catalog: PluginCatalog, use_test: bool = False): """Handle git-based installation (not yet implemented). @@ -672,8 +674,8 @@ def uninstall(plugin_name: str, catalog: PluginCatalog) -> None: "python cpex/tools/cli.py plugin search pii\n" "python cpex/tools/cli.py plugin --type monorepo search pii\n" "python cpex/tools/cli.py plugin --type monorepo install cpex-pii-filter\n" - "python cpex/tools/cli.py plugin --type pypi install \"ExamplePlugin@>=0.1.0\"\n" - "python cpex/tools/cli.py plugin --type test-pypi install \"cpex-plugin-test@>=0.1.1\"\n" + 'python cpex/tools/cli.py plugin --type pypi install "ExamplePlugin@>=0.1.0"\n' + 'python cpex/tools/cli.py plugin --type test-pypi install "cpex-plugin-test@>=0.1.1"\n' "python cpex/tools/cli.py plugin uninstall cpex-pii-filter" ) def plugin( From 13f775a496f12b2726cfa8127e1a8371e5fd8893 Mon Sep 17 00:00:00 2001 From: habeck Date: Tue, 21 Apr 2026 10:47:47 -0400 Subject: [PATCH 37/88] doc: add README for tools Signed-off-by: habeck --- cpex/tools/README.md | 173 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 173 insertions(+) create mode 100644 cpex/tools/README.md diff --git a/cpex/tools/README.md b/cpex/tools/README.md new file mode 100644 index 0000000..b6c277d --- /dev/null +++ b/cpex/tools/README.md @@ -0,0 +1,173 @@ +## Plugin installation using the cli + +```bash + python cpex/tools/cli.py plugin --help + + Usage: cli.py plugin [OPTIONS] [CMD_ACTION] [SOURCE] + + List, search, install or uninstall plugins. + + default install type is monorepo + Examples: + python cpex/tools/cli.py plugin info pii + python cpex/tools/cli.py plugin search pii + python cpex/tools/cli.py plugin --type monorepo search pii + python cpex/tools/cli.py plugin --type monorepo install cpex-pii-filter + python cpex/tools/cli.py plugin --type pypi install "ExamplePlugin@>=0.1.0" + python cpex/tools/cli.py plugin --type test-pypi install "cpex-plugin-test@>=0.1.1" + python cpex/tools/cli.py plugin uninstall cpex-pii-filter + +╭─ Arguments ────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +│ cmd_action [CMD_ACTION] One of: list|info|install|search|uninstall │ +│ source [SOURCE] The pypi, git, or local folder where the plugin resides │ +╰────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +│ --type -t TEXT The types of plugins to list. One of: monorepo|pypi|test-pypi|git|local Defaults to monorepo if unspecified. │ +│ --help Show this message and exit. │ +╰────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + +``` + + +## Installation catalog and plugin registry + +### Catalog update sequence diagram + +Catalog update from monorepo IBM/cpex-plugins: + +```mermaid +sequenceDiagram + participant cli + participant dotenv + participant catalog + participant pygithub + cli->>catalog: update + dotenv->>catalog: PLUGINS_REPO_URLS + dotenv->>catalog: PLUGINS_GITHUB_TOKEN + catalog->>pygithub: find pyproject.toml files + catalog->>catalog: for each pyproject.toml + catalog->>catalog: extract [project].name + catalog->>pygithub: find plugin-manifest.yaml + pygithub->>catalog: plugin-manifest.yaml + catalog-->catalog: update plugin-manifest.yaml with monorepo details + catalog->>catalog: save plugin manifest to plugin-catalog + catalog->>cli: catalog update completed +``` + +### Plugin installation sequence diagrams +Installation from git monorepo: + +`python cpex/tools/cli.py plugin --type monorepo install pii` + +```mermaid +sequenceDiagram + participant User + participant cli + participant installed_plugin_registry + participant catalog + participant subprocess + participant python + participant pip + participant git + participant monorepo + User->>cli: python cpex/tools/cli.py plugin --type monorepo install pii + cli->>catalog: update + catalog->>monorepo: get available plugins + monorepo->>catalog: available plugins + catalog->>catalog: add monorepo.package_source to downloaded plugin-manifest.yaml + catalog->>cli: available plugins + cli->>User: select plugin from available plugins + User->>cli: selected plugin + cli->>catalog: install selected plugin + catalog->>subprocess: python -m pip install git+ + subprocess->>python: -m pip install git+ + python->>pip: install git+ + pip->>git: download to site-packages + git->>monorepo: download to site-packages + monorepo->>git: package installed + git->>pip: package installed + pip->>python: package installed + python->>subprocess: rc=0 + subprocess->>catalog: plugin installed + catalog->>cli: PluginManifest + cli->>installed_plugin_registry: register plugin PluginManifest + installed_plugin_registry->>cli: plugin registered + cli->>cli: update PLUGINS_CONFIG_FILE (i.e. plugins/config.yaml) + cli->>User: plugin installed OK +``` + + Installation from pypi: + +`python cpex/tools/cli.py --type pypi install >=` + +```mermaid +sequenceDiagram + participant User + participant cli + participant catalog + participant installed_plugin_registry + participant subprocess + participant python + participant pip + participant pypi (Python Package Index) + User->>cli: python cpex/tools/cli.py plugin --type pypi install + cli->>catalog: install_from_pypi( + catalog->>subprocess: python -m pip install + subprocess->>python: -m pip install + python->>pip: install + pip->>pypi (Python Package Index): download to site-packages + pypi (Python Package Index)->>python: downloaded OK + python->>subprocess: rc=0 + subprocess->>catalog: plugin installed + catalog->>catalog: load plugin manifest + catalog->>catalog: package_info.pypi_package= + catalog->>catalog: package_info.version_constraint= + catalog->>catalog: save updated manifest to plugin-catalog + catalog->>cli: PluginManifest + cli->>installed_plugin_registry: register plugin + installed_plugin_registry->>cli: plugin registered + cli->>cli: update PLUGINS_CONFIG_FILE (i.e. plugins/config.yaml) + cli->>User: plugin installed OK +``` +Note: installation from test.pypi.org is also supported using --type test-pypi. e.g: + +`python cpex/tools/cli.py plugin --type test-pypi install "cpex-plugin-test@>=0.1.1" ` + +### Uninstall + +Example uninstall of plugin: +`python cpex/tools/cli.py plugin uninstall cpex-pii-filter` + + +### Pligin information query sequence diagram + +Query information for installed plugins: + +`python cpex/tools/cli.py plugin info` + +```mermaid +sequenceDiagram + participant User + participant cli + participant installed_plugin_registry + User->>cli: python cpex/tools/cli.py plugin info + cli->>installed_plugin_registry: pii + installed_plugin_registry->>cli: InstalledPluginInfo[] + cli->>User: InstalledPluginInfo[] +``` + +Example output: +```zsh + python cpex/tools/cli.py plugin info +{ + "name": "cpex-test-plugin", + "kind": "isolated_venv", + "version": "0.1.1", + "installation_type": "pypi", + "installation_path": "/Users/habeck/.venv/cpex/lib/python3.13/site-packages/cpex_test_plugin", + "installed_at": "2026-04-20T22:09:52.198619+00:00Z", + "installed_by": "habeck", + "package_source": "cpex-test-plugin", + "editable": false +} +``` \ No newline at end of file From 0513024ed52c6a76c6ed11d97e34b7d323ddcef8 Mon Sep 17 00:00:00 2001 From: habeck Date: Tue, 21 Apr 2026 15:17:46 -0400 Subject: [PATCH 38/88] enh: use cached repo object Signed-off-by: habeck --- cpex/framework/utils.py | 4 +-- cpex/tools/catalog.py | 12 +++---- tests/unit/cpex/tools/test_catalog.py | 47 ++++++++++++++------------- 3 files changed, 32 insertions(+), 31 deletions(-) diff --git a/cpex/framework/utils.py b/cpex/framework/utils.py index b8b9c7a..7eb4a94 100644 --- a/cpex/framework/utils.py +++ b/cpex/framework/utils.py @@ -478,7 +478,7 @@ def render(self, content: Any) -> bytes: ) -def find_package_path( package_name: str) -> Path: +def find_package_path(package_name: str) -> Path: """Locate installed package directory using importlib.metadata. Args: @@ -515,4 +515,4 @@ def find_package_path( package_name: str) -> Path: except Exception as e: if isinstance(e, RuntimeError): raise - raise RuntimeError(f"Error locating package {package_name}: {str(e)}") from e \ No newline at end of file + raise RuntimeError(f"Error locating package {package_name}: {str(e)}") from e diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index 90d419f..b9e1ba4 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -151,7 +151,7 @@ def download_contents(self, git_url: str, headers, path: str, repo_url: httpx.UR else: logger.error("Failed to download file: %s status_code: %d", git_url, result.status_code) - def download_file(self, repo_path: str, item: dict, headers) -> str | None: + def download_file(self, repo_path: str, item: dict, headers, gh_repo) -> str | None: """Download the content of a github file Args: @@ -163,7 +163,6 @@ def download_file(self, repo_path: str, item: dict, headers) -> str | None: """ # Get the repository using PyGithub try: - gh_repo = self.gh.get_repo(repo_path) file_content = gh_repo.get_contents(item["path"]) content = file_content.decoded_content.decode("utf-8") return content @@ -245,6 +244,7 @@ def _process_manifest_item( headers, relpath: Path, repo_path: str, + gh_repo, ) -> bool: """Process a single manifest search result item. @@ -265,7 +265,7 @@ def _process_manifest_item( return False # manifest_data = self.download_file(repo_path=repo_path, git_url=item["git_url"], headers=headers) - manifest_data = self.download_file(repo_path=repo_path, item=item, headers=headers) + manifest_data = self.download_file(repo_path=repo_path, item=item, headers=headers, gh_repo=gh_repo) if manifest_data is None: logger.error("Failed to download plugin-manifest from %s", member) return False @@ -278,7 +278,7 @@ def _process_manifest_item( return True def find_and_save_plugin_manifest( - self, member: str, name: str, repo_url: httpx.URL, headers + self, member: str, name: str, repo_url: httpx.URL, headers, gh_repo ) -> PluginManifest | None: """Find the plugin-manifest.yaml relative to the supplied member folder, download and save the manifest, updating the monorepo's package_folder, package_source and repo_url attributes @@ -303,7 +303,7 @@ def find_and_save_plugin_manifest( return None for item in items: - if self._process_manifest_item(item, name, member, repo_url, headers, relpath, repo_path): + if self._process_manifest_item(item, name, member, repo_url, headers, relpath, repo_path, gh_repo): break # Successfully processed first valid manifest return None @@ -336,7 +336,7 @@ def _process_pyproject(self, gh_repo, item, repo_url: httpx.URL, headers) -> Non # Find and save the plugin manifest self.find_and_save_plugin_manifest( - member=member, name=project_data["project"]["name"], repo_url=repo_url, headers=headers + member=member, name=project_data["project"]["name"], repo_url=repo_url, headers=headers, gh_repo=gh_repo ) def update_catalog_with_pyproject(self) -> bool: diff --git a/tests/unit/cpex/tools/test_catalog.py b/tests/unit/cpex/tools/test_catalog.py index 3f959e8..01b5f66 100644 --- a/tests/unit/cpex/tools/test_catalog.py +++ b/tests/unit/cpex/tools/test_catalog.py @@ -13,6 +13,7 @@ import subprocess import sys from pathlib import Path +from unittest import mock from unittest.mock import MagicMock, Mock, patch, mock_open # Third-Party @@ -424,7 +425,7 @@ def test_install_from_pypi_success(self, tmp_path, mock_github_env): """Test successful installation from PyPI.""" with ( patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, - patch("cpex.tools.catalog.importlib.metadata.distributions") as mock_distributions, + patch("cpex.framework.utils.importlib.metadata.distributions") as mock_distributions, ): # Create manifest file package_dir = tmp_path / "test_package" @@ -469,8 +470,8 @@ def test_install_from_pypi_package_not_found(self, mock_github_env): """Test when package is not found after installation.""" with ( patch("cpex.tools.catalog.subprocess.run"), - patch("cpex.tools.catalog.importlib.metadata.distributions", return_value=[]), - patch("cpex.tools.catalog.importlib.util.find_spec", return_value=None), + patch("cpex.framework.utils.importlib.metadata.distributions", return_value=[]), + patch("cpex.framework.utils.importlib.util.find_spec", return_value=None), ): catalog = PluginCatalog() with pytest.raises(RuntimeError, match="Could not find installed package"): @@ -480,7 +481,7 @@ def test_install_from_pypi_manifest_not_found(self, tmp_path, mock_github_env): """Test when manifest file is not found in package.""" with ( patch("cpex.tools.catalog.subprocess.run"), - patch("cpex.tools.catalog.importlib.metadata.distributions") as mock_distributions, + patch("cpex.framework.utils.importlib.metadata.distributions") as mock_distributions, ): # Setup mock distribution without plugin-manifest.yaml file mock_dist = Mock() @@ -503,7 +504,7 @@ def test_install_from_pypi_invalid_manifest(self, tmp_path, mock_github_env): """Test when manifest file is invalid.""" with ( patch("cpex.tools.catalog.subprocess.run"), - patch("cpex.tools.catalog.importlib.metadata.distributions") as mock_distributions, + patch("cpex.framework.utils.importlib.metadata.distributions") as mock_distributions, ): package_dir = tmp_path / "test_package" package_dir.mkdir() @@ -605,7 +606,7 @@ def test_download_file_success(self, mock_github_env): catalog.gh.get_repo = Mock(return_value=mock_repo) item = {"path": "test_plugin/plugin-manifest.yaml"} - result = catalog.download_file("org/repo", item, {}) + result = catalog.download_file("org/repo", item, {}, mock_repo) assert result == manifest_content @@ -613,12 +614,11 @@ def test_download_file_failure(self, mock_github_env): """Test failed file download.""" with patch("cpex.tools.catalog.logger") as mock_logger: catalog = PluginCatalog() - # Mock the GitHub repository to raise an exception - catalog.gh.get_repo = Mock(side_effect=Exception("Not found")) - + mock_repo = Mock(side_effect=Exception("Not found")) + mock_repo.get_contents.return_value = Exception("Not found") item = {"path": "test_plugin/plugin-manifest.yaml"} - result = catalog.download_file("org/repo", item, {}) + result = catalog.download_file("org/repo", item, {}, mock_repo) assert result is None mock_logger.error.assert_called_once() @@ -654,7 +654,7 @@ def test_find_and_save_plugin_manifest_success(self, tmp_path, mock_github_env): catalog.gh.get_repo = Mock(return_value=mock_repo) repo_url = httpx.URL("https://github.com/org/repo") - catalog.find_and_save_plugin_manifest("test_plugin", "test_plugin", repo_url, {}) + catalog.find_and_save_plugin_manifest("test_plugin", "test_plugin", repo_url, {}, mock_repo) saved_file = tmp_path / "catalog" / "test_plugin" / "plugin-manifest.yaml" assert saved_file.exists() @@ -738,7 +738,7 @@ def test_install_from_pypi_with_version_constraint(self, tmp_path, mock_github_e """Test installation with version constraint.""" with ( patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, - patch("cpex.tools.catalog.importlib.metadata.distributions") as mock_distributions, + patch("cpex.framework.utils.importlib.metadata.distributions") as mock_distributions, ): package_dir = tmp_path / "test_package" package_dir.mkdir() @@ -777,7 +777,7 @@ def test_install_from_pypi_with_default_configs(self, tmp_path, mock_github_env) """Test installation with default_configs field.""" with ( patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, - patch("cpex.tools.catalog.importlib.metadata.distributions") as mock_distributions, + patch("cpex.framework.utils.importlib.metadata.distributions") as mock_distributions, ): package_dir = tmp_path / "test_package" package_dir.mkdir() @@ -813,7 +813,7 @@ def test_install_from_pypi_with_existing_package_info(self, tmp_path, mock_githu """Test installation with existing package_info.""" with ( patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, - patch("cpex.tools.catalog.importlib.metadata.distributions") as mock_distributions, + patch("cpex.framework.utils.importlib.metadata.distributions") as mock_distributions, ): package_dir = tmp_path / "test_package" package_dir.mkdir() @@ -855,7 +855,7 @@ def test_install_from_pypi_with_null_default_configs_in_manifest(self, tmp_path, """Test installation with null default_configs in manifest.""" with ( patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, - patch("cpex.tools.catalog.importlib.metadata.distributions") as mock_distributions, + patch("cpex.framework.utils.importlib.metadata.distributions") as mock_distributions, ): package_dir = tmp_path / "test_package" package_dir.mkdir() @@ -1001,8 +1001,8 @@ def test_process_manifest_item_not_yaml(self, tmp_path, mock_github_env): repo_url = httpx.URL("https://github.com/org/repo") relpath = tmp_path / "catalog" / "plugin1" / "plugin-manifest.yaml" - - result = catalog._process_manifest_item(item, "plugin1", "plugin1", repo_url, {}, relpath, "org/repo") + mock_repo = Mock() + result = catalog._process_manifest_item(item, "plugin1", "plugin1", repo_url, {}, relpath, "org/repo", gh_repo=mock_repo) assert result is False mock_logger.warning.assert_called() @@ -1021,11 +1021,11 @@ def test_process_manifest_item_download_failure(self, tmp_path, mock_github_env) "path": "plugin1/plugin-manifest.yaml", "git_url": "https://api.github.com/file" } - + mock_repo = Mock() repo_url = httpx.URL("https://github.com/org/repo") relpath = tmp_path / "catalog" / "plugin1" / "plugin-manifest.yaml" - result = catalog._process_manifest_item(item, "plugin1", "plugin1", repo_url, {}, relpath, "org/repo") + result = catalog._process_manifest_item(item, "plugin1", "plugin1", repo_url, {}, relpath, "org/repo", gh_repo=mock_repo) assert result is False mock_logger.error.assert_called() @@ -1041,9 +1041,9 @@ def test_find_and_save_plugin_manifest_search_returns_none(self, tmp_path, mock_ # Mock _search_github_code to return None catalog._search_github_code = Mock(return_value=None) - + mock_repo = Mock() repo_url = httpx.URL("https://github.com/org/repo") - result = catalog.find_and_save_plugin_manifest("plugin1", "plugin1", repo_url, {}) + result = catalog.find_and_save_plugin_manifest("plugin1", "plugin1", repo_url, {}, mock_repo) assert result is None @@ -1132,9 +1132,10 @@ def test_download_file_with_exception_message(self, mock_github_env): # Mock to raise exception catalog.gh.get_repo = Mock(side_effect=Exception("API error")) - + mock_repo = Mock() + mock_repo.get_contents = Mock(side_effect=Exception("API error")) item = {"path": "test/file.yaml"} - result = catalog.download_file("org/repo", item, {}) + result = catalog.download_file("org/repo", item, {}, gh_repo=mock_repo) assert result is None # Check that error was logged with the item path From 5a022aec1f7986cca5a4a56af7ef0cd5082a77d0 Mon Sep 17 00:00:00 2001 From: habeck Date: Tue, 21 Apr 2026 16:52:07 -0400 Subject: [PATCH 39/88] chore: use rich emoji Signed-off-by: habeck --- cpex/tools/cli.py | 14 +++++++------- tests/unit/cpex/tools/test_cli.py | 10 +++++----- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index b1cc5f9..6a8803f 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -496,7 +496,7 @@ def _install_from_monorepo(source: str, catalog: PluginCatalog, use_test: bool = with console.status(f"Installing plugin {selected_plugin.name}...", spinner="dots"): install_from_manifest(selected_plugin, "monorepo", catalog=catalog) - console.print(f"✅ {selected_plugin.name} installation complete.") + console.print(f":white_heavy_check_mark: {selected_plugin.name} installation complete.") def _install_from_pypi(source: str, catalog: PluginCatalog, use_test: bool = False): @@ -517,11 +517,11 @@ def _install_from_pypi(source: str, catalog: PluginCatalog, use_test: bool = Fal ) if manifest is None: - console.print(f"❌ Failed to install {package_name}") + console.print(f":x: Failed to install {package_name}") return _finalize_installation(manifest, "pypi", catalog) - console.print(f"✅ {package_name} installation complete.") + console.print(f":white_heavy_check_mark: {package_name} installation complete.") def install(source: str, install_type: str | None, catalog: PluginCatalog): @@ -622,7 +622,7 @@ def uninstall(plugin_name: str, catalog: PluginCatalog) -> None: break if installed_plugin is None: - console.print(f"❌ Plugin '{plugin_name}' is not installed.") + console.print(f":x: Plugin '{plugin_name}' is not installed.") return # Confirm uninstallation @@ -659,10 +659,10 @@ def uninstall(plugin_name: str, catalog: PluginCatalog) -> None: else: console.print(f"Plugin {plugin_name} not found in plugins config.yaml.") - console.print(f"✅ {plugin_name} uninstalled successfully.") + console.print(f":white_heavy_check_mark: {plugin_name} uninstalled successfully.") except Exception as e: - console.print(f"❌ Failed to uninstall {plugin_name}: {str(e)}") + console.print(f":x: Failed to uninstall {plugin_name}: {str(e)}") logger.error("Uninstall error: %s", str(e), exc_info=True) @@ -697,7 +697,7 @@ def plugin( # For uninstall, we don't need to update the catalog if cmd_action == "uninstall": if source is None: - console.print("❌ Please specify a plugin name to uninstall.") + console.print(":x: Please specify a plugin name to uninstall.") return pc = PluginCatalog() return uninstall(source, catalog=pc) diff --git a/tests/unit/cpex/tools/test_cli.py b/tests/unit/cpex/tools/test_cli.py index 039019e..2717fc8 100644 --- a/tests/unit/cpex/tools/test_cli.py +++ b/tests/unit/cpex/tools/test_cli.py @@ -1019,7 +1019,7 @@ def test_uninstall_plugin_not_found(self, temp_registry_dir): with patch("cpex.tools.cli.console") as mock_console: uninstall("nonexistent_plugin", mock_catalog) - mock_console.print.assert_called_with("❌ Plugin 'nonexistent_plugin' is not installed.") + mock_console.print.assert_called_with(":x: Plugin 'nonexistent_plugin' is not installed.") def test_uninstall_cancelled_by_user(self, temp_registry_dir): """Test uninstall cancelled by user.""" @@ -1096,7 +1096,7 @@ def test_uninstall_success(self, temp_registry_dir): mock_catalog.uninstall_package.assert_called_once_with("test_plugin") mock_remove.assert_called_once_with(test_manifest) - mock_console.print.assert_any_call("✅ test_plugin uninstalled successfully.") + mock_console.print.assert_any_call(":white_heavy_check_mark: test_plugin uninstalled successfully.") def test_uninstall_handles_exception(self, temp_registry_dir): """Test uninstall handles exceptions gracefully.""" @@ -1133,7 +1133,7 @@ def test_uninstall_handles_exception(self, temp_registry_dir): uninstall("test_plugin", mock_catalog) - mock_console.print.assert_any_call("❌ Failed to uninstall test_plugin: Uninstall failed") + mock_console.print.assert_any_call(":x: Failed to uninstall test_plugin: Uninstall failed") mock_logger.error.assert_called_once() @@ -1151,7 +1151,7 @@ def test_plugin_uninstall_command_without_plugin_name(self, temp_registry_dir): result = runner.invoke(app, ["plugin", "uninstall"]) assert result.exit_code == 0 - mock_console.print.assert_called_with("❌ Please specify a plugin name to uninstall.") + mock_console.print.assert_called_with(":x: Please specify a plugin name to uninstall.") def test_plugin_uninstall_command_success(self, temp_registry_dir): """Test successful plugin uninstall command.""" @@ -1207,7 +1207,7 @@ def test_plugin_uninstall_command_not_found(self, temp_registry_dir): result = runner.invoke(app, ["plugin", "uninstall", "nonexistent_plugin"]) assert result.exit_code == 0 - mock_console.print.assert_called_with("❌ Plugin 'nonexistent_plugin' is not installed.") + mock_console.print.assert_called_with(":x: Plugin 'nonexistent_plugin' is not installed.") class TestCatalogUninstallPackage: From ee70ddbe791bfbac4c84b552905bd1194cd0713f Mon Sep 17 00:00:00 2001 From: habeck Date: Thu, 23 Apr 2026 12:24:04 -0400 Subject: [PATCH 40/88] ptf: workaround for version mis-match of cpex dependency in plugin Signed-off-by: habeck --- cpex/framework/isolated/worker.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/cpex/framework/isolated/worker.py b/cpex/framework/isolated/worker.py index 426b660..6a6dbe2 100644 --- a/cpex/framework/isolated/worker.py +++ b/cpex/framework/isolated/worker.py @@ -162,7 +162,7 @@ async def main(): while True: try: # Read one line at a time - if tp.plugin_config: + if tp.plugin_config and "max_content_size" in tp.plugin_config: line = sys.stdin.readline(limit=int(tp.plugin_config.max_content_size)) else: # on the first read, the plugin_config has not yet been initialized so just read. @@ -198,14 +198,17 @@ async def main(): serialized_response = json.dumps(serializable_response) # Send response back to parent (one line per response) if tp.plugin_config: - if len(serialized_response) > tp.plugin_config.max_content_size: - logger.error("Serialized response exceeds max content size") - error_response = { - "status": "error", - "message": "Serialized response exceeds max content size", - "request_id": request_id, - } - serialized_response = json.dumps(error_response) + # workaround until cpex is updated beyond dev11 + # cpex is a dependency of the plugin and as such it's PluginConfig does not contain the max_content_size yet. + if "max_content_size" in tp.plugin_config: + if len(serialized_response) > tp.plugin_config.max_content_size: + logger.error("Serialized response exceeds max content size") + error_response = { + "status": "error", + "message": "Serialized response exceeds max content size", + "request_id": request_id, + } + serialized_response = json.dumps(error_response) print(serialized_response, flush=True) except json.JSONDecodeError as e: From 63d2e8d45657aa075ed5fb40f31b9fa4ab9557d1 Mon Sep 17 00:00:00 2001 From: habeck Date: Thu, 23 Apr 2026 12:25:15 -0400 Subject: [PATCH 41/88] fix: download install targets to a temp folder to avoid installing to incorrect venv. Signed-off-by: habeck --- cpex/tools/catalog.py | 326 +++++++++++++++++++++++++++++++++++++----- 1 file changed, 293 insertions(+), 33 deletions(-) diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index b9e1ba4..808056d 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -15,8 +15,14 @@ import tomllib from pathlib import Path from typing import Any, Optional +import uuid import httpx +import shutil +import tarfile +import tempfile +import zipfile + import yaml from github import Auth, Github @@ -182,8 +188,10 @@ def _search_github_code(self, repo_path: str, member: str, headers) -> list[dict """ try: # Build search query for PyGithub - query = f"repo:{repo_path} path:{member} filename:plugin-manifest extension:yaml" - + if member is not None: + query = f"repo:{repo_path} path:{member} filename:plugin-manifest extension:yaml" + else: + query = f"repo:{repo_path} filename:plugin-manifest extension:yaml" # Use PyGithub's search_code method search_results = self.gh.search_code(query=query) @@ -219,14 +227,17 @@ def _transform_manifest_data(self, manifest_content: dict, name: str, member: st Returns: Transformed manifest data with monorepo metadata """ - package_source = f"{repo_url}#subdirectory={member}" + if member is None: + package_source = str(repo_url) + else: + package_source = f"{repo_url}#subdirectory={member}" manifest_content["name"] = name manifest_content.setdefault("tags", []) manifest_content["monorepo"] = { "package_source": package_source, "repo_url": str(repo_url), - "package_folder": member, + "package_folder": member if member is not None else "", } # Normalize default_configs -> default_config @@ -321,7 +332,10 @@ def _process_pyproject(self, gh_repo, item, repo_url: httpx.URL, headers) -> Non Exception: If processing fails (caller should handle) """ # Get the directory path (remove filename) - member = item.path.removesuffix("/" + item.name) + if item.path.find("/") == -1: + member = None + else: + member = item.path.removesuffix("/" + item.name) # Download pyproject.toml content using PyGithub file_content = gh_repo.get_contents(item.path) @@ -352,7 +366,7 @@ def update_catalog_with_pyproject(self) -> bool: repo_cache: dict[str, Any] = {} for repo in self.monorepos: - repo_url = httpx.URL(repo) + repo_url = httpx.URL(repo.strip()) repo_path = repo_url.path.removeprefix("/") try: @@ -445,9 +459,10 @@ def find(self, plugin_name: str) -> Optional[PluginManifest]: return manifest return None - def install_folder_via_pip(self, manifest: PluginManifest) -> None: + def install_folder_via_pip(self, manifest: PluginManifest) -> Path | None: """ - Runs a pip install using subfolder syntax + Runs a pip install using subfolder syntax for monorepo plugins. + For isolated_venv plugins, checks manifest kind BEFORE installing to avoid dependency conflicts. e.g. "git+https://github.com[extra]&subdirectory=folder_name" Args: @@ -459,14 +474,24 @@ def install_folder_via_pip(self, manifest: PluginManifest) -> None: if manifest.monorepo is None: raise RuntimeError("PluginManifest.monorepo can not be None.") try: - # safe_path = package_source.path.strip("/") - # org = safe_path.split("/")[0] - # safe_path = safe_path.replace(org, "", 1).lstrip("/") repo_url = f"git+{manifest.monorepo.package_source}" - subprocess.run( - [self.python_executable, "-m", "pip", "install", repo_url], check=True, capture_output=True, text=True - ) - logger.info("Successfully installed package: %s", manifest.name) + + plugin_path = None + # Check manifest kind BEFORE installing + if manifest.kind == "isolated_venv": + logger.info("Detected isolated_venv plugin from monorepo: %s", manifest.name) + # Install the package to make it available for venv initialization + package_path = self._download_monorepo_folder_to_temp(repo_url, manifest.name) + plugin_path = self._initialize_isolated_venv(manifest, package_path) + logger.info("Isolated venv initialized. Plugin will be auto-installed via requirements.txt") + else: + # For non-isolated plugins, install normally into CLI's venv + logger.info("Installing non-isolated plugin from monorepo: %s", manifest.name) + subprocess.run( + [self.python_executable, "-m", "pip", "install", repo_url], check=True, capture_output=True, text=True + ) + logger.info("Successfully installed package: %s", manifest.name) + return plugin_path except subprocess.CalledProcessError as e: raise RuntimeError(f"Failed to install {manifest.name}: {e.stderr}") from e @@ -606,17 +631,235 @@ def _persist_manifest(self, manifest: PluginManifest, package_name: str) -> None except Exception as e: raise RuntimeError(f"Failed to save manifest for {package_name}: {str(e)}") from e + def _download_monorepo_folder_to_temp(self, repo_url: str, package_name: str) -> Path: + """Download monorepo folder to temporary directory. + Args: + repo_url: The URL of the monorepo. + Returns: + Path to the downloaded monorepo folder. + """ + try: + tmpid = uuid.uuid4() + temp_dir = Path(tempfile.mkdtemp(prefix=f"cpex_plugin_{tmpid}_")) + logger.info("Downloading monorepo folder to %s", temp_dir) + + # Download package without installing + download_args = [ + self.python_executable, + "-m", + "pip", + "download", + "--no-deps", # Don't download dependencies + "--dest", + str(temp_dir), + ] + download_args.append(repo_url) + + subprocess.run(download_args, check=True, capture_output=True, text=True) + + # Find the downloaded file + downloaded_files = list(temp_dir.glob("*")) + if not downloaded_files: + raise RuntimeError(f"No files downloaded for {package_name}") + package_file = downloaded_files[0] + extract_dir = temp_dir / "extracted" + extract_dir.mkdir() + + # Extract the package + if package_file.suffix == ".zip" or package_file.name.endswith(".zip"): + with zipfile.ZipFile(package_file, "r") as zip_ref: + zip_ref.extractall(extract_dir) + elif package_file.suffix in [".gz", ".bz2"] or ".tar" in package_file.name: + with tarfile.open(package_file, "r:*") as tar_ref: + tar_ref.extractall(extract_dir) + else: + raise RuntimeError(f"Unsupported package format: {package_file}") + + logger.info("Downloaded and extracted %s to %s", package_name, extract_dir) + return extract_dir + + except subprocess.CalledProcessError as e: + raise RuntimeError(f"Failed to download {package_name}: {e.stderr}") from e + except Exception as e: + raise RuntimeError(f"Unexpected error downloading {package_name}: {str(e)}") from e + + + def _download_package_to_temp( + self, package_name: str, version_constraint: str | None, use_test: bool = False + ) -> Path: + """Download package to a temporary directory without installing it. + + Args: + package_name: The PyPI package name to download. + version_constraint: Optional version constraint. + use_test: Whether to use test.pypi.org. + + Returns: + Path to the downloaded package directory. + + Raises: + RuntimeError: If download fails. + """ + + try: + # Create temporary directory + temp_dir = Path(tempfile.mkdtemp(prefix=f"cpex_plugin_{package_name}_")) + + # Validate package name and constraint format + ppi = PluginPackageInfo(pypi_package=package_name, version_constraint=version_constraint) + tgt = ppi.pypi_package + if ppi.version_constraint is not None: + tgt = f"{tgt}{ppi.version_constraint}" + + # Download package without installing + download_args = [ + self.python_executable, + "-m", + "pip", + "download", + "--no-deps", # Don't download dependencies + "--dest", + str(temp_dir), + ] + + if use_test: + download_args.extend(["--index-url", "https://test.pypi.org/simple/"]) + + download_args.append(tgt) + + subprocess.run(download_args, check=True, capture_output=True, text=True) + + # Find the downloaded file + downloaded_files = list(temp_dir.glob("*")) + if not downloaded_files: + raise RuntimeError(f"No files downloaded for {package_name}") + + package_file = downloaded_files[0] + extract_dir = temp_dir / "extracted" + extract_dir.mkdir() + + # Extract the package + if package_file.suffix == ".whl" or package_file.name.endswith(".whl"): + with zipfile.ZipFile(package_file, "r") as zip_ref: + zip_ref.extractall(extract_dir) + elif package_file.suffix in [".gz", ".bz2"] or ".tar" in package_file.name: + with tarfile.open(package_file, "r:*") as tar_ref: + tar_ref.extractall(extract_dir) + else: + raise RuntimeError(f"Unsupported package format: {package_file}") + + logger.info("Downloaded and extracted %s to %s", package_name, extract_dir) + return extract_dir + + except subprocess.CalledProcessError as e: + raise RuntimeError(f"Failed to download {package_name}: {e.stderr}") from e + except Exception as e: + raise RuntimeError(f"Unexpected error downloading {package_name}: {str(e)}") from e + + def _find_manifest_in_extracted_package(self, extract_dir: Path, package_name: str) -> Path: + """Find plugin-manifest.yaml in extracted package. + + Args: + extract_dir: Directory where package was extracted. + package_name: Name of the package. + + Returns: + Path to plugin-manifest.yaml. + + Raises: + FileNotFoundError: If manifest not found. + """ + # Search for plugin-manifest.yaml in the extracted directory + manifest_files = list(extract_dir.rglob("plugin-manifest.yaml")) + + if not manifest_files: + raise FileNotFoundError(f"plugin-manifest.yaml not found in {package_name} package") + + # Return the first manifest found + return manifest_files[0] + + def _find_requirements_in_extracted_package(self, extract_dir: Path, package_name: str, requirements_file: str) -> Path: + """Find plugin-manifest.yaml in extracted package. + + Args: + extract_dir: Directory where package was extracted. + package_name: Name of the package. + + Returns: + Path to plugin-manifest.yaml. + + Raises: + FileNotFoundError: If manifest not found. + """ + # Search for plugin-manifest.yaml in the extracted directory + manifest_files = list(extract_dir.rglob(requirements_file)) + + if not manifest_files: + raise FileNotFoundError(f"requirements file {requirements_file} not found in {package_name} package") + + # Return the first manifest found + return manifest_files[0] + + + def _initialize_isolated_venv(self, manifest: PluginManifest, package_path: Path) -> Path: + """Initialize isolated venv for a plugin without installing it into the CLI's venv. + + This method creates and initializes the target venv for isolated_venv plugins, + allowing the plugin's requirements.txt to self-reference and auto-install the plugin. + + Args: + manifest: The plugin manifest. + package_path: Path to the installed package directory. + + Raises: + RuntimeError: If venv initialization fails. + """ + try: + # Import here to avoid circular dependency + from cpex.framework.isolated.client import IsolatedVenvPlugin + from cpex.framework.models import PluginMode + + logger.info("Initializing isolated venv for plugin: %s", manifest.name) + + # Create a temporary PluginConfig from the manifest + plugin_config = manifest.create_instance_config( + instance_name=manifest.name, + mode=PluginMode.SEQUENTIAL, # Mode doesn't matter for initialization + priority=100, + ) + + # Create an IsolatedVenvPlugin instance + isolated_plugin = IsolatedVenvPlugin( + config=plugin_config, + plugin_dirs=[str(self.plugin_folder)], + ) + # TODO: sec - prevent path traversal on user supplied requirements file path. + source_path = self._find_requirements_in_extracted_package(package_path, manifest.name, manifest.default_config["requirements_file"]) + shutil.copy(source_path, isolated_plugin.plugin_path / manifest.default_config["requirements_file"]) + # Initialize the venv (this will create venv and install requirements) + import asyncio + + asyncio.run(isolated_plugin.initialize()) + + logger.info("Successfully initialized isolated venv for %s", manifest.name) + + return isolated_plugin.plugin_path + + except Exception as e: + raise RuntimeError(f"Failed to initialize isolated venv for {manifest.name}: {str(e)}") from e + def install_from_pypi( self, plugin_package_name: str, version_constraint: str | None = None, use_pytest: bool = False ) -> PluginManifest: """Install Python package from PyPI and load its plugin-manifest.yaml. This method performs the following steps: - 1. Installs the package from PyPI - 2. Locates the installed package directory - 3. Loads and parses the plugin-manifest.yaml - 4. Normalizes and validates the manifest data - 5. Persists the manifest to the plugin catalog + 1. Downloads package to check manifest (without installing for isolated_venv) + 2. Loads and parses the plugin-manifest.yaml + 3. Normalizes and validates the manifest data + 4. For isolated_venv plugins: initializes the target venv (plugin auto-installs via requirements.txt) + 5. For other plugins: installs normally into CLI's venv + 6. Persists the manifest to the plugin catalog Args: plugin_package_name: The name of the package hosted on PyPI. @@ -629,24 +872,41 @@ def install_from_pypi( RuntimeError: If any step of the installation process fails. FileNotFoundError: If plugin-manifest.yaml is not found in the package. """ - # Step 1: Install the package - self._install_package(plugin_package_name, version_constraint, use_pytest) - # Step 2: Find the package location where plugin-manifest.yaml resides - package_path = find_package_path(plugin_package_name) + # Step 1: Download package to temporary location to read manifest + temp_extract_dir = self._download_package_to_temp(plugin_package_name, version_constraint, use_pytest) + + try: + # Step 2: Find and load the manifest file + manifest_path = self._find_manifest_in_extracted_package(temp_extract_dir, plugin_package_name) + manifest_data = self._load_manifest_file(manifest_path) + + # Step 3: Normalize and validate the manifest + manifest = self._normalize_manifest_data(manifest_data, plugin_package_name, version_constraint) + + package_path = manifest_path.parent - # Step 3: Load the manifest file - manifest_path = package_path / "plugin-manifest.yaml" - manifest_data = self._load_manifest_file(manifest_path) + plugin_path = None + # Step 4: Handle based on plugin kind + if manifest.kind == "isolated_venv": + logger.info("Detected isolated_venv plugin: %s", manifest.name) + plugin_path = self._initialize_isolated_venv(manifest, package_path) + logger.info("Isolated venv initialized. Plugin auto-installed via requirements.txt") + else: + # For non-isolated plugins, install normally into CLI's venv + logger.info("Installing non-isolated plugin: %s", manifest.name) + self._install_package(plugin_package_name, version_constraint, use_pytest) - # Step 4: Normalize and validate the manifest - manifest = self._normalize_manifest_data(manifest_data, plugin_package_name, version_constraint) + # Step 5: Persist to catalog + self._persist_manifest(manifest, plugin_package_name) - # Step 5: Persist to catalog - self._persist_manifest(manifest, plugin_package_name) + logger.info("Successfully installed and cataloged %s", plugin_package_name) + return manifest, plugin_path - logger.info("Successfully installed and cataloged %s", plugin_package_name) - return manifest + finally: + # Clean up temporary directory + if temp_extract_dir.exists(): + shutil.rmtree(temp_extract_dir.parent) def uninstall_package(self, package_name: str) -> bool: """Uninstall a Python package using pip. From af48a1b6c079312566d1c649efa9ce80f90785af Mon Sep 17 00:00:00 2001 From: habeck Date: Thu, 23 Apr 2026 12:26:03 -0400 Subject: [PATCH 42/88] enh: pass the plugin install path to the update method, as isolated_venv plugins are not installed in the current venv. Signed-off-by: habeck --- cpex/tools/plugin_registry.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cpex/tools/plugin_registry.py b/cpex/tools/plugin_registry.py index f85397b..4230c07 100644 --- a/cpex/tools/plugin_registry.py +++ b/cpex/tools/plugin_registry.py @@ -38,7 +38,7 @@ def __init__(self, *args, **kwargs): self.registry = InstalledPluginRegistry() def update( - self, manifest: PluginManifest, installation_type: str, catalog: PluginCatalog, git_user_name: str + self, manifest: PluginManifest, installation_type: str, catalog: PluginCatalog, git_user_name: str, plugin_path: Path | None = None ) -> None: """ Given a plugin manifest, register it in the plugin registry. @@ -64,7 +64,7 @@ def update( else: raise ValueError(f"Invalid installation type: {installation_type}") - installation_path = find_package_path(manifest.name) + installation_path = plugin_path if plugin_path is not None else find_package_path(manifest.name) ipi: InstalledPluginInfo = InstalledPluginInfo( name=manifest.name, From 5c467f19a9d7ee1548c92011bfecee5e50678207 Mon Sep 17 00:00:00 2001 From: habeck Date: Thu, 23 Apr 2026 12:27:12 -0400 Subject: [PATCH 43/88] enh: the catalog now returns the install path for isolated_venv plugins Signed-off-by: habeck --- cpex/tools/cli.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index 6a8803f..cd55240 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -352,11 +352,11 @@ def install_from_manifest(manifest: PluginManifest, installation_type: str, cata # download the plugin to the plugins folder if installation_type == "monorepo": logger.info("installation type: %s", installation_type) - catalog.install_folder_via_pip(manifest) + plugin_path = catalog.install_folder_via_pip(manifest) plugin_registry: PluginRegistry = PluginRegistry() # add the newly downloaded plugin to the registry plugin_registry.update( - manifest=manifest, installation_type=installation_type, catalog=catalog, git_user_name=git_user_name() + manifest=manifest, installation_type=installation_type, catalog=catalog, git_user_name=git_user_name(), plugin_path=plugin_path ) update_plugins_config_yaml(manifest) @@ -434,7 +434,7 @@ def _parse_pypi_source(source: str) -> tuple[str, Optional[str]]: return package_name, version_constraint -def _finalize_installation(manifest: PluginManifest, install_type: str, catalog: PluginCatalog): +def _finalize_installation(manifest: PluginManifest, install_type: str, catalog: PluginCatalog, plugin_path: Path | None = None): """Common finalization steps for plugin installation. Args: @@ -444,7 +444,7 @@ def _finalize_installation(manifest: PluginManifest, install_type: str, catalog: """ plugin_registry = PluginRegistry() plugin_registry.update( - manifest=manifest, installation_type=install_type, catalog=catalog, git_user_name=git_user_name() + manifest=manifest, installation_type=install_type, catalog=catalog, git_user_name=git_user_name(), plugin_path=plugin_path ) update_plugins_config_yaml(manifest=manifest) @@ -512,7 +512,7 @@ def _install_from_pypi(source: str, catalog: PluginCatalog, use_test: bool = Fal package_name, version_constraint = _parse_pypi_source(source) with console.status(f"Installing plugin {package_name} via pypi", spinner="dots"): - manifest = catalog.install_from_pypi( + manifest, plugin_path = catalog.install_from_pypi( plugin_package_name=package_name, version_constraint=version_constraint, use_pytest=use_test ) @@ -520,7 +520,7 @@ def _install_from_pypi(source: str, catalog: PluginCatalog, use_test: bool = Fal console.print(f":x: Failed to install {package_name}") return - _finalize_installation(manifest, "pypi", catalog) + _finalize_installation(manifest, "pypi", catalog, plugin_path) console.print(f":white_heavy_check_mark: {package_name} installation complete.") From c0421cf0aa08307e8b4e7d73e87cf80a06c01084 Mon Sep 17 00:00:00 2001 From: habeck Date: Thu, 23 Apr 2026 12:49:51 -0400 Subject: [PATCH 44/88] chore: unit test updates Signed-off-by: habeck --- cpex/tools/catalog.py | 19 +- tests/unit/cpex/tools/test_catalog.py | 322 +++++++++++++------------- 2 files changed, 171 insertions(+), 170 deletions(-) diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index 808056d..0d7293f 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -63,12 +63,7 @@ def create_folder(self, base_path, rel_path): """ Creates the base_path / rel_path folder to store data in. """ - # elements = rel_path.split("/") - # new_path = Path() - # for i in range(len(elements)): - # new_path = new_path / elements[i] relpath = Path(base_path) / rel_path - # logger.info("relpath: %s", relpath) os.makedirs(relpath, exist_ok=True) def create_plugin_folder(self, path: str): @@ -82,13 +77,6 @@ def create_catalog_folder(self, path: str): Creates the OUTPUT_FOLDER/path folder to store the plugin-manifest.yaml file in. """ self.create_folder(self.catalog_folder, path) - # elements = path.split("/") - # new_path = Path() - # for i in range(len(elements) - 1): - # new_path = new_path / elements[i] - # relpath = Path(OUTPUT_FOLDER / new_path) - # # logger.info("relpath: %s", relpath) - # os.makedirs(relpath, exist_ok=True) def save_manifest(self, manifest: PluginManifest, path): """Save a pypi installed manifest to the plugin catalog. @@ -834,8 +822,9 @@ def _initialize_isolated_venv(self, manifest: PluginManifest, package_path: Path plugin_dirs=[str(self.plugin_folder)], ) # TODO: sec - prevent path traversal on user supplied requirements file path. - source_path = self._find_requirements_in_extracted_package(package_path, manifest.name, manifest.default_config["requirements_file"]) - shutil.copy(source_path, isolated_plugin.plugin_path / manifest.default_config["requirements_file"]) + requirements_file = manifest.default_config.get("requirements_file", "requirements.txt") + source_path = self._find_requirements_in_extracted_package(package_path, manifest.name, requirements_file) + shutil.copy(source_path, isolated_plugin.plugin_path / requirements_file) # Initialize the venv (this will create venv and install requirements) import asyncio @@ -850,7 +839,7 @@ def _initialize_isolated_venv(self, manifest: PluginManifest, package_path: Path def install_from_pypi( self, plugin_package_name: str, version_constraint: str | None = None, use_pytest: bool = False - ) -> PluginManifest: + ) -> tuple[PluginManifest, Path | None]: """Install Python package from PyPI and load its plugin-manifest.yaml. This method performs the following steps: diff --git a/tests/unit/cpex/tools/test_catalog.py b/tests/unit/cpex/tools/test_catalog.py index 01b5f66..0e56117 100644 --- a/tests/unit/cpex/tools/test_catalog.py +++ b/tests/unit/cpex/tools/test_catalog.py @@ -423,103 +423,115 @@ class TestPluginCatalogInstallFromPypi: def test_install_from_pypi_success(self, tmp_path, mock_github_env): """Test successful installation from PyPI.""" + # Create manifest file in temp directory + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + package_dir = extract_dir / "test_package" + package_dir.mkdir() + manifest_data = { + "name": "test_package", + "version": "1.0.0", + "kind": "native", + "description": "Test", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {}, + } + manifest_file = package_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + with ( + patch("cpex.tools.catalog.PluginCatalog._download_package_to_temp", return_value=extract_dir), + patch("cpex.tools.catalog.PluginCatalog._find_manifest_in_extracted_package", return_value=manifest_file), patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, patch("cpex.framework.utils.importlib.metadata.distributions") as mock_distributions, + patch("shutil.rmtree") as mock_rmtree, ): - # Create manifest file - package_dir = tmp_path / "test_package" - package_dir.mkdir() - manifest_data = { - "name": "test_package", - "version": "1.0.0", - "kind": "native", - "description": "Test", - "author": "Test Author", - "tags": ["test"], - "available_hooks": ["tools"], - "default_config": {}, - } - manifest_file = package_dir / "plugin-manifest.yaml" - manifest_file.write_text(yaml.safe_dump(manifest_data)) - - # Setup mock distribution with plugin-manifest.yaml file + # Setup mock distribution mock_dist = Mock() mock_dist.name = "test_package" - mock_manifest_file = Mock() - mock_manifest_file.name = "plugin-manifest.yaml" - mock_dist.files = [mock_manifest_file] - mock_dist.locate_file.return_value = manifest_file mock_distributions.return_value = [mock_dist] catalog = PluginCatalog() catalog.catalog_folder = str(tmp_path / "catalog") - result = catalog.install_from_pypi("test_package") + manifest, plugin_path = catalog.install_from_pypi("test_package") + # Should call subprocess.run for non-isolated plugin mock_subprocess.assert_called_once() - assert result.name == "test_package" + assert manifest.name == "test_package" + assert plugin_path is None # Non-isolated plugins don't return a path + # Should clean up temp directory + mock_rmtree.assert_called_once() def test_install_from_pypi_install_failure(self, mock_github_env): """Test installation failure from PyPI.""" - with patch("cpex.tools.catalog.subprocess.run", side_effect=Exception("Install failed")): + with patch("cpex.tools.catalog.PluginCatalog._download_package_to_temp", side_effect=RuntimeError("Download failed")): catalog = PluginCatalog() - with pytest.raises(RuntimeError, match="Unexpected error installing"): + with pytest.raises(RuntimeError, match="Download failed"): catalog.install_from_pypi("test_package") - def test_install_from_pypi_package_not_found(self, mock_github_env): - """Test when package is not found after installation.""" + def test_install_from_pypi_package_not_found(self, tmp_path, mock_github_env): + """Test when package is not found after installation (for isolated_venv plugins).""" + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + package_dir = extract_dir / "test_package" + package_dir.mkdir() + manifest_data = { + "name": "test_package", + "version": "1.0.0", + "kind": "isolated_venv", # Changed to isolated_venv to trigger find_package_path + "description": "Test", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {}, + } + manifest_file = package_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + with ( + patch("cpex.tools.catalog.PluginCatalog._download_package_to_temp", return_value=extract_dir), + patch("cpex.tools.catalog.PluginCatalog._find_manifest_in_extracted_package", return_value=manifest_file), patch("cpex.tools.catalog.subprocess.run"), patch("cpex.framework.utils.importlib.metadata.distributions", return_value=[]), patch("cpex.framework.utils.importlib.util.find_spec", return_value=None), + patch("shutil.rmtree"), ): catalog = PluginCatalog() - with pytest.raises(RuntimeError, match="Could not find installed package"): + catalog.catalog_folder = str(tmp_path / "catalog") + with pytest.raises(RuntimeError, match="Failed to initialize isolated venv for test_package"): catalog.install_from_pypi("test_package") def test_install_from_pypi_manifest_not_found(self, tmp_path, mock_github_env): """Test when manifest file is not found in package.""" + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + with ( - patch("cpex.tools.catalog.subprocess.run"), - patch("cpex.framework.utils.importlib.metadata.distributions") as mock_distributions, + patch("cpex.tools.catalog.PluginCatalog._download_package_to_temp", return_value=extract_dir), + patch("cpex.tools.catalog.PluginCatalog._find_manifest_in_extracted_package", side_effect=FileNotFoundError("plugin-manifest.yaml not found")), + patch("shutil.rmtree"), ): - # Setup mock distribution without plugin-manifest.yaml file - mock_dist = Mock() - mock_dist.name = "test_package" - mock_file = Mock() - mock_file.name = "__init__.py" - mock_dist.files = [mock_file] - mock_dist.locate_file.return_value = tmp_path / "test_package" / "__init__.py" - mock_distributions.return_value = [mock_dist] - - package_dir = tmp_path / "test_package" - package_dir.mkdir() - catalog = PluginCatalog() catalog.catalog_folder = str(tmp_path / "catalog") - with pytest.raises(RuntimeError, match="Could not find installed package"): + with pytest.raises(FileNotFoundError, match="plugin-manifest.yaml not found"): catalog.install_from_pypi("test_package") def test_install_from_pypi_invalid_manifest(self, tmp_path, mock_github_env): """Test when manifest file is invalid.""" + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + package_dir = extract_dir / "test_package" + package_dir.mkdir() + manifest_file = package_dir / "plugin-manifest.yaml" + manifest_file.write_text("invalid: yaml: content:") + with ( - patch("cpex.tools.catalog.subprocess.run"), - patch("cpex.framework.utils.importlib.metadata.distributions") as mock_distributions, + patch("cpex.tools.catalog.PluginCatalog._download_package_to_temp", return_value=extract_dir), + patch("cpex.tools.catalog.PluginCatalog._find_manifest_in_extracted_package", return_value=manifest_file), + patch("shutil.rmtree"), ): - package_dir = tmp_path / "test_package" - package_dir.mkdir() - manifest_file = package_dir / "plugin-manifest.yaml" - manifest_file.write_text("invalid: yaml: content:") - - # Setup mock distribution with plugin-manifest.yaml file - mock_dist = Mock() - mock_dist.name = "test_package" - mock_manifest_file = Mock() - mock_manifest_file.name = "plugin-manifest.yaml" - mock_dist.files = [mock_manifest_file] - mock_dist.locate_file.return_value = manifest_file - mock_distributions.return_value = [mock_dist] - catalog = PluginCatalog() catalog.catalog_folder = str(tmp_path / "catalog") with pytest.raises(RuntimeError, match="Failed to parse manifest YAML"): @@ -736,157 +748,157 @@ class TestPluginCatalogInstallFromPypiExtended: def test_install_from_pypi_with_version_constraint(self, tmp_path, mock_github_env): """Test installation with version constraint.""" + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + package_dir = extract_dir / "test_package" + package_dir.mkdir() + manifest_data = { + "name": "test_package", + "version": "1.0.0", + "kind": "native", + "description": "Test", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {}, + } + manifest_file = package_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + with ( + patch("cpex.tools.catalog.PluginCatalog._download_package_to_temp", return_value=extract_dir), + patch("cpex.tools.catalog.PluginCatalog._find_manifest_in_extracted_package", return_value=manifest_file), patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, patch("cpex.framework.utils.importlib.metadata.distributions") as mock_distributions, + patch("shutil.rmtree"), ): - package_dir = tmp_path / "test_package" - package_dir.mkdir() - manifest_data = { - "name": "test_package", - "version": "1.0.0", - "kind": "native", - "description": "Test", - "author": "Test Author", - "tags": ["test"], - "available_hooks": ["tools"], - "default_config": {}, - } - manifest_file = package_dir / "plugin-manifest.yaml" - manifest_file.write_text(yaml.safe_dump(manifest_data)) - - # Setup mock distribution with plugin-manifest.yaml file mock_dist = Mock() mock_dist.name = "test_package" - mock_manifest_file = Mock() - mock_manifest_file.name = "plugin-manifest.yaml" - mock_dist.files = [mock_manifest_file] - mock_dist.locate_file.return_value = manifest_file mock_distributions.return_value = [mock_dist] catalog = PluginCatalog() catalog.catalog_folder = str(tmp_path / "catalog") - result = catalog.install_from_pypi("test_package", ">=1.0.0") + manifest, plugin_path = catalog.install_from_pypi("test_package", ">=1.0.0") mock_subprocess.assert_called_once() - assert result.name == "test_package" - assert result.package_info is not None - assert result.package_info.version_constraint == ">=1.0.0" + assert manifest.name == "test_package" + assert manifest.package_info is not None + assert manifest.package_info.version_constraint == ">=1.0.0" def test_install_from_pypi_with_default_configs(self, tmp_path, mock_github_env): """Test installation with default_configs field.""" + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + package_dir = extract_dir / "test_package" + package_dir.mkdir() + manifest_data = { + "name": "test_package", + "version": "1.0.0", + "kind": "native", + "description": "Test", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_configs": {"key": "value"}, + } + manifest_file = package_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + with ( + patch("cpex.tools.catalog.PluginCatalog._download_package_to_temp", return_value=extract_dir), + patch("cpex.tools.catalog.PluginCatalog._find_manifest_in_extracted_package", return_value=manifest_file), patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, patch("cpex.framework.utils.importlib.metadata.distributions") as mock_distributions, + patch("shutil.rmtree"), ): - package_dir = tmp_path / "test_package" - package_dir.mkdir() - manifest_data = { - "name": "test_package", - "version": "1.0.0", - "kind": "native", - "description": "Test", - "author": "Test Author", - "tags": ["test"], - "available_hooks": ["tools"], - "default_configs": {"key": "value"}, - } - manifest_file = package_dir / "plugin-manifest.yaml" - manifest_file.write_text(yaml.safe_dump(manifest_data)) - - # Setup mock distribution with plugin-manifest.yaml file mock_dist = Mock() mock_dist.name = "test_package" - mock_manifest_file = Mock() - mock_manifest_file.name = "plugin-manifest.yaml" - mock_dist.files = [mock_manifest_file] - mock_dist.locate_file.return_value = manifest_file mock_distributions.return_value = [mock_dist] catalog = PluginCatalog() catalog.catalog_folder = str(tmp_path / "catalog") - result = catalog.install_from_pypi("test_package") + manifest, plugin_path = catalog.install_from_pypi("test_package") - assert result.default_config == {"key": "value"} + assert manifest.default_config == {"key": "value"} def test_install_from_pypi_with_existing_package_info(self, tmp_path, mock_github_env): """Test installation with existing package_info.""" + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + package_dir = extract_dir / "test_package" + package_dir.mkdir() + manifest_data = { + "name": "test_package", + "version": "1.0.0", + "kind": "native", + "description": "Test", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {}, + "package_info": { + "pypi_package": "old_name", + "version_constraint": ">=0.1.0" + } + } + manifest_file = package_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + with ( + patch("cpex.tools.catalog.PluginCatalog._download_package_to_temp", return_value=extract_dir), + patch("cpex.tools.catalog.PluginCatalog._find_manifest_in_extracted_package", return_value=manifest_file), patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, patch("cpex.framework.utils.importlib.metadata.distributions") as mock_distributions, + patch("shutil.rmtree"), ): - package_dir = tmp_path / "test_package" - package_dir.mkdir() - manifest_data = { - "name": "test_package", - "version": "1.0.0", - "kind": "native", - "description": "Test", - "author": "Test Author", - "tags": ["test"], - "available_hooks": ["tools"], - "default_config": {}, - "package_info": { - "pypi_package": "old_name", - "version_constraint": ">=0.1.0" - } - } - manifest_file = package_dir / "plugin-manifest.yaml" - manifest_file.write_text(yaml.safe_dump(manifest_data)) - - # Setup mock distribution with plugin-manifest.yaml file mock_dist = Mock() mock_dist.name = "test_package" - mock_manifest_file = Mock() - mock_manifest_file.name = "plugin-manifest.yaml" - mock_dist.files = [mock_manifest_file] - mock_dist.locate_file.return_value = manifest_file mock_distributions.return_value = [mock_dist] catalog = PluginCatalog() catalog.catalog_folder = str(tmp_path / "catalog") - result = catalog.install_from_pypi("test_package", ">=2.0.0") + manifest, plugin_path = catalog.install_from_pypi("test_package", ">=2.0.0") - assert result.package_info is not None - assert result.package_info.pypi_package == "test_package" - assert result.package_info.version_constraint == ">=2.0.0" + assert manifest.package_info is not None + assert manifest.package_info.pypi_package == "test_package" + assert manifest.package_info.version_constraint == ">=2.0.0" def test_install_from_pypi_with_null_default_configs_in_manifest(self, tmp_path, mock_github_env): """Test installation with null default_configs in manifest.""" + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + package_dir = extract_dir / "test_package" + package_dir.mkdir() + manifest_data = { + "name": "test_package", + "version": "1.0.0", + "kind": "native", + "description": "Test", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_configs": None, + } + manifest_file = package_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + with ( + patch("cpex.tools.catalog.PluginCatalog._download_package_to_temp", return_value=extract_dir), + patch("cpex.tools.catalog.PluginCatalog._find_manifest_in_extracted_package", return_value=manifest_file), patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, patch("cpex.framework.utils.importlib.metadata.distributions") as mock_distributions, + patch("shutil.rmtree"), ): - package_dir = tmp_path / "test_package" - package_dir.mkdir() - manifest_data = { - "name": "test_package", - "version": "1.0.0", - "kind": "native", - "description": "Test", - "author": "Test Author", - "tags": ["test"], - "available_hooks": ["tools"], - "default_configs": None, - } - manifest_file = package_dir / "plugin-manifest.yaml" - manifest_file.write_text(yaml.safe_dump(manifest_data)) - - # Setup mock distribution with plugin-manifest.yaml file mock_dist = Mock() mock_dist.name = "test_package" - mock_manifest_file = Mock() - mock_manifest_file.name = "plugin-manifest.yaml" - mock_dist.files = [mock_manifest_file] - mock_dist.locate_file.return_value = manifest_file mock_distributions.return_value = [mock_dist] catalog = PluginCatalog() catalog.catalog_folder = str(tmp_path / "catalog") - result = catalog.install_from_pypi("test_package") + manifest, plugin_path = catalog.install_from_pypi("test_package") # default_config should be empty dict when default_configs is None - assert result.default_config == {} + assert manifest.default_config == {} # Made with Bob From aab87b429ab9f52fcd73a86fe961cf9fa1ae9cad Mon Sep 17 00:00:00 2001 From: habeck Date: Thu, 23 Apr 2026 13:18:52 -0400 Subject: [PATCH 45/88] misc: type fix Signed-off-by: habeck --- cpex/framework/isolated/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cpex/framework/isolated/client.py b/cpex/framework/isolated/client.py index 6c1cd70..e5178e0 100644 --- a/cpex/framework/isolated/client.py +++ b/cpex/framework/isolated/client.py @@ -44,8 +44,8 @@ def __init__(self, config: PluginConfig, plugin_dirs) -> None: # use the first plugin dir specified in the plugin configuration file. path = Path(self.plugin_dirs[0]).resolve() class_root = self.config.config.get("class_name").split(".")[0] - cache_root = path / class_root - self.plugin_path = cache_root + cache_root: Path = path / class_root + self.plugin_path: Path = cache_root if not cache_root.exists(): cache_root.mkdir(parents=True, exist_ok=True) self.cache_dir: Path = cache_root / ".cpex" / "venv_cache" From 6d1d342ba4db5b927856194efffd2e8efdd59566 Mon Sep 17 00:00:00 2001 From: habeck Date: Thu, 23 Apr 2026 14:02:04 -0400 Subject: [PATCH 46/88] enh: the plugin self installs into the isolated_venv via requirements.txt Signed-off-by: habeck --- .../isolated/{{cookiecutter.plugin_slug}}/requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/cpex/templates/isolated/{{cookiecutter.plugin_slug}}/requirements.txt b/cpex/templates/isolated/{{cookiecutter.plugin_slug}}/requirements.txt index d35182a..2644d11 100644 --- a/cpex/templates/isolated/{{cookiecutter.plugin_slug}}/requirements.txt +++ b/cpex/templates/isolated/{{cookiecutter.plugin_slug}}/requirements.txt @@ -1 +1,2 @@ -cpex>=0.1.0.dev10 \ No newline at end of file +cpex>=0.1.0.dev10 +{{ cookiecutter.plugin_name }} \ No newline at end of file From b1f263528c1aaf2bc039528a513ca543b999cb2a Mon Sep 17 00:00:00 2001 From: habeck Date: Thu, 23 Apr 2026 16:41:55 -0400 Subject: [PATCH 47/88] chore: increase coverage above 90% Signed-off-by: habeck --- cpex/tools/catalog.py | 46 +- tests/unit/cpex/tools/test_catalog.py | 696 ++++++++++++++++++++++++++ 2 files changed, 734 insertions(+), 8 deletions(-) diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index 0d7293f..ea0859b 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -766,28 +766,58 @@ def _find_manifest_in_extracted_package(self, extract_dir: Path, package_name: s # Return the first manifest found return manifest_files[0] - def _find_requirements_in_extracted_package(self, extract_dir: Path, package_name: str, requirements_file: str) -> Path: - """Find plugin-manifest.yaml in extracted package. + def _find_requirements_in_extracted_package( + self, extract_dir: Path, package_name: str, requirements_file: str + ) -> Path: + """Find requirements file in extracted package with path traversal protection. Args: extract_dir: Directory where package was extracted. package_name: Name of the package. + requirements_file: Name of the requirements file to find. Returns: - Path to plugin-manifest.yaml. + Path to requirements file. Raises: - FileNotFoundError: If manifest not found. - """ - # Search for plugin-manifest.yaml in the extracted directory + FileNotFoundError: If requirements file not found. + ValueError: If requirements_file contains path traversal attempts. + """ + # Validate requirements_file to prevent path traversal attacks + # Normalize the path and check for suspicious patterns + normalized_file = os.path.normpath(requirements_file) + + # Check for path traversal attempts (../, absolute paths, etc.) + if normalized_file.startswith("..") or os.path.isabs(normalized_file): + raise ValueError( + f"Invalid requirements file path '{requirements_file}': " + "path traversal attempts are not allowed" + ) + + # Additional check: ensure no path separators that could escape the directory + if normalized_file != requirements_file.replace("\\", "/").strip("/"): + raise ValueError( + f"Invalid requirements file path '{requirements_file}': " + "suspicious path components detected" + ) + + # Search for requirements file in the extracted directory manifest_files = list(extract_dir.rglob(requirements_file)) if not manifest_files: raise FileNotFoundError(f"requirements file {requirements_file} not found in {package_name} package") - # Return the first manifest found - return manifest_files[0] + # Verify the found file is actually within extract_dir (defense in depth) + found_file = manifest_files[0] + try: + found_file.resolve().relative_to(extract_dir.resolve()) + except ValueError as e: + raise ValueError( + f"Security violation: requirements file '{found_file}' is outside the package directory" + ) from e + # Return the first manifest found + return found_file def _initialize_isolated_venv(self, manifest: PluginManifest, package_path: Path) -> Path: """Initialize isolated venv for a plugin without installing it into the CLI's venv. diff --git a/tests/unit/cpex/tools/test_catalog.py b/tests/unit/cpex/tools/test_catalog.py index 0e56117..de0b750 100644 --- a/tests/unit/cpex/tools/test_catalog.py +++ b/tests/unit/cpex/tools/test_catalog.py @@ -12,6 +12,8 @@ import json import subprocess import sys +import tarfile +import zipfile from pathlib import Path from unittest import mock from unittest.mock import MagicMock, Mock, patch, mock_open @@ -1154,4 +1156,698 @@ def test_download_file_with_exception_message(self, mock_github_env): assert mock_logger.error.called +class TestPluginCatalogSearchGithubCodeWithNullMember: + """Tests for _search_github_code with member=None.""" + + def test_search_github_code_with_null_member(self, mock_github_env): + """Test _search_github_code when member is None.""" + catalog = PluginCatalog() + + # Mock the search results + mock_search_results = MagicMock() + mock_search_results.totalCount = 1 + + mock_content_file = MagicMock() + mock_content_file.name = "plugin-manifest.yaml" + mock_content_file.path = "plugin-manifest.yaml" + mock_content_file.git_url = "https://api.github.com/repos/org/repo/git/blobs/abc123" + mock_content_file.html_url = "https://github.com/org/repo/blob/main/plugin-manifest.yaml" + + mock_search_results.__iter__ = Mock(return_value=iter([mock_content_file])) + + with patch.object(catalog.gh, 'search_code', return_value=mock_search_results): + result = catalog._search_github_code("org/repo", None, {}) + + assert result is not None + assert len(result) == 1 + assert result[0]["name"] == "plugin-manifest.yaml" + + +class TestPluginCatalogTransformManifestDataWithNullMember: + """Tests for _transform_manifest_data with member=None.""" + + def test_transform_manifest_data_with_null_member(self, mock_github_env): + """Test _transform_manifest_data when member is None.""" + catalog = PluginCatalog() + + manifest_content = { + "version": "1.0.0", + "kind": "native", + "description": "Test plugin", + "author": "Test Author", + "available_hooks": ["tools"], + } + + repo_url = httpx.URL("https://github.com/org/repo") + result = catalog._transform_manifest_data(manifest_content, "test_plugin", None, repo_url) + + assert result["name"] == "test_plugin" + assert result["monorepo"]["package_source"] == "https://github.com/org/repo" + assert result["monorepo"]["package_folder"] == "" + + +class TestPluginCatalogDownloadMonorepoFolderToTemp: + """Tests for _download_monorepo_folder_to_temp method.""" + + def test_download_monorepo_folder_success(self, tmp_path, mock_github_env): + """Test successful download of monorepo folder.""" + catalog = PluginCatalog() + + # Create a mock tarball + mock_tarball = tmp_path / "package.tar.gz" + with tarfile.open(mock_tarball, "w:gz") as tar: + # Create a temporary file to add to the tarball + temp_file = tmp_path / "test_file.txt" + temp_file.write_text("test content") + tar.add(temp_file, arcname="test_file.txt") + + with patch('subprocess.run') as mock_run: + mock_run.return_value = MagicMock(returncode=0) + + with patch('pathlib.Path.glob') as mock_glob: + mock_glob.return_value = [mock_tarball] + + result = catalog._download_monorepo_folder_to_temp( + "git+https://github.com/org/repo#subdirectory=plugin", + "test_plugin" + ) + + assert result.exists() + assert result.name == "extracted" + + def test_download_monorepo_folder_no_files(self, tmp_path, mock_github_env): + """Test error when no files are downloaded.""" + catalog = PluginCatalog() + + with patch('subprocess.run') as mock_run: + mock_run.return_value = MagicMock(returncode=0) + + with patch('pathlib.Path.glob') as mock_glob: + mock_glob.return_value = [] + + with pytest.raises(RuntimeError) as exc_info: + catalog._download_monorepo_folder_to_temp( + "git+https://github.com/org/repo#subdirectory=plugin", + "test_plugin" + ) + + assert "No files downloaded" in str(exc_info.value) + + def test_download_monorepo_folder_unsupported_format(self, tmp_path, mock_github_env): + """Test error with unsupported package format.""" + catalog = PluginCatalog() + + # Create a mock file with unsupported extension + mock_file = tmp_path / "package.unknown" + mock_file.write_text("test") + + with patch('subprocess.run') as mock_run: + mock_run.return_value = MagicMock(returncode=0) + + with patch('pathlib.Path.glob') as mock_glob: + mock_glob.return_value = [mock_file] + + with pytest.raises(RuntimeError) as exc_info: + catalog._download_monorepo_folder_to_temp( + "git+https://github.com/org/repo#subdirectory=plugin", + "test_plugin" + ) + + assert "Unsupported package format" in str(exc_info.value) + + def test_download_monorepo_folder_subprocess_error(self, mock_github_env): + """Test subprocess error handling.""" + catalog = PluginCatalog() + + with patch('subprocess.run') as mock_run: + mock_run.side_effect = subprocess.CalledProcessError(1, "pip", stderr="Download failed") + + with pytest.raises(RuntimeError) as exc_info: + catalog._download_monorepo_folder_to_temp( + "git+https://github.com/org/repo#subdirectory=plugin", + "test_plugin" + ) + + assert "Failed to download" in str(exc_info.value) + + +class TestPluginCatalogDownloadPackageToTemp: + """Tests for _download_package_to_temp method.""" + + def test_download_package_with_test_pypi(self, tmp_path, mock_github_env): + """Test downloading from test.pypi.org.""" + catalog = PluginCatalog() + + # Create a mock wheel file + mock_wheel = tmp_path / "package-1.0.0-py3-none-any.whl" + with zipfile.ZipFile(mock_wheel, "w") as zf: + zf.writestr("test_file.txt", "test content") + + with patch('subprocess.run') as mock_run: + mock_run.return_value = MagicMock(returncode=0) + + with patch('pathlib.Path.glob') as mock_glob: + mock_glob.return_value = [mock_wheel] + + result = catalog._download_package_to_temp("test_plugin", None, use_test=True) + + assert result.exists() + assert result.name == "extracted" + + # Verify test.pypi.org was used + call_args = mock_run.call_args[0][0] + assert "--index-url" in call_args + assert "https://test.pypi.org/simple/" in call_args + + def test_download_package_no_files_downloaded(self, mock_github_env): + """Test error when no files are downloaded.""" + catalog = PluginCatalog() + + with patch('subprocess.run') as mock_run: + mock_run.return_value = MagicMock(returncode=0) + + with patch('pathlib.Path.glob') as mock_glob: + mock_glob.return_value = [] + + with pytest.raises(RuntimeError) as exc_info: + catalog._download_package_to_temp("test_plugin", None) + + assert "No files downloaded" in str(exc_info.value) + + def test_download_package_unsupported_format(self, tmp_path, mock_github_env): + """Test error with unsupported package format.""" + catalog = PluginCatalog() + + mock_file = tmp_path / "package.exe" + mock_file.write_text("test") + + with patch('subprocess.run') as mock_run: + mock_run.return_value = MagicMock(returncode=0) + + with patch('pathlib.Path.glob') as mock_glob: + mock_glob.return_value = [mock_file] + + with pytest.raises(RuntimeError) as exc_info: + catalog._download_package_to_temp("test_plugin", None) + + assert "Unsupported package format" in str(exc_info.value) + + +class TestPluginCatalogFindManifestInExtractedPackage: + """Tests for _find_manifest_in_extracted_package method.""" + + def test_find_manifest_not_found(self, tmp_path, mock_github_env): + """Test FileNotFoundError when manifest is not found.""" + catalog = PluginCatalog() + + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + + with pytest.raises(FileNotFoundError) as exc_info: + catalog._find_manifest_in_extracted_package(extract_dir, "test_plugin") + + assert "plugin-manifest.yaml not found" in str(exc_info.value) + + +class TestPluginCatalogUninstallPackage: + """Tests for uninstall_package method.""" + + def test_uninstall_package_success(self, mock_github_env): + """Test successful package uninstallation.""" + catalog = PluginCatalog() + + with patch('subprocess.run') as mock_run: + mock_run.return_value = MagicMock(returncode=0) + + result = catalog.uninstall_package("test_plugin") + + assert result is True + mock_run.assert_called_once() + call_args = mock_run.call_args[0][0] + assert "pip" in call_args + assert "uninstall" in call_args + assert "-y" in call_args + assert "test_plugin" in call_args + + def test_uninstall_package_subprocess_error(self, mock_github_env): + """Test subprocess error during uninstallation.""" + catalog = PluginCatalog() + + with patch('subprocess.run') as mock_run: + mock_run.side_effect = subprocess.CalledProcessError(1, "pip", stderr="Uninstall failed") + + with pytest.raises(RuntimeError) as exc_info: + catalog.uninstall_package("test_plugin") + + assert "Failed to uninstall" in str(exc_info.value) + + def test_uninstall_package_unexpected_error(self, mock_github_env): + """Test unexpected error during uninstallation.""" + catalog = PluginCatalog() + + with patch('subprocess.run') as mock_run: + mock_run.side_effect = Exception("Unexpected error") + + with pytest.raises(RuntimeError) as exc_info: + catalog.uninstall_package("test_plugin") + + assert "Unexpected error uninstalling" in str(exc_info.value) + + +class TestPluginCatalogInstallFolderViaPipIsolated: + """Tests for install_folder_via_pip with isolated_venv plugins.""" + + def test_install_folder_via_pip_isolated_venv(self, tmp_path, mock_github_env): + """Test installing an isolated_venv plugin from monorepo.""" + catalog = PluginCatalog() + + manifest = create_test_manifest(kind="isolated_venv") + + # Mock the download and initialization + with patch.object(catalog, '_download_monorepo_folder_to_temp') as mock_download: + mock_download.return_value = tmp_path / "package" + + with patch.object(catalog, '_initialize_isolated_venv') as mock_init: + mock_init.return_value = tmp_path / "venv" + + result = catalog.install_folder_via_pip(manifest) + + assert result == tmp_path / "venv" + mock_download.assert_called_once() + mock_init.assert_called_once() + + + +class TestPluginCatalogProcessPyprojectExtended: + """Extended tests for _process_pyproject method.""" + + def test_process_pyproject_with_member_none(self, mock_github_env): + """Test _process_pyproject when member is None (root directory).""" + catalog = PluginCatalog() + + mock_repo = MagicMock() + mock_item = MagicMock() + mock_item.path = "pyproject.toml" + mock_item.name = "pyproject.toml" + + # Mock file content + pyproject_content = """ +[project] +name = "test_plugin" +version = "1.0.0" +""" + mock_file_content = MagicMock() + mock_file_content.decoded_content = pyproject_content.encode('utf-8') + mock_repo.get_contents.return_value = mock_file_content + + repo_url = httpx.URL("https://github.com/org/repo") + + with patch.object(catalog, 'find_and_save_plugin_manifest') as mock_find: + catalog._process_pyproject(mock_repo, mock_item, repo_url, {}) + + # Verify find_and_save_plugin_manifest was called with member=None + mock_find.assert_called_once() + call_args = mock_find.call_args + assert call_args[1]['member'] is None + + +class TestPluginCatalogInstallFolderViaPipNonIsolated: + """Tests for install_folder_via_pip with non-isolated plugins.""" + + def test_install_folder_via_pip_non_isolated(self, mock_github_env): + """Test installing a non-isolated plugin from monorepo.""" + catalog = PluginCatalog() + + manifest = create_test_manifest(kind="native") + + with patch('subprocess.run') as mock_run: + mock_run.return_value = MagicMock(returncode=0) + + result = catalog.install_folder_via_pip(manifest) + + # For non-isolated plugins, should return None + assert result is None + + # Verify pip install was called + mock_run.assert_called_once() + call_args = mock_run.call_args[0][0] + assert "pip" in call_args + assert "install" in call_args + + +class TestPluginCatalogInstallPackageEdgeCases: + """Edge case tests for _install_package method.""" + + def test_install_package_with_null_version_constraint(self, mock_github_env): + """Test installing package with None version constraint.""" + catalog = PluginCatalog() + + with patch('subprocess.run') as mock_run: + mock_run.return_value = MagicMock(returncode=0) + + catalog._install_package("test_plugin", None, use_test=False) + + mock_run.assert_called_once() + call_args = mock_run.call_args[0][0] + assert "test_plugin" in call_args + assert "--index-url" not in call_args + + def test_install_package_unexpected_error(self, mock_github_env): + """Test unexpected error during package installation.""" + catalog = PluginCatalog() + + with patch('subprocess.run') as mock_run: + mock_run.side_effect = Exception("Unexpected error") + + with pytest.raises(RuntimeError) as exc_info: + catalog._install_package("test_plugin", None) + + assert "Unexpected error installing" in str(exc_info.value) + + +class TestPluginCatalogDownloadPackageEdgeCases: + """Edge case tests for download methods.""" + + def test_download_package_with_version_constraint(self, tmp_path, mock_github_env): + """Test downloading package with version constraint.""" + catalog = PluginCatalog() + + mock_wheel = tmp_path / "package-1.0.0-py3-none-any.whl" + with zipfile.ZipFile(mock_wheel, "w") as zf: + zf.writestr("test_file.txt", "test content") + + with patch('subprocess.run') as mock_run: + mock_run.return_value = MagicMock(returncode=0) + + with patch('pathlib.Path.glob') as mock_glob: + mock_glob.return_value = [mock_wheel] + + result = catalog._download_package_to_temp("test_plugin", ">=1.0.0", use_test=False) + + assert result.exists() + + # Verify version constraint was included + call_args = mock_run.call_args[0][0] + assert any("test_plugin>=1.0.0" in str(arg) for arg in call_args) + + def test_download_monorepo_zip_format(self, tmp_path, mock_github_env): + """Test downloading monorepo with zip format.""" + catalog = PluginCatalog() + + # Create a mock zip file + mock_zip = tmp_path / "package.zip" + with zipfile.ZipFile(mock_zip, "w") as zf: + zf.writestr("test_file.txt", "test content") + + with patch('subprocess.run') as mock_run: + mock_run.return_value = MagicMock(returncode=0) + + with patch('pathlib.Path.glob') as mock_glob: + mock_glob.return_value = [mock_zip] + + result = catalog._download_monorepo_folder_to_temp( + "git+https://github.com/org/repo#subdirectory=plugin", + "test_plugin" + ) + + assert result.exists() + assert result.name == "extracted" + + +class TestPluginCatalogFindOperations: + """Tests for find method.""" + + def test_find_case_insensitive(self, tmp_path, mock_github_env): + """Test that find is case-insensitive.""" + catalog = PluginCatalog() + + # Create a manifest with uppercase name + manifest_dir = tmp_path / "catalog" / "TEST_PLUGIN" + manifest_dir.mkdir(parents=True) + manifest_file = manifest_dir / "plugin-manifest.yaml" + + manifest_data = { + "name": "TEST_PLUGIN", + "version": "1.0.0", + "kind": "native", + "description": "Test plugin", + "author": "Test Author", + "available_hooks": ["tools"], + "tags": [], + "default_config": {}, + } + manifest_file.write_text(yaml.safe_dump(manifest_data)) + + catalog.catalog_folder = str(tmp_path / "catalog") + catalog.load() + + # Search with lowercase should find it + result = catalog.find("test_plugin") + + assert result is not None + assert result.name == "TEST_PLUGIN" + + +class TestPluginCatalogInstallFromPypiIsolated: + """Tests for install_from_pypi with isolated_venv plugins.""" + + def test_install_from_pypi_isolated_venv(self, tmp_path, mock_github_env): + """Test installing an isolated_venv plugin from PyPI.""" + catalog = PluginCatalog() + + # Create mock extracted package with manifest + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + plugin_dir = extract_dir / "test_plugin" + plugin_dir.mkdir() + + manifest_file = plugin_dir / "plugin-manifest.yaml" + manifest_data = { + "name": "test_plugin", + "version": "1.0.0", + "kind": "isolated_venv", + "description": "Test isolated plugin", + "author": "Test Author", + "available_hooks": ["tools"], + "default_config": {"requirements_file": "requirements.txt"}, + } + manifest_file.write_text(yaml.safe_dump(manifest_data)) + + with patch.object(catalog, '_download_package_to_temp') as mock_download: + mock_download.return_value = extract_dir + + with patch.object(catalog, '_initialize_isolated_venv') as mock_init: + mock_init.return_value = tmp_path / "venv" + + with patch.object(catalog, '_persist_manifest'): + manifest, plugin_path = catalog.install_from_pypi("test_plugin") + + assert manifest.kind == "isolated_venv" + assert plugin_path == tmp_path / "venv" + mock_init.assert_called_once() + + + +class TestPluginCatalogFindRequirementsInExtractedPackage: + """Tests for _find_requirements_in_extracted_package method with path traversal protection.""" + + def test_find_requirements_success(self, tmp_path, mock_github_env): + """Test successful finding of requirements file.""" + catalog = PluginCatalog() + + # Create a mock extracted package directory with requirements.txt + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + plugin_dir = extract_dir / "my_plugin" + plugin_dir.mkdir() + requirements_file = plugin_dir / "requirements.txt" + requirements_file.write_text("pytest>=7.0.0\n") + + # Find the requirements file + result = catalog._find_requirements_in_extracted_package( + extract_dir, "my_plugin", "requirements.txt" + ) + + assert result == requirements_file + assert result.exists() + + def test_find_requirements_not_found(self, tmp_path, mock_github_env): + """Test FileNotFoundError when requirements file doesn't exist.""" + catalog = PluginCatalog() + + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + + with pytest.raises(FileNotFoundError) as exc_info: + catalog._find_requirements_in_extracted_package( + extract_dir, "my_plugin", "requirements.txt" + ) + + assert "requirements file requirements.txt not found" in str(exc_info.value) + assert "my_plugin" in str(exc_info.value) + + def test_find_requirements_path_traversal_parent_directory(self, tmp_path, mock_github_env): + """Test that path traversal with ../ is blocked.""" + catalog = PluginCatalog() + + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + + # Try to access parent directory + with pytest.raises(ValueError) as exc_info: + catalog._find_requirements_in_extracted_package( + extract_dir, "my_plugin", "../../../etc/passwd" + ) + + assert "path traversal attempts are not allowed" in str(exc_info.value) + + def test_find_requirements_path_traversal_absolute_path(self, tmp_path, mock_github_env): + """Test that absolute paths are blocked.""" + catalog = PluginCatalog() + + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + + # Try to use absolute path + with pytest.raises(ValueError) as exc_info: + catalog._find_requirements_in_extracted_package( + extract_dir, "my_plugin", "/etc/passwd" + ) + + assert "path traversal attempts are not allowed" in str(exc_info.value) + + def test_find_requirements_path_traversal_mixed_separators(self, tmp_path, mock_github_env): + """Test that mixed path separators are blocked.""" + catalog = PluginCatalog() + + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + + # Try to use backslashes (Windows-style) in suspicious way + with pytest.raises(ValueError) as exc_info: + catalog._find_requirements_in_extracted_package( + extract_dir, "my_plugin", "..\\..\\etc\\passwd" + ) + + assert "path traversal attempts are not allowed" in str(exc_info.value) + + def test_find_requirements_path_traversal_encoded(self, tmp_path, mock_github_env): + """Test that URL-encoded path traversal attempts are blocked.""" + catalog = PluginCatalog() + + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + + # Try various encoded forms + malicious_paths = [ + "..%2F..%2Fetc%2Fpasswd", + "..%5c..%5cetc%5cpasswd", + ] + + for malicious_path in malicious_paths: + with pytest.raises(ValueError) as exc_info: + catalog._find_requirements_in_extracted_package( + extract_dir, "my_plugin", malicious_path + ) + + assert "path traversal" in str(exc_info.value).lower() or "suspicious" in str(exc_info.value).lower() + + def test_find_requirements_defense_in_depth(self, tmp_path, mock_github_env): + """Test defense-in-depth check that file is within extract_dir.""" + catalog = PluginCatalog() + + # Create extract directory + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + + # Create a file outside the extract directory + outside_dir = tmp_path / "outside" + outside_dir.mkdir() + outside_file = outside_dir / "requirements.txt" + outside_file.write_text("malicious\n") + + # Create a symlink inside extract_dir pointing outside + # (This tests the defense-in-depth check) + try: + symlink_path = extract_dir / "requirements.txt" + symlink_path.symlink_to(outside_file) + + # The rglob should find it, but the defense-in-depth check should catch it + with pytest.raises(ValueError) as exc_info: + catalog._find_requirements_in_extracted_package( + extract_dir, "my_plugin", "requirements.txt" + ) + + assert "outside the package directory" in str(exc_info.value) + except OSError: + # Skip test if symlinks aren't supported (e.g., Windows without admin) + pytest.skip("Symlinks not supported on this system") + + def test_find_requirements_nested_directory(self, tmp_path, mock_github_env): + """Test finding requirements file in nested directory structure.""" + catalog = PluginCatalog() + + # Create nested directory structure + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + nested_dir = extract_dir / "plugin" / "subdir" / "config" + nested_dir.mkdir(parents=True) + requirements_file = nested_dir / "requirements.txt" + requirements_file.write_text("pytest>=7.0.0\n") + + # Should find the file in nested structure + result = catalog._find_requirements_in_extracted_package( + extract_dir, "my_plugin", "requirements.txt" + ) + + assert result == requirements_file + assert result.exists() + + def test_find_requirements_multiple_files_returns_first(self, tmp_path, mock_github_env): + """Test that when multiple matching files exist, the first one is returned.""" + catalog = PluginCatalog() + + # Create multiple requirements files + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + + dir1 = extract_dir / "dir1" + dir1.mkdir() + req1 = dir1 / "requirements.txt" + req1.write_text("first\n") + + dir2 = extract_dir / "dir2" + dir2.mkdir() + req2 = dir2 / "requirements.txt" + req2.write_text("second\n") + + # Should return one of them (first found by rglob) + result = catalog._find_requirements_in_extracted_package( + extract_dir, "my_plugin", "requirements.txt" + ) + + assert result in [req1, req2] + assert result.exists() + + def test_find_requirements_custom_filename(self, tmp_path, mock_github_env): + """Test finding a custom requirements filename.""" + catalog = PluginCatalog() + + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + plugin_dir = extract_dir / "my_plugin" + plugin_dir.mkdir() + + # Use a custom requirements filename + custom_req = plugin_dir / "requirements-dev.txt" + custom_req.write_text("pytest>=7.0.0\n") + + result = catalog._find_requirements_in_extracted_package( + extract_dir, "my_plugin", "requirements-dev.txt" + ) + + assert result == custom_req + assert result.exists() + + # Made with Bob From 96e334adf155b032ac53581f107efe3d06603dd2 Mon Sep 17 00:00:00 2001 From: habeck Date: Mon, 27 Apr 2026 11:48:19 -0400 Subject: [PATCH 48/88] chore: update min_max_framework_version Signed-off-by: habeck --- cpex/framework/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cpex/framework/models.py b/cpex/framework/models.py index 3a2881b..20dce56 100644 --- a/cpex/framework/models.py +++ b/cpex/framework/models.py @@ -2189,7 +2189,7 @@ class PluginVersionInfo(BaseModel): deprecated: bool = False manifest_file: str changelog: Optional[str] = None - min_max_framework_version: Optional[str] = "0.1.0.dev4,0.1.0.dev4" + min_max_framework_version: Optional[str] = "0.1.0.dev12,0.1.0.dev12" class PluginVersionRegistry(BaseModel): From edcc95a15674b8d817e37d59b585ab9a70d6593d Mon Sep 17 00:00:00 2001 From: habeck Date: Mon, 27 Apr 2026 11:49:33 -0400 Subject: [PATCH 49/88] enh: isolated venv cookiecutter update for install flow Signed-off-by: habeck --- .../isolated/{{cookiecutter.plugin_slug}}/requirements.txt | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/cpex/templates/isolated/{{cookiecutter.plugin_slug}}/requirements.txt b/cpex/templates/isolated/{{cookiecutter.plugin_slug}}/requirements.txt index 2644d11..0454c5e 100644 --- a/cpex/templates/isolated/{{cookiecutter.plugin_slug}}/requirements.txt +++ b/cpex/templates/isolated/{{cookiecutter.plugin_slug}}/requirements.txt @@ -1,2 +1,7 @@ cpex>=0.1.0.dev10 +# The requirements file is used to install the plugin for the isolated_venv scenario +# The cpex cli tool first creates a venv for the plugin, and then uses pip to install the requirements.txt file into the venv. +# The default package name is provided below, however if monorepo installation is desired use +# a format like this: +# git+https://github.com/tedhabeck/cpex-test-plugin {{ cookiecutter.plugin_name }} \ No newline at end of file From dc20074dd4a05f959a086a801256d4b9a39b1b5c Mon Sep 17 00:00:00 2001 From: habeck Date: Mon, 27 Apr 2026 15:48:02 -0400 Subject: [PATCH 50/88] enh: catalog now properly persists all plugin-manifest*.yaml files Signed-off-by: habeck --- cpex/tools/catalog.py | 129 +++++++++++++++++++++----- tests/unit/cpex/tools/test_catalog.py | 41 +++++++- 2 files changed, 142 insertions(+), 28 deletions(-) diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index ea0859b..5143b5d 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -8,6 +8,10 @@ """ import base64 +import datetime +import importlib.metadata +import importlib.util +import json import logging import os import subprocess @@ -26,8 +30,7 @@ import yaml from github import Auth, Github -from cpex.framework.models import PiPyRepo, PluginManifest, PluginPackageInfo -from cpex.framework.utils import find_package_path +from cpex.framework.models import PiPyRepo, PluginManifest, PluginPackageInfo, PluginVersionInfo, PluginVersionRegistry from cpex.tools.settings import get_catalog_settings logger = logging.getLogger(__name__) @@ -88,6 +91,73 @@ def save_manifest(self, manifest: PluginManifest, path): updated_content = yaml.safe_dump(manifest.model_dump(), default_flow_style=False) relpath.write_text(updated_content, encoding="utf-8") + def update_plugin_version_registry(self, manifest: PluginManifest, relpath: Path): + """ + Update the plugin version registry with the given manifest. + args: + manifest: The plugin manifest to be stored in the catalog + relpath: the relative path of the plugin package that was installed + """ + plugin_version: PluginVersionInfo = PluginVersionInfo( + version=manifest.version, + manifest_file=str(relpath), + released=datetime.datetime.now(datetime.timezone.utc).isoformat() + "Z", + ) + file_path = Path(self.catalog_folder) / manifest.name / "plugin_version_registry.json" + # Ensure the directory exists + file_path.parent.mkdir(parents=True, exist_ok=True) + if file_path.exists(): + with file_path.open("r") as f: + plugin_version_registry = PluginVersionRegistry(**json.load(f)) + else: + plugin_version_registry = PluginVersionRegistry(versions=[]) + if plugin_version not in plugin_version_registry.versions: + plugin_version_registry.versions.append(plugin_version) + plugin_version_registry.latest = plugin_version + file_path.write_text( + json.dumps(plugin_version_registry.model_dump(mode="json"), indent=2), + encoding="utf-8", + ) + + def find_package_path(self, package_name: str) -> Path: + """Locate installed package directory using importlib.metadata. + + Args: + package_name: The name of the installed package. + + Returns: + Path to the package directory. + + Raises: + RuntimeError: If package cannot be found. + """ + try: + # Use importlib.metadata for more reliable package discovery + for dist in importlib.metadata.distributions(): + if dist.name == package_name or dist.metadata.get("Name") == package_name: + if dist.files: + # Get the package root from the plugin-manifest.yaml file + for afile in dist.files: + if afile.name == "plugin-manifest.yaml": + located_path = dist.locate_file(afile) + package_path = Path(str(located_path)).parent + logger.debug("Found package %s at %s", package_name, package_path) + return package_path + + # Fallback to importlib.util.find_spec if metadata approach fails + spec = importlib.util.find_spec(package_name) + if spec is not None and spec.origin is not None: + package_path = Path(spec.origin).parent + logger.debug("Found package %s at %s (via find_spec)", package_name, package_path) + return package_path + + raise RuntimeError(f"Could not find installed package: {package_name}") + + except Exception as e: + if isinstance(e, RuntimeError): + raise + raise RuntimeError(f"Error locating package {package_name}: {str(e)}") from e + def save_manifest_content(self, content: str, path, repo_url: httpx.URL): """ write the manifest content to the supplied path relative to the ouptut folder, @@ -107,6 +177,8 @@ def save_manifest_content(self, content: str, path, repo_url: httpx.URL): updated_content = yaml.safe_dump(manifest_data, default_flow_style=False) relpath.write_text(updated_content, encoding="utf-8") + pm: PluginManifest = PluginManifest(**manifest_data) + self.update_plugin_version_registry(pm, relpath) def save_content(self, base_path, content: str, path): """ @@ -164,7 +236,7 @@ def download_file(self, repo_path: str, item: dict, headers, gh_repo) -> str | N logger.error("Failed to download file: %s status_code: %d", item["path"], str(e)) def _search_github_code(self, repo_path: str, member: str, headers) -> list[dict] | None: - """Search GitHub for plugin-manifest.yaml files in a specific path using PyGithub API. + """Search GitHub for plugin-manifest*.yaml files in a specific path using PyGithub API. Args: repo_path: Repository path (e.g., 'owner/repo') @@ -175,11 +247,12 @@ def _search_github_code(self, repo_path: str, member: str, headers) -> list[dict List of search result items as dicts with 'name' and 'git_url' keys, or None if request failed """ try: - # Build search query for PyGithub + # Build search query for PyGithub - search for files starting with plugin-manifest and ending with .yaml + # Note: GitHub search doesn't support wildcards in filename, so we search broadly and filter results if member is not None: - query = f"repo:{repo_path} path:{member} filename:plugin-manifest extension:yaml" + query = f"repo:{repo_path} path:{member} extension:yaml" else: - query = f"repo:{repo_path} filename:plugin-manifest extension:yaml" + query = f"repo:{repo_path} extension:yaml" # Use PyGithub's search_code method search_results = self.gh.search_code(query=query) @@ -188,14 +261,16 @@ def _search_github_code(self, repo_path: str, member: str, headers) -> list[dict # Convert PyGithub ContentFile objects to dict format compatible with existing code items = [] for content_file in search_results: - items.append( - { - "name": content_file.name, - "path": content_file.path, - "git_url": content_file.git_url, - "html_url": content_file.html_url, - } - ) + # Filter to only include files that start with "plugin-manifest" and end with ".yaml" + if content_file.name.startswith("plugin-manifest") and content_file.name.endswith(".yaml"): + items.append( + { + "name": content_file.name, + "path": content_file.path, + "git_url": content_file.git_url, + "html_url": content_file.html_url, + } + ) return items @@ -274,12 +349,15 @@ def _process_manifest_item( updated_content = yaml.safe_dump(manifest_content, default_flow_style=False) relpath.write_text(updated_content, encoding="utf-8") + pm: PluginManifest = PluginManifest(**manifest_content) + self.update_plugin_version_registry(pm, relpath) + return True def find_and_save_plugin_manifest( self, member: str, name: str, repo_url: httpx.URL, headers, gh_repo ) -> PluginManifest | None: - """Find the plugin-manifest.yaml relative to the supplied member folder, + """Find plugin-manifest*.yaml files relative to the supplied member folder, download and save the manifest, updating the monorepo's package_folder, package_source and repo_url attributes Args: @@ -295,15 +373,15 @@ def find_and_save_plugin_manifest( self.create_catalog_folder(name) repo_path = repo_url.path.removeprefix("/") - relpath = Path(self.catalog_folder) / name / "plugin-manifest.yaml" items = self._search_github_code(repo_path, member, headers) if items is None: return None for item in items: - if self._process_manifest_item(item, name, member, repo_url, headers, relpath, repo_path, gh_repo): - break # Successfully processed first valid manifest + # Use the actual filename from the search result + relpath = Path(self.catalog_folder) / name / item["name"] + self._process_manifest_item(item, name, member, repo_url, headers, relpath, repo_path, gh_repo) return None @@ -786,21 +864,19 @@ def _find_requirements_in_extracted_package( # Validate requirements_file to prevent path traversal attacks # Normalize the path and check for suspicious patterns normalized_file = os.path.normpath(requirements_file) - + # Check for path traversal attempts (../, absolute paths, etc.) if normalized_file.startswith("..") or os.path.isabs(normalized_file): raise ValueError( - f"Invalid requirements file path '{requirements_file}': " - "path traversal attempts are not allowed" + f"Invalid requirements file path '{requirements_file}': path traversal attempts are not allowed" ) - + # Additional check: ensure no path separators that could escape the directory if normalized_file != requirements_file.replace("\\", "/").strip("/"): raise ValueError( - f"Invalid requirements file path '{requirements_file}': " - "suspicious path components detected" + f"Invalid requirements file path '{requirements_file}': suspicious path components detected" ) - + # Search for requirements file in the extracted directory manifest_files = list(extract_dir.rglob(requirements_file)) @@ -915,9 +991,12 @@ def install_from_pypi( # For non-isolated plugins, install normally into CLI's venv logger.info("Installing non-isolated plugin: %s", manifest.name) self._install_package(plugin_package_name, version_constraint, use_pytest) + plugin_path = self.find_package_path(plugin_package_name) # Step 5: Persist to catalog self._persist_manifest(manifest, plugin_package_name) + # Step 6: Update the plugin version registry + self.update_plugin_version_registry(manifest=manifest, relpath=plugin_path) logger.info("Successfully installed and cataloged %s", plugin_package_name) return manifest, plugin_path diff --git a/tests/unit/cpex/tools/test_catalog.py b/tests/unit/cpex/tools/test_catalog.py index de0b750..98ddbcc 100644 --- a/tests/unit/cpex/tools/test_catalog.py +++ b/tests/unit/cpex/tools/test_catalog.py @@ -252,7 +252,7 @@ def test_download_contents_success(self, tmp_path, mock_github_env): catalog.catalog_folder = str(tmp_path / "catalog") # Mock the HTTP response - manifest_content = "name: test\nversion: 1.0.0\nkind: native\ndescription: Test\nauthor: Test\navailable_hooks: [tools]" + manifest_content = "name: test\nversion: 1.0.0\nkind: native\ndescription: Test\nauthor: Test\navailable_hooks: [tools]\ndefault_config: {}" b64_content = base64.b64encode(manifest_content.encode()).decode() mock_response = Mock() mock_response.status_code = 200 @@ -448,13 +448,20 @@ def test_install_from_pypi_success(self, tmp_path, mock_github_env): patch("cpex.tools.catalog.PluginCatalog._find_manifest_in_extracted_package", return_value=manifest_file), patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, patch("cpex.framework.utils.importlib.metadata.distributions") as mock_distributions, + patch("cpex.framework.utils.importlib.util.find_spec") as mock_find_spec, patch("shutil.rmtree") as mock_rmtree, ): # Setup mock distribution mock_dist = Mock() mock_dist.name = "test_package" + mock_dist.files = None # No files attribute for non-isolated plugins mock_distributions.return_value = [mock_dist] + # Setup mock spec for find_spec fallback + mock_spec = Mock() + mock_spec.origin = str(package_dir / "__init__.py") + mock_find_spec.return_value = mock_spec + catalog = PluginCatalog() catalog.catalog_folder = str(tmp_path / "catalog") manifest, plugin_path = catalog.install_from_pypi("test_package") @@ -462,7 +469,7 @@ def test_install_from_pypi_success(self, tmp_path, mock_github_env): # Should call subprocess.run for non-isolated plugin mock_subprocess.assert_called_once() assert manifest.name == "test_package" - assert plugin_path is None # Non-isolated plugins don't return a path + assert plugin_path == package_dir # Non-isolated plugins return the package path # Should clean up temp directory mock_rmtree.assert_called_once() @@ -661,7 +668,7 @@ def test_find_and_save_plugin_manifest_success(self, tmp_path, mock_github_env): # Mock the repository and file content mock_repo = Mock() - manifest_content = "name: test\nversion: 1.0.0\nkind: native\ndescription: Test\nauthor: Test\navailable_hooks: [tools]" + manifest_content = "name: test\nversion: 1.0.0\nkind: native\ndescription: Test\nauthor: Test\navailable_hooks: [tools]\ndefault_config: {}" mock_file_content = Mock() mock_file_content.decoded_content = manifest_content.encode() mock_repo.get_contents.return_value = mock_file_content @@ -772,12 +779,19 @@ def test_install_from_pypi_with_version_constraint(self, tmp_path, mock_github_e patch("cpex.tools.catalog.PluginCatalog._find_manifest_in_extracted_package", return_value=manifest_file), patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, patch("cpex.framework.utils.importlib.metadata.distributions") as mock_distributions, + patch("cpex.framework.utils.importlib.util.find_spec") as mock_find_spec, patch("shutil.rmtree"), ): mock_dist = Mock() mock_dist.name = "test_package" + mock_dist.files = None # No files attribute for non-isolated plugins mock_distributions.return_value = [mock_dist] + # Setup mock spec for find_spec fallback + mock_spec = Mock() + mock_spec.origin = str(package_dir / "__init__.py") + mock_find_spec.return_value = mock_spec + catalog = PluginCatalog() catalog.catalog_folder = str(tmp_path / "catalog") manifest, plugin_path = catalog.install_from_pypi("test_package", ">=1.0.0") @@ -811,12 +825,19 @@ def test_install_from_pypi_with_default_configs(self, tmp_path, mock_github_env) patch("cpex.tools.catalog.PluginCatalog._find_manifest_in_extracted_package", return_value=manifest_file), patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, patch("cpex.framework.utils.importlib.metadata.distributions") as mock_distributions, + patch("cpex.framework.utils.importlib.util.find_spec") as mock_find_spec, patch("shutil.rmtree"), ): mock_dist = Mock() mock_dist.name = "test_package" + mock_dist.files = None # No files attribute for non-isolated plugins mock_distributions.return_value = [mock_dist] + # Setup mock spec for find_spec fallback + mock_spec = Mock() + mock_spec.origin = str(package_dir / "__init__.py") + mock_find_spec.return_value = mock_spec + catalog = PluginCatalog() catalog.catalog_folder = str(tmp_path / "catalog") manifest, plugin_path = catalog.install_from_pypi("test_package") @@ -851,12 +872,19 @@ def test_install_from_pypi_with_existing_package_info(self, tmp_path, mock_githu patch("cpex.tools.catalog.PluginCatalog._find_manifest_in_extracted_package", return_value=manifest_file), patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, patch("cpex.framework.utils.importlib.metadata.distributions") as mock_distributions, + patch("cpex.framework.utils.importlib.util.find_spec") as mock_find_spec, patch("shutil.rmtree"), ): mock_dist = Mock() mock_dist.name = "test_package" + mock_dist.files = None # No files attribute for non-isolated plugins mock_distributions.return_value = [mock_dist] + # Setup mock spec for find_spec fallback + mock_spec = Mock() + mock_spec.origin = str(package_dir / "__init__.py") + mock_find_spec.return_value = mock_spec + catalog = PluginCatalog() catalog.catalog_folder = str(tmp_path / "catalog") manifest, plugin_path = catalog.install_from_pypi("test_package", ">=2.0.0") @@ -889,12 +917,19 @@ def test_install_from_pypi_with_null_default_configs_in_manifest(self, tmp_path, patch("cpex.tools.catalog.PluginCatalog._find_manifest_in_extracted_package", return_value=manifest_file), patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, patch("cpex.framework.utils.importlib.metadata.distributions") as mock_distributions, + patch("cpex.framework.utils.importlib.util.find_spec") as mock_find_spec, patch("shutil.rmtree"), ): mock_dist = Mock() mock_dist.name = "test_package" + mock_dist.files = None # No files attribute for non-isolated plugins mock_distributions.return_value = [mock_dist] + # Setup mock spec for find_spec fallback + mock_spec = Mock() + mock_spec.origin = str(package_dir / "__init__.py") + mock_find_spec.return_value = mock_spec + catalog = PluginCatalog() catalog.catalog_folder = str(tmp_path / "catalog") manifest, plugin_path = catalog.install_from_pypi("test_package") From 2561a5be740a189bf831bc70b4033ab221db9ffc Mon Sep 17 00:00:00 2001 From: habeck Date: Wed, 29 Apr 2026 11:16:26 -0400 Subject: [PATCH 51/88] enh: upgrade pip before installing requirements Signed-off-by: habeck --- cpex/framework/isolated/venv_comm.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/cpex/framework/isolated/venv_comm.py b/cpex/framework/isolated/venv_comm.py index 4ef7716..c732523 100644 --- a/cpex/framework/isolated/venv_comm.py +++ b/cpex/framework/isolated/venv_comm.py @@ -53,6 +53,13 @@ def _get_python_executable(self): return str(python_exe) + def upgrade_pip(self) -> None: + """Upgrade pip in the target venv.""" + try: + subprocess.check_call([self.python_executable, "-m", "pip", "install", "--upgrade", "pip"]) + except Exception as e: + raise RuntimeError("Failed to upgrade pip") from e + def install_requirements(self, requirements_file: str) -> None: """ Install Python requirements from a file in the target venv. @@ -62,6 +69,7 @@ def install_requirements(self, requirements_file: str) -> None: requirements_path = Path(requirements_file) if requirements_path.exists(): try: + self.upgrade_pip() subprocess.check_call([self.python_executable, "-m", "pip", "install", "-r", requirements_file]) except Exception as e: raise RuntimeError(f"Failed to install requirements from {requirements_file}") from e From f9a791a147d45e21db324548c966e53de9f0b0da Mon Sep 17 00:00:00 2001 From: habeck Date: Wed, 29 Apr 2026 11:28:02 -0400 Subject: [PATCH 52/88] enh: allow the developer provided version registry values to persist, overriding only if they are not present. Improved install path resolution for isolated_venv plugins Signed-off-by: habeck --- cpex/tools/catalog.py | 248 +++++++++++++++++++- tests/unit/cpex/tools/test_catalog.py | 318 ++++++++++++++++++++++++++ 2 files changed, 555 insertions(+), 11 deletions(-) diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index 5143b5d..cfb00d8 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -103,7 +103,7 @@ def update_plugin_version_registry(self, manifest: PluginManifest, relpath: Path manifest_file=str(relpath), released=datetime.datetime.now(datetime.timezone.utc).isoformat() + "Z", ) - file_path = Path(self.catalog_folder) / manifest.name / "plugin_version_registry.json" + file_path = Path(self.catalog_folder) / manifest.name / "versions.json" # Ensure the directory exists file_path.parent.mkdir(parents=True, exist_ok=True) if file_path.exists(): @@ -111,13 +111,37 @@ def update_plugin_version_registry(self, manifest: PluginManifest, relpath: Path plugin_version_registry = PluginVersionRegistry(**json.load(f)) else: plugin_version_registry = PluginVersionRegistry(versions=[]) - if plugin_version not in plugin_version_registry.versions: + found = False + latest = None + for pv in plugin_version_registry.versions: + if pv.version == plugin_version.version: + found = True + if latest is None: + latest = pv + else: + if latest.version < plugin_version.version: + latest = plugin_version + else: + if latest is None: + latest = pv + else: + if latest.version < pv.version: + latest = pv + + if not found: plugin_version_registry.versions.append(plugin_version) - plugin_version_registry.latest = plugin_version - file_path.write_text( - json.dumps(plugin_version_registry.model_dump(mode="json"), indent=2), - encoding="utf-8", - ) + if plugin_version_registry.latest is None: + plugin_version_registry.latest = plugin_version + else: + plugin_version_registry.latest = latest + else: + if plugin_version_registry.latest != latest: + plugin_version_registry.latest = plugin_version + # Write the updated version registry to the file + file_path.write_text( + json.dumps(plugin_version_registry.model_dump(mode="json"), indent=2), + encoding="utf-8", + ) def find_package_path(self, package_name: str) -> Path: """Locate installed package directory using importlib.metadata. @@ -235,7 +259,50 @@ def download_file(self, repo_path: str, item: dict, headers, gh_repo) -> str | N except Exception as e: logger.error("Failed to download file: %s status_code: %d", item["path"], str(e)) - def _search_github_code(self, repo_path: str, member: str, headers) -> list[dict] | None: + def _search_github_code_for_versions_json(self, repo_path: str, member: str | None, headers) -> list[dict] | None: + """Search GitHub for plugin-manifest*.yaml files in a specific path using PyGithub API. + + Args: + repo_path: Repository path (e.g., 'owner/repo') + member: Directory path within the repository + headers: HTTP headers for authentication (kept for compatibility but not used) + + Returns: + List of search result items as dicts with 'name' and 'git_url' keys, or None if request failed + """ + try: + # Build search query for PyGithub - search for files starting with plugin-manifest and ending with .yaml + # Note: GitHub search doesn't support wildcards in filename, so we search broadly and filter results + if member is not None: + query = f"repo:{repo_path} path:{member} filename:versions extension:json" + else: + query = f"repo:{repo_path} filename:versions extension:json" + # Use PyGithub's search_code method + search_results = self.gh.search_code(query=query) + + logger.info("Found %d versions.json files in %s/%s", search_results.totalCount, repo_path, member) + + # Convert PyGithub ContentFile objects to dict format compatible with existing code + items = [] + for content_file in search_results: + # Filter to only include files that start with "plugin-manifest" and end with ".yaml" + if content_file.name.startswith("versions") and content_file.name.endswith(".json"): + items.append( + { + "name": content_file.name, + "path": content_file.path, + "git_url": content_file.git_url, + "html_url": content_file.html_url, + } + ) + + return items + + except Exception as e: + logger.error("Catalog update failed with error: %s", str(e)) + return None + + def _search_github_code(self, repo_path: str, member: str | None, headers) -> list[dict] | None: """Search GitHub for plugin-manifest*.yaml files in a specific path using PyGithub API. Args: @@ -278,7 +345,9 @@ def _search_github_code(self, repo_path: str, member: str, headers) -> list[dict logger.error("Catalog update failed with error: %s", str(e)) return None - def _transform_manifest_data(self, manifest_content: dict, name: str, member: str, repo_url: httpx.URL) -> dict: + def _transform_manifest_data( + self, manifest_content: dict, name: str, member: str | None, repo_url: httpx.URL + ) -> dict: """Apply standard transformations to manifest data. Args: @@ -354,6 +423,45 @@ def _process_manifest_item( return True + def _process_version_item( + self, item: dict, member: str, name: str, repo_url: httpx.URL, headers, relpath, repo_path, gh_repo + ) -> None: + """Find plugin-versions.json files relative to the supplied member folder, + download and save the manifest, updating the monorepo's package_folder, package_source and repo_url attributes + Args: + member: Directory path within the repository + name: Plugin name + repo_url: Repository URL + headers: HTTP headers for authentication + """ + self.create_output_folder() + self.create_catalog_folder(name) + version_data = self.download_file(repo_path=repo_path, item=item, headers=headers, gh_repo=gh_repo) + relpath.write_text(version_data, encoding="utf-8") + + def find_and_save_plugin_versions_json(self, member: str, name: str, repo_url: httpx.URL, headers, gh_repo) -> None: + """Find plugin-versions.json files relative to the supplied member folder, + download and save the manifest, updating the monorepo's package_folder, package_source and repo_url attributes + Args: + member: Directory path within the repository + name: Plugin name + repo_url: Repository URL + headers: HTTP headers for authentication + gh_repo: GitHub repository object + """ + self.create_output_folder() + self.create_catalog_folder(name) + + repo_path = repo_url.path.removeprefix("/") + items: list[dict[Any, Any]] | None = self._search_github_code_for_versions_json( + repo_path=repo_path, member=member, headers=headers + ) + if items is None: + return None + for item in items: + relpath = Path(self.catalog_folder) / name / item["name"] + self._process_version_item(item, member, name, repo_url, headers, relpath, repo_path, gh_repo) + def find_and_save_plugin_manifest( self, member: str, name: str, repo_url: httpx.URL, headers, gh_repo ) -> PluginManifest | None: @@ -414,6 +522,11 @@ def _process_pyproject(self, gh_repo, item, repo_url: httpx.URL, headers) -> Non # Parse the pyproject.toml project_data = tomllib.loads(pyproject_data) + # Find and save the versions.json file + self.find_and_save_plugin_versions_json( + member=member, name=project_data["project"]["name"], repo_url=repo_url, headers=headers, gh_repo=gh_repo + ) + # Find and save the plugin manifest self.find_and_save_plugin_manifest( member=member, name=project_data["project"]["name"], repo_url=repo_url, headers=headers, gh_repo=gh_repo @@ -471,7 +584,7 @@ def load(self) -> None: return # Find all plugin-manifest.yaml files recursively - manifest_files = list(output_path.rglob("plugin-manifest.yaml")) + manifest_files = list(output_path.rglob("plugin-manifest*.yaml")) if not manifest_files: logger.warning("No plugin-manifest.yaml files found in '%s'.", self.catalog_folder) @@ -943,6 +1056,112 @@ def _initialize_isolated_venv(self, manifest: PluginManifest, package_path: Path except Exception as e: raise RuntimeError(f"Failed to initialize isolated venv for {manifest.name}: {str(e)}") from e + def _find_and_load_versions_json( + self, manifest: PluginManifest, plugin_path: Path | None, plugin_package_name: str + ) -> Path | None: + """Find and load versions.json file from installed package. + + Args: + manifest: The plugin manifest + plugin_path: Path to the installed plugin (None for isolated_venv before installation) + plugin_package_name: The package name + + This method handles two cases: + 1. For non-isolated plugins: Uses the plugin_path directly + 2. For isolated_venv plugins: Runs a subprocess in the venv to find the package path + """ + try: + actual_plugin_path = plugin_path + + # For isolated_venv plugins, we need to find the package path within the venv + if manifest.kind == "isolated_venv" and plugin_path: + # The plugin_path for isolated_venv is the venv directory + # We need to find where the package is actually installed within it + venv_path = plugin_path + python_executable = self._get_venv_python_executable(venv_path / ".venv") + + # Create a simple Python script to find the package path + find_package_script = f""" +import sys +import importlib.metadata +from pathlib import Path + +package_name = "{plugin_package_name}" +try: + for dist in importlib.metadata.distributions(): + if dist.name == package_name or dist.metadata.get("Name") == package_name: + if dist.files: + for afile in dist.files: + if afile.name == "versions.json": + located_path = dist.locate_file(afile) + print(str(Path(located_path).parent)) + sys.exit(0) + print("NOT_FOUND", file=sys.stderr) + sys.exit(1) +except Exception as e: + print(f"ERROR: {{e}}", file=sys.stderr) + sys.exit(1) +""" + + # Execute the script in the isolated venv + result = subprocess.run( + [python_executable, "-c", find_package_script], + check=True, + capture_output=True, + text=True, + timeout=10, + ) + + if result.returncode == 0 and result.stdout.strip(): + actual_plugin_path = Path(result.stdout.strip()) + logger.debug("Found package path in isolated venv: %s", actual_plugin_path) + else: + logger.warning( + "Could not find versions.json in isolated venv for %s: %s", + plugin_package_name, + result.stderr, + ) + return + + # Now load the versions.json file if it exists + if actual_plugin_path: + versions_json_path = actual_plugin_path / "versions.json" + if versions_json_path.exists(): + logger.info("Found versions.json at %s", versions_json_path) + with open(versions_json_path, "r", encoding="utf8") as f: + versions_data = json.load(f) + # Save to catalog + catalog_versions_path = Path(self.catalog_folder) / manifest.name / "versions.json" + catalog_versions_path.parent.mkdir(parents=True, exist_ok=True) + with open(catalog_versions_path, "w", encoding="utf8") as f: + json.dump(versions_data, f, indent=2) + logger.info("Saved versions.json to catalog: %s", catalog_versions_path) + return actual_plugin_path + else: + logger.debug("No versions.json found at %s", versions_json_path) + + except Exception as e: + logger.warning("Failed to find/load versions.json for %s: %s", plugin_package_name, e) + + def _get_venv_python_executable(self, venv_path: Path) -> str: + """Get the Python executable path for a virtual environment. + + Args: + venv_path: Path to the virtual environment directory + + Returns: + Path to the Python executable as a string + """ + if sys.platform == "win32": + python_exe = venv_path / "Scripts" / "python.exe" + else: + python_exe = venv_path / "bin" / "python" + + if not python_exe.exists(): + raise FileNotFoundError(f"Python executable not found at {python_exe}") + + return str(python_exe) + def install_from_pypi( self, plugin_package_name: str, version_constraint: str | None = None, use_pytest: bool = False ) -> tuple[PluginManifest, Path | None]: @@ -955,6 +1174,8 @@ def install_from_pypi( 4. For isolated_venv plugins: initializes the target venv (plugin auto-installs via requirements.txt) 5. For other plugins: installs normally into CLI's venv 6. Persists the manifest to the plugin catalog + 7. Finds and saves versions.json if available + 8. Updates the plugin version registry Args: plugin_package_name: The name of the package hosted on PyPI. @@ -995,10 +1216,15 @@ def install_from_pypi( # Step 5: Persist to catalog self._persist_manifest(manifest, plugin_package_name) - # Step 6: Update the plugin version registry + + # Step 6: Find and save versions.json if available + actual_plugin_path = self._find_and_load_versions_json(manifest, plugin_path, plugin_package_name) + + # Step 7: Update the plugin version registry self.update_plugin_version_registry(manifest=manifest, relpath=plugin_path) logger.info("Successfully installed and cataloged %s", plugin_package_name) + plugin_path = actual_plugin_path if actual_plugin_path is not None else plugin_path return manifest, plugin_path finally: diff --git a/tests/unit/cpex/tools/test_catalog.py b/tests/unit/cpex/tools/test_catalog.py index 98ddbcc..ea149a5 100644 --- a/tests/unit/cpex/tools/test_catalog.py +++ b/tests/unit/cpex/tools/test_catalog.py @@ -1097,6 +1097,98 @@ def test_find_and_save_plugin_manifest_search_returns_none(self, tmp_path, mock_ assert result is None +class TestPluginCatalogVersionsJson: + """Tests for versions.json discovery and download.""" + + def test_search_github_code_for_versions_json_success(self, mock_github_env): + """Test _search_github_code_for_versions_json filters versions.json files.""" + catalog = PluginCatalog() + + mock_search_result = Mock() + matching_file = Mock() + matching_file.name = "versions.json" + matching_file.path = "plugin1/versions.json" + matching_file.git_url = "https://api.github.com/repos/org/repo/git/blobs/versions" + matching_file.html_url = "https://github.com/org/repo/blob/main/plugin1/versions.json" + + ignored_file = Mock() + ignored_file.name = "plugin-manifest.yaml" + ignored_file.path = "plugin1/plugin-manifest.yaml" + ignored_file.git_url = "https://api.github.com/repos/org/repo/git/blobs/manifest" + ignored_file.html_url = "https://github.com/org/repo/blob/main/plugin1/plugin-manifest.yaml" + + mock_search_result.totalCount = 2 + mock_search_result.__iter__ = Mock(return_value=iter([matching_file, ignored_file])) + catalog.gh.search_code = Mock(return_value=mock_search_result) + + result = catalog._search_github_code_for_versions_json("org/repo", "plugin1", {}) + + assert result == [ + { + "name": "versions.json", + "path": "plugin1/versions.json", + "git_url": "https://api.github.com/repos/org/repo/git/blobs/versions", + "html_url": "https://github.com/org/repo/blob/main/plugin1/versions.json", + } + ] + catalog.gh.search_code.assert_called_once_with( + query="repo:org/repo path:plugin1 filename:versions extension:json" + ) + + def test_search_github_code_for_versions_json_exception(self, mock_github_env): + """Test _search_github_code_for_versions_json when exception occurs.""" + with patch("cpex.tools.catalog.logger") as mock_logger: + catalog = PluginCatalog() + catalog.gh.search_code = Mock(side_effect=Exception("Search error")) + + result = catalog._search_github_code_for_versions_json("org/repo", "plugin1", {}) + + assert result is None + mock_logger.error.assert_called() + + def test_find_and_save_plugin_versions_json_success(self, tmp_path, mock_github_env): + """Test successful finding and saving of versions.json.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + mock_repo = Mock() + repo_url = httpx.URL("https://github.com/org/repo") + + catalog._search_github_code_for_versions_json = Mock( + return_value=[ + { + "name": "versions.json", + "path": "plugin1/versions.json", + "git_url": "https://api.github.com/repos/org/repo/git/blobs/versions", + "html_url": "https://github.com/org/repo/blob/main/plugin1/versions.json", + } + ] + ) + + versions_content = '{\n "plugin1": [{"version": "1.0.0"}]\n}' + catalog.download_file = Mock(return_value=versions_content) + + catalog.find_and_save_plugin_versions_json("plugin1", "plugin1", repo_url, {}, mock_repo) + + saved_file = tmp_path / "catalog" / "plugin1" / "versions.json" + assert saved_file.exists() + assert saved_file.read_text(encoding="utf-8") == versions_content + + def test_find_and_save_plugin_versions_json_search_returns_none(self, tmp_path, mock_github_env): + """Test find_and_save_plugin_versions_json when search returns None.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + catalog._search_github_code_for_versions_json = Mock(return_value=None) + + mock_repo = Mock() + repo_url = httpx.URL("https://github.com/org/repo") + + result = catalog.find_and_save_plugin_versions_json("plugin1", "plugin1", repo_url, {}, mock_repo) + + assert result is None + assert not (tmp_path / "catalog" / "plugin1" / "versions.json").exists() + + class TestPluginCatalogLoadManifestFile: """Tests for _load_manifest_file method.""" @@ -1885,4 +1977,230 @@ def test_find_requirements_custom_filename(self, tmp_path, mock_github_env): assert result.exists() +class TestPluginCatalogFindAndLoadVersionsJson: + """Tests for _find_and_load_versions_json method.""" + + def test_find_and_load_versions_json_non_isolated_success(self, tmp_path, mock_github_env): + """Test _find_and_load_versions_json for non-isolated plugin with versions.json.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + # Create a plugin path with versions.json + plugin_path = tmp_path / "plugin_package" + plugin_path.mkdir() + versions_json = plugin_path / "versions.json" + versions_data = {"versions": [{"version": "1.0.0", "date": "2024-01-01"}]} + versions_json.write_text(json.dumps(versions_data)) + + manifest = create_test_manifest(name="test_plugin", kind="native") + + catalog._find_and_load_versions_json(manifest, plugin_path, "test_plugin") + + # Check that versions.json was saved to catalog + catalog_versions = Path(catalog.catalog_folder) / "test_plugin" / "versions.json" + assert catalog_versions.exists() + saved_data = json.loads(catalog_versions.read_text()) + assert saved_data == versions_data + + def test_find_and_load_versions_json_non_isolated_no_file(self, tmp_path, mock_github_env): + """Test _find_and_load_versions_json for non-isolated plugin without versions.json.""" + with patch("cpex.tools.catalog.logger") as mock_logger: + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + # Create a plugin path without versions.json + plugin_path = tmp_path / "plugin_package" + plugin_path.mkdir() + + manifest = create_test_manifest(name="test_plugin", kind="native") + + catalog._find_and_load_versions_json(manifest, plugin_path, "test_plugin") + + # Check that no versions.json was saved to catalog + catalog_versions = Path(catalog.catalog_folder) / "test_plugin" / "versions.json" + assert not catalog_versions.exists() + mock_logger.debug.assert_called() + + def test_find_and_load_versions_json_isolated_success(self, tmp_path, mock_github_env): + """Test _find_and_load_versions_json for isolated_venv plugin.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + # Create a venv path + venv_path = tmp_path / ".venv" + venv_path.mkdir() + + # Mock the subprocess call to find package path + mock_package_path = tmp_path / "venv_package" + mock_package_path.mkdir() + versions_json = mock_package_path / "versions.json" + versions_data = {"versions": [{"version": "2.0.0", "date": "2024-02-01"}]} + versions_json.write_text(json.dumps(versions_data)) + + manifest = create_test_manifest(name="test_plugin", kind="isolated_venv") + + with ( + patch.object(catalog, "_get_venv_python_executable", return_value="/fake/python"), + patch("cpex.tools.catalog.subprocess.run") as mock_run, + ): + mock_result = Mock() + mock_result.returncode = 0 + mock_result.stdout = str(mock_package_path) + mock_result.stderr = "" + mock_run.return_value = mock_result + + catalog._find_and_load_versions_json(manifest, venv_path, "test_plugin") + + # Check that versions.json was saved to catalog + catalog_versions = Path(catalog.catalog_folder) / "test_plugin" / "versions.json" + assert catalog_versions.exists() + saved_data = json.loads(catalog_versions.read_text()) + assert saved_data == versions_data + + def test_find_and_load_versions_json_isolated_subprocess_failure(self, tmp_path, mock_github_env): + """Test _find_and_load_versions_json for isolated_venv when subprocess fails.""" + with patch("cpex.tools.catalog.logger") as mock_logger: + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + venv_path = tmp_path / ".venv" + venv_path.mkdir() + + manifest = create_test_manifest(name="test_plugin", kind="isolated_venv") + + with patch("cpex.tools.catalog.subprocess.run") as mock_run: + mock_result = Mock() + mock_result.returncode = 1 + mock_result.stdout = "" + mock_result.stderr = "NOT_FOUND" + mock_run.return_value = mock_result + + catalog._find_and_load_versions_json(manifest, venv_path, "test_plugin") + + # Check that no versions.json was saved + catalog_versions = Path(catalog.catalog_folder) / "test_plugin" / "versions.json" + assert not catalog_versions.exists() + mock_logger.warning.assert_called() + + def test_find_and_load_versions_json_none_plugin_path(self, tmp_path, mock_github_env): + """Test _find_and_load_versions_json with None plugin_path.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + manifest = create_test_manifest(name="test_plugin", kind="native") + + # Should handle None gracefully + catalog._find_and_load_versions_json(manifest, None, "test_plugin") + + catalog_versions = Path(catalog.catalog_folder) / "test_plugin" / "versions.json" + assert not catalog_versions.exists() + + def test_find_and_load_versions_json_exception_handling(self, tmp_path, mock_github_env): + """Test _find_and_load_versions_json handles exceptions gracefully.""" + with patch("cpex.tools.catalog.logger") as mock_logger: + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + plugin_path = tmp_path / "plugin_package" + plugin_path.mkdir() + + manifest = create_test_manifest(name="test_plugin", kind="native") + + # Create a versions.json that will cause an error when reading + versions_json = plugin_path / "versions.json" + versions_json.write_text("invalid json {{{") + + catalog._find_and_load_versions_json(manifest, plugin_path, "test_plugin") + + mock_logger.warning.assert_called() + + +class TestPluginCatalogGetVenvPythonExecutable: + """Tests for _get_venv_python_executable method.""" + + def test_get_venv_python_executable_unix(self, tmp_path, mock_github_env): + """Test _get_venv_python_executable on Unix-like systems.""" + catalog = PluginCatalog() + + venv_path = tmp_path / ".venv" + venv_path.mkdir() + bin_dir = venv_path / "bin" + bin_dir.mkdir() + python_exe = bin_dir / "python" + python_exe.touch() + + with patch("sys.platform", "linux"): + result = catalog._get_venv_python_executable(venv_path) + assert result == str(python_exe) + + def test_get_venv_python_executable_windows(self, tmp_path, mock_github_env): + """Test _get_venv_python_executable on Windows.""" + catalog = PluginCatalog() + + venv_path = tmp_path / ".venv" + venv_path.mkdir() + scripts_dir = venv_path / "Scripts" + scripts_dir.mkdir() + python_exe = scripts_dir / "python.exe" + python_exe.touch() + + with patch("sys.platform", "win32"): + result = catalog._get_venv_python_executable(venv_path) + assert result == str(python_exe) + + def test_get_venv_python_executable_not_found(self, tmp_path, mock_github_env): + """Test _get_venv_python_executable when executable doesn't exist.""" + catalog = PluginCatalog() + + venv_path = tmp_path / ".venv" + venv_path.mkdir() + + with pytest.raises(FileNotFoundError, match="Python executable not found"): + catalog._get_venv_python_executable(venv_path) + + +class TestPluginCatalogInstallFromPypiWithVersionsJson: + """Tests for install_from_pypi integration with versions.json.""" + + def test_install_from_pypi_calls_find_and_load_versions_json(self, tmp_path, mock_github_env): + """Test that install_from_pypi calls _find_and_load_versions_json.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + # Create temporary package structure + temp_extract = tmp_path / "temp_extract" + temp_extract.mkdir() + package_dir = temp_extract / "test_plugin" + package_dir.mkdir() + + manifest_path = package_dir / "plugin-manifest.yaml" + manifest_data = { + "name": "test_plugin", + "version": "1.0.0", + "kind": "native", + "description": "Test", + "author": "Test", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {} + } + manifest_path.write_text(yaml.dump(manifest_data)) + + with ( + patch.object(catalog, "_download_package_to_temp", return_value=temp_extract), + patch.object(catalog, "_install_package"), + patch.object(catalog, "find_package_path", return_value=package_dir), + patch.object(catalog, "_find_and_load_versions_json") as mock_find_versions, + patch.object(catalog, "update_plugin_version_registry"), + ): + manifest, plugin_path = catalog.install_from_pypi("test_plugin") + + # Verify _find_and_load_versions_json was called + mock_find_versions.assert_called_once() + call_args = mock_find_versions.call_args + assert call_args[0][0].name == "test_plugin" # manifest + assert call_args[0][1] == package_dir # plugin_path + assert call_args[0][2] == "test_plugin" # package_name + + # Made with Bob From 962d42d3148b56b897f0357771a7e8e2409e146a Mon Sep 17 00:00:00 2001 From: habeck Date: Wed, 29 Apr 2026 11:34:09 -0400 Subject: [PATCH 53/88] enh: only update the catalog when not installing from test-pypi or pypi. Correctly determine the install path for the plugin registry for monorepo installs and isolated_venv plugins. Signed-off-by: habeck --- cpex/tools/cli.py | 42 ++++++++++++++----- .../cpex/framework/isolated/test_venv_comm.py | 13 ++++-- 2 files changed, 41 insertions(+), 14 deletions(-) diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index cd55240..729e6ca 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -353,10 +353,15 @@ def install_from_manifest(manifest: PluginManifest, installation_type: str, cata if installation_type == "monorepo": logger.info("installation type: %s", installation_type) plugin_path = catalog.install_folder_via_pip(manifest) + actual_plugin_path = catalog._find_and_load_versions_json(manifest, plugin_path, manifest.name) plugin_registry: PluginRegistry = PluginRegistry() # add the newly downloaded plugin to the registry plugin_registry.update( - manifest=manifest, installation_type=installation_type, catalog=catalog, git_user_name=git_user_name(), plugin_path=plugin_path + manifest=manifest, + installation_type=installation_type, + catalog=catalog, + git_user_name=git_user_name(), + plugin_path=actual_plugin_path if actual_plugin_path is not None else plugin_path, ) update_plugins_config_yaml(manifest) @@ -373,6 +378,9 @@ def select_plugin_from_catalog(available_plugins: List[PluginManifest]) -> Optio if not available_plugins: return None + # Sort plugins by name and version + available_plugins = sorted(available_plugins, key=lambda p: (p.name, p.version), reverse=True) + # Build choices list with plugin information choices = [] for index, plug_in in enumerate(available_plugins): @@ -554,6 +562,15 @@ def install(source: str, install_type: str | None, catalog: PluginCatalog): handler(source, catalog, use_test=True if install_type == "test-pypi" else False) +def versions(plugin_name: str | None, catalog: PluginCatalog): + """List available versions of the plugin + Args: + plugin_name (str | None): The name of the plugin to search for. + catalog (PluginCatalog): The catalog to search in. + """ + return search(plugin_name, catalog) + + def search(plugin_name: str | None, catalog: PluginCatalog): """Search for a plugin in the catalog Args: @@ -675,11 +692,12 @@ def uninstall(plugin_name: str, catalog: PluginCatalog) -> None: "python cpex/tools/cli.py plugin --type monorepo search pii\n" "python cpex/tools/cli.py plugin --type monorepo install cpex-pii-filter\n" 'python cpex/tools/cli.py plugin --type pypi install "ExamplePlugin@>=0.1.0"\n' - 'python cpex/tools/cli.py plugin --type test-pypi install "cpex-plugin-test@>=0.1.1"\n' + 'python cpex/tools/cli.py plugin --type test-pypi install "cpex-test-plugin@>=0.1.1"\n' + "python cpex/tools/cli.py plugin versions cpex-test-plugin" "python cpex/tools/cli.py plugin uninstall cpex-pii-filter" ) def plugin( - cmd_action: str = typer.Argument(None, help="One of: list|info|install|search|uninstall"), + cmd_action: str = typer.Argument(None, help="One of: list|info|install|search|versions|uninstall"), source: str | None = typer.Argument(None, help="The pypi, git, or local folder where the plugin resides"), install_type: Annotated[ str, @@ -710,13 +728,17 @@ def plugin( # update the catalog before proceeding with install etc. pc = PluginCatalog() # optimized github search REST api takes ~14s to search & download all manifests - console.log("Update catalog") - with console.status("Updating catalog...", spinner="dots"): - rc = pc.update_catalog_with_pyproject() - if rc is False: - console.log("Catalog update completed.") - else: - console.log("❌ Catalog update failed.") + if install_type not in {"test-pypi", "pypi"}: + console.log("Update catalog") + with console.status("Updating catalog...", spinner="dots"): + rc = pc.update_catalog_with_pyproject() + if rc is False: + console.log("Catalog update completed.") + else: + console.log(":x: Catalog update failed.") + + if cmd_action == "versions": + return versions(source, catalog=pc) if cmd_action == "list": return list(install_type) diff --git a/tests/unit/cpex/framework/isolated/test_venv_comm.py b/tests/unit/cpex/framework/isolated/test_venv_comm.py index b6dcdd6..82b2bab 100644 --- a/tests/unit/cpex/framework/isolated/test_venv_comm.py +++ b/tests/unit/cpex/framework/isolated/test_venv_comm.py @@ -96,10 +96,15 @@ def test_install_requirements_success(self, mock_check_call, communicator, tmp_p mock_check_call.return_value = 0 communicator.install_requirements(str(requirements_file)) - - mock_check_call.assert_called_once_with( - [communicator.python_executable, "-m", "pip", "install", "-r", str(requirements_file)] - ) + + mock_check_call.assert_called_with([ + communicator.python_executable, + "-m", + "pip", + "install", + "-r", + str(requirements_file) + ]) @patch("subprocess.check_call") def test_install_requirements_failure(self, mock_check_call, communicator, tmp_path): From d872a2fd134397b91fdfdb419ff36dbae324e0b7 Mon Sep 17 00:00:00 2001 From: habeck Date: Thu, 30 Apr 2026 20:20:37 -0400 Subject: [PATCH 54/88] enh: refactor to reduce duplicate code, fix uninstall for isolated_venv, add install support for type local, Signed-off-by: habeck --- cpex/framework/models.py | 103 ++- cpex/tools/catalog.py | 536 +++++++++++++-- cpex/tools/cli.py | 38 +- cpex/tools/plugin_registry.py | 20 +- tests/unit/cpex/tools/test_catalog.py | 896 +++++++++++++++++++++++++- tests/unit/cpex/tools/test_cli.py | 439 ++++++++++++- 6 files changed, 1952 insertions(+), 80 deletions(-) diff --git a/cpex/framework/models.py b/cpex/framework/models.py index 20dce56..e6bea83 100644 --- a/cpex/framework/models.py +++ b/cpex/framework/models.py @@ -1411,7 +1411,7 @@ class Monorepo(BaseModel): package_folder: str -class PiPyRepo(BaseModel): +class PyPiRepo(BaseModel): """PyPi model. Attributes: name (str): The name of the pypi package. @@ -1499,6 +1499,99 @@ def validate_version_constraint(cls, version_constraint: str | None) -> str | No return version_constraint if version_constraint != "" else None +class GitRepo(BaseModel): + """Git repository model. + Attributes: + git_repository: The URL of the git repository. + git_branch_tag_commit: The branch, tag or commit of the git repository. + """ + + git_repository: str = Field( + title="URL", + description='The URL of the git repository. (e.g., "https://github.com/example/plugin.git")', + ) + git_branch_tag_commit: Optional[str] = Field( + title="Branch, tag or commit", + description="The branch, tag or commit of the git repository.", + ) + + @field_validator("git_repository", mode="after") + @classmethod + def validate_git_repository(cls, git_repository: str | None) -> str | None: + """Validate Git repository URL format. + + Args: + git_repository: The Git repository URL to validate. + + Returns: + The validated repository URL or None if none is set. + + Raises: + ValueError: If the repository URL is invalid. + """ + if git_repository is not None and git_repository != "": + if not git_repository.strip(): + raise ValueError("Git repository URL cannot be empty or whitespace") + + # Support common Git URL formats: https://, git://, ssh://, git@ + git_url_pattern = re.compile( + r"^(https?://|git://|git@)" r"[a-zA-Z0-9._-]+" r"(/|:)" r"[a-zA-Z0-9._/-]+" r"(\.git)?$" + ) + + if not git_url_pattern.match(git_repository): + raise ValueError( + f"Invalid Git repository URL '{git_repository}'. " + "Must be a valid Git URL (e.g., https://github.com/user/repo.git, " + "git@github.com:user/repo.git)" + ) + + # Additional validation for https/http URLs using existing validator + if git_repository.startswith(("http://", "https://")): + validate_plugin_url(git_repository, "Git repository URL") + + return git_repository if git_repository != "" else None + + @field_validator("git_branch_tag_commit", mode="after") + @classmethod + def validate_git_branch_tag_commit(cls, git_branch_tag_commit: str | None) -> str | None: + """Validate Git branch, tag, or commit reference. + + Args: + git_branch_tag_commit: The Git reference to validate. + + Returns: + The validated reference or None if none is set. + + Raises: + ValueError: If the reference is invalid. + """ + if git_branch_tag_commit is not None and git_branch_tag_commit != "": + if not git_branch_tag_commit.strip(): + raise ValueError("Git branch/tag/commit cannot be empty or whitespace") + + # Git refs can contain alphanumeric characters, hyphens, underscores, slashes, and periods + # Commit hashes are typically 7-40 hex characters + if not re.match(r"^[a-zA-Z0-9._/-]+$", git_branch_tag_commit): + raise ValueError( + f"Invalid Git branch/tag/commit '{git_branch_tag_commit}'. " + "Must contain only alphanumeric characters, hyphens, underscores, slashes, and periods." + ) + + # Check for common invalid patterns + if git_branch_tag_commit.startswith(("/", ".", "-")) or git_branch_tag_commit.endswith(("/", ".")): + raise ValueError( + f"Invalid Git branch/tag/commit '{git_branch_tag_commit}'. " + "Cannot start with /, ., or - or end with / or ." + ) + + if len(git_branch_tag_commit) > 255: + raise ValueError( + f"Git branch/tag/commit '{git_branch_tag_commit}' exceeds maximum length of 255 characters" + ) + + return git_branch_tag_commit if git_branch_tag_commit != "" else None + + class PluginManifest(BaseModel): """Plugin manifest. @@ -1511,6 +1604,10 @@ class PluginManifest(BaseModel): tags (list[str]): a list of tags for making the plugin searchable. available_hooks (list[str]): a list of the hook points where the plugin is callable. default_config (dict[str, Any]): the default configurations. + monorepo (Monorepo): A git monorepo where the plugin originates (Initialized by cepx cli during plugin installation) + package_info: (PyPiRepo): The package name and version constraint of the package (Initialized by cepx cli during plugin installation) + local: The path to the locally installed plugin (Initialized by cepx cli during plugin installation) + git_repo: GitRepo: The git repo where the plugin originates (Initialized by cepx cli during plugin installation) """ name: str @@ -1522,7 +1619,9 @@ class PluginManifest(BaseModel): available_hooks: list[str] default_config: dict[str, Any] monorepo: Optional[Monorepo] = None - package_info: Optional[PiPyRepo] = None + package_info: Optional[PyPiRepo] = None + local: Optional[str] = None + git_repo: Optional[GitRepo] = None def suggest_instance_name(self) -> str: """Suggest a name for the plugin instance. diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index cfb00d8..ba9f806 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -30,7 +30,14 @@ import yaml from github import Auth, Github -from cpex.framework.models import PiPyRepo, PluginManifest, PluginPackageInfo, PluginVersionInfo, PluginVersionRegistry +from cpex.framework.models import ( + GitRepo, + PluginManifest, + PluginPackageInfo, + PluginVersionInfo, + PluginVersionRegistry, + PyPiRepo, +) from cpex.tools.settings import get_catalog_settings logger = logging.getLogger(__name__) @@ -781,7 +788,7 @@ def _normalize_manifest_data( # Ensure package_info is properly set if manifest.package_info is None: - manifest.package_info = PiPyRepo(pypi_package=package_name, version_constraint=version_constraint) + manifest.package_info = PyPiRepo(pypi_package=package_name, version_constraint=version_constraint) else: manifest.package_info.pypi_package = package_name if version_constraint is not None: @@ -810,6 +817,28 @@ def _persist_manifest(self, manifest: PluginManifest, package_name: str) -> None except Exception as e: raise RuntimeError(f"Failed to save manifest for {package_name}: {str(e)}") from e + def _extract_package_archive(self, package_file: Path, extract_dir: Path) -> None: + """Extract a package archive (zip, tar.gz, wheel, etc.) to a directory. + + Args: + package_file: Path to the archive file. + extract_dir: Directory to extract to. + + Raises: + RuntimeError: If the archive format is unsupported. + """ + if package_file.suffix == ".whl" or package_file.name.endswith(".whl"): + with zipfile.ZipFile(package_file, "r") as zip_ref: + zip_ref.extractall(extract_dir) + elif package_file.suffix == ".zip" or package_file.name.endswith(".zip"): + with zipfile.ZipFile(package_file, "r") as zip_ref: + zip_ref.extractall(extract_dir) + elif package_file.suffix in [".gz", ".bz2"] or ".tar" in package_file.name: + with tarfile.open(package_file, "r:*") as tar_ref: + tar_ref.extractall(extract_dir) + else: + raise RuntimeError(f"Unsupported package format: {package_file}") + def _download_monorepo_folder_to_temp(self, repo_url: str, package_name: str) -> Path: """Download monorepo folder to temporary directory. Args: @@ -844,15 +873,8 @@ def _download_monorepo_folder_to_temp(self, repo_url: str, package_name: str) -> extract_dir = temp_dir / "extracted" extract_dir.mkdir() - # Extract the package - if package_file.suffix == ".zip" or package_file.name.endswith(".zip"): - with zipfile.ZipFile(package_file, "r") as zip_ref: - zip_ref.extractall(extract_dir) - elif package_file.suffix in [".gz", ".bz2"] or ".tar" in package_file.name: - with tarfile.open(package_file, "r:*") as tar_ref: - tar_ref.extractall(extract_dir) - else: - raise RuntimeError(f"Unsupported package format: {package_file}") + # Extract the package using common helper + self._extract_package_archive(package_file, extract_dir) logger.info("Downloaded and extracted %s to %s", package_name, extract_dir) return extract_dir @@ -917,15 +939,8 @@ def _download_package_to_temp( extract_dir = temp_dir / "extracted" extract_dir.mkdir() - # Extract the package - if package_file.suffix == ".whl" or package_file.name.endswith(".whl"): - with zipfile.ZipFile(package_file, "r") as zip_ref: - zip_ref.extractall(extract_dir) - elif package_file.suffix in [".gz", ".bz2"] or ".tar" in package_file.name: - with tarfile.open(package_file, "r:*") as tar_ref: - tar_ref.extractall(extract_dir) - else: - raise RuntimeError(f"Unsupported package format: {package_file}") + # Extract the package using common helper + self._extract_package_archive(package_file, extract_dir) logger.info("Downloaded and extracted %s to %s", package_name, extract_dir) return extract_dir @@ -1162,6 +1177,76 @@ def _get_venv_python_executable(self, venv_path: Path) -> str: return str(python_exe) + def _handle_plugin_installation( + self, manifest: PluginManifest, package_path: Path, install_command: list[str] | None = None + ) -> Path | None: + """Handle plugin installation based on its kind (isolated_venv or regular). + + Args: + manifest: The plugin manifest. + package_path: Path to the package source. + install_command: Optional custom install command for non-isolated plugins. + If None, no installation is performed for non-isolated plugins. + + Returns: + Path to the installed plugin, or None if not applicable. + + Raises: + RuntimeError: If installation fails. + """ + plugin_path = None + + if manifest.kind == "isolated_venv": + logger.info("Detected isolated_venv plugin: %s", manifest.name) + plugin_path = self._initialize_isolated_venv(manifest, package_path) + logger.info("Isolated venv initialized. Plugin auto-installed via requirements.txt") + else: + # For non-isolated plugins, install if command provided + if install_command: + logger.info("Installing non-isolated plugin: %s", manifest.name) + subprocess.run( + install_command, + check=True, + capture_output=True, + text=True, + ) + logger.info("Successfully installed package: %s", manifest.name) + + return plugin_path + + def _finalize_plugin_installation( + self, manifest: PluginManifest, plugin_path: Path | None, package_name: str + ) -> Path | None: + """Perform post-installation steps: persist manifest, find versions.json, update registry. + + Args: + manifest: The plugin manifest. + plugin_path: Path to the installed plugin (plugins/{manifest.name} directory). + package_name: Name of the package. + + Returns: + The actual plugin path from versions.json (inside .venv for isolated plugins), + or plugin_path if versions.json not found. + """ + # Step 1: Persist to catalog + self._persist_manifest(manifest, package_name) + + # Step 2: Find and save versions.json if available + # This returns the actual package location (inside .venv for isolated plugins) + actual_plugin_path = self._find_and_load_versions_json(manifest, plugin_path, package_name) + + # Step 3: Update the plugin version registry + # IMPORTANT: Use plugin_path (not actual_plugin_path) for the registry + # plugin_path is the plugins/{manifest.name} directory + # actual_plugin_path is the location inside .venv (for isolated plugins) + if plugin_path is not None: + self.update_plugin_version_registry(manifest=manifest, relpath=plugin_path) + + logger.info("Successfully installed and cataloged %s", package_name) + + # Return actual_plugin_path for reference (may be inside .venv) + return actual_plugin_path if actual_plugin_path is not None else plugin_path + def install_from_pypi( self, plugin_package_name: str, version_constraint: str | None = None, use_pytest: bool = False ) -> tuple[PluginManifest, Path | None]: @@ -1202,29 +1287,21 @@ def install_from_pypi( package_path = manifest_path.parent - plugin_path = None - # Step 4: Handle based on plugin kind - if manifest.kind == "isolated_venv": - logger.info("Detected isolated_venv plugin: %s", manifest.name) - plugin_path = self._initialize_isolated_venv(manifest, package_path) - logger.info("Isolated venv initialized. Plugin auto-installed via requirements.txt") - else: - # For non-isolated plugins, install normally into CLI's venv - logger.info("Installing non-isolated plugin: %s", manifest.name) + # Step 4: Handle installation based on plugin kind + plugin_path = self._handle_plugin_installation( + manifest, + package_path, + install_command=None # Will install separately for non-isolated + ) + + # For non-isolated plugins, install via pip and find package path + if manifest.kind != "isolated_venv": self._install_package(plugin_package_name, version_constraint, use_pytest) plugin_path = self.find_package_path(plugin_package_name) - # Step 5: Persist to catalog - self._persist_manifest(manifest, plugin_package_name) - - # Step 6: Find and save versions.json if available - actual_plugin_path = self._find_and_load_versions_json(manifest, plugin_path, plugin_package_name) - - # Step 7: Update the plugin version registry - self.update_plugin_version_registry(manifest=manifest, relpath=plugin_path) - - logger.info("Successfully installed and cataloged %s", plugin_package_name) - plugin_path = actual_plugin_path if actual_plugin_path is not None else plugin_path + # Step 5-7: Finalize installation (persist, versions.json, registry) + plugin_path = self._finalize_plugin_installation(manifest, plugin_path, plugin_package_name) + return manifest, plugin_path finally: @@ -1232,25 +1309,219 @@ def install_from_pypi( if temp_extract_dir.exists(): shutil.rmtree(temp_extract_dir.parent) - def uninstall_package(self, package_name: str) -> bool: - """Uninstall a Python package using pip. + def install_from_git(self, url: str) -> tuple[PluginManifest, Path | None]: + """Install Python package from Git repository and load its plugin-manifest.yaml. + + This method performs the following steps: + 1. Parses the Git URL to extract package name and repository details + 2. Downloads package to temporary location to read manifest + 3. Loads and parses the plugin-manifest.yaml + 4. Normalizes and validates the manifest data + 5. For isolated_venv plugins: initializes the target venv and installs via pip into isolated venv + 6. For other plugins: installs via pip into current venv + 7. Persists the manifest to the plugin catalog + 8. Finds and saves versions.json if available + 9. Updates the plugin version registry Args: - package_name: The name of the package to uninstall. + url: Git repository URL in one of these formats: + - MyProject @ git+ssh://git@git.example.com/MyProject + - MyProject @ git+https://git.example.com/MyProject + - MyProject @ git+https://git.example.com/MyProject@master Returns: - True if uninstallation was successful, False otherwise. + Tuple of (PluginManifest, Path to plugin or None) Raises: - RuntimeError: If the uninstallation process fails. + ValueError: If URL format is invalid. + RuntimeError: If any step of the installation process fails. + FileNotFoundError: If plugin-manifest.yaml is not found in the package. """ + # Step 1: Parse the Git URL + # Expected format: "PackageName@git+protocol://repo_url[@branch/tag/commit]" + if " @ " not in url: + raise ValueError( + f"Invalid Git URL format: '{url}'. Expected format: 'PackageName @ git+protocol://repo_url'" + ) + + package_name, git_spec = url.split(" @ ", 1) + package_name = package_name.strip() + + # Remove 'git+' prefix and extract protocol + if not git_spec.startswith("git+"): + raise ValueError(f"Git URL must start with 'git+': '{git_spec}'") + + git_url = git_spec[4:] # Remove 'git+' prefix + + # Extract branch/tag/commit if specified (after @) + git_branch_tag_commit = None + if "@" in git_url and not git_url.startswith("git@"): + # Split on the last @ to handle git@host:repo@branch format + parts = git_url.rsplit("@", 1) + if len(parts) == 2: + git_url, git_branch_tag_commit = parts + + # Validate using PluginPackageInfo try: + PluginPackageInfo( + git_repository=git_url, + git_branch_tag_commit=git_branch_tag_commit, + ) + except Exception as e: + raise ValueError(f"Invalid Git repository URL: {str(e)}") from e + + logger.info("Installing package '%s' from Git repository: %s", package_name, git_url) + if git_branch_tag_commit: + logger.info("Using branch/tag/commit: %s", git_branch_tag_commit) + + # Step 2: Download package to temporary location to read manifest + # We'll use pip download to get the package without installing it first + temp_dir = Path(tempfile.mkdtemp(prefix="cpex_git_")) + temp_extract_dir = temp_dir / "extracted" + temp_extract_dir.mkdir(parents=True, exist_ok=True) + + try: + # Construct the full git URL for pip + pip_git_url = f"git+{git_url}" + if git_branch_tag_commit: + pip_git_url = f"{pip_git_url}@{git_branch_tag_commit}" + + # Download the package using pip + logger.info("Downloading package from Git repository...") subprocess.run( - [self.python_executable, "-m", "pip", "uninstall", "-y", package_name], + [ + self.python_executable, + "-m", + "pip", + "download", + "--no-deps", + "--dest", + str(temp_dir), + pip_git_url, + ], check=True, capture_output=True, text=True, ) + + # Find the downloaded archive + archives = list(temp_dir.glob("*.tar.gz")) + list(temp_dir.glob("*.zip")) + list(temp_dir.glob("*.whl")) + if not archives: + raise RuntimeError(f"No package archive found after downloading from {git_url}") + + archive_path = archives[0] + logger.info("Downloaded archive: %s", archive_path.name) + + # Extract the archive using common helper + self._extract_package_archive(archive_path, temp_extract_dir) + + # Step 3: Find and load the manifest file + manifest_path = self._find_manifest_in_extracted_package(temp_extract_dir, package_name) + manifest_data = self._load_manifest_file(manifest_path) + + # Step 4: Normalize and validate the manifest + manifest = self._normalize_manifest_data(manifest_data, package_name, None) + + # Update the manifest with the git repo information + git_repo: GitRepo = GitRepo( + git_repository=git_url, + git_branch_tag_commit=git_branch_tag_commit, + ) + manifest.git_repo = git_repo + + package_path = manifest_path.parent + install_url = f"{package_name} @ {git_spec}" + + # Step 5: Handle installation based on plugin kind + plugin_path = self._handle_plugin_installation( + manifest, + package_path, + install_command=None # Will install separately + ) + + # Install the package from git + if manifest.kind == "isolated_venv": + # Install into isolated venv + if plugin_path is None: + raise RuntimeError(f"Failed to initialize isolated venv for {manifest.name}") + venv_python = self._get_venv_python_executable(plugin_path / ".venv") + logger.info("Installing package into isolated venv: %s", install_url) + subprocess.run( + [venv_python, "-m", "pip", "install", install_url], + check=True, + capture_output=True, + text=True, + ) + logger.info("Successfully installed into isolated venv") + else: + # Install into current venv + subprocess.run( + [self.python_executable, "-m", "pip", "install", install_url], + check=True, + capture_output=True, + text=True, + ) + plugin_path = self.find_package_path(package_name) + + # Step 6-8: Finalize installation (persist, versions.json, registry) + plugin_path = self._finalize_plugin_installation(manifest, plugin_path, package_name) + + return manifest, plugin_path + + except subprocess.CalledProcessError as e: + raise RuntimeError(f"Failed to install {package_name} from Git: {e.stderr}") from e + except Exception as e: + raise RuntimeError(f"Unexpected error installing {package_name} from Git: {str(e)}") from e + finally: + # Clean up temporary directory + if temp_dir.exists(): + shutil.rmtree(temp_dir) + + def uninstall_package(self, package_name: str, manifest: PluginManifest) -> bool: + """Uninstall a Python package using pip. + + Args: + package_name: The name of the package to uninstall. + + Returns: + True if uninstallation was successful, False otherwise. + + Raises: + RuntimeError: If the uninstallation process fails. + """ + try: + if manifest.kind == "isolated_venv": + # Import here to avoid circular dependency + from cpex.framework.isolated.client import IsolatedVenvPlugin + from cpex.framework.models import PluginMode + + # Create a temporary PluginConfig from the manifest + plugin_config = manifest.create_instance_config( + instance_name=manifest.name, + mode=PluginMode.SEQUENTIAL, + priority=100, + ) + + # Create an IsolatedVenvPlugin instance + isolated_plugin = IsolatedVenvPlugin( + config=plugin_config, + plugin_dirs=[str(self.plugin_folder)], + ) + + venv_python = self._get_venv_python_executable(isolated_plugin.plugin_path / ".venv") + subprocess.run( + [venv_python, "-m", "pip", "uninstall", "-y", package_name], + check=True, + capture_output=True, + text=True, + ) + else: + subprocess.run( + [self.python_executable, "-m", "pip", "uninstall", "-y", package_name], + check=True, + capture_output=True, + text=True, + ) logger.info("Successfully uninstalled package: %s", package_name) return True @@ -1258,3 +1529,176 @@ def uninstall_package(self, package_name: str) -> bool: raise RuntimeError(f"Failed to uninstall {package_name}: {e.stderr}") from e except Exception as e: raise RuntimeError(f"Unexpected error uninstalling {package_name}: {str(e)}") from e + + def install_from_local(self, source: Path) -> tuple[PluginManifest, Path]: + """Install a plugin from a local source directory. + + This method performs the following steps: + 1. Locates and loads pyproject.toml from source or subdirectories + 2. Finds and loads the plugin-manifest.yaml from source or subdirectories + 3. Parses and validates the manifest + 4. For isolated_venv plugins: initializes the target venv and installs in editable mode + 5. For other plugins: installs in editable mode into current environment + 6. Persists the manifest to the plugin catalog + 7. Finds and saves versions.json if available + 8. Updates the plugin version registry + + Args: + source: Path to the local plugin source directory. + + Returns: + Tuple of (PluginManifest, installation_path) where installation_path is the + path where the plugin was installed. + + Raises: + FileNotFoundError: If pyproject.toml or plugin-manifest.yaml is not found in source or subdirectories. + RuntimeError: If installation fails. + """ + # Step 1: Find and load pyproject.toml in source or subdirectories + pyproject_path = None + pyproject_data = None + + # Check in the source directory itself + candidate = source / "pyproject.toml" + if candidate.exists(): + pyproject_path = candidate + else: + # Search in subdirectories (one level deep) + for subdir in source.iterdir(): + if subdir.is_dir(): + candidate = subdir / "pyproject.toml" + if candidate.exists(): + pyproject_path = candidate + break + + if pyproject_path is None: + raise FileNotFoundError(f"pyproject.toml not found in {source} or its immediate subdirectories") + + logger.info("Found pyproject.toml at: %s", pyproject_path) + + # Load and parse the pyproject.toml + try: + with open(pyproject_path, "rb") as f: + pyproject_data = tomllib.load(f) + logger.info( + "Successfully loaded pyproject.toml with project name: %s", + pyproject_data.get("project", {}).get("name", "unknown"), + ) + except Exception as e: + raise RuntimeError(f"Failed to parse pyproject.toml at {pyproject_path}: {str(e)}") from e + + # Step 2: Find plugin-manifest.yaml in source or subdirectories + manifest_path = None + + # Check in the source directory itself + candidate = source / "plugin-manifest.yaml" + if candidate.exists(): + manifest_path = candidate + else: + # Search in subdirectories (one level deep) + for subdir in source.iterdir(): + if subdir.is_dir(): + candidate = subdir / "plugin-manifest.yaml" + if candidate.exists(): + manifest_path = candidate + break + + if manifest_path is None: + raise FileNotFoundError(f"plugin-manifest.yaml not found in {source} or its immediate subdirectories") + + logger.info("Found plugin-manifest.yaml at: %s", manifest_path) + + # Step 2: Load and parse the manifest + manifest_data = self._load_manifest_file(manifest_path) + manifest = self._normalize_manifest_data(manifest_data, pyproject_data["project"]["name"], None) + manifest.local = str(source.resolve()) + + logger.info("Loaded manifest for plugin: %s (kind: %s)", manifest.name, manifest.kind) + + plugin_path = None + + # Step 3: Install based on plugin kind + if manifest.kind == "isolated_venv": + logger.info("Installing isolated_venv plugin from local source: %s", source) + + try: + # Import here to avoid circular dependency + from cpex.framework.isolated.client import IsolatedVenvPlugin + from cpex.framework.models import PluginMode + + # Create a temporary PluginConfig from the manifest + plugin_config = manifest.create_instance_config( + instance_name=manifest.name, + mode=PluginMode.SEQUENTIAL, + priority=100, + ) + + # Create an IsolatedVenvPlugin instance + isolated_plugin = IsolatedVenvPlugin( + config=plugin_config, + plugin_dirs=[str(self.plugin_folder)], + ) + + # Initialize the venv (creates venv directory structure) + import asyncio + + asyncio.run(isolated_plugin.initialize()) + + # Get the venv python executable + venv_path = isolated_plugin.plugin_path / ".venv" + venv_python = self._get_venv_python_executable(venv_path) + + # Install the plugin in editable mode into the isolated venv + logger.info("Installing plugin in editable mode into isolated venv: %s", venv_path) + subprocess.run( + [venv_python, "-m", "pip", "install", "-e", str(source)], + check=True, + capture_output=True, + text=True, + ) + + plugin_path = isolated_plugin.plugin_path + logger.info("Successfully installed %s into isolated venv at %s", manifest.name, plugin_path) + + except Exception as e: + raise RuntimeError(f"Failed to install isolated_venv plugin from {source}: {str(e)}") from e + + else: + # Install into current environment for non-isolated plugins + logger.info("Installing plugin from local source into current environment: %s", source) + + try: + subprocess.run( + [self.python_executable, "-m", "pip", "install", "-e", str(source)], + check=True, + capture_output=True, + text=True, + ) + + # For non-isolated plugins, the plugin_path is the same folder that hosts the plugin-manifest.yaml + plugin_path = Path(str(manifest_path).removesuffix(manifest_path.name)) + if plugin_path is None: + # Fallback to source path if package path not found + plugin_path = source + + logger.info("Successfully installed %s into current environment at %s", manifest.name, plugin_path) + + except subprocess.CalledProcessError as e: + raise RuntimeError(f"Failed to install plugin from {source}: {e.stderr}") from e + except Exception as e: + raise RuntimeError(f"Unexpected error installing plugin from {source}: {str(e)}") from e + + # Step 4: Persist to catalog + self._persist_manifest(manifest, manifest.name) + + # Step 5: Find and save versions.json if available + actual_plugin_path = self._find_and_load_versions_json(manifest, plugin_path, manifest.name) + + # Step 6: Update the plugin version registry + self.update_plugin_version_registry(manifest=manifest, relpath=plugin_path) + + logger.info("Successfully installed and cataloged %s from local source", manifest.name) + + # Return the actual plugin path if found, otherwise the original plugin_path + final_path = actual_plugin_path if actual_plugin_path is not None else plugin_path + return manifest, final_path diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index 729e6ca..f9e4e4e 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -451,8 +451,14 @@ def _finalize_installation(manifest: PluginManifest, install_type: str, catalog: catalog: The plugin catalog. """ plugin_registry = PluginRegistry() + editable = install_type == "local" plugin_registry.update( - manifest=manifest, installation_type=install_type, catalog=catalog, git_user_name=git_user_name(), plugin_path=plugin_path + manifest=manifest, + installation_type=install_type, + catalog=catalog, + git_user_name=git_user_name(), + plugin_path=plugin_path, + editable=editable, ) update_plugins_config_yaml(manifest=manifest) @@ -465,9 +471,15 @@ def _install_from_local(source: str, catalog: PluginCatalog, use_test: bool = Fa catalog: The plugin catalog. Raises: - NotImplementedError: local installation is not yet supported. + FileNotFoundError: If plugin-manifest.yaml is not found in source or subdirectories. + RuntimeError: If installation fails. """ - raise NotImplementedError("Local installation is not yet implemented") + install_source = Path(source) + with console.status(f"Installing plugin from source {source}...", spinner="dots"): + manifest, installation_path = catalog.install_from_local(install_source) + update_plugins_config_yaml(manifest=manifest) + _finalize_installation(manifest, "local", catalog, installation_path) + console.print(f":white_heavy_check_mark: {manifest.name} installation complete.") def _install_from_git(source: str, catalog: PluginCatalog, use_test: bool = False): @@ -480,7 +492,11 @@ def _install_from_git(source: str, catalog: PluginCatalog, use_test: bool = Fals Raises: NotImplementedError: Git installation is not yet supported. """ - raise NotImplementedError("Git installation is not yet implemented") + with console.status(f"Installing plugin from source {source}...", spinner="dots"): + manifest, installation_path = catalog.install_from_git(source) + update_plugins_config_yaml(manifest=manifest) + _finalize_installation(manifest, "git", catalog, installation_path) + console.print(f":white_heavy_check_mark: {manifest.name} installation complete.") def _install_from_monorepo(source: str, catalog: PluginCatalog, use_test: bool = False): @@ -662,19 +678,18 @@ def uninstall(plugin_name: str, catalog: PluginCatalog) -> None: try: with console.status(f"Uninstalling plugin {plugin_name}...", spinner="dots"): - # Uninstall the package using pip - catalog.uninstall_package(plugin_name) - - # Remove from plugin registry - plugin_registry.remove(plugin_name) # retrieve the manifest so we can match on kind value catalog = PluginCatalog() manifest = catalog.find(plugin_name) # Remove from plugins/config.yaml if manifest: remove_from_plugins_config_yaml(manifest) + catalog.uninstall_package(plugin_name, manifest) + # Remove from plugin registry + plugin_registry.remove(plugin_name) else: - console.print(f"Plugin {plugin_name} not found in plugins config.yaml.") + console.print(f":x: Plugin {plugin_name} not found in catalog.") + return console.print(f":white_heavy_check_mark: {plugin_name} uninstalled successfully.") @@ -693,6 +708,7 @@ def uninstall(plugin_name: str, catalog: PluginCatalog) -> None: "python cpex/tools/cli.py plugin --type monorepo install cpex-pii-filter\n" 'python cpex/tools/cli.py plugin --type pypi install "ExamplePlugin@>=0.1.0"\n' 'python cpex/tools/cli.py plugin --type test-pypi install "cpex-test-plugin@>=0.1.1"\n' + 'python cpex/tools/cli.py plugin --type git install "cpex-test-plugin @ git+https://github.com/tedhabeck/cpex-test-plugin@main"\n' "python cpex/tools/cli.py plugin versions cpex-test-plugin" "python cpex/tools/cli.py plugin uninstall cpex-pii-filter" ) @@ -728,7 +744,7 @@ def plugin( # update the catalog before proceeding with install etc. pc = PluginCatalog() # optimized github search REST api takes ~14s to search & download all manifests - if install_type not in {"test-pypi", "pypi"}: + if install_type not in {"test-pypi", "pypi", "local"}: console.log("Update catalog") with console.status("Updating catalog...", spinner="dots"): rc = pc.update_catalog_with_pyproject() diff --git a/cpex/tools/plugin_registry.py b/cpex/tools/plugin_registry.py index 4230c07..49bfc91 100644 --- a/cpex/tools/plugin_registry.py +++ b/cpex/tools/plugin_registry.py @@ -38,7 +38,13 @@ def __init__(self, *args, **kwargs): self.registry = InstalledPluginRegistry() def update( - self, manifest: PluginManifest, installation_type: str, catalog: PluginCatalog, git_user_name: str, plugin_path: Path | None = None + self, + manifest: PluginManifest, + installation_type: str, + catalog: PluginCatalog, + git_user_name: str, + plugin_path: Path | None = None, + editable: bool = False, ) -> None: """ Given a plugin manifest, register it in the plugin registry. @@ -61,6 +67,16 @@ def update( if manifest.package_info is None: raise RuntimeError("PluginManifest.package_info can not be None.") package_source = manifest.package_info.pypi_package + elif installation_type == "local": + if manifest.local is None: + raise RuntimeError("PluginManifest local path can not be None.") + package_source = manifest.local + elif installation_type == "git": + if manifest.git_repo is None: + raise RuntimeError("PluginManifest.git_repo can not be None.") + package_source = manifest.name + " @ " + manifest.git_repo.git_repository + if manifest.git_repo.git_branch_tag_commit is not None: + package_source += f"@{manifest.git_repo.git_branch_tag_commit}" else: raise ValueError(f"Invalid installation type: {installation_type}") @@ -75,7 +91,7 @@ def update( installed_at=datetime.datetime.now(datetime.timezone.utc).isoformat() + "Z", installed_by=git_user_name, package_source=package_source, - editable=False, + editable=editable, ) # add the newly downloaded plugin to the registry self.registry.register_plugin(ipi) diff --git a/tests/unit/cpex/tools/test_catalog.py b/tests/unit/cpex/tools/test_catalog.py index ea149a5..38a1941 100644 --- a/tests/unit/cpex/tools/test_catalog.py +++ b/tests/unit/cpex/tools/test_catalog.py @@ -1499,14 +1499,57 @@ def test_find_manifest_not_found(self, tmp_path, mock_github_env): class TestPluginCatalogUninstallPackage: """Tests for uninstall_package method.""" - def test_uninstall_package_success(self, mock_github_env): - """Test successful package uninstallation.""" + def test_uninstall_package_success_native(self, mock_github_env): + """Test successful package uninstallation for native plugin.""" catalog = PluginCatalog() + # Create a native plugin manifest + manifest = create_test_manifest(kind="native") + with patch('subprocess.run') as mock_run: mock_run.return_value = MagicMock(returncode=0) - result = catalog.uninstall_package("test_plugin") + result = catalog.uninstall_package("test_plugin", manifest) + + assert result is True + mock_run.assert_called_once() + call_args = mock_run.call_args[0][0] + assert "pip" in call_args + assert "uninstall" in call_args + assert "-y" in call_args + assert "test_plugin" in call_args + # Should use current python executable for native plugins + assert call_args[0] == catalog.python_executable + + def test_uninstall_package_success_isolated_venv(self, tmp_path, mock_github_env): + """Test successful package uninstallation for isolated_venv plugin.""" + catalog = PluginCatalog() + catalog.plugin_folder = str(tmp_path / "plugins") + + # Create an isolated_venv plugin manifest + manifest = create_test_manifest(kind="isolated_venv") + + # Create mock venv structure + plugin_path = tmp_path / "plugins" / "test_plugin" + plugin_path.mkdir(parents=True) + venv_path = plugin_path / ".venv" + venv_bin = venv_path / "bin" + venv_bin.mkdir(parents=True) + venv_python = venv_bin / "python" + venv_python.touch() + + # Mock the IsolatedVenvPlugin + mock_isolated_plugin = MagicMock() + mock_isolated_plugin.plugin_path = plugin_path + + with ( + patch('subprocess.run') as mock_run, + patch('cpex.framework.isolated.client.IsolatedVenvPlugin', return_value=mock_isolated_plugin), + patch.object(catalog, '_get_venv_python_executable', return_value=str(venv_python)), + ): + mock_run.return_value = MagicMock(returncode=0) + + result = catalog.uninstall_package("test_plugin", manifest) assert result is True mock_run.assert_called_once() @@ -1515,31 +1558,66 @@ def test_uninstall_package_success(self, mock_github_env): assert "uninstall" in call_args assert "-y" in call_args assert "test_plugin" in call_args + # Should use venv python executable for isolated plugins + assert call_args[0] == str(venv_python) def test_uninstall_package_subprocess_error(self, mock_github_env): """Test subprocess error during uninstallation.""" catalog = PluginCatalog() + manifest = create_test_manifest(kind="native") with patch('subprocess.run') as mock_run: mock_run.side_effect = subprocess.CalledProcessError(1, "pip", stderr="Uninstall failed") with pytest.raises(RuntimeError) as exc_info: - catalog.uninstall_package("test_plugin") + catalog.uninstall_package("test_plugin", manifest) assert "Failed to uninstall" in str(exc_info.value) def test_uninstall_package_unexpected_error(self, mock_github_env): """Test unexpected error during uninstallation.""" catalog = PluginCatalog() + manifest = create_test_manifest(kind="native") with patch('subprocess.run') as mock_run: mock_run.side_effect = Exception("Unexpected error") with pytest.raises(RuntimeError) as exc_info: - catalog.uninstall_package("test_plugin") + catalog.uninstall_package("test_plugin", manifest) assert "Unexpected error uninstalling" in str(exc_info.value) + def test_uninstall_package_isolated_venv_error(self, tmp_path, mock_github_env): + """Test error during isolated_venv plugin uninstallation.""" + catalog = PluginCatalog() + catalog.plugin_folder = str(tmp_path / "plugins") + manifest = create_test_manifest(kind="isolated_venv") + + # Create mock venv structure + plugin_path = tmp_path / "plugins" / "test_plugin" + plugin_path.mkdir(parents=True) + venv_path = plugin_path / ".venv" + venv_bin = venv_path / "bin" + venv_bin.mkdir(parents=True) + venv_python = venv_bin / "python" + venv_python.touch() + + # Mock the IsolatedVenvPlugin + mock_isolated_plugin = MagicMock() + mock_isolated_plugin.plugin_path = plugin_path + + with ( + patch('subprocess.run') as mock_run, + patch('cpex.framework.isolated.client.IsolatedVenvPlugin', return_value=mock_isolated_plugin), + patch.object(catalog, '_get_venv_python_executable', return_value=str(venv_python)), + ): + mock_run.side_effect = subprocess.CalledProcessError(1, "pip", stderr="Uninstall failed") + + with pytest.raises(RuntimeError) as exc_info: + catalog.uninstall_package("test_plugin", manifest) + + assert "Failed to uninstall" in str(exc_info.value) + class TestPluginCatalogInstallFolderViaPipIsolated: """Tests for install_folder_via_pip with isolated_venv plugins.""" @@ -2204,3 +2282,811 @@ def test_install_from_pypi_calls_find_and_load_versions_json(self, tmp_path, moc # Made with Bob + + +class TestPluginCatalogInstallFromLocal: + """Tests for PluginCatalog.install_from_local method.""" + + def test_install_from_local_manifest_in_root(self, tmp_path, mock_github_env): + """Test installing from local source with manifest in root directory.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + # Create source directory with pyproject and manifest in root + source_dir = tmp_path / "my_plugin" + source_dir.mkdir() + (source_dir / "pyproject.toml").write_text('[project]\nname = "my_plugin"\nversion = "1.0.0"\n') + + manifest_data = { + "name": "my_plugin", + "version": "1.0.0", + "kind": "native", + "description": "Test plugin", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {}, + } + manifest_file = source_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + + with ( + patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, + patch.object(catalog, "find_package_path", return_value=source_dir), + patch.object(catalog, "_persist_manifest"), + patch.object(catalog, "_find_and_load_versions_json", return_value=source_dir), + patch.object(catalog, "update_plugin_version_registry"), + ): + manifest, plugin_path = catalog.install_from_local(source_dir) + + # Verify subprocess was called with pip install -e + mock_subprocess.assert_called_once() + call_args = mock_subprocess.call_args[0][0] + assert "-m" in call_args + assert "pip" in call_args + assert "install" in call_args + assert "-e" in call_args + assert str(source_dir) in call_args + + assert manifest.name == "my_plugin" + assert manifest.kind == "native" + assert plugin_path == source_dir + + def test_install_from_local_manifest_in_subdirectory(self, tmp_path, mock_github_env): + """Test installing from local source with manifest in subdirectory.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + # Create source directory with pyproject and manifest in subdirectory + source_dir = tmp_path / "my_plugin_project" + source_dir.mkdir() + plugin_subdir = source_dir / "my_plugin" + plugin_subdir.mkdir() + (plugin_subdir / "pyproject.toml").write_text('[project]\nname = "my_plugin"\nversion = "1.0.0"\n') + + manifest_data = { + "name": "my_plugin", + "version": "1.0.0", + "kind": "native", + "description": "Test plugin", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {}, + } + manifest_file = plugin_subdir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + + with ( + patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, + patch.object(catalog, "find_package_path", return_value=source_dir), + patch.object(catalog, "_persist_manifest"), + patch.object(catalog, "_find_and_load_versions_json", return_value=source_dir), + patch.object(catalog, "update_plugin_version_registry"), + ): + manifest, plugin_path = catalog.install_from_local(source_dir) + + assert manifest.name == "my_plugin" + mock_subprocess.assert_called_once() + + def test_install_from_local_manifest_not_found(self, tmp_path, mock_github_env): + """Test error when manifest is not found in source or subdirectories.""" + catalog = PluginCatalog() + + # Create source directory without pyproject or manifest + source_dir = tmp_path / "my_plugin" + source_dir.mkdir() + + with pytest.raises(FileNotFoundError, match="pyproject.toml not found"): + catalog.install_from_local(source_dir) + + def test_install_from_local_isolated_venv(self, tmp_path, mock_github_env): + """Test installing an isolated_venv plugin from local source.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + catalog.plugin_folder = str(tmp_path / "plugins") + + # Create source directory with isolated_venv pyproject and manifest + source_dir = tmp_path / "my_isolated_plugin" + source_dir.mkdir() + (source_dir / "pyproject.toml").write_text('[project]\nname = "my_isolated_plugin"\nversion = "1.0.0"\n') + + manifest_data = { + "name": "my_isolated_plugin", + "version": "1.0.0", + "kind": "isolated_venv", + "description": "Test isolated plugin", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {"requirements_file": "requirements.txt"}, + } + manifest_file = source_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + + # Mock IsolatedVenvPlugin + mock_isolated_plugin = Mock() + mock_isolated_plugin.plugin_path = tmp_path / "plugins" / "my_isolated_plugin" + mock_isolated_plugin.plugin_path.mkdir(parents=True, exist_ok=True) + venv_path = mock_isolated_plugin.plugin_path / ".venv" + venv_path.mkdir(parents=True, exist_ok=True) + + # Create mock venv python executable + if sys.platform == "win32": + python_exe = venv_path / "Scripts" / "python.exe" + else: + python_exe = venv_path / "bin" / "python" + python_exe.parent.mkdir(parents=True, exist_ok=True) + python_exe.touch() + + with ( + patch("cpex.framework.isolated.client.IsolatedVenvPlugin", return_value=mock_isolated_plugin), + patch("asyncio.run"), + patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, + patch.object(catalog, "_persist_manifest"), + patch.object(catalog, "_find_and_load_versions_json", return_value=mock_isolated_plugin.plugin_path), + patch.object(catalog, "update_plugin_version_registry"), + ): + manifest, plugin_path = catalog.install_from_local(source_dir) + + # Verify subprocess was called with venv python + mock_subprocess.assert_called_once() + call_args = mock_subprocess.call_args[0][0] + assert str(python_exe) == call_args[0] + assert "-m" in call_args + assert "pip" in call_args + assert "install" in call_args + assert "-e" in call_args + assert str(source_dir) in call_args + + assert manifest.name == "my_isolated_plugin" + assert manifest.kind == "isolated_venv" + assert plugin_path == mock_isolated_plugin.plugin_path + + def test_install_from_local_subprocess_error(self, tmp_path, mock_github_env): + """Test error handling when pip install fails.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + # Create source directory with pyproject and manifest + source_dir = tmp_path / "my_plugin" + source_dir.mkdir() + (source_dir / "pyproject.toml").write_text('[project]\nname = "my_plugin"\nversion = "1.0.0"\n') + + manifest_data = { + "name": "my_plugin", + "version": "1.0.0", + "kind": "native", + "description": "Test plugin", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {}, + } + manifest_file = source_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + + with patch("cpex.tools.catalog.subprocess.run") as mock_subprocess: + mock_subprocess.side_effect = subprocess.CalledProcessError( + 1, ["pip", "install"], stderr="Installation failed" + ) + + with pytest.raises(RuntimeError, match="Failed to install plugin from"): + catalog.install_from_local(source_dir) + + def test_install_from_local_invalid_manifest(self, tmp_path, mock_github_env): + """Test error handling when manifest is invalid.""" + catalog = PluginCatalog() + + # Create source directory with pyproject and invalid manifest + source_dir = tmp_path / "my_plugin" + source_dir.mkdir() + (source_dir / "pyproject.toml").write_text('[project]\nname = "my_plugin"\nversion = "1.0.0"\n') + + manifest_file = source_dir / "plugin-manifest.yaml" + manifest_file.write_text("invalid: yaml: content:") + + with pytest.raises(RuntimeError, match="Failed to parse manifest YAML"): + catalog.install_from_local(source_dir) + + def test_install_from_local_calls_persist_and_registry(self, tmp_path, mock_github_env): + """Test that install_from_local calls persist_manifest and update_plugin_version_registry.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + # Create source directory with pyproject and manifest + source_dir = tmp_path / "my_plugin" + source_dir.mkdir() + (source_dir / "pyproject.toml").write_text('[project]\nname = "my_plugin"\nversion = "1.0.0"\n') + + manifest_data = { + "name": "my_plugin", + "version": "1.0.0", + "kind": "native", + "description": "Test plugin", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {}, + } + manifest_file = source_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + + with ( + patch("cpex.tools.catalog.subprocess.run"), + patch.object(catalog, "find_package_path", return_value=source_dir), + patch.object(catalog, "_persist_manifest") as mock_persist, + patch.object(catalog, "_find_and_load_versions_json", return_value=source_dir) as mock_versions, + patch.object(catalog, "update_plugin_version_registry") as mock_registry, + ): + manifest, plugin_path = catalog.install_from_local(source_dir) + + # Verify all post-install steps were called + mock_persist.assert_called_once() + mock_versions.assert_called_once() + mock_registry.assert_called_once() + + # Verify the manifest was passed correctly + persist_call_args = mock_persist.call_args[0] + assert persist_call_args[0].name == "my_plugin" + + def test_install_from_local_isolated_venv_initialization_error(self, tmp_path, mock_github_env): + """Test error handling when isolated venv initialization fails.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + catalog.plugin_folder = str(tmp_path / "plugins") + + # Create source directory with isolated_venv pyproject and manifest + source_dir = tmp_path / "my_isolated_plugin" + source_dir.mkdir() + (source_dir / "pyproject.toml").write_text('[project]\nname = "my_isolated_plugin"\nversion = "1.0.0"\n') + + manifest_data = { + "name": "my_isolated_plugin", + "version": "1.0.0", + "kind": "isolated_venv", + "description": "Test isolated plugin", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {"requirements_file": "requirements.txt"}, + } + manifest_file = source_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + + with ( + patch("cpex.framework.isolated.client.IsolatedVenvPlugin") as mock_plugin_class, + patch("asyncio.run") as mock_asyncio_run, + ): + mock_asyncio_run.side_effect = Exception("Venv initialization failed") + + with pytest.raises(RuntimeError, match="Failed to install isolated_venv plugin"): + catalog.install_from_local(source_dir) + + def test_install_from_local_fallback_to_source_path(self, tmp_path, mock_github_env): + """Test that source path is used as fallback when find_package_path returns None.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + # Create source directory with pyproject and manifest + source_dir = tmp_path / "my_plugin" + source_dir.mkdir() + (source_dir / "pyproject.toml").write_text('[project]\nname = "my_plugin"\nversion = "1.0.0"\n') + + manifest_data = { + "name": "my_plugin", + "version": "1.0.0", + "kind": "native", + "description": "Test plugin", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {}, + } + manifest_file = source_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + + with ( + patch("cpex.tools.catalog.subprocess.run"), + patch.object(catalog, "_persist_manifest"), + patch.object(catalog, "_find_and_load_versions_json", return_value=None), + patch.object(catalog, "update_plugin_version_registry"), + ): + manifest, plugin_path = catalog.install_from_local(source_dir) + + # Non-isolated installs now derive plugin_path from manifest location + assert plugin_path == source_dir + + def test_install_from_local_with_versions_json(self, tmp_path, mock_github_env): + """Test that versions.json is found and loaded correctly.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + # Create source directory with pyproject and manifest + source_dir = tmp_path / "my_plugin" + source_dir.mkdir() + (source_dir / "pyproject.toml").write_text('[project]\nname = "my_plugin"\nversion = "1.0.0"\n') + + manifest_data = { + "name": "my_plugin", + "version": "1.0.0", + "kind": "native", + "description": "Test plugin", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {}, + } + manifest_file = source_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + + # Create a different path that versions.json returns + actual_path = tmp_path / "actual_plugin_path" + actual_path.mkdir() + + with ( + patch("cpex.tools.catalog.subprocess.run"), + patch.object(catalog, "find_package_path", return_value=source_dir), + patch.object(catalog, "_persist_manifest"), + patch.object(catalog, "_find_and_load_versions_json", return_value=actual_path), + patch.object(catalog, "update_plugin_version_registry"), + ): + manifest, plugin_path = catalog.install_from_local(source_dir) + + # Should return the actual path from versions.json + assert plugin_path == actual_path + + + +class TestPluginCatalogInstallFromGit: + """Tests for PluginCatalog.install_from_git method.""" + + def test_install_from_git_success_https(self, tmp_path, mock_github_env): + """Test successful installation from Git using HTTPS URL.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + # Create mock extracted package with manifest + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + plugin_dir = extract_dir / "test_plugin" + plugin_dir.mkdir() + + manifest_data = { + "name": "test_plugin", + "version": "1.0.0", + "kind": "native", + "description": "Test plugin", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {}, + } + manifest_file = plugin_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + + # Create a mock archive + archive_path = tmp_path / "test_plugin-1.0.0.tar.gz" + with tarfile.open(archive_path, "w:gz") as tar: + tar.add(plugin_dir, arcname="test_plugin") + + with ( + patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, + patch("cpex.tools.catalog.tempfile.mkdtemp", return_value=str(tmp_path)), + patch.object(catalog, "find_package_path", return_value=plugin_dir), + patch.object(catalog, "_persist_manifest"), + patch.object(catalog, "_find_and_load_versions_json", return_value=plugin_dir), + patch.object(catalog, "update_plugin_version_registry"), + patch("shutil.rmtree"), + ): + # Mock pip download to create the archive + def mock_run(*args, **kwargs): + if "download" in args[0]: + # Simulate pip download creating the archive + pass + return Mock(returncode=0) + + mock_subprocess.side_effect = mock_run + + url = "test_plugin @ git+https://github.com/example/test_plugin.git" + manifest, plugin_path = catalog.install_from_git(url) + + assert manifest.name == "test_plugin" + assert manifest.kind == "native" + assert plugin_path == plugin_dir + # Should call subprocess twice: once for download, once for install + assert mock_subprocess.call_count == 2 + + def test_install_from_git_success_ssh(self, tmp_path, mock_github_env): + """Test successful installation from Git using SSH URL.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + plugin_dir = extract_dir / "test_plugin" + plugin_dir.mkdir() + + manifest_data = { + "name": "test_plugin", + "version": "1.0.0", + "kind": "native", + "description": "Test plugin", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {}, + } + manifest_file = plugin_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + + archive_path = tmp_path / "test_plugin-1.0.0.tar.gz" + with tarfile.open(archive_path, "w:gz") as tar: + tar.add(plugin_dir, arcname="test_plugin") + + with ( + patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, + patch("cpex.tools.catalog.tempfile.mkdtemp", return_value=str(tmp_path)), + patch.object(catalog, "find_package_path", return_value=plugin_dir), + patch.object(catalog, "_persist_manifest"), + patch.object(catalog, "_find_and_load_versions_json", return_value=plugin_dir), + patch.object(catalog, "update_plugin_version_registry"), + patch("shutil.rmtree"), + ): + mock_subprocess.return_value = Mock(returncode=0) + + # Use git@ format which is the standard SSH format + url = "test_plugin @ git+git@github.com:example/test_plugin.git" + manifest, plugin_path = catalog.install_from_git(url) + + assert manifest.name == "test_plugin" + assert plugin_path == plugin_dir + + def test_install_from_git_with_branch(self, tmp_path, mock_github_env): + """Test installation from Git with specific branch.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + plugin_dir = extract_dir / "test_plugin" + plugin_dir.mkdir() + + manifest_data = { + "name": "test_plugin", + "version": "1.0.0", + "kind": "native", + "description": "Test plugin", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {}, + } + manifest_file = plugin_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + + archive_path = tmp_path / "test_plugin-1.0.0.tar.gz" + with tarfile.open(archive_path, "w:gz") as tar: + tar.add(plugin_dir, arcname="test_plugin") + + with ( + patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, + patch("cpex.tools.catalog.tempfile.mkdtemp", return_value=str(tmp_path)), + patch.object(catalog, "find_package_path", return_value=plugin_dir), + patch.object(catalog, "_persist_manifest"), + patch.object(catalog, "_find_and_load_versions_json", return_value=plugin_dir), + patch.object(catalog, "update_plugin_version_registry"), + patch("shutil.rmtree"), + ): + mock_subprocess.return_value = Mock(returncode=0) + + url = "test_plugin @ git+https://github.com/example/test_plugin.git@master" + manifest, plugin_path = catalog.install_from_git(url) + + assert manifest.name == "test_plugin" + # Verify that the branch was included in the pip install command + install_call = [call for call in mock_subprocess.call_args_list if "install" in str(call)] + assert len(install_call) > 0 + + def test_install_from_git_isolated_venv(self, tmp_path, mock_github_env): + """Test installation of isolated_venv plugin from Git.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + plugin_dir = extract_dir / "test_plugin" + plugin_dir.mkdir() + + manifest_data = { + "name": "test_plugin", + "version": "1.0.0", + "kind": "isolated_venv", + "description": "Test isolated plugin", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {"requirements_file": "requirements.txt"}, + } + manifest_file = plugin_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + + # Create requirements file + requirements_file = plugin_dir / "requirements.txt" + requirements_file.write_text("pytest>=7.0.0\n") + + archive_path = tmp_path / "test_plugin-1.0.0.tar.gz" + with tarfile.open(archive_path, "w:gz") as tar: + tar.add(plugin_dir, arcname="test_plugin") + tar.add(requirements_file, arcname="test_plugin/requirements.txt") + + venv_path = tmp_path / "venv_path" + venv_path.mkdir() + venv_bin = venv_path / "venv" / "bin" + venv_bin.mkdir(parents=True) + venv_python = venv_bin / "python" + + with ( + patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, + patch("cpex.tools.catalog.tempfile.mkdtemp", return_value=str(tmp_path)), + patch.object(catalog, "_initialize_isolated_venv", return_value=venv_path), + patch.object(catalog, "_get_venv_python_executable", return_value=str(venv_python)), + patch.object(catalog, "_persist_manifest"), + patch.object(catalog, "_find_and_load_versions_json", return_value=venv_path), + patch.object(catalog, "update_plugin_version_registry"), + patch("shutil.rmtree"), + ): + mock_subprocess.return_value = Mock(returncode=0) + + url = "test_plugin @ git+https://github.com/example/test_plugin.git" + manifest, plugin_path = catalog.install_from_git(url) + + assert manifest.kind == "isolated_venv" + assert plugin_path == venv_path + # Should call subprocess twice: download and install into isolated venv + assert mock_subprocess.call_count == 2 + # Verify install was called with venv python (not download which also contains "install") + install_calls = [call for call in mock_subprocess.call_args_list if "pip', 'install" in str(call)] + assert len(install_calls) == 1 + assert str(venv_python) in str(install_calls[0]) + + def test_install_from_git_invalid_url_format(self, mock_github_env): + """Test error when URL format is invalid (missing @).""" + catalog = PluginCatalog() + + with pytest.raises(ValueError) as exc_info: + catalog.install_from_git("test_plugin") + + assert "Invalid Git URL format" in str(exc_info.value) + assert "Expected format" in str(exc_info.value) + + def test_install_from_git_missing_git_prefix(self, mock_github_env): + """Test error when git+ prefix is missing.""" + catalog = PluginCatalog() + + with pytest.raises(ValueError) as exc_info: + catalog.install_from_git("test_plugin @ https://github.com/example/test_plugin.git") + + assert "Git URL must start with 'git+'" in str(exc_info.value) + + def test_install_from_git_invalid_git_url(self, mock_github_env): + """Test error when Git URL is invalid.""" + catalog = PluginCatalog() + + with pytest.raises(ValueError) as exc_info: + catalog.install_from_git("test_plugin @ git+invalid://not-a-valid-url") + + assert "Invalid Git repository URL" in str(exc_info.value) + + def test_install_from_git_download_failure(self, tmp_path, mock_github_env): + """Test error when pip download fails.""" + catalog = PluginCatalog() + + with ( + patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, + patch("cpex.tools.catalog.tempfile.mkdtemp", return_value=str(tmp_path)), + patch("shutil.rmtree"), + ): + mock_subprocess.side_effect = subprocess.CalledProcessError( + 1, ["pip", "download"], stderr="Download failed" + ) + + with pytest.raises(RuntimeError) as exc_info: + catalog.install_from_git("test_plugin @ git+https://github.com/example/test_plugin.git") + + assert "Failed to install test_plugin from Git" in str(exc_info.value) + + def test_install_from_git_no_archive_found(self, tmp_path, mock_github_env): + """Test error when no archive is found after download.""" + catalog = PluginCatalog() + + with ( + patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, + patch("cpex.tools.catalog.tempfile.mkdtemp", return_value=str(tmp_path)), + patch("shutil.rmtree"), + ): + mock_subprocess.return_value = Mock(returncode=0) + + with pytest.raises(RuntimeError) as exc_info: + catalog.install_from_git("test_plugin @ git+https://github.com/example/test_plugin.git") + + assert "No package archive found" in str(exc_info.value) + + def test_install_from_git_manifest_not_found(self, tmp_path, mock_github_env): + """Test error when manifest is not found in package.""" + catalog = PluginCatalog() + + # Create archive without manifest + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + plugin_dir = extract_dir / "test_plugin" + plugin_dir.mkdir() + + archive_path = tmp_path / "test_plugin-1.0.0.tar.gz" + with tarfile.open(archive_path, "w:gz") as tar: + tar.add(plugin_dir, arcname="test_plugin") + + with ( + patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, + patch("cpex.tools.catalog.tempfile.mkdtemp", return_value=str(tmp_path)), + patch("shutil.rmtree"), + ): + mock_subprocess.return_value = Mock(returncode=0) + + # The method wraps FileNotFoundError in RuntimeError + with pytest.raises(RuntimeError) as exc_info: + catalog.install_from_git("test_plugin @ git+https://github.com/example/test_plugin.git") + + assert "Unexpected error installing test_plugin from Git" in str(exc_info.value) + assert "plugin-manifest.yaml not found" in str(exc_info.value) + + def test_install_from_git_install_failure(self, tmp_path, mock_github_env): + """Test error when pip install fails.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + plugin_dir = extract_dir / "test_plugin" + plugin_dir.mkdir() + + manifest_data = { + "name": "test_plugin", + "version": "1.0.0", + "kind": "native", + "description": "Test plugin", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {}, + } + manifest_file = plugin_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + + archive_path = tmp_path / "test_plugin-1.0.0.tar.gz" + with tarfile.open(archive_path, "w:gz") as tar: + tar.add(plugin_dir, arcname="test_plugin") + + with ( + patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, + patch("cpex.tools.catalog.tempfile.mkdtemp", return_value=str(tmp_path)), + patch("shutil.rmtree"), + ): + # First call (download) succeeds, second call (install) fails + mock_subprocess.side_effect = [ + Mock(returncode=0), # download succeeds + subprocess.CalledProcessError(1, ["pip", "install"], stderr="Install failed"), # install fails + ] + + with pytest.raises(RuntimeError) as exc_info: + catalog.install_from_git("test_plugin @ git+https://github.com/example/test_plugin.git") + + assert "Failed to install test_plugin from Git" in str(exc_info.value) + + def test_install_from_git_cleanup_on_error(self, tmp_path, mock_github_env): + """Test that temporary directory is cleaned up even on error.""" + catalog = PluginCatalog() + + with ( + patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, + patch("cpex.tools.catalog.tempfile.mkdtemp", return_value=str(tmp_path)), + patch("shutil.rmtree") as mock_rmtree, + ): + mock_subprocess.side_effect = subprocess.CalledProcessError( + 1, ["pip", "download"], stderr="Download failed" + ) + + with pytest.raises(RuntimeError): + catalog.install_from_git("test_plugin @ git+https://github.com/example/test_plugin.git") + + # Verify cleanup was called + mock_rmtree.assert_called_once() + assert str(tmp_path) in str(mock_rmtree.call_args) + + def test_install_from_git_with_zip_archive(self, tmp_path, mock_github_env): + """Test installation from Git with zip archive.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + plugin_dir = extract_dir / "test_plugin" + plugin_dir.mkdir() + + manifest_data = { + "name": "test_plugin", + "version": "1.0.0", + "kind": "native", + "description": "Test plugin", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {}, + } + manifest_file = plugin_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + + # Create a zip archive + archive_path = tmp_path / "test_plugin-1.0.0.zip" + with zipfile.ZipFile(archive_path, "w") as zipf: + zipf.write(manifest_file, arcname="test_plugin/plugin-manifest.yaml") + + with ( + patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, + patch("cpex.tools.catalog.tempfile.mkdtemp", return_value=str(tmp_path)), + patch.object(catalog, "find_package_path", return_value=plugin_dir), + patch.object(catalog, "_persist_manifest"), + patch.object(catalog, "_find_and_load_versions_json", return_value=plugin_dir), + patch.object(catalog, "update_plugin_version_registry"), + patch("shutil.rmtree"), + ): + mock_subprocess.return_value = Mock(returncode=0) + + url = "test_plugin @ git+https://github.com/example/test_plugin.git" + manifest, plugin_path = catalog.install_from_git(url) + + assert manifest.name == "test_plugin" + + def test_install_from_git_with_wheel(self, tmp_path, mock_github_env): + """Test installation from Git with wheel archive.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + extract_dir = tmp_path / "extracted" + extract_dir.mkdir() + plugin_dir = extract_dir / "test_plugin" + plugin_dir.mkdir() + + manifest_data = { + "name": "test_plugin", + "version": "1.0.0", + "kind": "native", + "description": "Test plugin", + "author": "Test Author", + "tags": ["test"], + "available_hooks": ["tools"], + "default_config": {}, + } + manifest_file = plugin_dir / "plugin-manifest.yaml" + manifest_file.write_text(yaml.safe_dump(manifest_data)) + + # Create a wheel archive (which is a zip file) + archive_path = tmp_path / "test_plugin-1.0.0-py3-none-any.whl" + with zipfile.ZipFile(archive_path, "w") as zipf: + zipf.write(manifest_file, arcname="test_plugin/plugin-manifest.yaml") + + with ( + patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, + patch("cpex.tools.catalog.tempfile.mkdtemp", return_value=str(tmp_path)), + patch.object(catalog, "find_package_path", return_value=plugin_dir), + patch.object(catalog, "_persist_manifest"), + patch.object(catalog, "_find_and_load_versions_json", return_value=plugin_dir), + patch.object(catalog, "update_plugin_version_registry"), + patch("shutil.rmtree"), + ): + mock_subprocess.return_value = Mock(returncode=0) + + url = "test_plugin @ git+https://github.com/example/test_plugin.git" + manifest, plugin_path = catalog.install_from_git(url) + + assert manifest.name == "test_plugin" diff --git a/tests/unit/cpex/tools/test_cli.py b/tests/unit/cpex/tools/test_cli.py index 2717fc8..402f36b 100644 --- a/tests/unit/cpex/tools/test_cli.py +++ b/tests/unit/cpex/tools/test_cli.py @@ -40,7 +40,7 @@ uninstall, ) from cpex.tools.plugin_registry import PluginRegistry -from cpex.framework.models import PluginManifest, Monorepo, Config, PluginConfig, PluginMode, PiPyRepo +from cpex.framework.models import PluginManifest, Monorepo, Config, PluginConfig, PluginMode, PyPiRepo runner = CliRunner() @@ -558,7 +558,7 @@ def test_update_with_pypi_installation(self, temp_registry_dir): manifest = create_test_manifest( name="pypi_plugin", monorepo=None, - package_info=PiPyRepo(pypi_package="pypi-plugin", version_constraint=None), + package_info=PyPiRepo(pypi_package="pypi-plugin", version_constraint=None), ) mock_catalog = Mock() @@ -600,6 +600,80 @@ def test_update_raises_for_invalid_installation_type(self, temp_registry_dir): with pytest.raises(ValueError, match="Invalid installation type: invalid"): plugin_registry.update(manifest, "invalid", Mock(), "test_user") + + def test_update_with_local_installation_and_explicit_plugin_path(self, temp_registry_dir): + """Test registry update for local installation with explicit plugin_path.""" + manifest = create_test_manifest(monorepo=None, package_info=None) + manifest.local = "/tmp/local-plugin-source" + explicit_path = temp_registry_dir / "installed" / "local_plugin" + explicit_path.mkdir(parents=True) + + plugin_registry = PluginRegistry() + plugin_registry.update(manifest, "local", Mock(), "test_user", plugin_path=explicit_path, editable=True) + + registry_file = temp_registry_dir / "installed-plugins.json" + updated_data = json.loads(registry_file.read_text()) + assert len(updated_data["plugins"]) == 1 + assert updated_data["plugins"][0]["name"] == manifest.name + assert updated_data["plugins"][0]["package_source"] == "/tmp/local-plugin-source" + assert updated_data["plugins"][0]["installation_type"] == "local" + assert updated_data["plugins"][0]["installation_path"] == str(explicit_path.resolve()) + assert updated_data["plugins"][0]["editable"] is True + + def test_update_with_local_installation_raises_without_local_metadata(self, temp_registry_dir): + """Test local update fails when manifest.local is missing.""" + manifest = create_test_manifest(monorepo=None, package_info=None) + manifest.local = None + + plugin_registry = PluginRegistry() + + with pytest.raises(RuntimeError, match="PluginManifest local path can not be None."): + plugin_registry.update(manifest, "local", Mock(), "test_user") + + def test_update_uses_find_package_path_when_plugin_path_not_provided(self, temp_registry_dir): + """Test registry update falls back to find_package_path when plugin_path is omitted.""" + manifest = create_test_manifest() + + with patch("cpex.tools.plugin_registry.find_package_path", return_value=Path("/fake/path/from/find_package_path")): + plugin_registry = PluginRegistry() + plugin_registry.update(manifest, "monorepo", Mock(), "test_user") + + registry_file = temp_registry_dir / "installed-plugins.json" + updated_data = json.loads(registry_file.read_text()) + assert updated_data["plugins"][0]["installation_path"] == str(Path("/fake/path/from/find_package_path").resolve()) + + def test_has_returns_true_when_plugin_present(self, temp_registry_dir): + """Test has() returns True for an installed plugin.""" + registry_file = temp_registry_dir / "installed-plugins.json" + registry_data = { + "plugins": [ + { + "name": "test_plugin", + "version": "1.0.0", + "kind": "native", + "installation_type": "monorepo", + "installation_path": "/path/to/test_plugin", + "installed_at": "2024-01-01T00:00:00.000000Z", + "installed_by": "test_user", + "package_source": "https://example.com/repo/plugin", + "editable": False, + } + ] + } + registry_file.write_text(json.dumps(registry_data)) + + plugin_registry = PluginRegistry() + + assert plugin_registry.has("test_plugin") is True + + def test_has_returns_false_when_plugin_absent(self, temp_registry_dir): + """Test has() returns False for a missing plugin.""" + registry_file = temp_registry_dir / "installed-plugins.json" + registry_file.write_text(json.dumps({"plugins": []})) + + plugin_registry = PluginRegistry() + + assert plugin_registry.has("missing_plugin") is False class TestInstanceNameIsUnique: @@ -718,11 +792,28 @@ def test_install_from_monorepo(self, temp_registry_dir): class TestInstallFunction: """Tests for install() function.""" - def test_install_git_not_implemented(self): - """Test that git installation raises NotImplementedError.""" + def test_install_git_implementation(self): + """Test that git installation works with install_from_git.""" mock_catalog = Mock() - with pytest.raises(NotImplementedError, match="Git installation is not yet implemented"): - install("source", "git", mock_catalog) + test_manifest = create_test_manifest(name="test_plugin", kind="native") + mock_catalog.install_from_git = Mock(return_value=(test_manifest, Path("/path/to/plugin"))) + + with ( + patch("cpex.tools.cli._finalize_installation") as mock_finalize, + patch("cpex.tools.cli.console") as mock_console, + patch("cpex.tools.cli.update_plugins_config_yaml") as mock_update_config, + ): + mock_status = Mock() + mock_status.__enter__ = Mock(return_value=mock_status) + mock_status.__exit__ = Mock(return_value=False) + mock_console.status = Mock(return_value=mock_status) + + install("test_plugin @ git+https://github.com/example/test_plugin.git", "git", mock_catalog) + + # Verify install_from_git was called + mock_catalog.install_from_git.assert_called_once() + mock_update_config.assert_called_once_with(manifest=test_manifest) + mock_finalize.assert_called_once() def test_install_monorepo_no_plugins_found(self): """Test monorepo install when no plugins found.""" @@ -1071,7 +1162,6 @@ def test_uninstall_success(self, temp_registry_dir): registry_file.write_text(json.dumps(registry_data)) mock_catalog = Mock() - mock_catalog.uninstall_package = Mock() # Create a manifest to return from find test_manifest = create_test_manifest(name="test_plugin", kind="native") @@ -1082,9 +1172,10 @@ def test_uninstall_success(self, temp_registry_dir): patch("cpex.tools.cli.remove_from_plugins_config_yaml", return_value=True) as mock_remove, patch("cpex.tools.cli.PluginCatalog") as mock_catalog_class, ): - # Mock the catalog.find method + # Mock the catalog instance created inside uninstall() mock_catalog_instance = Mock() mock_catalog_instance.find = Mock(return_value=test_manifest) + mock_catalog_instance.uninstall_package = Mock() mock_catalog_class.return_value = mock_catalog_instance mock_status = Mock() @@ -1094,7 +1185,8 @@ def test_uninstall_success(self, temp_registry_dir): uninstall("test_plugin", mock_catalog) - mock_catalog.uninstall_package.assert_called_once_with("test_plugin") + # Verify uninstall_package was called with both plugin_name and manifest + mock_catalog_instance.uninstall_package.assert_called_once_with("test_plugin", test_manifest) mock_remove.assert_called_once_with(test_manifest) mock_console.print.assert_any_call(":white_heavy_check_mark: test_plugin uninstalled successfully.") @@ -1119,13 +1211,22 @@ def test_uninstall_handles_exception(self, temp_registry_dir): registry_file.write_text(json.dumps(registry_data)) mock_catalog = Mock() - mock_catalog.uninstall_package = Mock(side_effect=RuntimeError("Uninstall failed")) + + # Create a manifest to return from find + test_manifest = create_test_manifest(name="test_plugin", kind="native") with ( patch("cpex.tools.cli.inquirer.prompt", return_value={"confirm": True}), patch("cpex.tools.cli.console") as mock_console, patch("cpex.tools.cli.logger") as mock_logger, + patch("cpex.tools.cli.PluginCatalog") as mock_catalog_class, ): + # Mock the catalog instance created inside uninstall() + mock_catalog_instance = Mock() + mock_catalog_instance.find = Mock(return_value=test_manifest) + mock_catalog_instance.uninstall_package = Mock(side_effect=RuntimeError("Uninstall failed")) + mock_catalog_class.return_value = mock_catalog_instance + mock_status = Mock() mock_status.__enter__ = Mock(return_value=mock_status) mock_status.__exit__ = Mock(return_value=False) @@ -1194,7 +1295,8 @@ def test_plugin_uninstall_command_success(self, temp_registry_dir): result = runner.invoke(app, ["plugin", "uninstall", "test_plugin"]) assert result.exit_code == 0 - mock_catalog.uninstall_package.assert_called_once_with("test_plugin") + # Verify uninstall_package was called with both plugin_name and manifest + mock_catalog.uninstall_package.assert_called_once_with("test_plugin", test_manifest) def test_plugin_uninstall_command_not_found(self, temp_registry_dir): """Test plugin uninstall command when plugin not found.""" @@ -1217,13 +1319,16 @@ def test_uninstall_package_success(self, temp_registry_dir): """Test successful package uninstallation.""" from cpex.tools.catalog import PluginCatalog + # Create a test manifest + test_manifest = create_test_manifest(name="test_package", kind="native") + with ( patch.dict("os.environ", {"PLUGINS_GITHUB_TOKEN": "test_token"}), patch("cpex.tools.catalog.Github"), patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, ): catalog = PluginCatalog() - result = catalog.uninstall_package("test_package") + result = catalog.uninstall_package("test_package", test_manifest) assert result is True mock_subprocess.assert_called_once() @@ -1238,6 +1343,9 @@ def test_uninstall_package_subprocess_error(self, temp_registry_dir): from cpex.tools.catalog import PluginCatalog import subprocess + # Create a test manifest + test_manifest = create_test_manifest(name="test_package", kind="native") + with ( patch.dict("os.environ", {"PLUGINS_GITHUB_TOKEN": "test_token"}), patch("cpex.tools.catalog.Github"), @@ -1246,12 +1354,15 @@ def test_uninstall_package_subprocess_error(self, temp_registry_dir): catalog = PluginCatalog() with pytest.raises(RuntimeError, match="Failed to uninstall"): - catalog.uninstall_package("test_package") + catalog.uninstall_package("test_package", test_manifest) def test_uninstall_package_unexpected_error(self, temp_registry_dir): """Test package uninstallation with unexpected error.""" from cpex.tools.catalog import PluginCatalog + # Create a test manifest + test_manifest = create_test_manifest(name="test_package", kind="native") + with ( patch.dict("os.environ", {"PLUGINS_GITHUB_TOKEN": "test_token"}), patch("cpex.tools.catalog.Github"), @@ -1260,7 +1371,7 @@ def test_uninstall_package_unexpected_error(self, temp_registry_dir): catalog = PluginCatalog() with pytest.raises(RuntimeError, match="Unexpected error uninstalling"): - catalog.uninstall_package("test_package") + catalog.uninstall_package("test_package", test_manifest) class TestPluginRegistryRemove: @@ -1382,4 +1493,304 @@ def test_unregister_nonexistent_plugin(self, temp_registry_dir): assert len(registry.plugins) == 1 + +class TestSelectPluginFromCatalog: + """Tests for select_plugin_from_catalog() function.""" + + def test_returns_none_for_empty_list(self): + """Test that function returns None when given empty list.""" + from cpex.tools.cli import select_plugin_from_catalog + + result = select_plugin_from_catalog([]) + assert result is None + + def test_returns_none_when_user_cancels(self): + """Test that function returns None when user cancels selection.""" + from cpex.tools.cli import select_plugin_from_catalog + + manifest = create_test_manifest() + + with patch("cpex.tools.cli.inquirer.prompt", return_value=None): + result = select_plugin_from_catalog([manifest]) + assert result is None + + +class TestParsePypiSource: + """Tests for _parse_pypi_source() function.""" + + def test_parse_package_without_version(self): + """Test parsing package name without version constraint.""" + from cpex.tools.cli import _parse_pypi_source + + package_name, version_constraint = _parse_pypi_source("my-package") + assert package_name == "my-package" + assert version_constraint is None + + def test_parse_package_with_version(self): + """Test parsing package name with version constraint.""" + from cpex.tools.cli import _parse_pypi_source + + package_name, version_constraint = _parse_pypi_source("my-package@>=1.0.0") + assert package_name == "my-package" + assert version_constraint == ">=1.0.0" + + +class TestFinalizeInstallation: + """Tests for _finalize_installation() function.""" + + def test_finalize_installation_updates_registry_and_config(self, temp_registry_dir): + """Test that finalize_installation updates registry and config.""" + from cpex.tools.cli import _finalize_installation + from cpex.tools.catalog import PluginCatalog + + manifest = create_test_manifest() + mock_catalog = Mock(spec=PluginCatalog) + + with ( + patch("cpex.tools.cli.PluginRegistry") as mock_registry_class, + patch("cpex.tools.cli.update_plugins_config_yaml") as mock_update_config, + patch("cpex.tools.cli.git_user_name", return_value="test_user"), + ): + mock_registry = Mock() + mock_registry_class.return_value = mock_registry + + _finalize_installation(manifest, "pypi", mock_catalog, Path("/test/path")) + + mock_registry.update.assert_called_once() + mock_update_config.assert_called_once_with(manifest=manifest) + + +class TestInstallFromLocal: + """Tests for _install_from_local() function.""" + + def test_install_from_local_calls_catalog_method(self, temp_registry_dir, tmp_path): + """Test that _install_from_local calls catalog.install_from_local.""" + from cpex.tools.cli import _install_from_local + from cpex.tools.catalog import PluginCatalog + + source_dir = tmp_path / "my_plugin" + source_dir.mkdir() + + manifest = create_test_manifest() + manifest.local = str(source_dir) + mock_catalog = Mock(spec=PluginCatalog) + mock_catalog.install_from_local = Mock(return_value=(manifest, source_dir)) + + with ( + patch("cpex.tools.cli.console") as mock_console, + patch("cpex.tools.cli.update_plugins_config_yaml") as mock_update_config, + patch("cpex.tools.cli._finalize_installation") as mock_finalize, + ): + mock_status = Mock() + mock_status.__enter__ = Mock(return_value=mock_status) + mock_status.__exit__ = Mock(return_value=False) + mock_console.status = Mock(return_value=mock_status) + + _install_from_local(str(source_dir), mock_catalog) + + mock_catalog.install_from_local.assert_called_once() + mock_update_config.assert_called_once_with(manifest=manifest) + mock_finalize.assert_called_once_with(manifest, "local", mock_catalog, source_dir) + + +class TestInstallFromMonorepo: + """Tests for _install_from_monorepo() function.""" + + def test_returns_early_when_no_plugin_selected(self): + """Test that function returns early when user doesn't select a plugin.""" + from cpex.tools.cli import _install_from_monorepo + from cpex.tools.catalog import PluginCatalog + + manifest = create_test_manifest() + mock_catalog = Mock(spec=PluginCatalog) + mock_catalog.search = Mock(return_value=[manifest]) + + with ( + patch("cpex.tools.cli.select_plugin_from_catalog", return_value=None), + patch("cpex.tools.cli.console"), + ): + # Should return early without error + _install_from_monorepo("test_plugin", mock_catalog) + + +class TestInstallFromPypi: + """Tests for _install_from_pypi() function.""" + + def test_install_from_pypi_handles_none_manifest(self, temp_registry_dir): + """Test that _install_from_pypi handles None manifest gracefully.""" + from cpex.tools.cli import _install_from_pypi + from cpex.tools.catalog import PluginCatalog + + mock_catalog = Mock(spec=PluginCatalog) + mock_catalog.install_from_pypi = Mock(return_value=(None, None)) + + with patch("cpex.tools.cli.console") as mock_console: + mock_status = Mock() + mock_status.__enter__ = Mock(return_value=mock_status) + mock_status.__exit__ = Mock(return_value=False) + mock_console.status = Mock(return_value=mock_status) + + _install_from_pypi("test_package", mock_catalog) + + mock_console.print.assert_called_with(":x: Failed to install test_package") + + +class TestInstallFunctionAdditional: + """Additional tests for install() function.""" + + def test_install_with_unsupported_type_raises_error(self): + """Test that install raises ValueError for unsupported installation type.""" + from cpex.tools.cli import install + from cpex.tools.catalog import PluginCatalog + + mock_catalog = Mock(spec=PluginCatalog) + + with pytest.raises(ValueError, match="Unsupported installation type"): + install("test_plugin", "unsupported_type", mock_catalog) + + +class TestVersionsFunction: + """Tests for versions() function.""" + + def test_versions_calls_search(self): + """Test that versions() function calls search().""" + from cpex.tools.cli import versions + from cpex.tools.catalog import PluginCatalog + + mock_catalog = Mock(spec=PluginCatalog) + mock_catalog.search = Mock(return_value=[]) + + with patch("cpex.tools.cli.console"): + versions("test_plugin", mock_catalog) + mock_catalog.search.assert_called_once_with("test_plugin") + + +class TestUpdatePluginsConfigYamlWithNonePlugins: + """Test update_plugins_config_yaml when config.plugins is None.""" + + def test_creates_plugins_list_when_none(self, tmp_path): + """Test that function creates plugins list when it's None.""" + import yaml as yaml_module + + config_file = tmp_path / "config.yaml" + config_data = { + "plugins": None, # Explicitly None + } + config_file.write_text(yaml_module.safe_dump(config_data)) + + manifest = create_test_manifest(name="test_plugin") + + with ( + patch("cpex.tools.cli.settings") as mock_settings, + patch("cpex.tools.cli.ConfigLoader.load_config") as mock_load, + patch("cpex.tools.cli.ConfigSaver.save_config") as mock_save, + ): + mock_settings.config_file = str(config_file) + + # Create a Config object with plugins=None + config_obj = Config(plugins=None) + mock_load.return_value = config_obj + + update_plugins_config_yaml(manifest) + + # Verify that plugins list was created + mock_save.assert_called_once() + saved_config = mock_save.call_args[0][0] + assert saved_config.plugins is not None + assert len(saved_config.plugins) == 1 + + +class TestPluginCommandCatalogUpdate: + """Tests for plugin command catalog update paths.""" + + def test_plugin_search_updates_catalog(self, temp_registry_dir): + """Test that plugin search command updates catalog.""" + with ( + patch("cpex.tools.cli.PluginCatalog") as mock_catalog_class, + patch("cpex.tools.cli.console") as mock_console, + ): + mock_catalog = Mock() + mock_catalog.update_catalog_with_pyproject = Mock(return_value=False) + mock_catalog.search = Mock(return_value=[]) + mock_catalog_class.return_value = mock_catalog + + mock_status = Mock() + mock_status.__enter__ = Mock(return_value=mock_status) + mock_status.__exit__ = Mock(return_value=False) + mock_console.status = Mock(return_value=mock_status) + + result = runner.invoke(app, ["plugin", "search", "test"]) + assert result.exit_code == 0 + mock_catalog.update_catalog_with_pyproject.assert_called_once() + + def test_plugin_versions_command(self, temp_registry_dir): + """Test plugin versions command.""" + with ( + patch("cpex.tools.cli.PluginCatalog") as mock_catalog_class, + patch("cpex.tools.cli.console") as mock_console, + ): + mock_catalog = Mock() + mock_catalog.update_catalog_with_pyproject = Mock(return_value=False) + mock_catalog.search = Mock(return_value=[]) + mock_catalog_class.return_value = mock_catalog + + mock_status = Mock() + mock_status.__enter__ = Mock(return_value=mock_status) + mock_status.__exit__ = Mock(return_value=False) + mock_console.status = Mock(return_value=mock_status) + + result = runner.invoke(app, ["plugin", "versions", "test_plugin"]) + assert result.exit_code == 0 + mock_catalog.search.assert_called_once_with("test_plugin") + + +class TestUninstallManifestNotFound: + """Test uninstall when manifest is not found in catalog.""" + + def test_uninstall_when_manifest_not_found(self, temp_registry_dir): + """Test uninstall handles case when manifest is not found.""" + registry_file = temp_registry_dir / "installed-plugins.json" + registry_data = { + "plugins": [ + { + "name": "test_plugin", + "version": "1.0.0", + "kind": "native", + "installation_type": "monorepo", + "installation_path": "/path/to/test_plugin", + "installed_at": "2024-01-01T00:00:00.000000Z", + "installed_by": "test_user", + "package_source": "https://example.com/repo/plugin", + "editable": False, + } + ] + } + registry_file.write_text(json.dumps(registry_data)) + + mock_catalog = Mock() + + with ( + patch("cpex.tools.cli.inquirer.prompt", return_value={"confirm": True}), + patch("cpex.tools.cli.console") as mock_console, + patch("cpex.tools.cli.PluginCatalog") as mock_catalog_class, + ): + # Mock the catalog.find method to return None (manifest not found) + mock_catalog_instance = Mock() + mock_catalog_instance.find = Mock(return_value=None) + mock_catalog_class.return_value = mock_catalog_instance + + mock_status = Mock() + mock_status.__enter__ = Mock(return_value=mock_status) + mock_status.__exit__ = Mock(return_value=False) + mock_console.status = Mock(return_value=mock_status) + + uninstall("test_plugin", mock_catalog) + + # When manifest is not found, uninstall should print error and return early + # So uninstall_package should NOT be called + mock_catalog_instance.uninstall_package.assert_not_called() + mock_console.print.assert_any_call(":x: Plugin test_plugin not found in catalog.") + + + # Made with Bob From 132b9e141ee8dd02ec96ba099d32ed6e8f25ec87 Mon Sep 17 00:00:00 2001 From: habeck Date: Thu, 30 Apr 2026 20:35:52 -0400 Subject: [PATCH 55/88] chore: properly format info Signed-off-by: habeck --- cpex/tools/cli.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index f9e4e4e..d717187 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -709,8 +709,8 @@ def uninstall(plugin_name: str, catalog: PluginCatalog) -> None: 'python cpex/tools/cli.py plugin --type pypi install "ExamplePlugin@>=0.1.0"\n' 'python cpex/tools/cli.py plugin --type test-pypi install "cpex-test-plugin@>=0.1.1"\n' 'python cpex/tools/cli.py plugin --type git install "cpex-test-plugin @ git+https://github.com/tedhabeck/cpex-test-plugin@main"\n' - "python cpex/tools/cli.py plugin versions cpex-test-plugin" - "python cpex/tools/cli.py plugin uninstall cpex-pii-filter" + "python cpex/tools/cli.py plugin versions cpex-test-plugin\n" + "python cpex/tools/cli.py plugin uninstall cpex-pii-filter\n" ) def plugin( cmd_action: str = typer.Argument(None, help="One of: list|info|install|search|versions|uninstall"), From 1e222f19d9a334beb0ed454e26964fcbb6281ad3 Mon Sep 17 00:00:00 2001 From: habeck Date: Fri, 1 May 2026 11:20:25 -0400 Subject: [PATCH 56/88] chore: update README.md Signed-off-by: habeck --- cpex/tools/README.md | 42 +++++++++++++++++++++++++++--------------- 1 file changed, 27 insertions(+), 15 deletions(-) diff --git a/cpex/tools/README.md b/cpex/tools/README.md index b6c277d..1f2a078 100644 --- a/cpex/tools/README.md +++ b/cpex/tools/README.md @@ -7,16 +7,18 @@ List, search, install or uninstall plugins. - default install type is monorepo - Examples: - python cpex/tools/cli.py plugin info pii - python cpex/tools/cli.py plugin search pii - python cpex/tools/cli.py plugin --type monorepo search pii - python cpex/tools/cli.py plugin --type monorepo install cpex-pii-filter - python cpex/tools/cli.py plugin --type pypi install "ExamplePlugin@>=0.1.0" - python cpex/tools/cli.py plugin --type test-pypi install "cpex-plugin-test@>=0.1.1" - python cpex/tools/cli.py plugin uninstall cpex-pii-filter - +default install type is monorepo + Examples: + python cpex/tools/cli.py plugin info pii + python cpex/tools/cli.py plugin search pii + python cpex/tools/cli.py plugin --type monorepo search pii + python cpex/tools/cli.py plugin --type monorepo install cpex-pii-filter + python cpex/tools/cli.py plugin --type pypi install "ExamplePlugin@>=0.1.0" + python cpex/tools/cli.py plugin --type test-pypi install "cpex-test-plugin@>=0.1.1" + python cpex/tools/cli.py plugin --type git install "cpex-test-plugin @ git+https://github.com/tedhabeck/cpex-test-plugin@main" + python cpex/tools/cli.py plugin versions cpex-test-plugin + python cpex/tools/cli.py plugin uninstall cpex-pii-filter. + ╭─ Arguments ────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ │ cmd_action [CMD_ACTION] One of: list|info|install|search|uninstall │ │ source [SOURCE] The pypi, git, or local folder where the plugin resides │ @@ -112,6 +114,16 @@ sequenceDiagram participant pypi (Python Package Index) User->>cli: python cpex/tools/cli.py plugin --type pypi install cli->>catalog: install_from_pypi( + catalog->>subprocess: python -m pip download to temp + subprocess->>python: -m pip download to temp + python->>pip: download + pip->>pypi (Python Package Index): download to temp + pypi (Python Package Index)->>python: downloaded OK + python->>subprocess: rc=0 + subprocess->>catalog: extracted_folder + catalog->>catalog: Loads and parse the plugin-manifest.yaml + catalog->>catalog: if manifest.kind is isolated_venv initialize isolated venv and STOP here. + catalog->>cli: PluginManifest (isolated_venv) catalog->>subprocess: python -m pip install subprocess->>python: -m pip install python->>pip: install @@ -162,12 +174,12 @@ Example output: { "name": "cpex-test-plugin", "kind": "isolated_venv", - "version": "0.1.1", - "installation_type": "pypi", - "installation_path": "/Users/habeck/.venv/cpex/lib/python3.13/site-packages/cpex_test_plugin", - "installed_at": "2026-04-20T22:09:52.198619+00:00Z", + "version": "0.2.0", + "installation_type": "monorepo", + "installation_path": "/Users/habeck/tedhabeck/contextforge-plugins-framework/plugins/cpex_test_plugin/.venv/lib/python3.13/site-packages/cpex_test_plugin", + "installed_at": "2026-05-01T00:14:26.123924+00:00Z", "installed_by": "habeck", - "package_source": "cpex-test-plugin", + "package_source": "https://github.com/tedhabeck/cpex-test-plugin", "editable": false } ``` \ No newline at end of file From a1aa65a6293582bcca8d4cc4c10f3c2229dd828d Mon Sep 17 00:00:00 2001 From: habeck Date: Fri, 1 May 2026 11:47:57 -0400 Subject: [PATCH 57/88] chore: Add a, "before you begin" section detailing the required .env variable. Signed-off-by: habeck --- cpex/tools/README.md | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/cpex/tools/README.md b/cpex/tools/README.md index 1f2a078..dd66d63 100644 --- a/cpex/tools/README.md +++ b/cpex/tools/README.md @@ -1,3 +1,25 @@ +## Before you begin + +Update the environment variables in .env + +All values except PLUGINS_GITHUB_TOKEN have defaults. + +```dotenv +### Plugin installation +# Comma Separated Values used by install with --type monorepo +# The default value is https://github.com/ibm/cpex-plugins +# PLUGINS_REPO_URLS="https://github.com/ibm/cpex-plugins" + +# registry path (default shown below) +# PLUGIN_REGISTRY_FOLDER=data + +# Github API (default shown below) +# PLUGINS_GITHUB_API=api.github.com + +# PLUGINS_GITHUB_TOKEN= +### end Plugin installation +``` + ## Plugin installation using the cli ```bash From 972e4f3788a00e0f65faad64d581f6b740643edf Mon Sep 17 00:00:00 2001 From: habeck Date: Mon, 4 May 2026 10:38:52 -0400 Subject: [PATCH 58/88] =?UTF-8?q?fix:=20P0=20fix=20=E2=80=94=20tarfile/zip?= =?UTF-8?q?=20path=20traversal?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: habeck --- cpex/tools/catalog.py | 25 ++++-- tests/unit/cpex/tools/test_catalog.py | 105 ++++++++++++++++++++++++++ 2 files changed, 124 insertions(+), 6 deletions(-) diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index ba9f806..7238482 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -817,25 +817,38 @@ def _persist_manifest(self, manifest: PluginManifest, package_name: str) -> None except Exception as e: raise RuntimeError(f"Failed to save manifest for {package_name}: {str(e)}") from e + @staticmethod + def _safe_zip_extract(zip_ref: zipfile.ZipFile, extract_dir: Path) -> None: + """Extract a zip archive, rejecting members whose paths escape extract_dir.""" + base = extract_dir.resolve() + for info in zip_ref.infolist(): + name = info.filename + if os.path.isabs(name): + raise RuntimeError(f"Unsafe path in archive: {name}") + target = (base / name).resolve() + if not target.is_relative_to(base): + raise RuntimeError(f"Unsafe path in archive: {name}") + zip_ref.extractall(extract_dir) + def _extract_package_archive(self, package_file: Path, extract_dir: Path) -> None: """Extract a package archive (zip, tar.gz, wheel, etc.) to a directory. - + Args: package_file: Path to the archive file. extract_dir: Directory to extract to. - + Raises: - RuntimeError: If the archive format is unsupported. + RuntimeError: If the archive format is unsupported or contains unsafe paths. """ if package_file.suffix == ".whl" or package_file.name.endswith(".whl"): with zipfile.ZipFile(package_file, "r") as zip_ref: - zip_ref.extractall(extract_dir) + self._safe_zip_extract(zip_ref, extract_dir) elif package_file.suffix == ".zip" or package_file.name.endswith(".zip"): with zipfile.ZipFile(package_file, "r") as zip_ref: - zip_ref.extractall(extract_dir) + self._safe_zip_extract(zip_ref, extract_dir) elif package_file.suffix in [".gz", ".bz2"] or ".tar" in package_file.name: with tarfile.open(package_file, "r:*") as tar_ref: - tar_ref.extractall(extract_dir) + tar_ref.extractall(extract_dir, filter="data") else: raise RuntimeError(f"Unsupported package format: {package_file}") diff --git a/tests/unit/cpex/tools/test_catalog.py b/tests/unit/cpex/tools/test_catalog.py index 38a1941..a1b0bfc 100644 --- a/tests/unit/cpex/tools/test_catalog.py +++ b/tests/unit/cpex/tools/test_catalog.py @@ -3090,3 +3090,108 @@ def test_install_from_git_with_wheel(self, tmp_path, mock_github_env): manifest, plugin_path = catalog.install_from_git(url) assert manifest.name == "test_plugin" + + +# --------------------------------------------------------------------------- +# _extract_package_archive — path traversal guards +# --------------------------------------------------------------------------- + +class TestExtractPackageArchivePathTraversal: + """Verify that _extract_package_archive rejects archives with unsafe member paths.""" + + @pytest.fixture() + def catalog(self): + with patch("cpex.tools.catalog.PluginCatalog.__init__", return_value=None): + c = PluginCatalog.__new__(PluginCatalog) + c.python_executable = sys.executable + return c + + # --- tar.gz ----------------------------------------------------------- + + def test_tar_traversal_rejected(self, catalog, tmp_path): + """A tar member whose path escapes extract_dir raises and writes nothing.""" + import io + archive = tmp_path / "evil.tar.gz" + with tarfile.open(archive, "w:gz") as tf: + data = b"pwned" + info = tarfile.TarInfo(name="../evil.txt") + info.size = len(data) + tf.addfile(info, io.BytesIO(data)) + + extract_dir = tmp_path / "out" + extract_dir.mkdir() + + with pytest.raises(Exception): + catalog._extract_package_archive(archive, extract_dir) + + assert not (tmp_path / "evil.txt").exists() + + def test_tar_benign_succeeds(self, catalog, tmp_path): + """A well-formed tar.gz extracts correctly.""" + import io + archive = tmp_path / "good.tar.gz" + with tarfile.open(archive, "w:gz") as tf: + data = b"hello" + info = tarfile.TarInfo(name="subdir/hello.txt") + info.size = len(data) + tf.addfile(info, io.BytesIO(data)) + + extract_dir = tmp_path / "out" + extract_dir.mkdir() + catalog._extract_package_archive(archive, extract_dir) + + assert (extract_dir / "subdir" / "hello.txt").read_bytes() == b"hello" + + # --- zip / .whl ------------------------------------------------------- + + def test_zip_traversal_rejected(self, catalog, tmp_path): + """A zip member whose path escapes extract_dir raises and writes nothing.""" + archive = tmp_path / "evil.zip" + with zipfile.ZipFile(archive, "w") as zf: + zf.writestr("../evil.txt", "pwned") + + extract_dir = tmp_path / "out" + extract_dir.mkdir() + + with pytest.raises(RuntimeError, match="Unsafe path"): + catalog._extract_package_archive(archive, extract_dir) + + assert not (tmp_path / "evil.txt").exists() + + def test_whl_traversal_rejected(self, catalog, tmp_path): + """A .whl (zip) member whose path escapes extract_dir raises and writes nothing.""" + archive = tmp_path / "evil-1.0.0-py3-none-any.whl" + with zipfile.ZipFile(archive, "w") as zf: + zf.writestr("../evil.txt", "pwned") + + extract_dir = tmp_path / "out" + extract_dir.mkdir() + + with pytest.raises(RuntimeError, match="Unsafe path"): + catalog._extract_package_archive(archive, extract_dir) + + assert not (tmp_path / "evil.txt").exists() + + def test_zip_absolute_path_rejected(self, catalog, tmp_path): + """A zip member with an absolute path raises before any extraction.""" + archive = tmp_path / "absolute.zip" + with zipfile.ZipFile(archive, "w") as zf: + zf.writestr("/etc/passwd", "root:x:0:0") + + extract_dir = tmp_path / "out" + extract_dir.mkdir() + + with pytest.raises(RuntimeError, match="Unsafe path"): + catalog._extract_package_archive(archive, extract_dir) + + def test_zip_benign_succeeds(self, catalog, tmp_path): + """A well-formed zip extracts correctly.""" + archive = tmp_path / "good.zip" + with zipfile.ZipFile(archive, "w") as zf: + zf.writestr("pkg/hello.txt", "world") + + extract_dir = tmp_path / "out" + extract_dir.mkdir() + catalog._extract_package_archive(archive, extract_dir) + + assert (extract_dir / "pkg" / "hello.txt").read_text() == "world" From bcda708ffb5b55be0c53e4532fa08504d49ad13e Mon Sep 17 00:00:00 2001 From: habeck Date: Mon, 4 May 2026 11:02:16 -0400 Subject: [PATCH 59/88] enh: add remove_venv method to IsolatedVenvPlugin for uninstall cleanup. Signed-off-by: habeck --- cpex/framework/isolated/client.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/cpex/framework/isolated/client.py b/cpex/framework/isolated/client.py index e5178e0..bad8e9e 100644 --- a/cpex/framework/isolated/client.py +++ b/cpex/framework/isolated/client.py @@ -336,3 +336,10 @@ async def invoke_hook(self, hook_type: str, payload: PluginPayload, context: Plu except Exception as e: logger.exception("Unexpected error invoking hook '%s' for plugin '%s'", hook_type, self.name) raise PluginError(error=convert_exception_to_error(e, plugin_name=self.name)) from e + + def remove_venv(self) : + """ + Remove the virtual environment associated with the plugin. + """ + shutil.rmtree(self.plugin_path.joinpath(".cpex")) + shutil.rmtree(self.plugin_path.joinpath(".venv")) \ No newline at end of file From 75396a8e06c07369ae5efe334f03887136a71c18 Mon Sep 17 00:00:00 2001 From: habeck Date: Mon, 4 May 2026 12:41:33 -0400 Subject: [PATCH 60/88] fix: priority 1 items Signed-off-by: habeck --- cpex/framework/isolated/client.py | 4 +- cpex/framework/models.py | 2 +- cpex/tools/catalog.py | 181 +++++++++++++------------ cpex/tools/cli.py | 183 ++++++++++++++++++-------- tests/unit/cpex/tools/test_catalog.py | 20 +-- tests/unit/cpex/tools/test_cli.py | 10 +- 6 files changed, 249 insertions(+), 151 deletions(-) diff --git a/cpex/framework/isolated/client.py b/cpex/framework/isolated/client.py index bad8e9e..c61a398 100644 --- a/cpex/framework/isolated/client.py +++ b/cpex/framework/isolated/client.py @@ -337,9 +337,9 @@ async def invoke_hook(self, hook_type: str, payload: PluginPayload, context: Plu logger.exception("Unexpected error invoking hook '%s' for plugin '%s'", hook_type, self.name) raise PluginError(error=convert_exception_to_error(e, plugin_name=self.name)) from e - def remove_venv(self) : + def remove_venv(self): """ Remove the virtual environment associated with the plugin. """ shutil.rmtree(self.plugin_path.joinpath(".cpex")) - shutil.rmtree(self.plugin_path.joinpath(".venv")) \ No newline at end of file + shutil.rmtree(self.plugin_path.joinpath(".venv")) diff --git a/cpex/framework/models.py b/cpex/framework/models.py index e6bea83..2513562 100644 --- a/cpex/framework/models.py +++ b/cpex/framework/models.py @@ -1418,7 +1418,7 @@ class PyPiRepo(BaseModel): """ pypi_package: str - version_constraint: Optional[str] + version_constraint: Optional[str] = None @field_validator("pypi_package", mode="after") @classmethod diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index 7238482..e659c4c 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -9,8 +9,6 @@ import base64 import datetime -import importlib.metadata -import importlib.util import json import logging import os @@ -29,6 +27,7 @@ import yaml from github import Auth, Github +from packaging.version import InvalidVersion, Version from cpex.framework.models import ( GitRepo, @@ -38,11 +37,20 @@ PluginVersionRegistry, PyPiRepo, ) +from cpex.framework.utils import find_package_path from cpex.tools.settings import get_catalog_settings logger = logging.getLogger(__name__) +def _ver(version_str: str) -> Version: + try: + return Version(version_str) + except InvalidVersion: + logger.debug("Could not parse version %r as PEP 440; treating as lowest", version_str) + return Version("0") + + class PluginCatalog: """ Utility class to initialize the plugin catalog from configured monorepos @@ -126,13 +134,13 @@ def update_plugin_version_registry(self, manifest: PluginManifest, relpath: Path if latest is None: latest = pv else: - if latest.version < plugin_version.version: + if _ver(latest.version) < _ver(plugin_version.version): latest = plugin_version else: if latest is None: latest = pv else: - if latest.version < pv.version: + if _ver(latest.version) < _ver(pv.version): latest = pv if not found: @@ -150,45 +158,6 @@ def update_plugin_version_registry(self, manifest: PluginManifest, relpath: Path encoding="utf-8", ) - def find_package_path(self, package_name: str) -> Path: - """Locate installed package directory using importlib.metadata. - - Args: - package_name: The name of the installed package. - - Returns: - Path to the package directory. - - Raises: - RuntimeError: If package cannot be found. - """ - try: - # Use importlib.metadata for more reliable package discovery - for dist in importlib.metadata.distributions(): - if dist.name == package_name or dist.metadata.get("Name") == package_name: - if dist.files: - # Get the package root from the plugin-manifest.yaml file - for afile in dist.files: - if afile.name == "plugin-manifest.yaml": - located_path = dist.locate_file(afile) - package_path = Path(str(located_path)).parent - logger.debug("Found package %s at %s", package_name, package_path) - return package_path - - # Fallback to importlib.util.find_spec if metadata approach fails - spec = importlib.util.find_spec(package_name) - if spec is not None and spec.origin is not None: - package_path = Path(spec.origin).parent - logger.debug("Found package %s at %s (via find_spec)", package_name, package_path) - return package_path - - raise RuntimeError(f"Could not find installed package: {package_name}") - - except Exception as e: - if isinstance(e, RuntimeError): - raise - raise RuntimeError(f"Error locating package {package_name}: {str(e)}") from e - def save_manifest_content(self, content: str, path, repo_url: httpx.URL): """ write the manifest content to the supplied path relative to the ouptut folder, @@ -234,7 +203,7 @@ def download_contents(self, git_url: str, headers, path: str, repo_url: httpx.UR """ Download the contents of the file using the github REST API. """ - result = httpx.get(git_url, headers=headers) + result = httpx.get(git_url, headers=headers, timeout=30.0) if result.status_code == 200: js = result.json() b64_content = js["content"] @@ -264,7 +233,7 @@ def download_file(self, repo_path: str, item: dict, headers, gh_repo) -> str | N content = file_content.decoded_content.decode("utf-8") return content except Exception as e: - logger.error("Failed to download file: %s status_code: %d", item["path"], str(e)) + logger.error("Failed to download file: %s error: %s", item["path"], str(e)) def _search_github_code_for_versions_json(self, repo_path: str, member: str | None, headers) -> list[dict] | None: """Search GitHub for plugin-manifest*.yaml files in a specific path using PyGithub API. @@ -444,6 +413,9 @@ def _process_version_item( self.create_output_folder() self.create_catalog_folder(name) version_data = self.download_file(repo_path=repo_path, item=item, headers=headers, gh_repo=gh_repo) + if version_data is None: + logger.error("Skipping version item for %s (%s) — download failed", name, item.get("path")) + return relpath.write_text(version_data, encoding="utf-8") def find_and_save_plugin_versions_json(self, member: str, name: str, repo_url: httpx.URL, headers, gh_repo) -> None: @@ -674,7 +646,11 @@ def install_folder_via_pip(self, manifest: PluginManifest) -> Path | None: # For non-isolated plugins, install normally into CLI's venv logger.info("Installing non-isolated plugin from monorepo: %s", manifest.name) subprocess.run( - [self.python_executable, "-m", "pip", "install", repo_url], check=True, capture_output=True, text=True + [self.python_executable, "-m", "pip", "install", repo_url], + check=True, + capture_output=True, + text=True, + timeout=600, ) logger.info("Successfully installed package: %s", manifest.name) return plugin_path @@ -714,11 +690,15 @@ def _install_package(self, package_name: str, version_constraint: str | None, us check=True, capture_output=True, text=True, + timeout=600, ) else: - # Use subprocess.run for better error handling subprocess.run( - [self.python_executable, "-m", "pip", "install", tgt], check=True, capture_output=True, text=True + [self.python_executable, "-m", "pip", "install", tgt], + check=True, + capture_output=True, + text=True, + timeout=600, ) logger.info("Successfully installed package: %s", package_name) @@ -854,31 +834,32 @@ def _extract_package_archive(self, package_file: Path, extract_dir: Path) -> Non def _download_monorepo_folder_to_temp(self, repo_url: str, package_name: str) -> Path: """Download monorepo folder to temporary directory. + Args: repo_url: The URL of the monorepo. + package_name: Name used in error messages. + Returns: - Path to the downloaded monorepo folder. + Path to the extracted package directory. Caller is responsible for cleanup. """ + tmpid = uuid.uuid4() + temp_dir = Path(tempfile.mkdtemp(prefix=f"cpex_plugin_{tmpid}_")) try: - tmpid = uuid.uuid4() - temp_dir = Path(tempfile.mkdtemp(prefix=f"cpex_plugin_{tmpid}_")) logger.info("Downloading monorepo folder to %s", temp_dir) - # Download package without installing download_args = [ self.python_executable, "-m", "pip", "download", - "--no-deps", # Don't download dependencies + "--no-deps", "--dest", str(temp_dir), ] download_args.append(repo_url) - subprocess.run(download_args, check=True, capture_output=True, text=True) + subprocess.run(download_args, check=True, capture_output=True, text=True, timeout=600) - # Find the downloaded file downloaded_files = list(temp_dir.glob("*")) if not downloaded_files: raise RuntimeError(f"No files downloaded for {package_name}") @@ -886,15 +867,16 @@ def _download_monorepo_folder_to_temp(self, repo_url: str, package_name: str) -> extract_dir = temp_dir / "extracted" extract_dir.mkdir() - # Extract the package using common helper self._extract_package_archive(package_file, extract_dir) logger.info("Downloaded and extracted %s to %s", package_name, extract_dir) return extract_dir except subprocess.CalledProcessError as e: + shutil.rmtree(temp_dir, ignore_errors=True) raise RuntimeError(f"Failed to download {package_name}: {e.stderr}") from e except Exception as e: + shutil.rmtree(temp_dir, ignore_errors=True) raise RuntimeError(f"Unexpected error downloading {package_name}: {str(e)}") from e @@ -941,7 +923,7 @@ def _download_package_to_temp( download_args.append(tgt) - subprocess.run(download_args, check=True, capture_output=True, text=True) + subprocess.run(download_args, check=True, capture_output=True, text=True, timeout=600) # Find the downloaded file downloaded_files = list(temp_dir.glob("*")) @@ -1074,8 +1056,20 @@ def _initialize_isolated_venv(self, manifest: PluginManifest, package_path: Path shutil.copy(source_path, isolated_plugin.plugin_path / requirements_file) # Initialize the venv (this will create venv and install requirements) import asyncio + import concurrent.futures + + try: + loop = asyncio.get_running_loop() + except RuntimeError: + loop = None - asyncio.run(isolated_plugin.initialize()) + if loop is None: + asyncio.run(isolated_plugin.initialize()) + else: + # Called from within a running event loop (e.g. Jupyter, async CLI). + # Run in a thread to avoid "asyncio.run cannot be called from a running event loop". + with concurrent.futures.ThreadPoolExecutor(max_workers=1) as ex: + ex.submit(asyncio.run, isolated_plugin.initialize()).result() logger.info("Successfully initialized isolated venv for %s", manifest.name) @@ -1108,13 +1102,13 @@ def _find_and_load_versions_json( venv_path = plugin_path python_executable = self._get_venv_python_executable(venv_path / ".venv") - # Create a simple Python script to find the package path - find_package_script = f""" + # Script receives the package name via sys.argv to avoid f-string injection. + find_package_script = """ import sys import importlib.metadata from pathlib import Path -package_name = "{plugin_package_name}" +package_name = sys.argv[1] try: for dist in importlib.metadata.distributions(): if dist.name == package_name or dist.metadata.get("Name") == package_name: @@ -1127,17 +1121,17 @@ def _find_and_load_versions_json( print("NOT_FOUND", file=sys.stderr) sys.exit(1) except Exception as e: - print(f"ERROR: {{e}}", file=sys.stderr) + print(f"ERROR: {e}", file=sys.stderr) sys.exit(1) """ # Execute the script in the isolated venv result = subprocess.run( - [python_executable, "-c", find_package_script], + [python_executable, "-c", find_package_script, plugin_package_name], check=True, capture_output=True, text=True, - timeout=10, + timeout=60, ) if result.returncode == 0 and result.stdout.strip(): @@ -1194,21 +1188,21 @@ def _handle_plugin_installation( self, manifest: PluginManifest, package_path: Path, install_command: list[str] | None = None ) -> Path | None: """Handle plugin installation based on its kind (isolated_venv or regular). - + Args: manifest: The plugin manifest. package_path: Path to the package source. install_command: Optional custom install command for non-isolated plugins. If None, no installation is performed for non-isolated plugins. - + Returns: Path to the installed plugin, or None if not applicable. - + Raises: RuntimeError: If installation fails. """ plugin_path = None - + if manifest.kind == "isolated_venv": logger.info("Detected isolated_venv plugin: %s", manifest.name) plugin_path = self._initialize_isolated_venv(manifest, package_path) @@ -1222,21 +1216,22 @@ def _handle_plugin_installation( check=True, capture_output=True, text=True, + timeout=600, ) logger.info("Successfully installed package: %s", manifest.name) - + return plugin_path def _finalize_plugin_installation( self, manifest: PluginManifest, plugin_path: Path | None, package_name: str ) -> Path | None: """Perform post-installation steps: persist manifest, find versions.json, update registry. - + Args: manifest: The plugin manifest. plugin_path: Path to the installed plugin (plugins/{manifest.name} directory). package_name: Name of the package. - + Returns: The actual plugin path from versions.json (inside .venv for isolated plugins), or plugin_path if versions.json not found. @@ -1256,7 +1251,7 @@ def _finalize_plugin_installation( self.update_plugin_version_registry(manifest=manifest, relpath=plugin_path) logger.info("Successfully installed and cataloged %s", package_name) - + # Return actual_plugin_path for reference (may be inside .venv) return actual_plugin_path if actual_plugin_path is not None else plugin_path @@ -1304,17 +1299,17 @@ def install_from_pypi( plugin_path = self._handle_plugin_installation( manifest, package_path, - install_command=None # Will install separately for non-isolated + install_command=None, # Will install separately for non-isolated ) - + # For non-isolated plugins, install via pip and find package path if manifest.kind != "isolated_venv": self._install_package(plugin_package_name, version_constraint, use_pytest) - plugin_path = self.find_package_path(plugin_package_name) + plugin_path = find_package_path(plugin_package_name) # Step 5-7: Finalize installation (persist, versions.json, registry) plugin_path = self._finalize_plugin_installation(manifest, plugin_path, plugin_package_name) - + return manifest, plugin_path finally: @@ -1415,6 +1410,7 @@ def install_from_git(self, url: str) -> tuple[PluginManifest, Path | None]: check=True, capture_output=True, text=True, + timeout=600, ) # Find the downloaded archive @@ -1431,10 +1427,10 @@ def install_from_git(self, url: str) -> tuple[PluginManifest, Path | None]: # Step 3: Find and load the manifest file manifest_path = self._find_manifest_in_extracted_package(temp_extract_dir, package_name) manifest_data = self._load_manifest_file(manifest_path) - + # Step 4: Normalize and validate the manifest manifest = self._normalize_manifest_data(manifest_data, package_name, None) - + # Update the manifest with the git repo information git_repo: GitRepo = GitRepo( git_repository=git_url, @@ -1449,9 +1445,9 @@ def install_from_git(self, url: str) -> tuple[PluginManifest, Path | None]: plugin_path = self._handle_plugin_installation( manifest, package_path, - install_command=None # Will install separately + install_command=None, # Will install separately ) - + # Install the package from git if manifest.kind == "isolated_venv": # Install into isolated venv @@ -1464,6 +1460,7 @@ def install_from_git(self, url: str) -> tuple[PluginManifest, Path | None]: check=True, capture_output=True, text=True, + timeout=600, ) logger.info("Successfully installed into isolated venv") else: @@ -1473,12 +1470,13 @@ def install_from_git(self, url: str) -> tuple[PluginManifest, Path | None]: check=True, capture_output=True, text=True, + timeout=600, ) - plugin_path = self.find_package_path(package_name) + plugin_path = find_package_path(package_name) # Step 6-8: Finalize installation (persist, versions.json, registry) plugin_path = self._finalize_plugin_installation(manifest, plugin_path, package_name) - + return manifest, plugin_path except subprocess.CalledProcessError as e: @@ -1527,13 +1525,18 @@ def uninstall_package(self, package_name: str, manifest: PluginManifest) -> bool check=True, capture_output=True, text=True, + timeout=120, ) + isolated_plugin.remove_venv() + logger.info("Successfully uninstalled package: %s", package_name) + return True else: subprocess.run( [self.python_executable, "-m", "pip", "uninstall", "-y", package_name], check=True, capture_output=True, text=True, + timeout=120, ) logger.info("Successfully uninstalled package: %s", package_name) return True @@ -1654,8 +1657,18 @@ def install_from_local(self, source: Path) -> tuple[PluginManifest, Path]: # Initialize the venv (creates venv directory structure) import asyncio + import concurrent.futures - asyncio.run(isolated_plugin.initialize()) + try: + loop = asyncio.get_running_loop() + except RuntimeError: + loop = None + + if loop is None: + asyncio.run(isolated_plugin.initialize()) + else: + with concurrent.futures.ThreadPoolExecutor(max_workers=1) as ex: + ex.submit(asyncio.run, isolated_plugin.initialize()).result() # Get the venv python executable venv_path = isolated_plugin.plugin_path / ".venv" @@ -1668,6 +1681,7 @@ def install_from_local(self, source: Path) -> tuple[PluginManifest, Path]: check=True, capture_output=True, text=True, + timeout=600, ) plugin_path = isolated_plugin.plugin_path @@ -1686,6 +1700,7 @@ def install_from_local(self, source: Path) -> tuple[PluginManifest, Path]: check=True, capture_output=True, text=True, + timeout=600, ) # For non-isolated plugins, the plugin_path is the same folder that hosts the plugin-manifest.yaml diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index d717187..7842a9d 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -244,18 +244,29 @@ def bootstrap( logger.exception("An error was caught while copying template.") -def list(type: str) -> None: +def list(type: str, fmt: str = "text") -> None: """List the installed plugins Args: type (str): The type of plugins to list. Can be "native" or "external". - - Raises: - typer.Exit: If the type is not "native" or "external". + fmt (str): Output format — "text" (default) or "json". """ pr = PluginRegistry() registered_plugins = pr.registry.plugins + if fmt == "json": + print( + json.dumps( + { + "plugins": [ + {"name": p.name, "version": p.version, "installation_type": p.installation_type} + for p in registered_plugins + ] + } + ) + ) + return + if registered_plugins: for plug_in in registered_plugins: logger.info( @@ -366,11 +377,15 @@ def install_from_manifest(manifest: PluginManifest, installation_type: str, cata update_plugins_config_yaml(manifest) -def select_plugin_from_catalog(available_plugins: List[PluginManifest]) -> Optional[PluginManifest]: +def select_plugin_from_catalog( + available_plugins: List[PluginManifest], assume_yes: bool = False +) -> Optional[PluginManifest]: """Select a plugin from a list of available plugins using an interactive prompt. Args: available_plugins: List of available plugin manifests to choose from. + assume_yes: When True, skip the interactive prompt and return the first + match (sorted by name/version descending). Returns: The selected PluginManifest, or None if no selection was made. @@ -381,6 +396,25 @@ def select_plugin_from_catalog(available_plugins: List[PluginManifest]) -> Optio # Sort plugins by name and version available_plugins = sorted(available_plugins, key=lambda p: (p.name, p.version), reverse=True) + if assume_yes: + selected_plugin = available_plugins[0] + installation_type = ( + "monorepo" + if selected_plugin.monorepo is not None + else "pypi" + if selected_plugin.package_info is not None + else "local" + ) + console.print( + "name: ", + selected_plugin.name, + "Version: ", + selected_plugin.version, + "type: ", + installation_type, + ) + return selected_plugin + # Build choices list with plugin information choices = [] for index, plug_in in enumerate(available_plugins): @@ -477,7 +511,6 @@ def _install_from_local(source: str, catalog: PluginCatalog, use_test: bool = Fa install_source = Path(source) with console.status(f"Installing plugin from source {source}...", spinner="dots"): manifest, installation_path = catalog.install_from_local(install_source) - update_plugins_config_yaml(manifest=manifest) _finalize_installation(manifest, "local", catalog, installation_path) console.print(f":white_heavy_check_mark: {manifest.name} installation complete.") @@ -494,17 +527,17 @@ def _install_from_git(source: str, catalog: PluginCatalog, use_test: bool = Fals """ with console.status(f"Installing plugin from source {source}...", spinner="dots"): manifest, installation_path = catalog.install_from_git(source) - update_plugins_config_yaml(manifest=manifest) _finalize_installation(manifest, "git", catalog, installation_path) console.print(f":white_heavy_check_mark: {manifest.name} installation complete.") -def _install_from_monorepo(source: str, catalog: PluginCatalog, use_test: bool = False): +def _install_from_monorepo(source: str, catalog: PluginCatalog, use_test: bool = False, assume_yes: bool = False): """Handle monorepo-based installation. Args: source: Plugin name or search term in the monorepo. catalog: The plugin catalog. + assume_yes: Skip the interactive selection prompt. """ logger.info("Trying to install from git monorepo: %s", source) available_plugins = catalog.search(source) @@ -513,7 +546,7 @@ def _install_from_monorepo(source: str, catalog: PluginCatalog, use_test: bool = console.print("No matching plugins found.") return - selected_plugin = select_plugin_from_catalog(available_plugins) + selected_plugin = select_plugin_from_catalog(available_plugins, assume_yes=assume_yes) if not selected_plugin: return @@ -548,13 +581,14 @@ def _install_from_pypi(source: str, catalog: PluginCatalog, use_test: bool = Fal console.print(f":white_heavy_check_mark: {package_name} installation complete.") -def install(source: str, install_type: str | None, catalog: PluginCatalog): +def install(source: str, install_type: str | None, catalog: PluginCatalog, assume_yes: bool = False): """Install a plugin from its associated source. Args: source: The source of the plugin (package name, repo URL, or search term). install_type: The type of installation ("git", "monorepo", or "pypi"). catalog: The catalog of plugins. + assume_yes: Skip interactive selection prompt for monorepo installs. Raises: ValueError: If install_type is not supported. @@ -563,9 +597,12 @@ def install(source: str, install_type: str | None, catalog: PluginCatalog): if install_type is None: install_type = "monorepo" + if install_type == "monorepo": + _install_from_monorepo(source, catalog, assume_yes=assume_yes) + return + handlers = { "git": _install_from_git, - "monorepo": _install_from_monorepo, "pypi": _install_from_pypi, "test-pypi": _install_from_pypi, "local": _install_from_local, @@ -578,26 +615,51 @@ def install(source: str, install_type: str | None, catalog: PluginCatalog): handler(source, catalog, use_test=True if install_type == "test-pypi" else False) -def versions(plugin_name: str | None, catalog: PluginCatalog): +def versions(plugin_name: str | None, catalog: PluginCatalog, fmt: str = "text"): """List available versions of the plugin Args: plugin_name (str | None): The name of the plugin to search for. catalog (PluginCatalog): The catalog to search in. + fmt (str): Output format — "text" (default) or "json". """ - return search(plugin_name, catalog) + return search(plugin_name, catalog, fmt=fmt) -def search(plugin_name: str | None, catalog: PluginCatalog): +def search(plugin_name: str | None, catalog: PluginCatalog, fmt: str = "text"): """Search for a plugin in the catalog Args: plugin_name (str | None): The name of the plugin to search for. catalog (PluginCatalog): The catalog to search in. + fmt (str): Output format — "text" (default) or "json". Returns: list[Plugin]: A list of plugins that match the search criteria. """ - # lookup the plugin from the catalog's plugin-manifest.yaml with console.status("Searching for available plugins ...", spinner="dots"): available_plugins = catalog.search(plugin_name) + + if fmt == "json": + print( + json.dumps( + { + "results": [ + { + "name": p.name, + "version": p.version, + "installation_type": ( + "monorepo" + if p.monorepo is not None + else "pypi" + if p.package_info is not None + else "local" + ), + } + for p in (available_plugins or []) + ] + } + ) + ) + return + if available_plugins: console.log("Available plugins:") for plug_in in available_plugins: @@ -607,42 +669,42 @@ def search(plugin_name: str | None, catalog: PluginCatalog): console.log("No plugins found.") -def info(plugin_name: str | None): +def info(plugin_name: str | None, fmt: str = "text"): """Search for or list all installed plugins Args: plugin_name (str | None): The name of the plugin to search for. - If None, list all installed plugins. - - Returns: - list[Plugin]: A list of plugins that match the search criteria. + If None, list all installed plugins. + fmt (str): Output format — "text" (default) or "json". """ registry = PluginRegistry().registry - found = 0 - for plug_in in registry.plugins: - if plugin_name is None: + matches = [ + p + for p in registry.plugins + if plugin_name is None + or p.name.lower().count(plugin_name.lower()) > 0 + or p.kind.lower().count(plugin_name.lower()) > 0 + ] + + if fmt == "json": + print(json.dumps({"plugins": [p.model_dump() for p in matches]})) + return + + if matches: + for plug_in in matches: console.print_json(json.dumps(plug_in.model_dump())) - # console.print(yaml.dump(plug_in.model_dump(), default_flow_style=False)) - found += 1 - else: - if ( - plug_in.name.lower().count(plugin_name.lower()) > 0 - or plug_in.kind.lower().count(plugin_name.lower()) > 0 - ): - console.print_json(json.dumps(plug_in.model_dump())) - # console.print(yaml.dump(plug_in.model_dump())) - found += 1 - if found == 0: + else: console.print("No plugins found") -def uninstall(plugin_name: str, catalog: PluginCatalog) -> None: +def uninstall(plugin_name: str, catalog: PluginCatalog, assume_yes: bool = False) -> None: """Uninstall a plugin. Args: plugin_name: The name of the plugin to uninstall. catalog: The plugin catalog. + assume_yes: Skip the confirmation prompt. """ # Get plugin registry to find the installed plugin plugin_registry = PluginRegistry() @@ -663,18 +725,19 @@ def uninstall(plugin_name: str, catalog: PluginCatalog) -> None: console.print(f"Installation type: {installed_plugin.installation_type}") console.print(f"Installation path: {installed_plugin.installation_path}") - questions = [ - inquirer.Confirm( - "confirm", - message=f"Are you sure you want to uninstall '{plugin_name}'?", - default=False, - ), - ] - answers = inquirer.prompt(questions) + if not assume_yes: + questions = [ + inquirer.Confirm( + "confirm", + message=f"Are you sure you want to uninstall '{plugin_name}'?", + default=False, + ), + ] + answers = inquirer.prompt(questions) - if not answers or not answers["confirm"]: - console.print("Uninstall cancelled.") - return + if not answers or not answers["confirm"]: + console.print("Uninstall cancelled.") + return try: with console.status(f"Uninstalling plugin {plugin_name}...", spinner="dots"): @@ -723,10 +786,26 @@ def plugin( help="The types of plugins to list. One of: monorepo|pypi|test-pypi|git|local Defaults to monorepo if unspecified.", ), ] = None, + assume_yes: Annotated[ + bool, + typer.Option( + "--yes", + "-y", + help="Bypass interactive prompts: pick the first match on install, skip confirm on uninstall.", + ), + ] = False, + fmt: Annotated[ + str, + typer.Option( + "--format", + "-f", + help="Output format for read commands: 'text' (default) or 'json'.", + ), + ] = "text", ) -> None: """Lists installed plugins""" if cmd_action == "info": - return info(source) + return info(source, fmt=fmt) # For uninstall, we don't need to update the catalog if cmd_action == "uninstall": @@ -734,7 +813,7 @@ def plugin( console.print(":x: Please specify a plugin name to uninstall.") return pc = PluginCatalog() - return uninstall(source, catalog=pc) + return uninstall(source, catalog=pc, assume_yes=assume_yes) if cmd_action == "install" and source is not None: registry = PluginRegistry() if registry.has(source): @@ -754,14 +833,14 @@ def plugin( console.log(":x: Catalog update failed.") if cmd_action == "versions": - return versions(source, catalog=pc) + return versions(source, catalog=pc, fmt=fmt) if cmd_action == "list": - return list(install_type) + return list(install_type, fmt=fmt) if cmd_action == "install" and source is not None: - return install(source, install_type, catalog=pc) + return install(source, install_type, catalog=pc, assume_yes=assume_yes) if cmd_action == "search": - return search(source, catalog=pc) + return search(source, catalog=pc, fmt=fmt) @app.callback() diff --git a/tests/unit/cpex/tools/test_catalog.py b/tests/unit/cpex/tools/test_catalog.py index a1b0bfc..6f660d2 100644 --- a/tests/unit/cpex/tools/test_catalog.py +++ b/tests/unit/cpex/tools/test_catalog.py @@ -2267,7 +2267,7 @@ def test_install_from_pypi_calls_find_and_load_versions_json(self, tmp_path, moc with ( patch.object(catalog, "_download_package_to_temp", return_value=temp_extract), patch.object(catalog, "_install_package"), - patch.object(catalog, "find_package_path", return_value=package_dir), + patch("cpex.tools.catalog.find_package_path", return_value=package_dir), patch.object(catalog, "_find_and_load_versions_json") as mock_find_versions, patch.object(catalog, "update_plugin_version_registry"), ): @@ -2312,7 +2312,7 @@ def test_install_from_local_manifest_in_root(self, tmp_path, mock_github_env): with ( patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, - patch.object(catalog, "find_package_path", return_value=source_dir), + patch("cpex.tools.catalog.find_package_path", return_value=source_dir), patch.object(catalog, "_persist_manifest"), patch.object(catalog, "_find_and_load_versions_json", return_value=source_dir), patch.object(catalog, "update_plugin_version_registry"), @@ -2359,7 +2359,7 @@ def test_install_from_local_manifest_in_subdirectory(self, tmp_path, mock_github with ( patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, - patch.object(catalog, "find_package_path", return_value=source_dir), + patch("cpex.tools.catalog.find_package_path", return_value=source_dir), patch.object(catalog, "_persist_manifest"), patch.object(catalog, "_find_and_load_versions_json", return_value=source_dir), patch.object(catalog, "update_plugin_version_registry"), @@ -2514,7 +2514,7 @@ def test_install_from_local_calls_persist_and_registry(self, tmp_path, mock_gith with ( patch("cpex.tools.catalog.subprocess.run"), - patch.object(catalog, "find_package_path", return_value=source_dir), + patch("cpex.tools.catalog.find_package_path", return_value=source_dir), patch.object(catalog, "_persist_manifest") as mock_persist, patch.object(catalog, "_find_and_load_versions_json", return_value=source_dir) as mock_versions, patch.object(catalog, "update_plugin_version_registry") as mock_registry, @@ -2626,7 +2626,7 @@ def test_install_from_local_with_versions_json(self, tmp_path, mock_github_env): with ( patch("cpex.tools.catalog.subprocess.run"), - patch.object(catalog, "find_package_path", return_value=source_dir), + patch("cpex.tools.catalog.find_package_path", return_value=source_dir), patch.object(catalog, "_persist_manifest"), patch.object(catalog, "_find_and_load_versions_json", return_value=actual_path), patch.object(catalog, "update_plugin_version_registry"), @@ -2673,7 +2673,7 @@ def test_install_from_git_success_https(self, tmp_path, mock_github_env): with ( patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, patch("cpex.tools.catalog.tempfile.mkdtemp", return_value=str(tmp_path)), - patch.object(catalog, "find_package_path", return_value=plugin_dir), + patch("cpex.tools.catalog.find_package_path", return_value=plugin_dir), patch.object(catalog, "_persist_manifest"), patch.object(catalog, "_find_and_load_versions_json", return_value=plugin_dir), patch.object(catalog, "update_plugin_version_registry"), @@ -2727,7 +2727,7 @@ def test_install_from_git_success_ssh(self, tmp_path, mock_github_env): with ( patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, patch("cpex.tools.catalog.tempfile.mkdtemp", return_value=str(tmp_path)), - patch.object(catalog, "find_package_path", return_value=plugin_dir), + patch("cpex.tools.catalog.find_package_path", return_value=plugin_dir), patch.object(catalog, "_persist_manifest"), patch.object(catalog, "_find_and_load_versions_json", return_value=plugin_dir), patch.object(catalog, "update_plugin_version_registry"), @@ -2772,7 +2772,7 @@ def test_install_from_git_with_branch(self, tmp_path, mock_github_env): with ( patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, patch("cpex.tools.catalog.tempfile.mkdtemp", return_value=str(tmp_path)), - patch.object(catalog, "find_package_path", return_value=plugin_dir), + patch("cpex.tools.catalog.find_package_path", return_value=plugin_dir), patch.object(catalog, "_persist_manifest"), patch.object(catalog, "_find_and_load_versions_json", return_value=plugin_dir), patch.object(catalog, "update_plugin_version_registry"), @@ -3034,7 +3034,7 @@ def test_install_from_git_with_zip_archive(self, tmp_path, mock_github_env): with ( patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, patch("cpex.tools.catalog.tempfile.mkdtemp", return_value=str(tmp_path)), - patch.object(catalog, "find_package_path", return_value=plugin_dir), + patch("cpex.tools.catalog.find_package_path", return_value=plugin_dir), patch.object(catalog, "_persist_manifest"), patch.object(catalog, "_find_and_load_versions_json", return_value=plugin_dir), patch.object(catalog, "update_plugin_version_registry"), @@ -3078,7 +3078,7 @@ def test_install_from_git_with_wheel(self, tmp_path, mock_github_env): with ( patch("cpex.tools.catalog.subprocess.run") as mock_subprocess, patch("cpex.tools.catalog.tempfile.mkdtemp", return_value=str(tmp_path)), - patch.object(catalog, "find_package_path", return_value=plugin_dir), + patch("cpex.tools.catalog.find_package_path", return_value=plugin_dir), patch.object(catalog, "_persist_manifest"), patch.object(catalog, "_find_and_load_versions_json", return_value=plugin_dir), patch.object(catalog, "update_plugin_version_registry"), diff --git a/tests/unit/cpex/tools/test_cli.py b/tests/unit/cpex/tools/test_cli.py index 402f36b..12c38c8 100644 --- a/tests/unit/cpex/tools/test_cli.py +++ b/tests/unit/cpex/tools/test_cli.py @@ -809,10 +809,12 @@ def test_install_git_implementation(self): mock_console.status = Mock(return_value=mock_status) install("test_plugin @ git+https://github.com/example/test_plugin.git", "git", mock_catalog) - + # Verify install_from_git was called mock_catalog.install_from_git.assert_called_once() - mock_update_config.assert_called_once_with(manifest=test_manifest) + # update_plugins_config_yaml is called inside _finalize_installation (mocked), + # not directly from _install_from_git — verify no duplicate direct call. + mock_update_config.assert_not_called() mock_finalize.assert_called_once() def test_install_monorepo_no_plugins_found(self): @@ -1589,7 +1591,9 @@ def test_install_from_local_calls_catalog_method(self, temp_registry_dir, tmp_pa _install_from_local(str(source_dir), mock_catalog) mock_catalog.install_from_local.assert_called_once() - mock_update_config.assert_called_once_with(manifest=manifest) + # update_plugins_config_yaml is called inside _finalize_installation (mocked), + # not directly from _install_from_local — verify no duplicate direct call. + mock_update_config.assert_not_called() mock_finalize.assert_called_once_with(manifest, "local", mock_catalog, source_dir) From 02789d6a217091991b9041224cdf2c78f707f55f Mon Sep 17 00:00:00 2001 From: habeck Date: Mon, 4 May 2026 12:49:09 -0400 Subject: [PATCH 61/88] fix: p2 item 17 search() case-insensitive match broken Signed-off-by: habeck --- cpex/tools/catalog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index e659c4c..ff954fa 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -594,7 +594,7 @@ def search(self, plugin_name: str | None) -> Optional[list[PluginManifest]]: self.load() for manifest in self.manifests: if plugin_name is not None: - if manifest.name.lower().count(plugin_name) > 0: + if manifest.name.lower().count(plugin_name.lower()) > 0: matching.append(manifest) elif plugin_name.lower() in manifest.tags: matching.append(manifest) From 07ebd45f5172ea11fce25305f45d96ce6e716c83 Mon Sep 17 00:00:00 2001 From: habeck Date: Mon, 4 May 2026 12:55:34 -0400 Subject: [PATCH 62/88] chore: add tests for _ver method. Signed-off-by: habeck --- tests/unit/cpex/tools/test_catalog.py | 167 ++++++++++++++++++++++++++ 1 file changed, 167 insertions(+) diff --git a/tests/unit/cpex/tools/test_catalog.py b/tests/unit/cpex/tools/test_catalog.py index 6f660d2..f29881b 100644 --- a/tests/unit/cpex/tools/test_catalog.py +++ b/tests/unit/cpex/tools/test_catalog.py @@ -3195,3 +3195,170 @@ def test_zip_benign_succeeds(self, catalog, tmp_path): catalog._extract_package_archive(archive, extract_dir) assert (extract_dir / "pkg" / "hello.txt").read_text() == "world" + + + +class TestVerFunction: + """Tests for the _ver utility function.""" + + def test_ver_valid_simple_version(self): + """Test _ver with a simple valid version string.""" + from cpex.tools.catalog import _ver + from packaging.version import Version + + result = _ver("1.0.0") + assert isinstance(result, Version) + assert str(result) == "1.0.0" + + def test_ver_valid_complex_version(self): + """Test _ver with a complex valid PEP 440 version string.""" + from cpex.tools.catalog import _ver + from packaging.version import Version + + result = _ver("2.1.3rc1.post0.dev5") + assert isinstance(result, Version) + assert str(result) == "2.1.3rc1.post0.dev5" + + def test_ver_valid_version_with_epoch(self): + """Test _ver with a version string containing an epoch.""" + from cpex.tools.catalog import _ver + from packaging.version import Version + + result = _ver("1!2.0.0") + assert isinstance(result, Version) + assert str(result) == "1!2.0.0" + + def test_ver_valid_prerelease_version(self): + """Test _ver with pre-release version strings.""" + from cpex.tools.catalog import _ver + from packaging.version import Version + + # Test alpha + result = _ver("1.0.0a1") + assert isinstance(result, Version) + assert str(result) == "1.0.0a1" + + # Test beta + result = _ver("1.0.0b2") + assert isinstance(result, Version) + assert str(result) == "1.0.0b2" + + # Test release candidate + result = _ver("1.0.0rc3") + assert isinstance(result, Version) + assert str(result) == "1.0.0rc3" + + def test_ver_valid_post_release_version(self): + """Test _ver with post-release version strings.""" + from cpex.tools.catalog import _ver + from packaging.version import Version + + result = _ver("1.0.0.post1") + assert isinstance(result, Version) + assert str(result) == "1.0.0.post1" + + def test_ver_valid_dev_version(self): + """Test _ver with development version strings.""" + from cpex.tools.catalog import _ver + from packaging.version import Version + + result = _ver("1.0.0.dev0") + assert isinstance(result, Version) + assert str(result) == "1.0.0.dev0" + + def test_ver_invalid_version_returns_zero(self): + """Test _ver with an invalid version string returns Version('0').""" + from cpex.tools.catalog import _ver + from packaging.version import Version + + result = _ver("not-a-valid-version") + assert isinstance(result, Version) + assert str(result) == "0" + + def test_ver_invalid_version_with_special_chars(self): + """Test _ver with invalid version containing special characters.""" + from cpex.tools.catalog import _ver + from packaging.version import Version + + result = _ver("v1.0.0@latest") + assert isinstance(result, Version) + assert str(result) == "0" + + def test_ver_empty_string_returns_zero(self): + """Test _ver with an empty string returns Version('0').""" + from cpex.tools.catalog import _ver + from packaging.version import Version + + result = _ver("") + assert isinstance(result, Version) + assert str(result) == "0" + + def test_ver_invalid_semantic_version(self): + """Test _ver with invalid semantic version format.""" + from cpex.tools.catalog import _ver + from packaging.version import Version + + # Missing patch version + result = _ver("1.0") + assert isinstance(result, Version) + # Note: "1.0" is actually valid in PEP 440, normalized to "1.0" + assert str(result) == "1.0" + + # Completely invalid + result = _ver("abc.def.ghi") + assert isinstance(result, Version) + assert str(result) == "0" + + def test_ver_version_with_local_identifier(self): + """Test _ver with version containing local version identifier.""" + from cpex.tools.catalog import _ver + from packaging.version import Version + + result = _ver("1.0.0+local.version") + assert isinstance(result, Version) + assert str(result) == "1.0.0+local.version" + + def test_ver_logs_debug_on_invalid_version(self): + """Test _ver logs a debug message when version is invalid.""" + from cpex.tools.catalog import _ver + + with patch("cpex.tools.catalog.logger") as mock_logger: + result = _ver("invalid-version-string") + + # Verify debug was called with appropriate message + mock_logger.debug.assert_called_once() + call_args = mock_logger.debug.call_args[0] + assert "Could not parse version" in call_args[0] + assert "invalid-version-string" in call_args + + def test_ver_whitespace_version(self): + """Test _ver with whitespace-only version string.""" + from cpex.tools.catalog import _ver + from packaging.version import Version + + result = _ver(" ") + assert isinstance(result, Version) + assert str(result) == "0" + + def test_ver_version_with_v_prefix(self): + """Test _ver with version string that has 'v' prefix.""" + from cpex.tools.catalog import _ver + from packaging.version import Version + + # 'v1.0.0' is accepted by packaging.version and normalized to '1.0.0' + result = _ver("v1.0.0") + assert isinstance(result, Version) + assert str(result) == "1.0.0" + + def test_ver_numeric_only_version(self): + """Test _ver with numeric-only version strings.""" + from cpex.tools.catalog import _ver + from packaging.version import Version + + result = _ver("1") + assert isinstance(result, Version) + assert str(result) == "1" + + result = _ver("42") + assert isinstance(result, Version) + assert str(result) == "42" From b2f20c861cd24075b2471151ac35c7465a907d43 Mon Sep 17 00:00:00 2001 From: habeck Date: Mon, 4 May 2026 14:12:18 -0400 Subject: [PATCH 63/88] fix: version registry update cleanup Signed-off-by: habeck --- cpex/tools/catalog.py | 55 +++-- tests/unit/cpex/tools/test_catalog.py | 291 ++++++++++++++++++++++++++ 2 files changed, 316 insertions(+), 30 deletions(-) diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index ff954fa..8783d4b 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -110,54 +110,49 @@ def update_plugin_version_registry(self, manifest: PluginManifest, relpath: Path """ Update the plugin version registry with the given manifest. args: - manifest: The plugin manifest to be stored in the catalog - relpath: the relative path of the plugin package that was installed + manifest: The plugin manifest to be stored in the catalog + relpath: the relative path of the plugin package that was installed """ - plugin_version: PluginVersionInfo = PluginVersionInfo( + plugin_version = PluginVersionInfo( version=manifest.version, manifest_file=str(relpath), - released=datetime.datetime.now(datetime.timezone.utc).isoformat() + "Z", + released=datetime.datetime.now(datetime.timezone.utc).isoformat().replace("+00:00", "Z"), ) + file_path = Path(self.catalog_folder) / manifest.name / "versions.json" - # Ensure the directory exists file_path.parent.mkdir(parents=True, exist_ok=True) + + # Load or create registry if file_path.exists(): with file_path.open("r") as f: plugin_version_registry = PluginVersionRegistry(**json.load(f)) else: plugin_version_registry = PluginVersionRegistry(versions=[]) - found = False - latest = None - for pv in plugin_version_registry.versions: - if pv.version == plugin_version.version: - found = True - if latest is None: - latest = pv - else: - if _ver(latest.version) < _ver(plugin_version.version): - latest = plugin_version - else: - if latest is None: - latest = pv - else: - if _ver(latest.version) < _ver(pv.version): - latest = pv - - if not found: + + # Check if version already exists (avoid duplicates) + version_exists = any( + pv.version == plugin_version.version + for pv in plugin_version_registry.versions + ) + + # Add new version if not duplicate + if not version_exists: plugin_version_registry.versions.append(plugin_version) - if plugin_version_registry.latest is None: - plugin_version_registry.latest = plugin_version - else: - plugin_version_registry.latest = latest - else: - if plugin_version_registry.latest != latest: - plugin_version_registry.latest = plugin_version + + # Recalculate latest version from all versions + if plugin_version_registry.versions: + plugin_version_registry.latest = max( + plugin_version_registry.versions, + key=lambda pv: _ver(pv.version) + ) + # Write the updated version registry to the file file_path.write_text( json.dumps(plugin_version_registry.model_dump(mode="json"), indent=2), encoding="utf-8", ) + def save_manifest_content(self, content: str, path, repo_url: httpx.URL): """ write the manifest content to the supplied path relative to the ouptut folder, diff --git a/tests/unit/cpex/tools/test_catalog.py b/tests/unit/cpex/tools/test_catalog.py index f29881b..9dd16ce 100644 --- a/tests/unit/cpex/tools/test_catalog.py +++ b/tests/unit/cpex/tools/test_catalog.py @@ -3362,3 +3362,294 @@ def test_ver_numeric_only_version(self): result = _ver("42") assert isinstance(result, Version) assert str(result) == "42" + + + +class TestPluginCatalogUpdatePluginVersionRegistry: + """Tests for PluginCatalog.update_plugin_version_registry method.""" + + def test_update_plugin_version_registry_creates_new_file(self, tmp_path, mock_github_env): + """Test creating a new versions.json file when none exists.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + manifest = create_test_manifest(name="test_plugin", version="1.0.0") + relpath = Path("plugins/test_plugin") + + catalog.update_plugin_version_registry(manifest, relpath) + + # Verify file was created + versions_file = tmp_path / "catalog" / "test_plugin" / "versions.json" + assert versions_file.exists() + + # Verify content + with versions_file.open("r") as f: + data = json.load(f) + + assert len(data["versions"]) == 1 + assert data["versions"][0]["version"] == "1.0.0" + assert data["versions"][0]["manifest_file"] == str(relpath) + assert data["latest"]["version"] == "1.0.0" + + def test_update_plugin_version_registry_adds_new_version(self, tmp_path, mock_github_env): + """Test adding a new version to existing registry.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + # Create initial version + manifest1 = create_test_manifest(name="test_plugin", version="1.0.0") + relpath1 = Path("plugins/test_plugin") + catalog.update_plugin_version_registry(manifest1, relpath1) + + # Add new version + manifest2 = create_test_manifest(name="test_plugin", version="2.0.0") + relpath2 = Path("plugins/test_plugin") + catalog.update_plugin_version_registry(manifest2, relpath2) + + # Verify both versions exist + versions_file = tmp_path / "catalog" / "test_plugin" / "versions.json" + with versions_file.open("r") as f: + data = json.load(f) + + assert len(data["versions"]) == 2 + versions = [v["version"] for v in data["versions"]] + assert "1.0.0" in versions + assert "2.0.0" in versions + assert data["latest"]["version"] == "2.0.0" + + def test_update_plugin_version_registry_handles_duplicate_version(self, tmp_path, mock_github_env): + """Test that duplicate versions are not added.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + manifest = create_test_manifest(name="test_plugin", version="1.0.0") + relpath = Path("plugins/test_plugin") + + # Add same version twice + catalog.update_plugin_version_registry(manifest, relpath) + catalog.update_plugin_version_registry(manifest, relpath) + + # Verify only one version exists + versions_file = tmp_path / "catalog" / "test_plugin" / "versions.json" + with versions_file.open("r") as f: + data = json.load(f) + + assert len(data["versions"]) == 1 + assert data["versions"][0]["version"] == "1.0.0" + + def test_update_plugin_version_registry_updates_latest_correctly(self, tmp_path, mock_github_env): + """Test that latest version is updated correctly when adding versions out of order.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + # Add version 2.0.0 first + manifest2 = create_test_manifest(name="test_plugin", version="2.0.0") + catalog.update_plugin_version_registry(manifest2, Path("plugins/test_plugin")) + + # Add version 1.0.0 + manifest1 = create_test_manifest(name="test_plugin", version="1.0.0") + catalog.update_plugin_version_registry(manifest1, Path("plugins/test_plugin")) + + # Add version 3.0.0 + manifest3 = create_test_manifest(name="test_plugin", version="3.0.0") + catalog.update_plugin_version_registry(manifest3, Path("plugins/test_plugin")) + + # Verify latest is 3.0.0 + versions_file = tmp_path / "catalog" / "test_plugin" / "versions.json" + with versions_file.open("r") as f: + data = json.load(f) + + assert data["latest"]["version"] == "3.0.0" + assert len(data["versions"]) == 3 + + def test_update_plugin_version_registry_with_prerelease_versions(self, tmp_path, mock_github_env): + """Test handling of pre-release versions.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + # Add stable version + manifest1 = create_test_manifest(name="test_plugin", version="1.0.0") + catalog.update_plugin_version_registry(manifest1, Path("plugins/test_plugin")) + + # Add pre-release version + manifest2 = create_test_manifest(name="test_plugin", version="2.0.0rc1") + catalog.update_plugin_version_registry(manifest2, Path("plugins/test_plugin")) + + # Verify latest is the rc version (higher version number) + versions_file = tmp_path / "catalog" / "test_plugin" / "versions.json" + with versions_file.open("r") as f: + data = json.load(f) + + assert len(data["versions"]) == 2 + assert data["latest"]["version"] == "2.0.0rc1" + + def test_update_plugin_version_registry_with_dev_versions(self, tmp_path, mock_github_env): + """Test handling of development versions.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + # Add dev version + manifest1 = create_test_manifest(name="test_plugin", version="1.0.0.dev1") + catalog.update_plugin_version_registry(manifest1, Path("plugins/test_plugin")) + + # Add stable version + manifest2 = create_test_manifest(name="test_plugin", version="1.0.0") + catalog.update_plugin_version_registry(manifest2, Path("plugins/test_plugin")) + + # Verify latest is stable version + versions_file = tmp_path / "catalog" / "test_plugin" / "versions.json" + with versions_file.open("r") as f: + data = json.load(f) + + assert len(data["versions"]) == 2 + assert data["latest"]["version"] == "1.0.0" + + def test_update_plugin_version_registry_preserves_existing_data(self, tmp_path, mock_github_env): + """Test that existing version data is preserved when adding new versions.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + # Manually create a versions.json with additional metadata + versions_dir = tmp_path / "catalog" / "test_plugin" + versions_dir.mkdir(parents=True) + versions_file = versions_dir / "versions.json" + + initial_data = { + "latest": { + "version": "1.0.0", + "released": "2024-01-01T00:00:00Z", + "manifest_file": "plugins/test_plugin", + "deprecated": False, + "breaking_changes": False, + "changelog": "Initial release" + }, + "versions": [ + { + "version": "1.0.0", + "released": "2024-01-01T00:00:00Z", + "manifest_file": "plugins/test_plugin", + "deprecated": False, + "breaking_changes": False, + "changelog": "Initial release" + } + ] + } + versions_file.write_text(json.dumps(initial_data, indent=2)) + + # Add new version + manifest2 = create_test_manifest(name="test_plugin", version="2.0.0") + catalog.update_plugin_version_registry(manifest2, Path("plugins/test_plugin")) + + # Verify old version data is preserved + with versions_file.open("r") as f: + data = json.load(f) + + assert len(data["versions"]) == 2 + old_version = next(v for v in data["versions"] if v["version"] == "1.0.0") + assert old_version["changelog"] == "Initial release" + assert old_version["released"] == "2024-01-01T00:00:00Z" + + def test_update_plugin_version_registry_with_complex_version_ordering(self, tmp_path, mock_github_env): + """Test version ordering with complex version strings.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + # Add versions in random order + versions = ["1.0.0", "2.0.0rc1", "1.5.0", "2.0.0", "1.0.1", "2.1.0a1"] + for version in versions: + manifest = create_test_manifest(name="test_plugin", version=version) + catalog.update_plugin_version_registry(manifest, Path("plugins/test_plugin")) + + # Verify latest is 2.1.0a1 (highest version) + versions_file = tmp_path / "catalog" / "test_plugin" / "versions.json" + with versions_file.open("r") as f: + data = json.load(f) + + assert len(data["versions"]) == 6 + assert data["latest"]["version"] == "2.1.0a1" + + def test_update_plugin_version_registry_creates_parent_directories(self, tmp_path, mock_github_env): + """Test that parent directories are created if they don't exist.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + # Don't create the directory beforehand + manifest = create_test_manifest(name="test_plugin", version="1.0.0") + catalog.update_plugin_version_registry(manifest, Path("plugins/test_plugin")) + + # Verify directory and file were created + versions_file = tmp_path / "catalog" / "test_plugin" / "versions.json" + assert versions_file.exists() + assert versions_file.parent.exists() + + def test_update_plugin_version_registry_with_invalid_existing_json(self, tmp_path, mock_github_env): + """Test handling of corrupted existing versions.json file.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + # Create corrupted versions.json + versions_dir = tmp_path / "catalog" / "test_plugin" + versions_dir.mkdir(parents=True) + versions_file = versions_dir / "versions.json" + versions_file.write_text("invalid json content") + + # Attempt to update should raise an error + manifest = create_test_manifest(name="test_plugin", version="1.0.0") + with pytest.raises(json.JSONDecodeError): + catalog.update_plugin_version_registry(manifest, Path("plugins/test_plugin")) + + def test_update_plugin_version_registry_timestamp_format(self, tmp_path, mock_github_env): + """Test that timestamp is in correct ISO format with Z suffix.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + manifest = create_test_manifest(name="test_plugin", version="1.0.0") + catalog.update_plugin_version_registry(manifest, Path("plugins/test_plugin")) + + versions_file = tmp_path / "catalog" / "test_plugin" / "versions.json" + with versions_file.open("r") as f: + data = json.load(f) + + released = data["versions"][0]["released"] + # Verify format: ends with Z and contains T + assert released.endswith("Z") + assert "T" in released + # Verify it's a valid ISO format + from datetime import datetime + datetime.fromisoformat(released.replace("Z", "+00:00")) + + def test_update_plugin_version_registry_with_epoch_versions(self, tmp_path, mock_github_env): + """Test handling of versions with epochs.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + # Add version with epoch + manifest1 = create_test_manifest(name="test_plugin", version="1!1.0.0") + catalog.update_plugin_version_registry(manifest1, Path("plugins/test_plugin")) + + # Add version without epoch (should be lower) + manifest2 = create_test_manifest(name="test_plugin", version="2.0.0") + catalog.update_plugin_version_registry(manifest2, Path("plugins/test_plugin")) + + # Verify epoch version is latest + versions_file = tmp_path / "catalog" / "test_plugin" / "versions.json" + with versions_file.open("r") as f: + data = json.load(f) + + assert data["latest"]["version"] == "1!1.0.0" + + def test_update_plugin_version_registry_relpath_stored_correctly(self, tmp_path, mock_github_env): + """Test that relative path is stored correctly in manifest_file.""" + catalog = PluginCatalog() + catalog.catalog_folder = str(tmp_path / "catalog") + + manifest = create_test_manifest(name="test_plugin", version="1.0.0") + relpath = Path("custom/path/to/plugin") + + catalog.update_plugin_version_registry(manifest, relpath) + + versions_file = tmp_path / "catalog" / "test_plugin" / "versions.json" + with versions_file.open("r") as f: + data = json.load(f) + + assert data["versions"][0]["manifest_file"] == "custom/path/to/plugin" From 33c3e9578e022fd0d627ad31e5f7ed8328f108f9 Mon Sep 17 00:00:00 2001 From: habeck Date: Mon, 4 May 2026 14:13:31 -0400 Subject: [PATCH 64/88] chore: lint fix Signed-off-by: habeck --- cpex/tools/catalog.py | 23 ++++++++--------------- 1 file changed, 8 insertions(+), 15 deletions(-) diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index 8783d4b..0816d71 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -118,41 +118,34 @@ def update_plugin_version_registry(self, manifest: PluginManifest, relpath: Path manifest_file=str(relpath), released=datetime.datetime.now(datetime.timezone.utc).isoformat().replace("+00:00", "Z"), ) - + file_path = Path(self.catalog_folder) / manifest.name / "versions.json" file_path.parent.mkdir(parents=True, exist_ok=True) - + # Load or create registry if file_path.exists(): with file_path.open("r") as f: plugin_version_registry = PluginVersionRegistry(**json.load(f)) else: plugin_version_registry = PluginVersionRegistry(versions=[]) - + # Check if version already exists (avoid duplicates) - version_exists = any( - pv.version == plugin_version.version - for pv in plugin_version_registry.versions - ) - + version_exists = any(pv.version == plugin_version.version for pv in plugin_version_registry.versions) + # Add new version if not duplicate if not version_exists: plugin_version_registry.versions.append(plugin_version) - + # Recalculate latest version from all versions if plugin_version_registry.versions: - plugin_version_registry.latest = max( - plugin_version_registry.versions, - key=lambda pv: _ver(pv.version) - ) - + plugin_version_registry.latest = max(plugin_version_registry.versions, key=lambda pv: _ver(pv.version)) + # Write the updated version registry to the file file_path.write_text( json.dumps(plugin_version_registry.model_dump(mode="json"), indent=2), encoding="utf-8", ) - def save_manifest_content(self, content: str, path, repo_url: httpx.URL): """ write the manifest content to the supplied path relative to the ouptut folder, From 8ebf07a52b489ec9977d318559529b1025caab7f Mon Sep 17 00:00:00 2001 From: habeck Date: Mon, 4 May 2026 14:40:04 -0400 Subject: [PATCH 65/88] chore: add missing doc string, and tests for _ver method. Signed-off-by: habeck --- cpex/tools/catalog.py | 26 ++- tests/unit/cpex/tools/test_catalog.py | 300 +++++++++++--------------- 2 files changed, 148 insertions(+), 178 deletions(-) diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index 0816d71..0683551 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -43,13 +43,6 @@ logger = logging.getLogger(__name__) -def _ver(version_str: str) -> Version: - try: - return Version(version_str) - except InvalidVersion: - logger.debug("Could not parse version %r as PEP 440; treating as lowest", version_str) - return Version("0") - class PluginCatalog: """ @@ -106,6 +99,23 @@ def save_manifest(self, manifest: PluginManifest, path): updated_content = yaml.safe_dump(manifest.model_dump(), default_flow_style=False) relpath.write_text(updated_content, encoding="utf-8") + def _ver(self, version_str: str) -> Version: + """ + Parse a version string into a Version object. + + Args: + version_str: Version string to parse (e.g., "1.0.0", "2.0.0rc1") + + Returns: + Version object. Returns Version("0") if parsing fails. + """ + try: + return Version(version_str) + except InvalidVersion: + logger.debug("Could not parse version %r as PEP 440; treating as lowest", version_str) + return Version("0") + + def update_plugin_version_registry(self, manifest: PluginManifest, relpath: Path): """ Update the plugin version registry with the given manifest. @@ -138,7 +148,7 @@ def update_plugin_version_registry(self, manifest: PluginManifest, relpath: Path # Recalculate latest version from all versions if plugin_version_registry.versions: - plugin_version_registry.latest = max(plugin_version_registry.versions, key=lambda pv: _ver(pv.version)) + plugin_version_registry.latest = max(plugin_version_registry.versions, key=lambda pv: self._ver(pv.version)) # Write the updated version registry to the file file_path.write_text( diff --git a/tests/unit/cpex/tools/test_catalog.py b/tests/unit/cpex/tools/test_catalog.py index 9dd16ce..d32ea8a 100644 --- a/tests/unit/cpex/tools/test_catalog.py +++ b/tests/unit/cpex/tools/test_catalog.py @@ -3196,175 +3196,6 @@ def test_zip_benign_succeeds(self, catalog, tmp_path): assert (extract_dir / "pkg" / "hello.txt").read_text() == "world" - - -class TestVerFunction: - """Tests for the _ver utility function.""" - - def test_ver_valid_simple_version(self): - """Test _ver with a simple valid version string.""" - from cpex.tools.catalog import _ver - from packaging.version import Version - - result = _ver("1.0.0") - assert isinstance(result, Version) - assert str(result) == "1.0.0" - - def test_ver_valid_complex_version(self): - """Test _ver with a complex valid PEP 440 version string.""" - from cpex.tools.catalog import _ver - from packaging.version import Version - - result = _ver("2.1.3rc1.post0.dev5") - assert isinstance(result, Version) - assert str(result) == "2.1.3rc1.post0.dev5" - - def test_ver_valid_version_with_epoch(self): - """Test _ver with a version string containing an epoch.""" - from cpex.tools.catalog import _ver - from packaging.version import Version - - result = _ver("1!2.0.0") - assert isinstance(result, Version) - assert str(result) == "1!2.0.0" - - def test_ver_valid_prerelease_version(self): - """Test _ver with pre-release version strings.""" - from cpex.tools.catalog import _ver - from packaging.version import Version - - # Test alpha - result = _ver("1.0.0a1") - assert isinstance(result, Version) - assert str(result) == "1.0.0a1" - - # Test beta - result = _ver("1.0.0b2") - assert isinstance(result, Version) - assert str(result) == "1.0.0b2" - - # Test release candidate - result = _ver("1.0.0rc3") - assert isinstance(result, Version) - assert str(result) == "1.0.0rc3" - - def test_ver_valid_post_release_version(self): - """Test _ver with post-release version strings.""" - from cpex.tools.catalog import _ver - from packaging.version import Version - - result = _ver("1.0.0.post1") - assert isinstance(result, Version) - assert str(result) == "1.0.0.post1" - - def test_ver_valid_dev_version(self): - """Test _ver with development version strings.""" - from cpex.tools.catalog import _ver - from packaging.version import Version - - result = _ver("1.0.0.dev0") - assert isinstance(result, Version) - assert str(result) == "1.0.0.dev0" - - def test_ver_invalid_version_returns_zero(self): - """Test _ver with an invalid version string returns Version('0').""" - from cpex.tools.catalog import _ver - from packaging.version import Version - - result = _ver("not-a-valid-version") - assert isinstance(result, Version) - assert str(result) == "0" - - def test_ver_invalid_version_with_special_chars(self): - """Test _ver with invalid version containing special characters.""" - from cpex.tools.catalog import _ver - from packaging.version import Version - - result = _ver("v1.0.0@latest") - assert isinstance(result, Version) - assert str(result) == "0" - - def test_ver_empty_string_returns_zero(self): - """Test _ver with an empty string returns Version('0').""" - from cpex.tools.catalog import _ver - from packaging.version import Version - - result = _ver("") - assert isinstance(result, Version) - assert str(result) == "0" - - def test_ver_invalid_semantic_version(self): - """Test _ver with invalid semantic version format.""" - from cpex.tools.catalog import _ver - from packaging.version import Version - - # Missing patch version - result = _ver("1.0") - assert isinstance(result, Version) - # Note: "1.0" is actually valid in PEP 440, normalized to "1.0" - assert str(result) == "1.0" - - # Completely invalid - result = _ver("abc.def.ghi") - assert isinstance(result, Version) - assert str(result) == "0" - - def test_ver_version_with_local_identifier(self): - """Test _ver with version containing local version identifier.""" - from cpex.tools.catalog import _ver - from packaging.version import Version - - result = _ver("1.0.0+local.version") - assert isinstance(result, Version) - assert str(result) == "1.0.0+local.version" - - def test_ver_logs_debug_on_invalid_version(self): - """Test _ver logs a debug message when version is invalid.""" - from cpex.tools.catalog import _ver - - with patch("cpex.tools.catalog.logger") as mock_logger: - result = _ver("invalid-version-string") - - # Verify debug was called with appropriate message - mock_logger.debug.assert_called_once() - call_args = mock_logger.debug.call_args[0] - assert "Could not parse version" in call_args[0] - assert "invalid-version-string" in call_args - - def test_ver_whitespace_version(self): - """Test _ver with whitespace-only version string.""" - from cpex.tools.catalog import _ver - from packaging.version import Version - - result = _ver(" ") - assert isinstance(result, Version) - assert str(result) == "0" - - def test_ver_version_with_v_prefix(self): - """Test _ver with version string that has 'v' prefix.""" - from cpex.tools.catalog import _ver - from packaging.version import Version - - # 'v1.0.0' is accepted by packaging.version and normalized to '1.0.0' - result = _ver("v1.0.0") - assert isinstance(result, Version) - assert str(result) == "1.0.0" - - def test_ver_numeric_only_version(self): - """Test _ver with numeric-only version strings.""" - from cpex.tools.catalog import _ver - from packaging.version import Version - - result = _ver("1") - assert isinstance(result, Version) - assert str(result) == "1" - - result = _ver("42") - assert isinstance(result, Version) - assert str(result) == "42" - - - class TestPluginCatalogUpdatePluginVersionRegistry: """Tests for PluginCatalog.update_plugin_version_registry method.""" @@ -3652,4 +3483,133 @@ def test_update_plugin_version_registry_relpath_stored_correctly(self, tmp_path, with versions_file.open("r") as f: data = json.load(f) - assert data["versions"][0]["manifest_file"] == "custom/path/to/plugin" + + +class TestPluginCatalogVerMethod: + """Tests for PluginCatalog._ver method.""" + + def test_ver_valid_simple_version(self, mock_github_env): + """Test parsing a simple valid version string.""" + catalog = PluginCatalog() + version = catalog._ver("1.0.0") + assert str(version) == "1.0.0" + + def test_ver_valid_complex_version(self, mock_github_env): + """Test parsing a complex valid version string.""" + catalog = PluginCatalog() + version = catalog._ver("1.2.3") + assert str(version) == "1.2.3" + + def test_ver_valid_version_with_epoch(self, mock_github_env): + """Test parsing a version with epoch.""" + catalog = PluginCatalog() + version = catalog._ver("1!2.0.0") + assert str(version) == "1!2.0.0" + + def test_ver_valid_prerelease_version(self, mock_github_env): + """Test parsing a pre-release version.""" + catalog = PluginCatalog() + version = catalog._ver("1.0.0rc1") + assert str(version) == "1.0.0rc1" + + def test_ver_valid_post_release_version(self, mock_github_env): + """Test parsing a post-release version.""" + catalog = PluginCatalog() + version = catalog._ver("1.0.0.post1") + assert str(version) == "1.0.0.post1" + + def test_ver_valid_dev_version(self, mock_github_env): + """Test parsing a development version.""" + catalog = PluginCatalog() + version = catalog._ver("1.0.0.dev1") + assert str(version) == "1.0.0.dev1" + + def test_ver_invalid_version_returns_zero(self, mock_github_env): + """Test that invalid version strings return Version('0').""" + catalog = PluginCatalog() + version = catalog._ver("invalid.version") + assert str(version) == "0" + + def test_ver_invalid_version_with_special_chars(self, mock_github_env): + """Test that version with special characters returns Version('0').""" + catalog = PluginCatalog() + version = catalog._ver("1.0.0@beta") + assert str(version) == "0" + + def test_ver_empty_string_returns_zero(self, mock_github_env): + """Test that empty string returns Version('0').""" + catalog = PluginCatalog() + version = catalog._ver("") + assert str(version) == "0" + + def test_ver_invalid_semantic_version(self, mock_github_env): + """Test that version with 'v' prefix is actually valid (packaging strips it).""" + catalog = PluginCatalog() + version = catalog._ver("v1.0") + # packaging.version.Version actually accepts and strips 'v' prefix + assert str(version) == "1.0" + + def test_ver_version_with_local_identifier(self, mock_github_env): + """Test parsing a version with local identifier.""" + catalog = PluginCatalog() + version = catalog._ver("1.0.0+local.build") + assert str(version) == "1.0.0+local.build" + + def test_ver_logs_debug_on_invalid_version(self, mock_github_env, caplog): + """Test that debug log is created for invalid versions.""" + import logging + catalog = PluginCatalog() + + with caplog.at_level(logging.DEBUG): + catalog._ver("not-a-version") + + assert "Could not parse version" in caplog.text + assert "treating as lowest" in caplog.text + + def test_ver_whitespace_version(self, mock_github_env): + """Test that whitespace-only version returns Version('0').""" + catalog = PluginCatalog() + version = catalog._ver(" ") + assert str(version) == "0" + + def test_ver_version_with_v_prefix(self, mock_github_env): + """Test that version with 'v' prefix is valid (packaging strips it).""" + catalog = PluginCatalog() + version = catalog._ver("v1.0.0") + # packaging.version.Version actually accepts and strips 'v' prefix + assert str(version) == "1.0.0" + + def test_ver_numeric_only_version(self, mock_github_env): + """Test parsing a single numeric version.""" + catalog = PluginCatalog() + version = catalog._ver("1") + assert str(version) == "1" + + def test_ver_comparison_works_correctly(self, mock_github_env): + """Test that version comparison works as expected.""" + catalog = PluginCatalog() + v1 = catalog._ver("1.0.0") + v2 = catalog._ver("2.0.0") + v_invalid = catalog._ver("invalid") + + assert v1 < v2 + assert v_invalid < v1 + assert v_invalid == catalog._ver("0") + + def test_ver_prerelease_comparison(self, mock_github_env): + """Test that pre-release versions compare correctly.""" + catalog = PluginCatalog() + v_stable = catalog._ver("1.0.0") + v_rc = catalog._ver("1.0.0rc1") + v_dev = catalog._ver("1.0.0.dev1") + + assert v_dev < v_rc < v_stable + + def test_ver_epoch_comparison(self, mock_github_env): + """Test that epoch versions compare correctly.""" + catalog = PluginCatalog() + v_no_epoch = catalog._ver("2.0.0") + v_with_epoch = catalog._ver("1!1.0.0") + + # Epoch takes precedence + assert v_with_epoch > v_no_epoch From 25aa1f2ca8219883565900804cac4c68b19712b5 Mon Sep 17 00:00:00 2001 From: habeck Date: Mon, 4 May 2026 14:43:01 -0400 Subject: [PATCH 66/88] chore: lint fix Signed-off-by: habeck --- cpex/tools/catalog.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index 0683551..b2cda79 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -43,7 +43,6 @@ logger = logging.getLogger(__name__) - class PluginCatalog: """ Utility class to initialize the plugin catalog from configured monorepos @@ -102,10 +101,10 @@ def save_manifest(self, manifest: PluginManifest, path): def _ver(self, version_str: str) -> Version: """ Parse a version string into a Version object. - + Args: version_str: Version string to parse (e.g., "1.0.0", "2.0.0rc1") - + Returns: Version object. Returns Version("0") if parsing fails. """ @@ -115,7 +114,6 @@ def _ver(self, version_str: str) -> Version: logger.debug("Could not parse version %r as PEP 440; treating as lowest", version_str) return Version("0") - def update_plugin_version_registry(self, manifest: PluginManifest, relpath: Path): """ Update the plugin version registry with the given manifest. From 387d0e7d796bae64bd58531c00cd8e482c14da44 Mon Sep 17 00:00:00 2001 From: habeck Date: Mon, 4 May 2026 15:47:29 -0400 Subject: [PATCH 67/88] fix: review p2 moderate 21 - list function now uses console.print Signed-off-by: habeck --- cpex/tools/cli.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index 7842a9d..1d54982 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -255,7 +255,7 @@ def list(type: str, fmt: str = "text") -> None: registered_plugins = pr.registry.plugins if fmt == "json": - print( + console.print( json.dumps( { "plugins": [ @@ -269,12 +269,7 @@ def list(type: str, fmt: str = "text") -> None: if registered_plugins: for plug_in in registered_plugins: - logger.info( - "name: %s version: %s installation type: %s", - plug_in.name, - plug_in.version, - plug_in.installation_type, - ) + console.print(f"name: {plug_in.name} version: {plug_in.version} installation type: {plug_in.installation_type}\n") else: logger.info("No plugins registered.") From 5df7451e594ea07f01920b22a9c7b0f1cf748ff6 Mon Sep 17 00:00:00 2001 From: habeck Date: Mon, 4 May 2026 16:11:15 -0400 Subject: [PATCH 68/88] chore: fix failing unit test, address non-atomic registry write. Signed-off-by: habeck --- cpex/framework/models.py | 32 ++++++++--- tests/unit/cpex/tools/test_cli.py | 93 ++++++++++++++++++++++++++++++- 2 files changed, 115 insertions(+), 10 deletions(-) diff --git a/cpex/framework/models.py b/cpex/framework/models.py index 2513562..6440183 100644 --- a/cpex/framework/models.py +++ b/cpex/framework/models.py @@ -10,10 +10,12 @@ """ # Standard -import json +import asyncio +import contextlib import logging import os import re +import tempfile from datetime import datetime from enum import Enum, StrEnum from pathlib import Path @@ -2411,10 +2413,24 @@ def unregister_plugin(self, plugin_name: str) -> bool: return False def save(self) -> None: - """Serialize the registry to disk.""" - DEFAULT_PLUGIN_REGISTRY_FOLDER = Path(os.environ.get("PLUGIN_REGISTRY_FILE", "data")) - DEFAULT_PLUGIN_REGISTRY_FILE = "installed-plugins.json" - - ipr_file = DEFAULT_PLUGIN_REGISTRY_FOLDER / DEFAULT_PLUGIN_REGISTRY_FILE - with open(ipr_file, "w", encoding="utf-8") as ipr: - json.dump(self.model_dump(), ipr, indent=2) + """Serialize the registry to disk atomically.""" + folder = Path(os.environ.get("PLUGIN_REGISTRY_FILE", "data")) + target = folder / "installed-plugins.json" + data = orjson.dumps(self.model_dump(), option=orjson.OPT_INDENT_2) + + tmp = tempfile.NamedTemporaryFile( + mode="wb", delete=False, dir=str(folder), + prefix="installed-plugins.", suffix=".tmp", + ) + try: + try: + tmp.write(data) + tmp.flush() + os.fsync(tmp.fileno()) + finally: + tmp.close() + os.replace(tmp.name, target) + except Exception: + with contextlib.suppress(FileNotFoundError): + os.unlink(tmp.name) + raise diff --git a/tests/unit/cpex/tools/test_cli.py b/tests/unit/cpex/tools/test_cli.py index 12c38c8..fc721b0 100644 --- a/tests/unit/cpex/tools/test_cli.py +++ b/tests/unit/cpex/tools/test_cli.py @@ -501,9 +501,9 @@ def test_list_with_existing_plugins(self, temp_registry_dir): } registry_file.write_text(json.dumps(registry_data)) - with patch("cpex.tools.cli.logger") as mock_logger: + with patch("cpex.tools.cli.console") as mock_console: list("all") - assert mock_logger.info.call_count == 2 + assert mock_console.print.call_count == 2 class TestUpdatePluginRegistry: @@ -1495,6 +1495,95 @@ def test_unregister_nonexistent_plugin(self, temp_registry_dir): assert len(registry.plugins) == 1 +class TestInstalledPluginRegistrySaveAtomic: + """Tests for atomic write behaviour of InstalledPluginRegistry.save().""" + + def _make_plugin(self): + from cpex.framework.models import InstalledPluginInfo, PluginInstallationType + return InstalledPluginInfo( + name="test_plugin", + kind="native", + version="1.0.0", + installation_type=PluginInstallationType.MONOREPO, + installation_path="/path/to/plugin", + installed_at="2024-01-01T00:00:00.000000Z", + installed_by="test_user", + package_source="https://example.com/repo/plugin", + editable=False, + ) + + def test_happy_path_no_tmp_litter(self, temp_registry_dir): + """save() writes the file and leaves no .tmp siblings.""" + from cpex.framework.models import InstalledPluginRegistry + registry = InstalledPluginRegistry() + registry.register_plugin(self._make_plugin()) + + registry_file = temp_registry_dir / "installed-plugins.json" + assert registry_file.exists() + data = json.loads(registry_file.read_text()) + assert len(data["plugins"]) == 1 + assert data["plugins"][0]["name"] == "test_plugin" + assert [*temp_registry_dir.glob("installed-plugins.*.tmp")] == [] + + def test_crash_mid_rename_preserves_original(self, temp_registry_dir, monkeypatch): + """If os.replace raises, the original file is untouched and no .tmp remains.""" + import os + from cpex.framework.models import InstalledPluginRegistry + + original_content = b'{"plugins":[]}' + registry_file = temp_registry_dir / "installed-plugins.json" + registry_file.write_bytes(original_content) + + def exploding_replace(*args, **kwargs): + raise OSError("simulated mid-rename crash") + + monkeypatch.setattr(os, "replace", exploding_replace) + + registry = InstalledPluginRegistry() + with pytest.raises(OSError, match="simulated mid-rename crash"): + registry.save() + + assert registry_file.read_bytes() == original_content + assert [*temp_registry_dir.glob("installed-plugins.*.tmp")] == [] + + def test_crash_mid_write_cleans_up_tmp(self, temp_registry_dir, monkeypatch): + """If writing to the temp file raises, the temp file is cleaned up.""" + import tempfile as _tempfile + from cpex.framework.models import InstalledPluginRegistry + + original_NamedTemporaryFile = _tempfile.NamedTemporaryFile + + class _ExplodingFile: + def __init__(self, *args, **kwargs): + self._real = original_NamedTemporaryFile(*args, **kwargs) + self.name = self._real.name + + def write(self, data): + raise OSError("simulated write failure") + + def flush(self): + pass + + def fileno(self): + return self._real.fileno() + + def close(self): + self._real.close() + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + monkeypatch.setattr(_tempfile, "NamedTemporaryFile", _ExplodingFile) + + registry = InstalledPluginRegistry() + with pytest.raises(OSError, match="simulated write failure"): + registry.save() + + assert [*temp_registry_dir.glob("installed-plugins.*.tmp")] == [] + class TestSelectPluginFromCatalog: """Tests for select_plugin_from_catalog() function.""" From e86ef1289df2a897d16a707c881beb0acadc577c Mon Sep 17 00:00:00 2001 From: habeck Date: Mon, 4 May 2026 16:11:59 -0400 Subject: [PATCH 69/88] chore: lint fix Signed-off-by: habeck --- cpex/framework/models.py | 7 +++++-- cpex/tools/cli.py | 4 +++- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/cpex/framework/models.py b/cpex/framework/models.py index 6440183..7e4b28a 100644 --- a/cpex/framework/models.py +++ b/cpex/framework/models.py @@ -2419,8 +2419,11 @@ def save(self) -> None: data = orjson.dumps(self.model_dump(), option=orjson.OPT_INDENT_2) tmp = tempfile.NamedTemporaryFile( - mode="wb", delete=False, dir=str(folder), - prefix="installed-plugins.", suffix=".tmp", + mode="wb", + delete=False, + dir=str(folder), + prefix="installed-plugins.", + suffix=".tmp", ) try: try: diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index 1d54982..2b9c74a 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -269,7 +269,9 @@ def list(type: str, fmt: str = "text") -> None: if registered_plugins: for plug_in in registered_plugins: - console.print(f"name: {plug_in.name} version: {plug_in.version} installation type: {plug_in.installation_type}\n") + console.print( + f"name: {plug_in.name} version: {plug_in.version} installation type: {plug_in.installation_type}\n" + ) else: logger.info("No plugins registered.") From 84def91456d6026ecf1cf7e0c261077ab8b5bab5 Mon Sep 17 00:00:00 2001 From: habeck Date: Mon, 4 May 2026 16:54:26 -0400 Subject: [PATCH 70/88] chore: claude can't tell the difference between if "rc is False" and "if rc". Signed-off-by: habeck --- cpex/tools/cli.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index 2b9c74a..3f80395 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -824,10 +824,10 @@ def plugin( console.log("Update catalog") with console.status("Updating catalog...", spinner="dots"): rc = pc.update_catalog_with_pyproject() - if rc is False: - console.log("Catalog update completed.") - else: + if rc: console.log(":x: Catalog update failed.") + else: + console.log("Catalog update completed.") if cmd_action == "versions": return versions(source, catalog=pc, fmt=fmt) From e42d99ca4775ae269867ef542c4fa0223a65d71d Mon Sep 17 00:00:00 2001 From: habeck Date: Tue, 5 May 2026 10:27:44 -0400 Subject: [PATCH 71/88] =?UTF-8?q?fix:=20cpex/framework/models.py=20?= =?UTF-8?q?=E2=80=94=20register=5Fplugin=20(line=202392):=20replaced=20the?= =?UTF-8?q?=20unconditional=20append=20with=20a=20filter-then-append.=20An?= =?UTF-8?q?y=20existing=20entry=20with=20the=20same=20=20=20=20name=20is?= =?UTF-8?q?=20removed=20before=20the=20new=20one=20is=20added,=20so=20a=20?= =?UTF-8?q?reinstall=20upgrades=20the=20entry=20rather=20than=20creating?= =?UTF-8?q?=20a=20duplicate.=20One=20save()=20call,=20same=20atomicity=20a?= =?UTF-8?q?s=20=20=20before.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: habeck --- cpex/framework/models.py | 8 +++- tests/unit/cpex/tools/test_cli.py | 68 +++++++++++++++++++++++++++++++ 2 files changed, 74 insertions(+), 2 deletions(-) diff --git a/cpex/framework/models.py b/cpex/framework/models.py index 7e4b28a..8f4e16f 100644 --- a/cpex/framework/models.py +++ b/cpex/framework/models.py @@ -2390,8 +2390,12 @@ class InstalledPluginRegistry(BaseModel): plugins: List[InstalledPluginInfo] = [] def register_plugin(self, plugin: InstalledPluginInfo) -> None: - """Register a new plugin in the registry""" - # load the registry + """Register a plugin in the registry. + + If a plugin with the same name is already registered, its entry is + replaced so the registry reflects the most-recent install. + """ + self.plugins = [p for p in self.plugins if p.name != plugin.name] self.plugins.append(plugin) self.save() diff --git a/tests/unit/cpex/tools/test_cli.py b/tests/unit/cpex/tools/test_cli.py index fc721b0..0ee4b75 100644 --- a/tests/unit/cpex/tools/test_cli.py +++ b/tests/unit/cpex/tools/test_cli.py @@ -1495,6 +1495,74 @@ def test_unregister_nonexistent_plugin(self, temp_registry_dir): assert len(registry.plugins) == 1 +class TestInstalledPluginRegistryRegisterDedup: + """Tests for dedup behaviour of InstalledPluginRegistry.register_plugin().""" + + def _make_plugin(self, name="foo", version="1.0.0", path="/path/to/plugin", installed_at="2024-01-01T00:00:00.000000Z"): + from cpex.framework.models import InstalledPluginInfo, PluginInstallationType + return InstalledPluginInfo( + name=name, + kind="native", + version=version, + installation_type=PluginInstallationType.LOCAL, + installation_path=path, + installed_at=installed_at, + installed_by="test_user", + package_source="https://example.com/repo", + editable=False, + ) + + def test_first_register_appends(self, temp_registry_dir): + """Registering into an empty registry results in exactly one entry.""" + import json + from cpex.framework.models import InstalledPluginRegistry + + registry = InstalledPluginRegistry() + registry.register_plugin(self._make_plugin()) + + assert len(registry.plugins) == 1 + registry_file = temp_registry_dir / "installed-plugins.json" + data = json.loads(registry_file.read_text()) + assert len(data["plugins"]) == 1 + assert data["plugins"][0]["name"] == "foo" + + def test_reregister_replaces_existing(self, temp_registry_dir): + """Registering a plugin that is already present replaces the old entry.""" + import json + from cpex.framework.models import InstalledPluginRegistry + + registry = InstalledPluginRegistry() + registry.register_plugin(self._make_plugin(version="1.0.0", installed_at="2024-01-01T00:00:00.000000Z")) + registry.register_plugin(self._make_plugin(version="2.0.0", path="/new/path", installed_at="2025-06-01T00:00:00.000000Z")) + + assert len(registry.plugins) == 1 + assert registry.plugins[0].version == "2.0.0" + assert registry.plugins[0].installation_path == "/new/path" + assert registry.plugins[0].installed_at == "2025-06-01T00:00:00.000000Z" + + registry_file = temp_registry_dir / "installed-plugins.json" + data = json.loads(registry_file.read_text()) + assert len(data["plugins"]) == 1 + assert data["plugins"][0]["version"] == "2.0.0" + + def test_reregister_does_not_affect_other_plugins(self, temp_registry_dir): + """Re-registering one plugin leaves other plugins untouched.""" + from cpex.framework.models import InstalledPluginRegistry + + registry = InstalledPluginRegistry() + registry.register_plugin(self._make_plugin(name="foo", version="1.0.0")) + registry.register_plugin(self._make_plugin(name="bar", version="1.0.0")) + registry.register_plugin(self._make_plugin(name="foo", version="2.0.0")) + + assert len(registry.plugins) == 2 + names = {p.name for p in registry.plugins} + assert names == {"foo", "bar"} + foo = next(p for p in registry.plugins if p.name == "foo") + bar = next(p for p in registry.plugins if p.name == "bar") + assert foo.version == "2.0.0" + assert bar.version == "1.0.0" + + class TestInstalledPluginRegistrySaveAtomic: """Tests for atomic write behaviour of InstalledPluginRegistry.save().""" From 46cfdfbc09c7af4c93ab17dcaa41257de90e3231 Mon Sep 17 00:00:00 2001 From: habeck Date: Tue, 5 May 2026 11:31:58 -0400 Subject: [PATCH 72/88] fix: P2 Issue 20 Implementation Complete: Exit Code Handling Signed-off-by: habeck --- cpex/tools/cli.py | 57 +++++++++++++++++++------ tests/unit/cpex/tools/test_cli.py | 71 ++++++++++++++++++++----------- 2 files changed, 88 insertions(+), 40 deletions(-) diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index 3f80395..c414362 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -31,8 +31,7 @@ import os import shutil import subprocess # nosec B404 # Safe: Used only for git commands with hardcoded args - -# import sys +import sys from pathlib import Path from typing import List, Optional @@ -41,6 +40,13 @@ from rich.console import Console from typing_extensions import Annotated +# Exit codes for CLI commands +EXIT_SUCCESS = 0 +EXIT_GENERAL_ERROR = 1 +EXIT_INVALID_ARGS = 2 +EXIT_NOT_FOUND = 3 +EXIT_OPERATION_FAILED = 4 + # First-Party from cpex.framework.loader.config import ConfigLoader, ConfigSaver from cpex.framework.models import ( @@ -237,11 +243,14 @@ def bootstrap( extra_context=extra_context, ) else: - logger.warning("No local templates found and git is not available to fetch remote template.") + logger.error("No local templates found and git is not available to fetch remote template.") + raise typer.Exit(EXIT_OPERATION_FAILED) except (SystemExit, typer.Exit): raise - except Exception: + except Exception as e: logger.exception("An error was caught while copying template.") + console.print(f":x: Failed to create plugin project: {str(e)}") + raise typer.Exit(EXIT_OPERATION_FAILED) def list(type: str, fmt: str = "text") -> None: @@ -588,15 +597,20 @@ def install(source: str, install_type: str | None, catalog: PluginCatalog, assum assume_yes: Skip interactive selection prompt for monorepo installs. Raises: - ValueError: If install_type is not supported. - NotImplementedError: If the installation type is not yet implemented. + typer.Exit: With EXIT_INVALID_ARGS if install_type is not supported. + typer.Exit: With EXIT_OPERATION_FAILED if installation fails. """ if install_type is None: install_type = "monorepo" if install_type == "monorepo": - _install_from_monorepo(source, catalog, assume_yes=assume_yes) - return + try: + _install_from_monorepo(source, catalog, assume_yes=assume_yes) + return + except Exception as e: + console.print(f":x: Installation failed: {str(e)}") + logger.error("Install error: %s", str(e), exc_info=True) + raise typer.Exit(EXIT_OPERATION_FAILED) handlers = { "git": _install_from_git, @@ -607,9 +621,15 @@ def install(source: str, install_type: str | None, catalog: PluginCatalog, assum handler = handlers.get(install_type) if handler is None: - raise ValueError(f"Unsupported installation type: {install_type}. Must be one of: {', '.join(handlers.keys())}") + console.print(f":x: Unsupported installation type: {install_type}. Must be one of: {', '.join(handlers.keys())}") + raise typer.Exit(EXIT_INVALID_ARGS) - handler(source, catalog, use_test=True if install_type == "test-pypi" else False) + try: + handler(source, catalog, use_test=True if install_type == "test-pypi" else False) + except Exception as e: + console.print(f":x: Installation failed: {str(e)}") + logger.error("Install error: %s", str(e), exc_info=True) + raise typer.Exit(EXIT_OPERATION_FAILED) def versions(plugin_name: str | None, catalog: PluginCatalog, fmt: str = "text"): @@ -715,7 +735,7 @@ def uninstall(plugin_name: str, catalog: PluginCatalog, assume_yes: bool = False if installed_plugin is None: console.print(f":x: Plugin '{plugin_name}' is not installed.") - return + raise typer.Exit(EXIT_NOT_FOUND) # Confirm uninstallation console.print(f"Found plugin: {installed_plugin.name} (version {installed_plugin.version})") @@ -749,18 +769,27 @@ def uninstall(plugin_name: str, catalog: PluginCatalog, assume_yes: bool = False plugin_registry.remove(plugin_name) else: console.print(f":x: Plugin {plugin_name} not found in catalog.") - return + raise typer.Exit(EXIT_NOT_FOUND) console.print(f":white_heavy_check_mark: {plugin_name} uninstalled successfully.") + except typer.Exit: + raise except Exception as e: console.print(f":x: Failed to uninstall {plugin_name}: {str(e)}") logger.error("Uninstall error: %s", str(e), exc_info=True) + raise typer.Exit(EXIT_OPERATION_FAILED) @app.command( help="List, search, install or uninstall plugins.\n\n" - "\ndefault install type is monorepo\n" + "Exit Codes:\n" + " 0 - Success\n" + " 1 - General error\n" + " 2 - Invalid arguments\n" + " 3 - Plugin not found\n" + " 4 - Operation failed\n\n" + "Default install type is monorepo\n\n" "Examples:\n" "python cpex/tools/cli.py plugin info pii\n" "python cpex/tools/cli.py plugin search pii\n" @@ -808,7 +837,7 @@ def plugin( if cmd_action == "uninstall": if source is None: console.print(":x: Please specify a plugin name to uninstall.") - return + raise typer.Exit(EXIT_INVALID_ARGS) pc = PluginCatalog() return uninstall(source, catalog=pc, assume_yes=assume_yes) if cmd_action == "install" and source is not None: diff --git a/tests/unit/cpex/tools/test_cli.py b/tests/unit/cpex/tools/test_cli.py index 0ee4b75..dbc6a34 100644 --- a/tests/unit/cpex/tools/test_cli.py +++ b/tests/unit/cpex/tools/test_cli.py @@ -395,10 +395,12 @@ def test_warns_when_no_local_and_no_git(self): patch("cpex.tools.cli.command_exists", return_value=False), patch(_CC_PATCH_TARGET) as mock_cc, patch("cpex.tools.cli.logger") as mock_logger, + patch("cpex.tools.cli.console") as mock_console, ): - runner.invoke(app, ["bootstrap", "-d", "/tmp/test_nogit", "--no_input"]) + result = runner.invoke(app, ["bootstrap", "-d", "/tmp/test_nogit", "--no_input"]) + assert result.exit_code == 4 # EXIT_OPERATION_FAILED mock_cc.assert_not_called() - mock_logger.warning.assert_called_once() + mock_logger.error.assert_called_once() class TestBootstrapErrorHandling: @@ -408,9 +410,10 @@ def test_logs_exception_on_cookiecutter_error(self): with ( patch(_CC_PATCH_TARGET, side_effect=RuntimeError("template error")), patch("cpex.tools.cli.logger") as mock_logger, + patch("cpex.tools.cli.console") as mock_console, ): result = runner.invoke(app, ["bootstrap", "-d", "/tmp/test_err", "--no_input"]) - assert result.exit_code == 0 # error is caught and logged + assert result.exit_code == 4 # EXIT_OPERATION_FAILED mock_logger.exception.assert_called_once() @@ -845,10 +848,14 @@ def test_install_monorepo_with_available_plugins(self, temp_registry_dir): mock_catalog.install_folder_via_pip.assert_called_once() def test_install_requires_type_parameter(self): - """Test that install raises ValueError for unsupported type.""" + """Test that install raises typer.Exit for unsupported type.""" mock_catalog = Mock() - with pytest.raises(ValueError, match="Unsupported installation type"): + with ( + patch("cpex.tools.cli.console") as mock_console, + pytest.raises(click.exceptions.Exit) as exc_info, + ): install("source", "", mock_catalog) + assert exc_info.value.exit_code == 2 # EXIT_INVALID_ARGS class TestSearchFunction: @@ -1110,9 +1117,13 @@ def test_uninstall_plugin_not_found(self, temp_registry_dir): """Test uninstalling a plugin that is not installed.""" mock_catalog = Mock() - with patch("cpex.tools.cli.console") as mock_console: + with ( + patch("cpex.tools.cli.console") as mock_console, + pytest.raises(click.exceptions.Exit) as exc_info, + ): uninstall("nonexistent_plugin", mock_catalog) - mock_console.print.assert_called_with(":x: Plugin 'nonexistent_plugin' is not installed.") + assert exc_info.value.exit_code == 3 # EXIT_NOT_FOUND + mock_console.print.assert_called_with(":x: Plugin 'nonexistent_plugin' is not installed.") def test_uninstall_cancelled_by_user(self, temp_registry_dir): """Test uninstall cancelled by user.""" @@ -1211,33 +1222,35 @@ def test_uninstall_handles_exception(self, temp_registry_dir): ] } registry_file.write_text(json.dumps(registry_data)) - + mock_catalog = Mock() - + # Create a manifest to return from find test_manifest = create_test_manifest(name="test_plugin", kind="native") - + with ( patch("cpex.tools.cli.inquirer.prompt", return_value={"confirm": True}), patch("cpex.tools.cli.console") as mock_console, patch("cpex.tools.cli.logger") as mock_logger, patch("cpex.tools.cli.PluginCatalog") as mock_catalog_class, + pytest.raises(click.exceptions.Exit) as exc_info, ): # Mock the catalog instance created inside uninstall() mock_catalog_instance = Mock() mock_catalog_instance.find = Mock(return_value=test_manifest) mock_catalog_instance.uninstall_package = Mock(side_effect=RuntimeError("Uninstall failed")) mock_catalog_class.return_value = mock_catalog_instance - + mock_status = Mock() mock_status.__enter__ = Mock(return_value=mock_status) mock_status.__exit__ = Mock(return_value=False) mock_console.status = Mock(return_value=mock_status) - + uninstall("test_plugin", mock_catalog) - - mock_console.print.assert_any_call(":x: Failed to uninstall test_plugin: Uninstall failed") - mock_logger.error.assert_called_once() + + assert exc_info.value.exit_code == 4 # EXIT_OPERATION_FAILED + mock_console.print.assert_any_call(":x: Failed to uninstall test_plugin: Uninstall failed") + mock_logger.error.assert_called_once() class TestPluginUninstallCommand: @@ -1251,9 +1264,9 @@ def test_plugin_uninstall_command_without_plugin_name(self, temp_registry_dir): ): mock_catalog = Mock() mock_catalog_class.return_value = mock_catalog - + result = runner.invoke(app, ["plugin", "uninstall"]) - assert result.exit_code == 0 + assert result.exit_code == 2 # EXIT_INVALID_ARGS mock_console.print.assert_called_with(":x: Please specify a plugin name to uninstall.") def test_plugin_uninstall_command_success(self, temp_registry_dir): @@ -1308,9 +1321,9 @@ def test_plugin_uninstall_command_not_found(self, temp_registry_dir): ): mock_catalog = Mock() mock_catalog_class.return_value = mock_catalog - + result = runner.invoke(app, ["plugin", "uninstall", "nonexistent_plugin"]) - assert result.exit_code == 0 + assert result.exit_code == 3 # EXIT_NOT_FOUND mock_console.print.assert_called_with(":x: Plugin 'nonexistent_plugin' is not installed.") @@ -1800,14 +1813,18 @@ class TestInstallFunctionAdditional: """Additional tests for install() function.""" def test_install_with_unsupported_type_raises_error(self): - """Test that install raises ValueError for unsupported installation type.""" + """Test that install raises typer.Exit for unsupported installation type.""" from cpex.tools.cli import install from cpex.tools.catalog import PluginCatalog mock_catalog = Mock(spec=PluginCatalog) - with pytest.raises(ValueError, match="Unsupported installation type"): + with ( + patch("cpex.tools.cli.console") as mock_console, + pytest.raises(click.exceptions.Exit) as exc_info, + ): install("test_plugin", "unsupported_type", mock_catalog) + assert exc_info.value.exit_code == 2 # EXIT_INVALID_ARGS class TestVersionsFunction: @@ -1934,6 +1951,7 @@ def test_uninstall_when_manifest_not_found(self, temp_registry_dir): patch("cpex.tools.cli.inquirer.prompt", return_value={"confirm": True}), patch("cpex.tools.cli.console") as mock_console, patch("cpex.tools.cli.PluginCatalog") as mock_catalog_class, + pytest.raises(click.exceptions.Exit) as exc_info, ): # Mock the catalog.find method to return None (manifest not found) mock_catalog_instance = Mock() @@ -1946,11 +1964,12 @@ def test_uninstall_when_manifest_not_found(self, temp_registry_dir): mock_console.status = Mock(return_value=mock_status) uninstall("test_plugin", mock_catalog) - - # When manifest is not found, uninstall should print error and return early - # So uninstall_package should NOT be called - mock_catalog_instance.uninstall_package.assert_not_called() - mock_console.print.assert_any_call(":x: Plugin test_plugin not found in catalog.") + + assert exc_info.value.exit_code == 3 # EXIT_NOT_FOUND + # When manifest is not found, uninstall should print error and exit + # So uninstall_package should NOT be called + mock_catalog_instance.uninstall_package.assert_not_called() + mock_console.print.assert_any_call(":x: Plugin test_plugin not found in catalog.") From 01dd5635c442c5b4b498361fbb212ba674575c18 Mon Sep 17 00:00:00 2001 From: habeck Date: Tue, 5 May 2026 11:33:57 -0400 Subject: [PATCH 73/88] chore: lint fix Signed-off-by: habeck --- cpex/tools/cli.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index c414362..b1f0ea3 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -31,7 +31,6 @@ import os import shutil import subprocess # nosec B404 # Safe: Used only for git commands with hardcoded args -import sys from pathlib import Path from typing import List, Optional @@ -40,13 +39,6 @@ from rich.console import Console from typing_extensions import Annotated -# Exit codes for CLI commands -EXIT_SUCCESS = 0 -EXIT_GENERAL_ERROR = 1 -EXIT_INVALID_ARGS = 2 -EXIT_NOT_FOUND = 3 -EXIT_OPERATION_FAILED = 4 - # First-Party from cpex.framework.loader.config import ConfigLoader, ConfigSaver from cpex.framework.models import ( @@ -60,6 +52,13 @@ # Third-Party from cpex.tools.plugin_registry import PluginRegistry +# Exit codes for CLI commands +EXIT_SUCCESS = 0 +EXIT_GENERAL_ERROR = 1 +EXIT_INVALID_ARGS = 2 +EXIT_NOT_FOUND = 3 +EXIT_OPERATION_FAILED = 4 + logger = logging.getLogger(__name__) console = Console() @@ -621,7 +620,9 @@ def install(source: str, install_type: str | None, catalog: PluginCatalog, assum handler = handlers.get(install_type) if handler is None: - console.print(f":x: Unsupported installation type: {install_type}. Must be one of: {', '.join(handlers.keys())}") + console.print( + f":x: Unsupported installation type: {install_type}. Must be one of: {', '.join(handlers.keys())}" + ) raise typer.Exit(EXIT_INVALID_ARGS) try: From 3dc581a9c114d7923b094ee3ad1d9cbfae308be7 Mon Sep 17 00:00:00 2001 From: habeck Date: Tue, 5 May 2026 14:12:01 -0400 Subject: [PATCH 74/88] fix: list function shadows built-in Signed-off-by: habeck --- cpex/tools/cli.py | 4 ++-- tests/unit/cpex/tools/test_cli.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index b1f0ea3..a56339a 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -252,7 +252,7 @@ def bootstrap( raise typer.Exit(EXIT_OPERATION_FAILED) -def list(type: str, fmt: str = "text") -> None: +def list_registered_plugins(type: str, fmt: str = "text") -> None: """List the installed plugins Args: type (str): The type of plugins to list. Can be "native" or "external". @@ -863,7 +863,7 @@ def plugin( return versions(source, catalog=pc, fmt=fmt) if cmd_action == "list": - return list(install_type, fmt=fmt) + return list_registered_plugins(install_type, fmt=fmt) if cmd_action == "install" and source is not None: return install(source, install_type, catalog=pc, assume_yes=assume_yes) if cmd_action == "search": diff --git a/tests/unit/cpex/tools/test_cli.py b/tests/unit/cpex/tools/test_cli.py index dbc6a34..01aa874 100644 --- a/tests/unit/cpex/tools/test_cli.py +++ b/tests/unit/cpex/tools/test_cli.py @@ -29,7 +29,7 @@ command_exists, git_user_email, git_user_name, - list, + list_registered_plugins, install_from_manifest, install, search, @@ -474,7 +474,7 @@ class TestListFunction: def test_list_with_no_registry_file(self, temp_registry_dir): """Test list when registry file doesn't exist.""" with patch("cpex.tools.cli.logger") as mock_logger: - list("all") + list_registered_plugins("all") mock_logger.info.assert_called_with("No plugins registered.") def test_list_with_existing_plugins(self, temp_registry_dir): @@ -505,7 +505,7 @@ def test_list_with_existing_plugins(self, temp_registry_dir): registry_file.write_text(json.dumps(registry_data)) with patch("cpex.tools.cli.console") as mock_console: - list("all") + list_registered_plugins("all") assert mock_console.print.call_count == 2 From 8f464a83ae2768c534f5589cddec935580b2be65 Mon Sep 17 00:00:00 2001 From: habeck Date: Tue, 5 May 2026 15:22:40 -0400 Subject: [PATCH 75/88] chore: logic tweak Signed-off-by: habeck --- cpex/tools/cli.py | 8 ++++-- tests/unit/cpex/tools/test_cli.py | 48 +++++++++++++++++++++++++++++++ 2 files changed, 54 insertions(+), 2 deletions(-) diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index a56339a..1ef1870 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -338,10 +338,14 @@ def remove_from_plugins_config_yaml(manifest: PluginManifest) -> bool: return False initial_count = len(plugin_configs.plugins) - # Need to match by manifest.kind or if plugin.name starts with manifest.name + # Need to match by manifest.kind and plugin.name starts with manifest.name # e.g. if it is an external plugin or a venv plugin then kind will match many plugin configurations in config.yaml + # remove plugins that match the manifest.kind and plugin.name is part of the manifest.name + # plugin_configs.plugins = [ + # p for p in plugin_configs.plugins if p.kind != manifest.kind and p.name.count(manifest.name) == 0 + # ] plugin_configs.plugins = [ - p for p in plugin_configs.plugins if p.kind != manifest.kind and p.name.count(manifest.name) == 0 + p for p in plugin_configs.plugins if not (p.kind == manifest.kind and p.name.count(manifest.name) > 0) ] if len(plugin_configs.plugins) < initial_count: ConfigSaver.save_config(plugin_configs, settings.config_file) diff --git a/tests/unit/cpex/tools/test_cli.py b/tests/unit/cpex/tools/test_cli.py index 01aa874..9cc418b 100644 --- a/tests/unit/cpex/tools/test_cli.py +++ b/tests/unit/cpex/tools/test_cli.py @@ -1067,6 +1067,54 @@ def test_removes_plugin_from_config(self, tmp_path): assert len(mock_config.plugins) == 1 assert mock_config.plugins[0].kind == "test.plugin.keep" + def test_removes_isolated_venv_plugin_from_config(self, tmp_path): + """Test removing a plugin from config.""" + config_file = tmp_path / "config.yaml" + + config_1 = { + "class_name": "test.plugin.remove", + "requirements_file": "requirements.txt" + } + + config_2 = { + "class_name": "test.plugin.keep", + "requirements_file": "requirements.txt" + } + + plugin1 = PluginConfig( + name="plugin_to_remove", + kind="isolated_venv", + mode=PluginMode.SEQUENTIAL, + priority=100, + config=config_1 + ) + plugin2 = PluginConfig( + name="plugin_to_keep", + kind="test.plugin.keep", + mode=PluginMode.SEQUENTIAL, + priority=100, + config=config_2 + ) + mock_config = Config(plugins=[plugin1, plugin2]) + + # Create a manifest with matching kind + manifest = create_test_manifest( + name="plugin_to_remove", + kind="isolated_venv", + default_config=config_1 + ) + + with ( + patch("cpex.tools.cli.ConfigLoader.load_config", return_value=mock_config), + patch("cpex.tools.cli.ConfigSaver.save_config") as mock_save, + ): + result = remove_from_plugins_config_yaml(manifest) + assert result is True + mock_save.assert_called_once() + assert len(mock_config.plugins) == 1 + assert mock_config.plugins[0].kind == "test.plugin.keep" + + def test_returns_false_when_plugin_not_found(self, tmp_path): """Test that function returns False when plugin not found.""" plugin1 = PluginConfig( From 9b10a837b43a752d3e6c6ed8b9b63ed9cbcba8ce Mon Sep 17 00:00:00 2001 From: habeck Date: Tue, 5 May 2026 15:27:32 -0400 Subject: [PATCH 76/88] fix: p2 issue 23 Signed-off-by: habeck --- cpex/tools/catalog.py | 3 ++- cpex/tools/cli.py | 6 ------ 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index b2cda79..590a911 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -57,7 +57,8 @@ def __init__(self) -> None: self.plugin_folder = os.environ.get("PLUGINS_FOLDER", settings.PLUGINS_FOLDER) self.catalog_folder = os.environ.get("PLUGINS_CATALOG_FOLDER", settings.PLUGINS_CATALOG_FOLDER) self.manifests: list[PluginManifest] = [] - self.auth = Auth.Token(self.github_token) + # Only create Auth.Token if a token is provided to avoid errors with None + self.auth = Auth.Token(self.github_token) if self.github_token else None self.gh = Github(auth=self.auth, base_url=f"https://{self.github_api}", per_page=100) self.python_executable = self._get_python_executable() diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index 1ef1870..aebab5a 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -338,12 +338,6 @@ def remove_from_plugins_config_yaml(manifest: PluginManifest) -> bool: return False initial_count = len(plugin_configs.plugins) - # Need to match by manifest.kind and plugin.name starts with manifest.name - # e.g. if it is an external plugin or a venv plugin then kind will match many plugin configurations in config.yaml - # remove plugins that match the manifest.kind and plugin.name is part of the manifest.name - # plugin_configs.plugins = [ - # p for p in plugin_configs.plugins if p.kind != manifest.kind and p.name.count(manifest.name) == 0 - # ] plugin_configs.plugins = [ p for p in plugin_configs.plugins if not (p.kind == manifest.kind and p.name.count(manifest.name) > 0) ] From 43d1099d08ebe3b48385933d679b386ed3c3f095 Mon Sep 17 00:00:00 2001 From: habeck Date: Tue, 5 May 2026 15:31:23 -0400 Subject: [PATCH 77/88] fix: P2 issue 19 error handling for corrupted JSON registry Signed-off-by: habeck --- cpex/tools/plugin_registry.py | 22 ++++++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/cpex/tools/plugin_registry.py b/cpex/tools/plugin_registry.py index 49bfc91..1b96fd8 100644 --- a/cpex/tools/plugin_registry.py +++ b/cpex/tools/plugin_registry.py @@ -32,8 +32,26 @@ def __init__(self, *args, **kwargs): DEFAULT_PLUGIN_REGISTRY_FILE = "installed-plugins.json" ipr_file = DEFAULT_PLUGIN_REGISTRY_FOLDER / DEFAULT_PLUGIN_REGISTRY_FILE if ipr_file.exists(): - with open(ipr_file, "r", encoding="utf-8") as ipr: - self.registry = InstalledPluginRegistry(**json.load(ipr)) + try: + with open(ipr_file, "r", encoding="utf-8") as ipr: + self.registry = InstalledPluginRegistry(**json.load(ipr)) + except (json.JSONDecodeError, ValueError, KeyError) as e: + # If registry is corrupted, log error and start fresh + import logging + logger = logging.getLogger(__name__) + logger.error( + "Corrupted plugin registry file at %s: %s. Starting with empty registry.", + ipr_file, + str(e) + ) + # Backup the corrupted file + backup_file = ipr_file.with_suffix(".json.corrupted") + try: + ipr_file.rename(backup_file) + logger.info("Backed up corrupted registry to %s", backup_file) + except Exception as backup_error: + logger.warning("Could not backup corrupted registry: %s", str(backup_error)) + self.registry = InstalledPluginRegistry() else: self.registry = InstalledPluginRegistry() From 2d07478852c2351a162a6bc03a9bbe68ebced5a8 Mon Sep 17 00:00:00 2001 From: habeck Date: Tue, 5 May 2026 15:31:45 -0400 Subject: [PATCH 78/88] chore: lint fix Signed-off-by: habeck --- cpex/tools/plugin_registry.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/cpex/tools/plugin_registry.py b/cpex/tools/plugin_registry.py index 1b96fd8..f619595 100644 --- a/cpex/tools/plugin_registry.py +++ b/cpex/tools/plugin_registry.py @@ -38,11 +38,10 @@ def __init__(self, *args, **kwargs): except (json.JSONDecodeError, ValueError, KeyError) as e: # If registry is corrupted, log error and start fresh import logging + logger = logging.getLogger(__name__) logger.error( - "Corrupted plugin registry file at %s: %s. Starting with empty registry.", - ipr_file, - str(e) + "Corrupted plugin registry file at %s: %s. Starting with empty registry.", ipr_file, str(e) ) # Backup the corrupted file backup_file = ipr_file.with_suffix(".json.corrupted") From 610f53ccec1fb9e70d82ea88822cb4b0bfdb3f73 Mon Sep 17 00:00:00 2001 From: habeck Date: Tue, 5 May 2026 15:36:53 -0400 Subject: [PATCH 79/88] fix: P2 issue 24 - Registry file path triplicated Signed-off-by: habeck --- cpex/framework/models.py | 6 ++++-- cpex/tools/cli.py | 2 -- cpex/tools/plugin_registry.py | 7 +++---- cpex/tools/settings.py | 15 +++++++++++++++ 4 files changed, 22 insertions(+), 8 deletions(-) diff --git a/cpex/framework/models.py b/cpex/framework/models.py index 8f4e16f..21121dc 100644 --- a/cpex/framework/models.py +++ b/cpex/framework/models.py @@ -2418,8 +2418,10 @@ def unregister_plugin(self, plugin_name: str) -> bool: def save(self) -> None: """Serialize the registry to disk atomically.""" - folder = Path(os.environ.get("PLUGIN_REGISTRY_FILE", "data")) - target = folder / "installed-plugins.json" + from cpex.tools.settings import get_plugin_registry_path + + target = get_plugin_registry_path() + folder = target.parent data = orjson.dumps(self.model_dump(), option=orjson.OPT_INDENT_2) tmp = tempfile.NamedTemporaryFile( diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index aebab5a..08f838b 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -72,8 +72,6 @@ DEFAULT_AUTHOR_EMAIL = "" DEFAULT_PROJECT_DIR = Path("./.") DEFAULT_INSTALL_MANIFEST = Path("plugins/install.yaml") -DEFAULT_PLUGIN_REGISTRY_FOLDER = Path(os.environ.get("PLUGIN_REGISTRY_FILE", "data")) -DEFAULT_PLUGIN_REGISTRY_FILE = "installed-plugins.json" DEFAULT_IMAGE_TAG = "contextforge-plugin:latest" # TBD: add plugin name and version DEFAULT_IMAGE_BUILDER = "docker" DEFAULT_BUILD_CONTEXT = "." diff --git a/cpex/tools/plugin_registry.py b/cpex/tools/plugin_registry.py index f619595..13c97bf 100644 --- a/cpex/tools/plugin_registry.py +++ b/cpex/tools/plugin_registry.py @@ -15,6 +15,7 @@ from cpex.framework.models import InstalledPluginInfo, InstalledPluginRegistry, PluginInstallationType, PluginManifest from cpex.framework.utils import find_package_path from cpex.tools.catalog import PluginCatalog +from cpex.tools.settings import get_plugin_registry_path class PluginRegistry: @@ -27,10 +28,8 @@ class PluginRegistry: def __init__(self, *args, **kwargs): """Initialize the plugin registry.""" super().__init__(*args, **kwargs) - DEFAULT_PLUGIN_REGISTRY_FOLDER = Path(os.environ.get("PLUGIN_REGISTRY_FILE", "data")) - os.makedirs(DEFAULT_PLUGIN_REGISTRY_FOLDER, exist_ok=True) - DEFAULT_PLUGIN_REGISTRY_FILE = "installed-plugins.json" - ipr_file = DEFAULT_PLUGIN_REGISTRY_FOLDER / DEFAULT_PLUGIN_REGISTRY_FILE + ipr_file = get_plugin_registry_path() + os.makedirs(ipr_file.parent, exist_ok=True) if ipr_file.exists(): try: with open(ipr_file, "r", encoding="utf-8") as ipr: diff --git a/cpex/tools/settings.py b/cpex/tools/settings.py index 16abad6..9010da3 100644 --- a/cpex/tools/settings.py +++ b/cpex/tools/settings.py @@ -7,6 +7,8 @@ """ import logging +import os +from pathlib import Path from dotenv import find_dotenv, load_dotenv from pydantic import Field @@ -44,3 +46,16 @@ def get_catalog_settings() -> CatalogSettings: CatalogSettings: Catalog settings. """ return CatalogSettings() + + +def get_plugin_registry_path() -> Path: + """Get the plugin registry file path. + + This centralizes the logic for determining where the plugin registry is stored. + Uses PLUGIN_REGISTRY_FILE env var if set, otherwise falls back to 'data' folder. + + Returns: + Path: Path to the installed-plugins.json file. + """ + folder = Path(os.environ.get("PLUGIN_REGISTRY_FILE", "data")) + return folder / "installed-plugins.json" From e6c5fa45197fdaa65fac7fcfe29ab56d66720663 Mon Sep 17 00:00:00 2001 From: habeck Date: Tue, 5 May 2026 15:37:29 -0400 Subject: [PATCH 80/88] chore: lint-fix Signed-off-by: habeck --- cpex/framework/models.py | 2 +- cpex/tools/cli.py | 1 - cpex/tools/settings.py | 4 ++-- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/cpex/framework/models.py b/cpex/framework/models.py index 21121dc..91bba80 100644 --- a/cpex/framework/models.py +++ b/cpex/framework/models.py @@ -2419,7 +2419,7 @@ def unregister_plugin(self, plugin_name: str) -> bool: def save(self) -> None: """Serialize the registry to disk atomically.""" from cpex.tools.settings import get_plugin_registry_path - + target = get_plugin_registry_path() folder = target.parent data = orjson.dumps(self.model_dump(), option=orjson.OPT_INDENT_2) diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index 08f838b..90a85f6 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -28,7 +28,6 @@ # Standard import json import logging -import os import shutil import subprocess # nosec B404 # Safe: Used only for git commands with hardcoded args from pathlib import Path diff --git a/cpex/tools/settings.py b/cpex/tools/settings.py index 9010da3..5634860 100644 --- a/cpex/tools/settings.py +++ b/cpex/tools/settings.py @@ -50,10 +50,10 @@ def get_catalog_settings() -> CatalogSettings: def get_plugin_registry_path() -> Path: """Get the plugin registry file path. - + This centralizes the logic for determining where the plugin registry is stored. Uses PLUGIN_REGISTRY_FILE env var if set, otherwise falls back to 'data' folder. - + Returns: Path: Path to the installed-plugins.json file. """ From d1ed7202bfa3d3e12e6aaf52bf2886964694d463 Mon Sep 17 00:00:00 2001 From: habeck Date: Wed, 6 May 2026 11:24:43 -0400 Subject: [PATCH 81/88] fix: P2 issue 25 Signed-off-by: habeck --- cpex/tools/catalog.py | 10 +++++----- cpex/tools/settings.py | 15 ++++++++------- tests/unit/cpex/tools/test_catalog.py | 12 +++++++++++- 3 files changed, 24 insertions(+), 13 deletions(-) diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index 590a911..bc9fab9 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -51,11 +51,11 @@ class PluginCatalog: def __init__(self) -> None: """Utility for creating the catalog from one or more monorepos.""" settings = get_catalog_settings() - self.github_api = os.environ.get("PLUGINS_GITHUB_API", settings.PLUGINS_GITHUB_API) - self.github_token = os.environ.get("PLUGINS_GITHUB_TOKEN", None) - self.monorepos = os.environ.get("PLUGINS_REPO_URLS", settings.PLUGINS_REPO_URLS or "").split(",") - self.plugin_folder = os.environ.get("PLUGINS_FOLDER", settings.PLUGINS_FOLDER) - self.catalog_folder = os.environ.get("PLUGINS_CATALOG_FOLDER", settings.PLUGINS_CATALOG_FOLDER) + self.github_api = settings.GITHUB_API + self.github_token = settings.GITHUB_TOKEN + self.monorepos = settings.REPO_URLS.split(",") + self.plugin_folder = settings.FOLDER + self.catalog_folder = settings.CATALOG_FOLDER self.manifests: list[PluginManifest] = [] # Only create Auth.Token if a token is provided to avoid errors with None self.auth = Auth.Token(self.github_token) if self.github_token else None diff --git a/cpex/tools/settings.py b/cpex/tools/settings.py index 5634860..5adddb6 100644 --- a/cpex/tools/settings.py +++ b/cpex/tools/settings.py @@ -23,21 +23,22 @@ class CatalogSettings(BaseSettings): """Catalog settings.""" - PLUGINS_GITHUB_TOKEN: str | None = Field( + model_config = SettingsConfigDict(env_prefix="PLUGINS_", env_file=".env", env_file_encoding="utf-8", extra="ignore") + + GITHUB_TOKEN: str | None = Field( default=None, description="The github token for accessing the plugins repositories" ) - PLUGINS_GITHUB_API: str | None = Field(default="api.github.com", description="api.github.com") - PLUGINS_REPO_URLS: str = Field( + GITHUB_API: str | None = Field(default="api.github.com", description="api.github.com") + REPO_URLS: str = Field( default="https://github.com/ibm/cpex-plugins", description="The url of the plugins repositories comma separated" ) - PLUGINS_REGISTRY_FOLDER: str | None = Field( + REGISTRY_FOLDER: str | None = Field( default="data", description="The folder where the plugin registry is located (r/w)" ) - PLUGINS_CATALOG_FOLDER: str = Field( + CATALOG_FOLDER: str = Field( default="plugin-catalog", description="The folder where the plugin catalog is located (r/w)" ) - PLUGINS_FOLDER: str = Field(default="plugins", description="The folder where the plugins are located (r/w)") - model_config = SettingsConfigDict(env_prefix="PLUGINS_", env_file=".env", env_file_encoding="utf-8", extra="ignore") + FOLDER: str = Field(default="plugins", description="The folder where the plugins are located (r/w)") def get_catalog_settings() -> CatalogSettings: diff --git a/tests/unit/cpex/tools/test_catalog.py b/tests/unit/cpex/tools/test_catalog.py index d32ea8a..557fc7a 100644 --- a/tests/unit/cpex/tools/test_catalog.py +++ b/tests/unit/cpex/tools/test_catalog.py @@ -62,7 +62,17 @@ class TestPluginCatalogInit: def test_init_with_defaults(self): """Test initialization with default environment variables.""" with ( - patch.dict("os.environ", {"PLUGINS_GITHUB_TOKEN": "test_token"}, clear=True), + patch.dict( + "os.environ", + { + "PLUGINS_GITHUB_TOKEN": "test_token", + "PLUGINS_GITHUB_API": "api.github.com", + "PLUGINS_REPO_URLS": "https://github.com/ibm/cpex-plugins", + "PLUGINS_FOLDER": "plugins", + "PLUGINS_CATALOG_FOLDER": "plugin-catalog", + }, + clear=True, + ), patch("cpex.tools.catalog.Github"), ): catalog = PluginCatalog() From ca2fd8729d9cb671535d0a604002bb6e56038adf Mon Sep 17 00:00:00 2001 From: habeck Date: Wed, 6 May 2026 11:45:04 -0400 Subject: [PATCH 82/88] fix: update worker to call cpex.framework.utils.import_module rather than importlib.import_module directly. Signed-off-by: habeck --- cpex/framework/isolated/worker.py | 4 +-- .../cpex/framework/isolated/test_worker.py | 32 +++++-------------- 2 files changed, 10 insertions(+), 26 deletions(-) diff --git a/cpex/framework/isolated/worker.py b/cpex/framework/isolated/worker.py index 6a6dbe2..74deee0 100644 --- a/cpex/framework/isolated/worker.py +++ b/cpex/framework/isolated/worker.py @@ -24,7 +24,7 @@ from cpex.framework.loader.plugin import ALLOWED_PLUGIN_DIRS from cpex.framework.manager import PluginExecutor from cpex.framework.models import PluginConfig, PluginContext -from cpex.framework.utils import parse_class_name +from cpex.framework.utils import import_module, parse_class_name logger = logging.getLogger(__name__) @@ -115,7 +115,7 @@ async def process_task(task_data, tp: TaskProcessor): hook_type = task_data.get(HOOK_TYPE) cls_name: str = task_data.get("class_name") mod_name, n_cls_name = parse_class_name(cls_name) - module: ModuleType = importlib.import_module(mod_name) + module: ModuleType = import_module(mod_name) # cool, we found the module, and verified it implemented the hook type. class_ = getattr(module, n_cls_name) plugin_type = cast(Type[Plugin], class_) diff --git a/tests/unit/cpex/framework/isolated/test_worker.py b/tests/unit/cpex/framework/isolated/test_worker.py index 7626535..3dcfb68 100644 --- a/tests/unit/cpex/framework/isolated/test_worker.py +++ b/tests/unit/cpex/framework/isolated/test_worker.py @@ -63,17 +63,12 @@ async def test_process_task_info(self): assert result["message"] == "Environment info retrieved successfully" @pytest.mark.asyncio - @patch("cpex.framework.isolated.worker.get_proper_config") - @patch("cpex.framework.isolated.worker.importlib.import_module") + @patch("cpex.framework.isolated.worker.import_module") @patch("cpex.framework.isolated.worker.PluginExecutor") async def test_process_task_load_and_run_hook_success( - self, mock_executor_class, mock_import, mock_get_config, mock_plugin_dirs + self, mock_executor_class, mock_import, mock_plugin_dirs ): """Test processing load_and_run_hook task successfully.""" - # Setup mock config - mock_config = MagicMock() - mock_config.name = "test_plugin" - mock_get_config.return_value = mock_config # Setup mock plugin class mock_plugin_instance = AsyncMock() @@ -115,13 +110,9 @@ async def test_process_task_load_and_run_hook_success( self.cleanup_mock_plugin_dirs() @pytest.mark.asyncio - @patch("cpex.framework.isolated.worker.get_proper_config") - @patch("cpex.framework.isolated.worker.importlib.import_module") - async def test_process_task_load_and_run_hook_import_error(self, mock_import, mock_get_config, mock_plugin_dirs): + @patch("cpex.framework.isolated.worker.import_module") + async def test_process_task_load_and_run_hook_import_error(self, mock_import, mock_plugin_dirs): """Test processing load_and_run_hook task with import error.""" - mock_config = MagicMock() - mock_get_config.return_value = mock_config - mock_import.side_effect = ImportError("Module not found") config_dict = {"name": "test_plugin", "kind": "isolated_venv"} @@ -139,16 +130,12 @@ async def test_process_task_load_and_run_hook_import_error(self, mock_import, mo await process_task(task_data, tp) @pytest.mark.asyncio - @patch("cpex.framework.isolated.worker.get_proper_config") - @patch("cpex.framework.isolated.worker.importlib.import_module") + @patch("cpex.framework.isolated.worker.import_module") @patch("cpex.framework.isolated.worker.PluginExecutor") async def test_process_task_with_different_hook_types( - self, mock_executor_class, mock_import, mock_get_config, mock_plugin_dirs + self, mock_executor_class, mock_import, mock_plugin_dirs ): """Test processing tasks with different hook types.""" - # Setup mocks - mock_config = MagicMock() - mock_get_config.return_value = mock_config mock_plugin_instance = MagicMock() mock_plugin_instance.initialize = AsyncMock() @@ -197,15 +184,12 @@ async def test_process_task_unknown_task_type(self): assert result == {"message": "task type not supported.", "request_id": "unknown", "status": "error"} @pytest.mark.asyncio - @patch("cpex.framework.isolated.worker.get_proper_config") - @patch("cpex.framework.isolated.worker.importlib.import_module") + @patch("cpex.framework.isolated.worker.import_module") @patch("cpex.framework.isolated.worker.PluginExecutor") async def test_process_task_with_metadata( - self, mock_executor_class, mock_import, mock_get_config, mock_plugin_dirs + self, mock_executor_class, mock_import, mock_plugin_dirs ): """Test processing task with metadata in context.""" - mock_config = MagicMock() - mock_get_config.return_value = mock_config mock_plugin_instance = AsyncMock() mock_plugin_instance.initialize = AsyncMock() From 61c6ea292776a12bb5c1c365a4204661d10785c5 Mon Sep 17 00:00:00 2001 From: habeck Date: Wed, 6 May 2026 15:37:33 -0400 Subject: [PATCH 83/88] enh: add package integrity verification Signed-off-by: habeck --- cpex/tools/catalog.py | 122 ++++++- cpex/tools/cli.py | 33 +- cpex/tools/integrity.py | 300 +++++++++++++++++ cpex/tools/settings.py | 8 + docs/content/docs/package-integrity.md | 323 ++++++++++++++++++ tests/unit/cpex/tools/test_integrity.py | 421 ++++++++++++++++++++++++ 6 files changed, 1193 insertions(+), 14 deletions(-) create mode 100644 cpex/tools/integrity.py create mode 100644 docs/content/docs/package-integrity.md create mode 100644 tests/unit/cpex/tools/test_integrity.py diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index bc9fab9..9faaa6a 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -38,6 +38,12 @@ PyPiRepo, ) from cpex.framework.utils import find_package_path +from cpex.tools.integrity import ( + IntegrityVerificationError, + fetch_pypi_package_hashes, + find_matching_hash, + verify_package_integrity, +) from cpex.tools.settings import get_catalog_settings logger = logging.getLogger(__name__) @@ -614,7 +620,7 @@ def find(self, plugin_name: str) -> Optional[PluginManifest]: return manifest return None - def install_folder_via_pip(self, manifest: PluginManifest) -> Path | None: + def install_folder_via_pip(self, manifest: PluginManifest, verify_integrity: bool = True) -> Path | None: """ Runs a pip install using subfolder syntax for monorepo plugins. For isolated_venv plugins, checks manifest kind BEFORE installing to avoid dependency conflicts. @@ -622,6 +628,7 @@ def install_folder_via_pip(self, manifest: PluginManifest) -> Path | None: Args: manifest: The PluginManifest of the plugin to be installed + verify_integrity: Whether to compute and log package hash for verification Raises: RuntimeError: If package installation fails. @@ -636,7 +643,7 @@ def install_folder_via_pip(self, manifest: PluginManifest) -> Path | None: if manifest.kind == "isolated_venv": logger.info("Detected isolated_venv plugin from monorepo: %s", manifest.name) # Install the package to make it available for venv initialization - package_path = self._download_monorepo_folder_to_temp(repo_url, manifest.name) + package_path = self._download_monorepo_folder_to_temp(repo_url, manifest.name, verify_integrity=verify_integrity) plugin_path = self._initialize_isolated_venv(manifest, package_path) logger.info("Isolated venv initialized. Plugin will be auto-installed via requirements.txt") else: @@ -829,12 +836,13 @@ def _extract_package_archive(self, package_file: Path, extract_dir: Path) -> Non else: raise RuntimeError(f"Unsupported package format: {package_file}") - def _download_monorepo_folder_to_temp(self, repo_url: str, package_name: str) -> Path: + def _download_monorepo_folder_to_temp(self, repo_url: str, package_name: str, verify_integrity: bool = True) -> Path: """Download monorepo folder to temporary directory. Args: repo_url: The URL of the monorepo. package_name: Name used in error messages. + verify_integrity: Whether to compute and log package hash for verification. Returns: Path to the extracted package directory. Caller is responsible for cleanup. @@ -861,6 +869,24 @@ def _download_monorepo_folder_to_temp(self, repo_url: str, package_name: str) -> if not downloaded_files: raise RuntimeError(f"No files downloaded for {package_name}") package_file = downloaded_files[0] + + # Compute and log hash for integrity verification + if verify_integrity: + try: + from cpex.tools.integrity import compute_file_hash + package_hash = compute_file_hash(package_file) + logger.info( + "Package integrity hash for %s (%s): SHA256=%s", + package_name, + package_file.name, + package_hash + ) + logger.info( + "Store this hash for future verification or to detect tampering" + ) + except Exception as e: + logger.warning("Failed to compute package hash: %s", str(e)) + extract_dir = temp_dir / "extracted" extract_dir.mkdir() @@ -878,7 +904,7 @@ def _download_monorepo_folder_to_temp(self, repo_url: str, package_name: str) -> def _download_package_to_temp( - self, package_name: str, version_constraint: str | None, use_test: bool = False + self, package_name: str, version_constraint: str | None, use_test: bool = False, verify_integrity: bool = True ) -> Path: """Download package to a temporary directory without installing it. @@ -886,12 +912,14 @@ def _download_package_to_temp( package_name: The PyPI package name to download. version_constraint: Optional version constraint. use_test: Whether to use test.pypi.org. + verify_integrity: Whether to verify package integrity using SHA256 hashes. Returns: Path to the downloaded package directory. Raises: RuntimeError: If download fails. + IntegrityVerificationError: If hash verification fails. """ try: @@ -904,6 +932,33 @@ def _download_package_to_temp( if ppi.version_constraint is not None: tgt = f"{tgt}{ppi.version_constraint}" + # Fetch expected hashes from PyPI before downloading (if verification enabled) + expected_hashes = {} + if verify_integrity: + try: + logger.info("Fetching package hashes from PyPI for %s", package_name) + # Extract version from constraint if available, otherwise fetch latest + version_to_fetch = None + if version_constraint: + # Try to extract exact version from constraint (e.g., "==1.0.0" -> "1.0.0") + import re + version_match = re.search(r'==\s*([0-9.]+)', version_constraint) + if version_match: + version_to_fetch = version_match.group(1) + + expected_hashes = fetch_pypi_package_hashes( + package_name=package_name, + version=version_to_fetch, + use_test=use_test + ) + if expected_hashes: + logger.info("Retrieved hashes for %d distribution files", len(expected_hashes)) + else: + logger.warning("No hashes available from PyPI for %s", package_name) + except Exception as e: + logger.warning("Failed to fetch hashes from PyPI: %s. Proceeding without verification.", str(e)) + expected_hashes = {} + # Download package without installing download_args = [ self.python_executable, @@ -928,6 +983,24 @@ def _download_package_to_temp( raise RuntimeError(f"No files downloaded for {package_name}") package_file = downloaded_files[0] + + # Verify package integrity if hashes are available + if verify_integrity and expected_hashes: + expected_hash = find_matching_hash(package_file, expected_hashes, package_name) + if expected_hash: + logger.info("Verifying integrity of %s", package_file.name) + verify_package_integrity( + file_path=package_file, + expected_hash=expected_hash, + package_name=package_name, + strict=True + ) + else: + logger.warning( + "No matching hash found for %s. Proceeding without verification.", + package_file.name + ) + extract_dir = temp_dir / "extracted" extract_dir.mkdir() @@ -937,9 +1010,15 @@ def _download_package_to_temp( logger.info("Downloaded and extracted %s to %s", package_name, extract_dir) return extract_dir + except IntegrityVerificationError: + # Re-raise integrity errors without wrapping + shutil.rmtree(temp_dir, ignore_errors=True) + raise except subprocess.CalledProcessError as e: + shutil.rmtree(temp_dir, ignore_errors=True) raise RuntimeError(f"Failed to download {package_name}: {e.stderr}") from e except Exception as e: + shutil.rmtree(temp_dir, ignore_errors=True) raise RuntimeError(f"Unexpected error downloading {package_name}: {str(e)}") from e def _find_manifest_in_extracted_package(self, extract_dir: Path, package_name: str) -> Path: @@ -1253,7 +1332,11 @@ def _finalize_plugin_installation( return actual_plugin_path if actual_plugin_path is not None else plugin_path def install_from_pypi( - self, plugin_package_name: str, version_constraint: str | None = None, use_pytest: bool = False + self, + plugin_package_name: str, + version_constraint: str | None = None, + use_pytest: bool = False, + verify_integrity: bool = True, ) -> tuple[PluginManifest, Path | None]: """Install Python package from PyPI and load its plugin-manifest.yaml. @@ -1270,6 +1353,8 @@ def install_from_pypi( Args: plugin_package_name: The name of the package hosted on PyPI. version_constraint: Optional version constraint (e.g., ">=1.0.0,<2.0.0"). + use_pytest: Whether to use test.pypi.org instead of pypi.org. + verify_integrity: Whether to verify package integrity using SHA256 hashes from PyPI. Returns: The loaded and validated plugin manifest. @@ -1277,10 +1362,13 @@ def install_from_pypi( Raises: RuntimeError: If any step of the installation process fails. FileNotFoundError: If plugin-manifest.yaml is not found in the package. + IntegrityVerificationError: If package hash verification fails. """ - # Step 1: Download package to temporary location to read manifest - temp_extract_dir = self._download_package_to_temp(plugin_package_name, version_constraint, use_pytest) + # Step 1: Download package to temporary location to read manifest (with integrity verification) + temp_extract_dir = self._download_package_to_temp( + plugin_package_name, version_constraint, use_pytest, verify_integrity=verify_integrity + ) try: # Step 2: Find and load the manifest file @@ -1314,7 +1402,7 @@ def install_from_pypi( if temp_extract_dir.exists(): shutil.rmtree(temp_extract_dir.parent) - def install_from_git(self, url: str) -> tuple[PluginManifest, Path | None]: + def install_from_git(self, url: str, verify_integrity: bool = True) -> tuple[PluginManifest, Path | None]: """Install Python package from Git repository and load its plugin-manifest.yaml. This method performs the following steps: @@ -1333,6 +1421,7 @@ def install_from_git(self, url: str) -> tuple[PluginManifest, Path | None]: - MyProject @ git+ssh://git@git.example.com/MyProject - MyProject @ git+https://git.example.com/MyProject - MyProject @ git+https://git.example.com/MyProject@master + verify_integrity: Whether to compute and log package hash for verification Returns: Tuple of (PluginManifest, Path to plugin or None) @@ -1418,6 +1507,23 @@ def install_from_git(self, url: str) -> tuple[PluginManifest, Path | None]: archive_path = archives[0] logger.info("Downloaded archive: %s", archive_path.name) + # Compute and log hash for integrity verification + if verify_integrity: + try: + from cpex.tools.integrity import compute_file_hash + package_hash = compute_file_hash(archive_path) + logger.info( + "Package integrity hash for %s (%s): SHA256=%s", + package_name, + archive_path.name, + package_hash + ) + logger.info( + "Store this hash for future verification or to detect tampering" + ) + except Exception as e: + logger.warning("Failed to compute package hash: %s", str(e)) + # Extract the archive using common helper self._extract_package_archive(archive_path, temp_extract_dir) diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index 90a85f6..50a6695 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -47,6 +47,7 @@ ) from cpex.framework.settings import settings from cpex.tools.catalog import PluginCatalog +from cpex.tools.settings import get_catalog_settings # Third-Party from cpex.tools.plugin_registry import PluginRegistry @@ -516,17 +517,24 @@ def _install_from_local(source: str, catalog: PluginCatalog, use_test: bool = Fa def _install_from_git(source: str, catalog: PluginCatalog, use_test: bool = False): - """Handle git-based installation (not yet implemented). + """Handle git-based installation. Args: source: Git repository URL or path. catalog: The plugin catalog. - - Raises: - NotImplementedError: Git installation is not yet supported. + use_test: Unused for git installations (kept for consistency). """ + # Get integrity verification setting from catalog settings + catalog_settings = get_catalog_settings() + verify_integrity = catalog_settings.VERIFY_PACKAGE_INTEGRITY + + if verify_integrity: + console.log("Package integrity verification: enabled (hash will be computed and logged)") + else: + console.log("Package integrity verification: disabled") + with console.status(f"Installing plugin from source {source}...", spinner="dots"): - manifest, installation_path = catalog.install_from_git(source) + manifest, installation_path = catalog.install_from_git(source, verify_integrity=verify_integrity) _finalize_installation(manifest, "git", catalog, installation_path) console.print(f":white_heavy_check_mark: {manifest.name} installation complete.") @@ -562,15 +570,28 @@ def _install_from_pypi(source: str, catalog: PluginCatalog, use_test: bool = Fal Args: source: PyPI package name, optionally with version constraint (e.g., "package@>=1.0.0"). catalog: The plugin catalog. + use_test: Whether to use test.pypi.org instead of pypi.org. """ logger.info("Trying to install from pypi package %s", source) # Parse version constraint package_name, version_constraint = _parse_pypi_source(source) + # Get integrity verification setting from catalog settings + catalog_settings = get_catalog_settings() + verify_integrity = catalog_settings.VERIFY_PACKAGE_INTEGRITY + + if verify_integrity: + console.log(f"Package integrity verification: enabled") + else: + console.log(f"Package integrity verification: disabled") + with console.status(f"Installing plugin {package_name} via pypi", spinner="dots"): manifest, plugin_path = catalog.install_from_pypi( - plugin_package_name=package_name, version_constraint=version_constraint, use_pytest=use_test + plugin_package_name=package_name, + version_constraint=version_constraint, + use_pytest=use_test, + verify_integrity=verify_integrity, ) if manifest is None: diff --git a/cpex/tools/integrity.py b/cpex/tools/integrity.py new file mode 100644 index 0000000..8be0766 --- /dev/null +++ b/cpex/tools/integrity.py @@ -0,0 +1,300 @@ +# -*- coding: utf-8 -*- +"""Location: ./cpex/tools/integrity.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Ted Habeck + +Package integrity verification utilities. + +This module provides SHA256 hash verification for downloaded packages +to ensure integrity beyond pip's built-in checks. It fetches expected +hashes from PyPI's JSON API and verifies downloaded files against them. + +Features +──────── +* SHA256 hash computation for package files +* PyPI JSON API integration for hash retrieval +* Configurable verification modes (strict/permissive) +* Detailed logging and error reporting + +Typical usage +───────────── +```python +from cpex.tools.integrity import verify_package_integrity, fetch_pypi_package_hashes + +# Fetch expected hashes from PyPI +hashes = fetch_pypi_package_hashes("requests", "2.31.0") + +# Verify downloaded package +verify_package_integrity(Path("/tmp/requests-2.31.0.tar.gz"), hashes["sha256"]) +``` +""" + +# Standard +import hashlib +import logging +from pathlib import Path +from typing import Optional + +import httpx + +logger = logging.getLogger(__name__) + +# Constants +PYPI_JSON_API_URL = "https://pypi.org/pypi/{package}/json" +TEST_PYPI_JSON_API_URL = "https://test.pypi.org/pypi/{package}/json" +HASH_CHUNK_SIZE = 8192 # 8KB chunks for efficient file reading + + +class IntegrityVerificationError(Exception): + """Raised when package integrity verification fails. + + This exception indicates that a downloaded package's hash does not + match the expected hash from PyPI, suggesting potential tampering + or corruption. + + Attributes: + package_name: Name of the package that failed verification. + expected_hash: The expected SHA256 hash from PyPI. + actual_hash: The computed SHA256 hash of the downloaded file. + """ + + def __init__(self, package_name: str, expected_hash: str, actual_hash: str): + """Initialize the exception with verification details. + + Args: + package_name: Name of the package that failed verification. + expected_hash: The expected SHA256 hash from PyPI. + actual_hash: The computed SHA256 hash of the downloaded file. + """ + self.package_name = package_name + self.expected_hash = expected_hash + self.actual_hash = actual_hash + super().__init__( + f"Integrity verification failed for {package_name}: " + f"expected {expected_hash[:16]}..., got {actual_hash[:16]}..." + ) + + +def compute_file_hash(file_path: Path, algorithm: str = "sha256") -> str: + """Compute cryptographic hash of a file. + + Reads the file in chunks to handle large files efficiently without + loading the entire file into memory. + + Args: + file_path: Path to the file to hash. + algorithm: Hash algorithm to use (default: sha256). + + Returns: + Hexadecimal hash string. + + Raises: + FileNotFoundError: If the file does not exist. + ValueError: If the hash algorithm is not supported. + + Examples: + >>> from pathlib import Path + >>> import tempfile + >>> with tempfile.NamedTemporaryFile(mode='w', delete=False) as f: + ... _ = f.write("test content") + ... temp_path = Path(f.name) + >>> hash_value = compute_file_hash(temp_path) + >>> len(hash_value) + 64 + >>> temp_path.unlink() + """ + if not file_path.exists(): + raise FileNotFoundError(f"File not found: {file_path}") + + try: + hasher = hashlib.new(algorithm) + except ValueError as e: + raise ValueError(f"Unsupported hash algorithm: {algorithm}") from e + + with open(file_path, "rb") as f: + while chunk := f.read(HASH_CHUNK_SIZE): + hasher.update(chunk) + + hash_value = hasher.hexdigest() + logger.debug("Computed %s hash for %s: %s", algorithm, file_path.name, hash_value[:16] + "...") + return hash_value + + +def fetch_pypi_package_hashes( + package_name: str, version: Optional[str] = None, use_test: bool = False, timeout: float = 30.0 +) -> dict[str, dict[str, str]]: + """Fetch package hashes from PyPI JSON API. + + Retrieves SHA256 hashes for all distribution files of a package version + from PyPI's JSON API. If no version is specified, fetches hashes for + the latest version. + + Args: + package_name: Name of the package on PyPI. + version: Specific version to fetch hashes for (optional). + use_test: Whether to use test.pypi.org instead of pypi.org. + timeout: HTTP request timeout in seconds. + + Returns: + Dictionary mapping filename to hash information: + { + "package-1.0.0.tar.gz": { + "sha256": "abc123...", + "url": "https://files.pythonhosted.org/..." + } + } + + Raises: + RuntimeError: If the API request fails or package is not found. + + Examples: + >>> hashes = fetch_pypi_package_hashes("requests", "2.31.0") # doctest: +SKIP + >>> "requests-2.31.0.tar.gz" in hashes # doctest: +SKIP + True + """ + api_url = TEST_PYPI_JSON_API_URL if use_test else PYPI_JSON_API_URL + url = api_url.format(package=package_name) + + if version: + url = f"{url.rstrip('/json')}/{version}/json" + + logger.debug("Fetching package hashes from: %s", url) + + try: + with httpx.Client(timeout=timeout) as client: + response = client.get(url) + response.raise_for_status() + data = response.json() + + except httpx.HTTPStatusError as e: + if e.response.status_code == 404: + raise RuntimeError(f"Package '{package_name}' not found on {'test.' if use_test else ''}PyPI") from e + raise RuntimeError(f"Failed to fetch package metadata: {e}") from e + except httpx.RequestError as e: + raise RuntimeError(f"Network error fetching package metadata: {e}") from e + except Exception as e: + raise RuntimeError(f"Unexpected error fetching package metadata: {e}") from e + + # Extract hashes from the response + hashes = {} + urls = data.get("urls", []) + + if not urls: + logger.warning("No distribution files found for %s", package_name) + return hashes + + for file_info in urls: + filename = file_info.get("filename") + digests = file_info.get("digests", {}) + sha256_hash = digests.get("sha256") + file_url = file_info.get("url") + + if filename and sha256_hash: + hashes[filename] = {"sha256": sha256_hash, "url": file_url} + logger.debug("Found hash for %s: %s...", filename, sha256_hash[:16]) + + logger.info("Fetched hashes for %d distribution files of %s", len(hashes), package_name) + return hashes + + +def verify_package_integrity( + file_path: Path, expected_hash: str, package_name: Optional[str] = None, strict: bool = True +) -> bool: + """Verify package file integrity against expected SHA256 hash. + + Computes the SHA256 hash of the file and compares it to the expected + hash. In strict mode, raises an exception on mismatch. In non-strict + mode, logs a warning and returns False. + + Args: + file_path: Path to the package file to verify. + expected_hash: Expected SHA256 hash (hexadecimal string). + package_name: Name of the package (for error messages). + strict: If True, raise exception on mismatch. If False, return False. + + Returns: + True if hash matches, False if mismatch in non-strict mode. + + Raises: + IntegrityVerificationError: If hash doesn't match in strict mode. + FileNotFoundError: If the file does not exist. + + Examples: + >>> from pathlib import Path + >>> import tempfile + >>> with tempfile.NamedTemporaryFile(mode='w', delete=False) as f: + ... _ = f.write("test") + ... temp_path = Path(f.name) + >>> expected = compute_file_hash(temp_path) + >>> verify_package_integrity(temp_path, expected, "test-pkg") + True + >>> temp_path.unlink() + """ + if not file_path.exists(): + raise FileNotFoundError(f"Package file not found: {file_path}") + + pkg_name = package_name or file_path.name + logger.info("Verifying integrity of %s", pkg_name) + + actual_hash = compute_file_hash(file_path) + + if actual_hash.lower() == expected_hash.lower(): + logger.info("✓ Integrity verification passed for %s", pkg_name) + return True + + error_msg = ( + f"Integrity verification failed for {pkg_name}\n" + f" Expected: {expected_hash}\n" + f" Actual: {actual_hash}\n" + f" File: {file_path}" + ) + + if strict: + logger.error(error_msg) + raise IntegrityVerificationError(pkg_name, expected_hash, actual_hash) + + logger.warning(error_msg) + return False + + +def find_matching_hash( + file_path: Path, hashes_dict: dict[str, dict[str, str]], package_name: Optional[str] = None +) -> Optional[str]: + """Find the expected hash for a downloaded file from PyPI hashes dictionary. + + Matches the downloaded file against the hashes dictionary by filename. + Handles various filename patterns including wheels and source distributions. + + Args: + file_path: Path to the downloaded package file. + hashes_dict: Dictionary of hashes from fetch_pypi_package_hashes(). + package_name: Name of the package (for logging). + + Returns: + The expected SHA256 hash if found, None otherwise. + + Examples: + >>> hashes = {"pkg-1.0.0.tar.gz": {"sha256": "abc123", "url": "..."}} + >>> find_matching_hash(Path("/tmp/pkg-1.0.0.tar.gz"), hashes) + 'abc123' + """ + filename = file_path.name + pkg_name = package_name or filename + + if filename in hashes_dict: + hash_value = hashes_dict[filename]["sha256"] + logger.debug("Found matching hash for %s", filename) + return hash_value + + # Try case-insensitive match + for key, value in hashes_dict.items(): + if key.lower() == filename.lower(): + hash_value = value["sha256"] + logger.debug("Found case-insensitive match for %s", filename) + return hash_value + + logger.warning("No matching hash found for %s in PyPI metadata", pkg_name) + return None + +# Made with Bob diff --git a/cpex/tools/settings.py b/cpex/tools/settings.py index 5adddb6..2ae192b 100644 --- a/cpex/tools/settings.py +++ b/cpex/tools/settings.py @@ -39,6 +39,14 @@ class CatalogSettings(BaseSettings): default="plugin-catalog", description="The folder where the plugin catalog is located (r/w)" ) FOLDER: str = Field(default="plugins", description="The folder where the plugins are located (r/w)") + VERIFY_PACKAGE_INTEGRITY: bool = Field( + default=True, + description="Enable SHA256 hash verification for downloaded packages from PyPI" + ) + STRICT_INTEGRITY_MODE: bool = Field( + default=False, + description="Fail installation if package hashes are unavailable (strict mode)" + ) def get_catalog_settings() -> CatalogSettings: diff --git a/docs/content/docs/package-integrity.md b/docs/content/docs/package-integrity.md new file mode 100644 index 0000000..66dda63 --- /dev/null +++ b/docs/content/docs/package-integrity.md @@ -0,0 +1,323 @@ +--- +title: "Package Integrity Verification" +weight: 150 +--- + +# Package Integrity Verification + +The CPEX framework includes built-in SHA256 hash verification for packages, providing an additional security layer beyond pip's built-in checks. + +## Overview + +The framework provides integrity verification for different installation sources: + +### PyPI Packages + +When installing plugins from PyPI, the framework automatically: + +1. Fetches expected SHA256 hashes from PyPI's JSON API +2. Downloads the package file +3. Computes the SHA256 hash of the downloaded file +4. Compares the computed hash against the expected hash +5. Aborts installation if hashes don't match + +### Git and Monorepo Packages + +When installing from Git repositories or monorepos, the framework: + +1. Downloads the package archive +2. Computes the SHA256 hash of the downloaded file +3. Logs the hash for future reference and verification +4. Allows manual verification against known-good hashes + +This protects against: +- **Tampered packages**: Detects if a package has been modified in transit +- **Corrupted downloads**: Identifies incomplete or corrupted downloads +- **Supply chain attacks**: Verifies package authenticity + +## Configuration + +### Environment Variables + +Control integrity verification behavior using environment variables: + +```bash +# Enable/disable integrity verification (default: true) +export PLUGINS_VERIFY_PACKAGE_INTEGRITY=true + +# Strict mode: fail if hashes unavailable (default: false) +export PLUGINS_STRICT_INTEGRITY_MODE=false +``` + +### Configuration File + +Add to your `.env` file: + +```ini +# Package Integrity Verification +PLUGINS_VERIFY_PACKAGE_INTEGRITY=true +PLUGINS_STRICT_INTEGRITY_MODE=false +``` + +## Verification Modes + +### Standard Mode (Default) + +```bash +PLUGINS_VERIFY_PACKAGE_INTEGRITY=true +PLUGINS_STRICT_INTEGRITY_MODE=false +``` + +**Behavior:** +- Verifies packages when hashes are available +- Warns but continues if hashes are unavailable +- Fails immediately on hash mismatch + +**Use case:** Recommended for most deployments. Provides security without breaking installations for packages that don't publish hashes. + +### Strict Mode + +```bash +PLUGINS_VERIFY_PACKAGE_INTEGRITY=true +PLUGINS_STRICT_INTEGRITY_MODE=true +``` + +**Behavior:** +- Requires hashes for all packages +- Fails installation if hashes are unavailable +- Fails immediately on hash mismatch + +**Use case:** High-security environments where all packages must be verifiable. + +### Disabled Mode + +```bash +PLUGINS_VERIFY_PACKAGE_INTEGRITY=false +``` + +**Behavior:** +- Skips hash verification entirely +- Relies only on pip's built-in checks + +**Use case:** Development environments or when troubleshooting installation issues. + +## Usage Examples + +### Installing with Verification (Default) + +**PyPI Installation:** +```bash +# Verification is enabled by default +cpex plugin install --type pypi my-plugin + +# Output shows verification status: +# Package integrity verification: enabled +# Fetching package hashes from PyPI for my-plugin +# Retrieved hashes for 2 distribution files +# Verifying integrity of my-plugin-1.0.0.tar.gz +# ✓ Integrity verification passed for my-plugin +``` + +**Git Installation:** +```bash +# Hash is computed and logged for future verification +cpex plugin install --type git "MyPlugin @ git+https://github.com/user/repo.git" + +# Output shows: +# Package integrity verification: enabled (hash will be computed and logged) +# Package integrity hash for MyPlugin (MyPlugin-1.0.0.tar.gz): SHA256=abc123def456... +# Store this hash for future verification or to detect tampering +``` + +**Monorepo Installation:** +```bash +# Hash is computed and logged +cpex plugin install --type monorepo my-plugin + +# Output shows: +# Package integrity hash for my-plugin (my-plugin-1.0.0.tar.gz): SHA256=abc123def456... +# Store this hash for future verification or to detect tampering +``` + +### Installing with Verification Disabled + +```bash +# Temporarily disable verification +export PLUGINS_VERIFY_PACKAGE_INTEGRITY=false +cpex plugin install --type pypi my-plugin + +# Output shows: +# Package integrity verification: disabled +``` + +### Installing from Test PyPI + +```bash +# Verification works with test.pypi.org too +cpex plugin install --type test-pypi my-test-plugin +``` + +## Error Handling + +### Hash Mismatch + +If a downloaded package's hash doesn't match the expected hash: + +``` +ERROR: Integrity verification failed for my-plugin + Expected: abc123def456... + Actual: 789ghi012jkl... + File: /tmp/cpex_plugin_my-plugin_xyz/my-plugin-1.0.0.tar.gz + +IntegrityVerificationError: Integrity verification failed for my-plugin +``` + +**Resolution:** +1. Retry the installation (may be a transient network issue) +2. Check if PyPI is experiencing issues +3. Report to package maintainer if problem persists + +### Hash Unavailable + +If PyPI doesn't provide hashes for a package: + +**Standard Mode:** +``` +WARNING: No hashes available from PyPI for my-plugin +WARNING: No matching hash found for my-plugin-1.0.0.tar.gz. Proceeding without verification. +``` +Installation continues. + +**Strict Mode:** +``` +ERROR: No hashes available from PyPI for my-plugin +RuntimeError: Package hashes required in strict mode but not available +``` +Installation fails. + +### Network Error Fetching Hashes + +If the PyPI API is unreachable: + +``` +WARNING: Failed to fetch hashes from PyPI: Connection timeout. Proceeding without verification. +``` + +Installation continues to avoid breaking deployments due to temporary network issues. + +## Security Best Practices + +### Production Deployments + +1. **Enable verification** (default setting) +2. **Monitor logs** for verification warnings +3. **Consider strict mode** for critical environments +4. **Use private PyPI mirrors** with known-good packages + +### Development Environments + +1. **Keep verification enabled** to catch issues early +2. **Use standard mode** for flexibility +3. **Disable only when troubleshooting** specific issues + +### CI/CD Pipelines + +1. **Enable verification** in all pipelines +2. **Use strict mode** for production deployments +3. **Cache verified packages** to reduce API calls +4. **Fail builds** on verification errors + +## Technical Details + +### Hash Algorithm + +- **Algorithm**: SHA256 (256-bit) +- **Source**: PyPI JSON API (`/pypi/{package}/json`) +- **Format**: Hexadecimal string (64 characters) + +### Verification Process + +```python +# 1. Fetch expected hashes from PyPI +hashes = fetch_pypi_package_hashes("my-plugin", "1.0.0") +# Returns: {"my-plugin-1.0.0.tar.gz": {"sha256": "abc123...", "url": "..."}} + +# 2. Download package +package_file = download_package("my-plugin==1.0.0") + +# 3. Compute actual hash +actual_hash = compute_file_hash(package_file) + +# 4. Verify +if actual_hash != expected_hash: + raise IntegrityVerificationError(...) +``` + +### Performance Impact + +- **API Call**: ~100-500ms to fetch hashes from PyPI +- **Hash Computation**: ~10-50ms per MB of package size +- **Total Overhead**: Typically <1 second per package + +The overhead is minimal compared to download time and provides significant security benefits. + +## Troubleshooting + +### Verification Always Fails + +**Symptoms:** Every package fails verification with hash mismatch. + +**Possible Causes:** +1. Corporate proxy modifying downloads +2. Antivirus scanning altering files +3. Disk corruption + +**Solutions:** +1. Check proxy configuration +2. Temporarily disable antivirus +3. Run disk check utility + +### Verification Warnings for All Packages + +**Symptoms:** "No hashes available" warning for every package. + +**Possible Causes:** +1. Network blocking PyPI API +2. Firewall rules +3. PyPI API outage + +**Solutions:** +1. Check network connectivity to `pypi.org` +2. Review firewall rules +3. Check PyPI status page + +### Slow Installations + +**Symptoms:** Package installations take much longer than expected. + +**Possible Causes:** +1. Slow network to PyPI API +2. Large packages taking time to hash + +**Solutions:** +1. Use a PyPI mirror closer to your location +2. Consider caching verified packages +3. This is normal for very large packages (>100MB) + +## API Reference + +See [`cpex.tools.integrity`](../api-reference/#cpextoolsintegrity) for detailed API documentation. + +## Related Documentation + +- [CLI Reference](../cli/) - Command-line usage +- [Configuration](../configuration/) - General configuration options +- [Security Best Practices](../security/) - Comprehensive security guide + +## Changelog + +### Version 0.1.0rc1 +- Initial implementation of SHA256 hash verification +- Support for PyPI and Test PyPI +- Configurable verification modes (standard/strict/disabled) +- Comprehensive error handling and logging \ No newline at end of file diff --git a/tests/unit/cpex/tools/test_integrity.py b/tests/unit/cpex/tools/test_integrity.py new file mode 100644 index 0000000..4efd8dc --- /dev/null +++ b/tests/unit/cpex/tools/test_integrity.py @@ -0,0 +1,421 @@ +# -*- coding: utf-8 -*- +"""Location: ./tests/unit/cpex/tools/test_integrity.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Ted Habeck + +Unit tests for package integrity verification. +""" + +import tempfile +from pathlib import Path +from unittest.mock import MagicMock, patch + +import httpx +import pytest + +from cpex.tools.integrity import ( + IntegrityVerificationError, + compute_file_hash, + fetch_pypi_package_hashes, + find_matching_hash, + verify_package_integrity, +) + + +class TestComputeFileHash: + """Tests for compute_file_hash function.""" + + def test_compute_hash_basic(self, tmp_path): + """Test basic hash computation.""" + test_file = tmp_path / "test.txt" + test_file.write_text("test content") + + hash_value = compute_file_hash(test_file) + + assert isinstance(hash_value, str) + assert len(hash_value) == 64 # SHA256 produces 64 hex characters + # Verify it's a valid hex string + int(hash_value, 16) + + def test_compute_hash_consistency(self, tmp_path): + """Test that same content produces same hash.""" + test_file = tmp_path / "test.txt" + test_file.write_text("consistent content") + + hash1 = compute_file_hash(test_file) + hash2 = compute_file_hash(test_file) + + assert hash1 == hash2 + + def test_compute_hash_different_content(self, tmp_path): + """Test that different content produces different hashes.""" + file1 = tmp_path / "file1.txt" + file2 = tmp_path / "file2.txt" + file1.write_text("content 1") + file2.write_text("content 2") + + hash1 = compute_file_hash(file1) + hash2 = compute_file_hash(file2) + + assert hash1 != hash2 + + def test_compute_hash_large_file(self, tmp_path): + """Test hash computation for large files (chunked reading).""" + test_file = tmp_path / "large.txt" + # Create a file larger than chunk size (8KB) + test_file.write_text("x" * 10000) + + hash_value = compute_file_hash(test_file) + + assert isinstance(hash_value, str) + assert len(hash_value) == 64 + + def test_compute_hash_binary_file(self, tmp_path): + """Test hash computation for binary files.""" + test_file = tmp_path / "binary.bin" + test_file.write_bytes(b"\x00\x01\x02\x03\xff\xfe\xfd") + + hash_value = compute_file_hash(test_file) + + assert isinstance(hash_value, str) + assert len(hash_value) == 64 + + def test_compute_hash_nonexistent_file(self, tmp_path): + """Test that nonexistent file raises FileNotFoundError.""" + nonexistent = tmp_path / "nonexistent.txt" + + with pytest.raises(FileNotFoundError, match="File not found"): + compute_file_hash(nonexistent) + + def test_compute_hash_unsupported_algorithm(self, tmp_path): + """Test that unsupported algorithm raises ValueError.""" + test_file = tmp_path / "test.txt" + test_file.write_text("test") + + with pytest.raises(ValueError, match="Unsupported hash algorithm"): + compute_file_hash(test_file, algorithm="invalid_algo") + + +class TestFetchPyPiPackageHashes: + """Tests for fetch_pypi_package_hashes function.""" + + @patch("cpex.tools.integrity.httpx.Client") + def test_fetch_hashes_success(self, mock_client_class): + """Test successful hash fetching from PyPI.""" + mock_response = MagicMock() + mock_response.json.return_value = { + "urls": [ + { + "filename": "package-1.0.0.tar.gz", + "digests": {"sha256": "abc123def456"}, + "url": "https://files.pythonhosted.org/package-1.0.0.tar.gz", + }, + { + "filename": "package-1.0.0-py3-none-any.whl", + "digests": {"sha256": "789ghi012jkl"}, + "url": "https://files.pythonhosted.org/package-1.0.0-py3-none-any.whl", + }, + ] + } + mock_client = MagicMock() + mock_client.get.return_value = mock_response + mock_client.__enter__.return_value = mock_client + mock_client_class.return_value = mock_client + + hashes = fetch_pypi_package_hashes("test-package", "1.0.0") + + assert len(hashes) == 2 + assert "package-1.0.0.tar.gz" in hashes + assert hashes["package-1.0.0.tar.gz"]["sha256"] == "abc123def456" + assert "package-1.0.0-py3-none-any.whl" in hashes + assert hashes["package-1.0.0-py3-none-any.whl"]["sha256"] == "789ghi012jkl" + + @patch("cpex.tools.integrity.httpx.Client") + def test_fetch_hashes_test_pypi(self, mock_client_class): + """Test fetching from test.pypi.org.""" + mock_response = MagicMock() + mock_response.json.return_value = {"urls": []} + mock_client = MagicMock() + mock_client.get.return_value = mock_response + mock_client.__enter__.return_value = mock_client + mock_client_class.return_value = mock_client + + fetch_pypi_package_hashes("test-package", use_test=True) + + # Verify test PyPI URL was used + call_args = mock_client.get.call_args[0][0] + assert "test.pypi.org" in call_args + + @patch("cpex.tools.integrity.httpx.Client") + def test_fetch_hashes_package_not_found(self, mock_client_class): + """Test handling of 404 response.""" + mock_client = MagicMock() + mock_response = MagicMock() + mock_response.status_code = 404 + mock_client.get.return_value = mock_response + mock_response.raise_for_status.side_effect = httpx.HTTPStatusError( + "404", request=MagicMock(), response=mock_response + ) + mock_client.__enter__.return_value = mock_client + mock_client_class.return_value = mock_client + + with pytest.raises(RuntimeError, match="Package .* not found"): + fetch_pypi_package_hashes("nonexistent-package") + + @patch("cpex.tools.integrity.httpx.Client") + def test_fetch_hashes_network_error(self, mock_client_class): + """Test handling of network errors.""" + mock_client = MagicMock() + mock_client.get.side_effect = httpx.RequestError("Connection failed") + mock_client.__enter__.return_value = mock_client + mock_client_class.return_value = mock_client + + with pytest.raises(RuntimeError, match="Network error"): + fetch_pypi_package_hashes("test-package") + + @patch("cpex.tools.integrity.httpx.Client") + def test_fetch_hashes_no_urls(self, mock_client_class): + """Test handling of package with no distribution files.""" + mock_response = MagicMock() + mock_response.json.return_value = {"urls": []} + mock_client = MagicMock() + mock_client.get.return_value = mock_response + mock_client.__enter__.return_value = mock_client + mock_client_class.return_value = mock_client + + hashes = fetch_pypi_package_hashes("empty-package") + + assert hashes == {} + + @patch("cpex.tools.integrity.httpx.Client") + def test_fetch_hashes_missing_sha256(self, mock_client_class): + """Test handling of files without SHA256 digests.""" + mock_response = MagicMock() + mock_response.json.return_value = { + "urls": [ + { + "filename": "package-1.0.0.tar.gz", + "digests": {}, # No SHA256 + "url": "https://files.pythonhosted.org/package-1.0.0.tar.gz", + } + ] + } + mock_client = MagicMock() + mock_client.get.return_value = mock_response + mock_client.__enter__.return_value = mock_client + mock_client_class.return_value = mock_client + + hashes = fetch_pypi_package_hashes("test-package") + + assert hashes == {} + + +class TestVerifyPackageIntegrity: + """Tests for verify_package_integrity function.""" + + def test_verify_success(self, tmp_path): + """Test successful verification.""" + test_file = tmp_path / "package.tar.gz" + test_file.write_text("package content") + + expected_hash = compute_file_hash(test_file) + result = verify_package_integrity(test_file, expected_hash, "test-package") + + assert result is True + + def test_verify_failure_strict(self, tmp_path): + """Test verification failure in strict mode.""" + test_file = tmp_path / "package.tar.gz" + test_file.write_text("package content") + + wrong_hash = "0" * 64 + + with pytest.raises(IntegrityVerificationError) as exc_info: + verify_package_integrity(test_file, wrong_hash, "test-package", strict=True) + + assert "test-package" in str(exc_info.value) + assert exc_info.value.package_name == "test-package" + assert exc_info.value.expected_hash == wrong_hash + + def test_verify_failure_non_strict(self, tmp_path): + """Test verification failure in non-strict mode.""" + test_file = tmp_path / "package.tar.gz" + test_file.write_text("package content") + + wrong_hash = "0" * 64 + + result = verify_package_integrity(test_file, wrong_hash, "test-package", strict=False) + + assert result is False + + def test_verify_case_insensitive(self, tmp_path): + """Test that hash comparison is case-insensitive.""" + test_file = tmp_path / "package.tar.gz" + test_file.write_text("package content") + + expected_hash = compute_file_hash(test_file) + uppercase_hash = expected_hash.upper() + + result = verify_package_integrity(test_file, uppercase_hash, "test-package") + + assert result is True + + def test_verify_nonexistent_file(self, tmp_path): + """Test verification of nonexistent file.""" + nonexistent = tmp_path / "nonexistent.tar.gz" + + with pytest.raises(FileNotFoundError, match="Package file not found"): + verify_package_integrity(nonexistent, "abc123", "test-package") + + def test_verify_without_package_name(self, tmp_path): + """Test verification without explicit package name.""" + test_file = tmp_path / "package.tar.gz" + test_file.write_text("package content") + + expected_hash = compute_file_hash(test_file) + result = verify_package_integrity(test_file, expected_hash) + + assert result is True + + +class TestFindMatchingHash: + """Tests for find_matching_hash function.""" + + def test_find_exact_match(self, tmp_path): + """Test finding exact filename match.""" + test_file = tmp_path / "package-1.0.0.tar.gz" + hashes = { + "package-1.0.0.tar.gz": {"sha256": "abc123", "url": "https://example.com"}, + "other-file.whl": {"sha256": "def456", "url": "https://example.com"}, + } + + result = find_matching_hash(test_file, hashes) + + assert result == "abc123" + + def test_find_case_insensitive_match(self, tmp_path): + """Test finding case-insensitive match.""" + test_file = tmp_path / "Package-1.0.0.TAR.GZ" + hashes = {"package-1.0.0.tar.gz": {"sha256": "abc123", "url": "https://example.com"}} + + result = find_matching_hash(test_file, hashes) + + assert result == "abc123" + + def test_find_no_match(self, tmp_path): + """Test when no matching hash is found.""" + test_file = tmp_path / "unknown-package.tar.gz" + hashes = {"other-package.tar.gz": {"sha256": "abc123", "url": "https://example.com"}} + + result = find_matching_hash(test_file, hashes) + + assert result is None + + def test_find_empty_hashes(self, tmp_path): + """Test with empty hashes dictionary.""" + test_file = tmp_path / "package.tar.gz" + hashes = {} + + result = find_matching_hash(test_file, hashes) + + assert result is None + + +class TestIntegrityVerificationError: + """Tests for IntegrityVerificationError exception.""" + + def test_error_attributes(self): + """Test that error has correct attributes.""" + error = IntegrityVerificationError("test-pkg", "expected123", "actual456") + + assert error.package_name == "test-pkg" + assert error.expected_hash == "expected123" + assert error.actual_hash == "actual456" + assert "test-pkg" in str(error) + assert "expected123" in str(error) + assert "actual456" in str(error) + + def test_error_message_format(self): + """Test error message formatting.""" + error = IntegrityVerificationError("my-package", "a" * 64, "b" * 64) + + message = str(error) + assert "my-package" in message + assert "Integrity verification failed" in message + # Should show truncated hashes + assert "aaaaaaaaaaaaaaa" in message + assert "bbbbbbbbbbbbbbb" in message + + +class TestIntegrationScenarios: + """Integration tests for complete verification workflows.""" + + @patch("cpex.tools.integrity.httpx.Client") + def test_full_verification_workflow(self, mock_client_class, tmp_path): + """Test complete workflow: fetch hashes, download, verify.""" + # Setup mock PyPI response + test_file = tmp_path / "package-1.0.0.tar.gz" + test_file.write_text("package content") + actual_hash = compute_file_hash(test_file) + + mock_response = MagicMock() + mock_response.json.return_value = { + "urls": [ + { + "filename": "package-1.0.0.tar.gz", + "digests": {"sha256": actual_hash}, + "url": "https://files.pythonhosted.org/package-1.0.0.tar.gz", + } + ] + } + mock_client = MagicMock() + mock_client.get.return_value = mock_response + mock_client.__enter__.return_value = mock_client + mock_client_class.return_value = mock_client + + # Fetch hashes + hashes = fetch_pypi_package_hashes("package", "1.0.0") + + # Find matching hash + expected_hash = find_matching_hash(test_file, hashes) + + # Verify + result = verify_package_integrity(test_file, expected_hash, "package") + + assert result is True + + @patch("cpex.tools.integrity.httpx.Client") + def test_verification_with_tampered_package(self, mock_client_class, tmp_path): + """Test detection of tampered package.""" + # Setup mock with original hash + original_hash = "abc123def456" + "0" * 52 + + mock_response = MagicMock() + mock_response.json.return_value = { + "urls": [ + { + "filename": "package-1.0.0.tar.gz", + "digests": {"sha256": original_hash}, + "url": "https://files.pythonhosted.org/package-1.0.0.tar.gz", + } + ] + } + mock_client = MagicMock() + mock_client.get.return_value = mock_response + mock_client.__enter__.return_value = mock_client + mock_client_class.return_value = mock_client + + # Create "tampered" file with different content + test_file = tmp_path / "package-1.0.0.tar.gz" + test_file.write_text("tampered content") + + # Fetch hashes + hashes = fetch_pypi_package_hashes("package", "1.0.0") + expected_hash = find_matching_hash(test_file, hashes) + + # Verification should fail + with pytest.raises(IntegrityVerificationError): + verify_package_integrity(test_file, expected_hash, "package", strict=True) + +# Made with Bob From 5cd6a7026eeffa443d35199df9b69b3a116c8c59 Mon Sep 17 00:00:00 2001 From: habeck Date: Wed, 6 May 2026 15:38:55 -0400 Subject: [PATCH 84/88] chore: lint fix Signed-off-by: habeck --- cpex/tools/catalog.py | 47 ++++++++++++++++------------------------- cpex/tools/cli.py | 6 +++--- cpex/tools/integrity.py | 1 + cpex/tools/settings.py | 6 ++---- 4 files changed, 24 insertions(+), 36 deletions(-) diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index 9faaa6a..d4089ce 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -643,7 +643,9 @@ def install_folder_via_pip(self, manifest: PluginManifest, verify_integrity: boo if manifest.kind == "isolated_venv": logger.info("Detected isolated_venv plugin from monorepo: %s", manifest.name) # Install the package to make it available for venv initialization - package_path = self._download_monorepo_folder_to_temp(repo_url, manifest.name, verify_integrity=verify_integrity) + package_path = self._download_monorepo_folder_to_temp( + repo_url, manifest.name, verify_integrity=verify_integrity + ) plugin_path = self._initialize_isolated_venv(manifest, package_path) logger.info("Isolated venv initialized. Plugin will be auto-installed via requirements.txt") else: @@ -836,7 +838,9 @@ def _extract_package_archive(self, package_file: Path, extract_dir: Path) -> Non else: raise RuntimeError(f"Unsupported package format: {package_file}") - def _download_monorepo_folder_to_temp(self, repo_url: str, package_name: str, verify_integrity: bool = True) -> Path: + def _download_monorepo_folder_to_temp( + self, repo_url: str, package_name: str, verify_integrity: bool = True + ) -> Path: """Download monorepo folder to temporary directory. Args: @@ -874,16 +878,12 @@ def _download_monorepo_folder_to_temp(self, repo_url: str, package_name: str, ve if verify_integrity: try: from cpex.tools.integrity import compute_file_hash + package_hash = compute_file_hash(package_file) logger.info( - "Package integrity hash for %s (%s): SHA256=%s", - package_name, - package_file.name, - package_hash - ) - logger.info( - "Store this hash for future verification or to detect tampering" + "Package integrity hash for %s (%s): SHA256=%s", package_name, package_file.name, package_hash ) + logger.info("Store this hash for future verification or to detect tampering") except Exception as e: logger.warning("Failed to compute package hash: %s", str(e)) @@ -942,14 +942,13 @@ def _download_package_to_temp( if version_constraint: # Try to extract exact version from constraint (e.g., "==1.0.0" -> "1.0.0") import re - version_match = re.search(r'==\s*([0-9.]+)', version_constraint) + + version_match = re.search(r"==\s*([0-9.]+)", version_constraint) if version_match: version_to_fetch = version_match.group(1) - + expected_hashes = fetch_pypi_package_hashes( - package_name=package_name, - version=version_to_fetch, - use_test=use_test + package_name=package_name, version=version_to_fetch, use_test=use_test ) if expected_hashes: logger.info("Retrieved hashes for %d distribution files", len(expected_hashes)) @@ -990,16 +989,10 @@ def _download_package_to_temp( if expected_hash: logger.info("Verifying integrity of %s", package_file.name) verify_package_integrity( - file_path=package_file, - expected_hash=expected_hash, - package_name=package_name, - strict=True + file_path=package_file, expected_hash=expected_hash, package_name=package_name, strict=True ) else: - logger.warning( - "No matching hash found for %s. Proceeding without verification.", - package_file.name - ) + logger.warning("No matching hash found for %s. Proceeding without verification.", package_file.name) extract_dir = temp_dir / "extracted" extract_dir.mkdir() @@ -1511,16 +1504,12 @@ def install_from_git(self, url: str, verify_integrity: bool = True) -> tuple[Plu if verify_integrity: try: from cpex.tools.integrity import compute_file_hash + package_hash = compute_file_hash(archive_path) logger.info( - "Package integrity hash for %s (%s): SHA256=%s", - package_name, - archive_path.name, - package_hash - ) - logger.info( - "Store this hash for future verification or to detect tampering" + "Package integrity hash for %s (%s): SHA256=%s", package_name, archive_path.name, package_hash ) + logger.info("Store this hash for future verification or to detect tampering") except Exception as e: logger.warning("Failed to compute package hash: %s", str(e)) diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index 50a6695..387ddf1 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -47,10 +47,10 @@ ) from cpex.framework.settings import settings from cpex.tools.catalog import PluginCatalog -from cpex.tools.settings import get_catalog_settings # Third-Party from cpex.tools.plugin_registry import PluginRegistry +from cpex.tools.settings import get_catalog_settings # Exit codes for CLI commands EXIT_SUCCESS = 0 @@ -582,9 +582,9 @@ def _install_from_pypi(source: str, catalog: PluginCatalog, use_test: bool = Fal verify_integrity = catalog_settings.VERIFY_PACKAGE_INTEGRITY if verify_integrity: - console.log(f"Package integrity verification: enabled") + console.log("Package integrity verification: enabled") else: - console.log(f"Package integrity verification: disabled") + console.log("Package integrity verification: disabled") with console.status(f"Installing plugin {package_name} via pypi", spinner="dots"): manifest, plugin_path = catalog.install_from_pypi( diff --git a/cpex/tools/integrity.py b/cpex/tools/integrity.py index 8be0766..9f23f88 100644 --- a/cpex/tools/integrity.py +++ b/cpex/tools/integrity.py @@ -297,4 +297,5 @@ def find_matching_hash( logger.warning("No matching hash found for %s in PyPI metadata", pkg_name) return None + # Made with Bob diff --git a/cpex/tools/settings.py b/cpex/tools/settings.py index 2ae192b..f64111a 100644 --- a/cpex/tools/settings.py +++ b/cpex/tools/settings.py @@ -40,12 +40,10 @@ class CatalogSettings(BaseSettings): ) FOLDER: str = Field(default="plugins", description="The folder where the plugins are located (r/w)") VERIFY_PACKAGE_INTEGRITY: bool = Field( - default=True, - description="Enable SHA256 hash verification for downloaded packages from PyPI" + default=True, description="Enable SHA256 hash verification for downloaded packages from PyPI" ) STRICT_INTEGRITY_MODE: bool = Field( - default=False, - description="Fail installation if package hashes are unavailable (strict mode)" + default=False, description="Fail installation if package hashes are unavailable (strict mode)" ) From 07a9ecd2ed8349ba83868330b6308748f5c3e009 Mon Sep 17 00:00:00 2001 From: habeck Date: Wed, 6 May 2026 17:13:03 -0400 Subject: [PATCH 85/88] chore: missed commit Signed-off-by: habeck --- .env.example | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/.env.example b/.env.example index 205d3fe..9647b83 100644 --- a/.env.example +++ b/.env.example @@ -162,3 +162,15 @@ # PLUGINS_GRPC_SERVER_SSL_ENABLED= + +### Package Integrity Verification +# Enable SHA256 hash verification for PyPI packages (default: True) +# When enabled, downloaded packages are verified against hashes from PyPI's JSON API +# Recommended: Keep enabled for security +# PLUGINS_VERIFY_PACKAGE_INTEGRITY=True + +# Strict integrity mode (default: False) +# When True: Fail installation if package hashes are unavailable +# When False: Warn but continue if hashes are unavailable +# Recommended: False for development, True for production +# PLUGINS_STRICT_INTEGRITY_MODE=False From c0f2835d1fe1602f6a353260f5156d5994a217f4 Mon Sep 17 00:00:00 2001 From: habeck Date: Fri, 8 May 2026 12:39:02 -0400 Subject: [PATCH 86/88] fix: if the plugins/config.yaml plugins array is empty, initialize it with the appropriate default from catalog settings. Signed-off-by: habeck --- .env.example | 2 ++ cpex/tools/cli.py | 3 +++ 2 files changed, 5 insertions(+) diff --git a/.env.example b/.env.example index 9647b83..c755e13 100644 --- a/.env.example +++ b/.env.example @@ -6,6 +6,8 @@ # `allow`, `deny` # PLUGINS_DEFAULT_HOOK_POLICY=allow +# Path to plugins folder +# PLUGINS_FOLDER=plugins # Path to main plugins configuration file # PLUGINS_CONFIG_FILE=plugins/config.yaml diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index 387ddf1..820e65b 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -314,6 +314,9 @@ def update_plugins_config_yaml(manifest: PluginManifest): ) if plugin_configs.plugins is None: plugin_configs.plugins = [] + if plugin_configs.plugin_dirs is None or len(plugin_configs.plugin_dirs)==0: + catalog_settings = get_catalog_settings() + plugin_configs.plugin_dirs = [f"{catalog_settings.FOLDER}"] plugin_configs.plugins.append(plugin_config) # now serialize the config ConfigSaver.save_config(plugin_configs, settings.config_file) From f5bb2a7553ff9f294bae264d936b3439f420f5d7 Mon Sep 17 00:00:00 2001 From: habeck Date: Fri, 8 May 2026 14:53:48 -0400 Subject: [PATCH 87/88] chore: lint fix Signed-off-by: habeck --- cpex/tools/catalog.py | 14 ++++++-------- cpex/tools/cli.py | 6 ++++-- cpex/tools/plugin_registry.py | 2 +- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/cpex/tools/catalog.py b/cpex/tools/catalog.py index d4089ce..5d68974 100644 --- a/cpex/tools/catalog.py +++ b/cpex/tools/catalog.py @@ -12,19 +12,18 @@ import json import logging import os +import shutil import subprocess import sys +import tarfile +import tempfile import tomllib +import uuid +import zipfile from pathlib import Path from typing import Any, Optional -import uuid import httpx -import shutil -import tarfile -import tempfile -import zipfile - import yaml from github import Auth, Github from packaging.version import InvalidVersion, Version @@ -637,7 +636,7 @@ def install_folder_via_pip(self, manifest: PluginManifest, verify_integrity: boo raise RuntimeError("PluginManifest.monorepo can not be None.") try: repo_url = f"git+{manifest.monorepo.package_source}" - + plugin_path = None # Check manifest kind BEFORE installing if manifest.kind == "isolated_venv": @@ -902,7 +901,6 @@ def _download_monorepo_folder_to_temp( shutil.rmtree(temp_dir, ignore_errors=True) raise RuntimeError(f"Unexpected error downloading {package_name}: {str(e)}") from e - def _download_package_to_temp( self, package_name: str, version_constraint: str | None, use_test: bool = False, verify_integrity: bool = True ) -> Path: diff --git a/cpex/tools/cli.py b/cpex/tools/cli.py index 820e65b..bf4bb43 100644 --- a/cpex/tools/cli.py +++ b/cpex/tools/cli.py @@ -314,7 +314,7 @@ def update_plugins_config_yaml(manifest: PluginManifest): ) if plugin_configs.plugins is None: plugin_configs.plugins = [] - if plugin_configs.plugin_dirs is None or len(plugin_configs.plugin_dirs)==0: + if plugin_configs.plugin_dirs is None or len(plugin_configs.plugin_dirs) == 0: catalog_settings = get_catalog_settings() plugin_configs.plugin_dirs = [f"{catalog_settings.FOLDER}"] plugin_configs.plugins.append(plugin_config) @@ -480,7 +480,9 @@ def _parse_pypi_source(source: str) -> tuple[str, Optional[str]]: return package_name, version_constraint -def _finalize_installation(manifest: PluginManifest, install_type: str, catalog: PluginCatalog, plugin_path: Path | None = None): +def _finalize_installation( + manifest: PluginManifest, install_type: str, catalog: PluginCatalog, plugin_path: Path | None = None +): """Common finalization steps for plugin installation. Args: diff --git a/cpex/tools/plugin_registry.py b/cpex/tools/plugin_registry.py index 13c97bf..29f4dda 100644 --- a/cpex/tools/plugin_registry.py +++ b/cpex/tools/plugin_registry.py @@ -96,7 +96,7 @@ def update( else: raise ValueError(f"Invalid installation type: {installation_type}") - installation_path = plugin_path if plugin_path is not None else find_package_path(manifest.name) + installation_path = plugin_path if plugin_path is not None else find_package_path(manifest.name) ipi: InstalledPluginInfo = InstalledPluginInfo( name=manifest.name, From a13f7028abf70eca08d65c42d628087ca1fe45a5 Mon Sep 17 00:00:00 2001 From: habeck Date: Fri, 8 May 2026 15:29:25 -0400 Subject: [PATCH 88/88] chore: version to '0.1.0 minimum' Signed-off-by: habeck --- cpex/framework/models.py | 2 +- .../isolated/{{cookiecutter.plugin_slug}}/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/cpex/framework/models.py b/cpex/framework/models.py index 91bba80..e85ffcf 100644 --- a/cpex/framework/models.py +++ b/cpex/framework/models.py @@ -2290,7 +2290,7 @@ class PluginVersionInfo(BaseModel): deprecated: bool = False manifest_file: str changelog: Optional[str] = None - min_max_framework_version: Optional[str] = "0.1.0.dev12,0.1.0.dev12" + min_max_framework_version: Optional[str] = "0.1.0,0.1.0" class PluginVersionRegistry(BaseModel): diff --git a/cpex/templates/isolated/{{cookiecutter.plugin_slug}}/requirements.txt b/cpex/templates/isolated/{{cookiecutter.plugin_slug}}/requirements.txt index 0454c5e..3a03707 100644 --- a/cpex/templates/isolated/{{cookiecutter.plugin_slug}}/requirements.txt +++ b/cpex/templates/isolated/{{cookiecutter.plugin_slug}}/requirements.txt @@ -1,4 +1,4 @@ -cpex>=0.1.0.dev10 +cpex>=0.1.0 # The requirements file is used to install the plugin for the isolated_venv scenario # The cpex cli tool first creates a venv for the plugin, and then uses pip to install the requirements.txt file into the venv. # The default package name is provided below, however if monorepo installation is desired use