diff --git a/.gitignore b/.gitignore index 3584ea2..500ed41 100644 --- a/.gitignore +++ b/.gitignore @@ -47,6 +47,8 @@ backend/alembic/versions/ *.log backend/logs/ +backend/services_runtime/ + # Environment variables .env .env.local diff --git a/backend/app/dto/__init__.py b/backend/app/dto/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/dto/plugin.py b/backend/app/dto/plugin.py new file mode 100644 index 0000000..3c5d7b4 --- /dev/null +++ b/backend/app/dto/plugin.py @@ -0,0 +1,25 @@ +from pydantic import BaseModel +from typing import List, Optional +from datetime import datetime + +# This schema is used for returning data from the repository. +# It ensures that JSON fields are correctly converted to Python types. +class PluginServiceRuntimeDTO(BaseModel): + """ + A Pydantic model to represent a PluginServiceRuntime object, + with required_env_vars as a list of strings. + """ + id: str + plugin_id: str + plugin_slug: str + name: str + source_url: Optional[str] = None + type: Optional[str] = None + install_command: Optional[str] = None + start_command: Optional[str] = None + healthcheck_url: Optional[str] = None + required_env_vars: List[str] = [] + status: Optional[str] = None + user_id: str + created_at: Optional[datetime] = None + updated_at: Optional[datetime] = None diff --git a/backend/app/main.py b/backend/app/main.py index 7e26f33..1937d8c 100644 --- a/backend/app/main.py +++ b/backend/app/main.py @@ -5,6 +5,7 @@ from app.api.v1.api import api_router from app.core.config import settings from app.routers.plugins import plugin_manager +from backend.app.plugins.service_installler.start_stop_plugin_services import start_plugin_services, stop_plugin_services import logging import time import structlog @@ -29,8 +30,21 @@ async def startup_event(): logger.info("Initializing application settings...") from app.init_settings import init_ollama_settings await init_ollama_settings() + # Start plugin services + await start_plugin_services() logger.info("Settings initialization completed") + +# Add shutdown event to gracefully stop services +@app.on_event("shutdown") +async def shutdown_event(): + """Gracefully stop all plugin services on application shutdown.""" + logger.info("Shutting down application and stopping plugin services...") + # Stop all plugin services gracefully + await stop_plugin_services() + logger.info("Application shutdown completed.") + + # Add middleware to log all requests logger = structlog.get_logger() @@ -94,5 +108,3 @@ async def validation_exception_handler(request: Request, exc: RequestValidationE # Include API routers app.include_router(api_router) - - diff --git a/backend/app/models/plugin.py b/backend/app/models/plugin.py index 4d02e51..f56e9e0 100644 --- a/backend/app/models/plugin.py +++ b/backend/app/models/plugin.py @@ -1,10 +1,13 @@ -from sqlalchemy import Column, String, Integer, Boolean, ForeignKey, Text, JSON, UniqueConstraint, TIMESTAMP +from sqlalchemy import Column, String, Integer, Boolean, ForeignKey, Text, JSON, UniqueConstraint, TIMESTAMP, DateTime import sqlalchemy from sqlalchemy.orm import relationship from sqlalchemy.sql import func +from datetime import datetime, UTC +import json from app.models.base import Base + class Plugin(Base): """SQLAlchemy model for plugins.""" @@ -44,6 +47,7 @@ class Plugin(Base): config_fields = Column(Text) # Stored as JSON string messages = Column(Text) # Stored as JSON string dependencies = Column(Text) # Stored as JSON string + required_services_runtime = Column(Text, nullable=True) # Timestamps created_at = Column(String, default=func.now()) @@ -60,6 +64,7 @@ class Plugin(Base): # Relationships modules = relationship("Module", back_populates="plugin", cascade="all, delete-orphan") + service_runtimes = relationship("PluginServiceRuntime", back_populates="plugin", cascade="all, delete-orphan") def to_dict(self): """Convert model to dictionary.""" @@ -118,6 +123,11 @@ def to_dict(self): else: result["permissions"] = [] + if self.required_services_runtime: + result["requiredServicesRuntime"] = json.loads(self.required_services_runtime) + else: + result["requiredServicesRuntime"] = [] + return result @classmethod @@ -162,10 +172,81 @@ def from_dict(cls, data): # Remove modules from data as they are handled separately if "modules" in db_data: db_data.pop("modules") + + # Handle service runtimes (only store names in plugin table) + if "requiredServicesRuntime" in db_data and db_data["requiredServicesRuntime"] is not None: + db_data["required_services_runtime"] = json.dumps([ + r["name"] for r in db_data["requiredServicesRuntime"] + ]) + db_data.pop("requiredServicesRuntime") return cls(**db_data) +class PluginServiceRuntime(Base): + """SQLAlchemy model for plugin service runtimes.""" + + __tablename__ = "plugin_service_runtime" + + id = Column(String, primary_key=True, index=True) + plugin_id = Column(String, ForeignKey("plugin.id"), nullable=False, index=True) + plugin_slug = Column(String, nullable=False, index=True) + + name = Column(String, nullable=False) + source_url = Column(String) + type = Column(String) + install_command = Column(Text) + start_command = Column(Text) + healthcheck_url = Column(String) + required_env_vars = Column(Text) # store as JSON string + status = Column(String, default="pending") + + created_at = Column(DateTime, default=datetime.now(UTC)) + updated_at = Column(DateTime, default=datetime.now(UTC), onupdate=datetime.now(UTC)) + + user_id = Column(String(32), ForeignKey("users.id", name="fk_plugin_service_runtime_user_id"), nullable=False) + user = relationship("User") + + # Relationship back to plugin + plugin = relationship("Plugin", back_populates="service_runtimes") + + def to_dict(self): + """ + Convert the model instance to a dictionary, handling JSON fields. + """ + return { + "id": self.id, + "plugin_id": self.plugin_id, + "plugin_slug": self.plugin_slug, + "name": self.name, + "source_url": self.source_url, + "type": self.type, + "install_command": self.install_command, + "start_command": self.start_command, + "healthcheck_url": self.healthcheck_url, + "required_env_vars": json.loads(self.required_env_vars) if self.required_env_vars else [], + "status": self.status, + "user_id": self.user_id, + "created_at": self.created_at.isoformat() if self.created_at else None, + "updated_at": self.updated_at.isoformat() if self.updated_at else None, + } + + @classmethod + def from_dict(cls, data: dict): + """ + Create a new instance from a dictionary, serializing JSON fields. + """ + db_data = data.copy() + + if "required_env_vars" in db_data and db_data["required_env_vars"] is not None: + db_data["required_env_vars"] = json.dumps(db_data["required_env_vars"]) + + # Handle conversion from camelCase to snake_case if necessary + # For simplicity, we are assuming keys in the incoming dict match model attributes + + return cls(**db_data) + + class Module(Base): """SQLAlchemy model for plugin modules.""" diff --git a/backend/app/plugins/remote_installer.py b/backend/app/plugins/remote_installer.py index 6334fdf..bc1c033 100644 --- a/backend/app/plugins/remote_installer.py +++ b/backend/app/plugins/remote_installer.py @@ -20,6 +20,8 @@ from typing import Dict, Any, Optional, List, Tuple from urllib.parse import urlparse import structlog +from .service_installler.plugin_service_manager import install_and_start_docker_service +from .service_installler.service_runtime_extractor import extract_required_services_runtime logger = structlog.get_logger() @@ -237,6 +239,15 @@ async def install_from_url(self, repo_url: str, user_id: str, version: str = "la if install_result['success']: logger.info(f"Plugin installation successful: {install_result}") + service_runtime: list = validation_result.get("service_runtime", []) + logger.info(f"\n\n>>>>>>>>SERVICE RUNTIME\n\n: {service_runtime}\n\n>>>>>>>>>>") + if service_runtime: + plugin_slug = validation_result["plugin_info"].get("plugin_slug") + await install_and_start_docker_service( + service_runtime, + plugin_slug + ) + # Store installation metadata try: await self._store_installation_metadata( @@ -718,6 +729,7 @@ async def _validate_plugin_structure(self, plugin_dir: Path) -> Dict[str, Any]: # Try to load plugin metadata plugin_info = {} + service_runtime = [] # Check package.json package_json_path = plugin_dir / 'package.json' @@ -828,6 +840,12 @@ async def _validate_plugin_structure(self, plugin_dir: Path) -> Dict[str, Any]: extracted_slug = slug_match.group(1) plugin_info['plugin_slug'] = extracted_slug logger.info(f"Extracted plugin_slug from source: {extracted_slug}") + + # Extract services using the dedicated function + services = extract_required_services_runtime(content, plugin_info.get('plugin_slug')) + if services: + plugin_info['required_services_runtime'] = services + service_runtime.extend(services) except Exception as extract_error: logger.warning(f"Could not extract plugin_slug from source: {extract_error}") @@ -839,7 +857,8 @@ async def _validate_plugin_structure(self, plugin_dir: Path) -> Dict[str, Any]: return { 'valid': True, - 'plugin_info': plugin_info + 'plugin_info': plugin_info, + 'service_runtime': service_runtime } except Exception as e: diff --git a/backend/app/plugins/repository.py b/backend/app/plugins/repository.py index 45ffc1a..8084113 100644 --- a/backend/app/plugins/repository.py +++ b/backend/app/plugins/repository.py @@ -6,7 +6,8 @@ from sqlalchemy.ext.asyncio import AsyncSession import structlog -from app.models.plugin import Plugin, Module +from app.models.plugin import Plugin, Module, PluginServiceRuntime +from app.dto.plugin import PluginServiceRuntimeDTO logger = structlog.get_logger() @@ -85,7 +86,7 @@ async def get_all_plugins_with_modules(self, user_id: str = None) -> List[Dict[s plugin_dicts = [] for plugin in plugins: plugin_dict = plugin.to_dict() - + # Get modules for this plugin modules_query = select(Module).where(Module.plugin_id == plugin.id) @@ -104,6 +105,25 @@ async def get_all_plugins_with_modules(self, user_id: str = None) -> List[Dict[s except Exception as e: logger.error("Error getting plugins with modules", error=str(e)) raise + + async def get_all_service_runtimes(self) -> List[PluginServiceRuntimeDTO]: + """ + Get all plugin service runtimes for startup and return them as DTOs. + """ + try: + query = select(PluginServiceRuntime).where( + PluginServiceRuntime.status.in_(["pending", "stopped", "running"]) + ) + + result = await self.db.execute(query) + services = result.scalars().all() + + # Convert SQLAlchemy models to Pydantic DTOs for a typed return + return [PluginServiceRuntimeDTO(**service.to_dict()) for service in services] + + except Exception as e: + logger.error("Error getting service runtimes", error=str(e)) + raise async def get_plugin(self, plugin_id: str) -> Optional[Dict[str, Any]]: """Get a specific plugin by ID.""" diff --git a/backend/app/plugins/service_installler/docker_manager.py b/backend/app/plugins/service_installler/docker_manager.py new file mode 100644 index 0000000..51c9507 --- /dev/null +++ b/backend/app/plugins/service_installler/docker_manager.py @@ -0,0 +1,142 @@ +import subprocess +import asyncio +from pathlib import Path +from typing import Dict, List +import structlog +from app.plugins.service_installler.prerequisites import write_env_file +from app.plugins.service_installler.service_health_checker import wait_for_service_health +from app.dto.plugin import PluginServiceRuntimeDTO + +logger = structlog.get_logger() + +async def _run_docker_compose_command(command: str, cwd: Path): + """ + Run a Docker Compose command and log its output. + Uses asyncio.to_thread for non-blocking I/O. + """ + logger.info("Executing Docker Compose command", command=command, cwd=str(cwd)) + + def _execute(): + # Execute the command in the specified directory + return subprocess.run( + command, + shell=True, + cwd=str(cwd), + capture_output=True, + text=True, + check=False + ) + + try: + result = await asyncio.to_thread(_execute) + if result.returncode != 0: + logger.error( + "Docker Compose command failed", + command=command, + returncode=result.returncode, + stdout=result.stdout, + stderr=result.stderr, + ) + raise RuntimeError(f"Docker Compose failed with error:\n{result.stderr}") + + logger.info( + "Docker Compose command completed successfully", + command=command, + stdout=result.stdout + ) + except Exception as e: + logger.error("Failed to run Docker Compose command", command=command, error=str(e)) + raise RuntimeError(f"Failed to run Docker Compose command: {e}") + + +async def check_docker_availability(): + """ + Performs a comprehensive check to ensure Docker and Docker Compose are + installed and the daemon is running. + """ + logger.info("Checking Docker availability...") + + def _check(): + try: + # Check docker command + subprocess.run(["docker", "--version"], check=True, capture_output=True, text=True) + # Check docker compose command + subprocess.run(["docker", "compose", "version"], check=True, capture_output=True, text=True) + # Check if docker daemon is running + subprocess.run(["docker", "info"], check=True, capture_output=True, text=True, timeout=15) + return True, "Docker and Docker Compose are available and the daemon is running." + except subprocess.CalledProcessError as e: + return False, f"Docker check failed: Command '{e.cmd}' returned non-zero exit code {e.returncode}. Stderr: {e.stderr}" + except FileNotFoundError: + return False, "Docker or Docker Compose command not found. Please ensure they are installed and in your system's PATH." + except subprocess.TimeoutExpired: + return False, "Docker daemon did not respond in time. It might not be running or is unresponsive." + except Exception as e: + return False, f"An unexpected error occurred during Docker check: {str(e)}" + + is_available, message = await asyncio.to_thread(_check) + if not is_available: + logger.error("Docker availability check failed", reason=message) + raise RuntimeError( + f"Docker is not available: {message}\n\n" + "Please ensure:\n" + "1. Docker Desktop is installed and running.\n" + "2. Docker commands work from your terminal." + ) + logger.info(message) + + +async def install_and_start_docker_service( + service_data: PluginServiceRuntimeDTO, + target_dir: Path, + env_vars: Dict[str, str], + required_vars: List[str] +): + """ + Handles the installation and startup of a Docker Compose-based service. + This includes checking Docker availability, writing env files, and running the service. + """ + logger.info("Starting Docker service installation process", name=service_data.name) + + await check_docker_availability() + + start_command = service_data.start_command + healthcheck_url = service_data.healthcheck_url + + if not start_command: + raise ValueError("Missing 'start_command' for Docker service.") + + # Write environment file + write_env_file(target_dir, env_vars, required_vars) + + # Run the Docker Compose start command + await _run_docker_compose_command(start_command, target_dir) + + # Wait for the service to become healthy + if healthcheck_url: + logger.info("Waiting for Docker service to become healthy", url=healthcheck_url) + if await wait_for_service_health(healthcheck_url): + logger.info("Docker service is healthy.") + else: + logger.error("Docker service failed to become healthy within timeout.") + await _run_docker_compose_command("down", target_dir) + raise RuntimeError("Docker service failed to become healthy.") + else: + logger.warning("No healthcheck URL provided, assuming service started successfully.") + + +async def stop_docker_service(service_data: PluginServiceRuntimeDTO, target_dir: Path): + """ + Stops and removes a running docker-compose service. + """ + logger.info("Attempting to stop docker-compose service", service=service_data.name) + + command = "docker compose stop" + + try: + await _run_docker_compose_command(command, target_dir) + return True + except RuntimeError: + # We don't want to fail the entire shutdown process if one service fails to stop. + logger.error("Failed to stop docker-compose service gracefully", service=service_data.name) + return False diff --git a/backend/app/plugins/service_installler/plugin_service_manager.py b/backend/app/plugins/service_installler/plugin_service_manager.py new file mode 100644 index 0000000..b541849 --- /dev/null +++ b/backend/app/plugins/service_installler/plugin_service_manager.py @@ -0,0 +1,182 @@ +import os +import zipfile +import tarfile +import tempfile +import aiohttp +import asyncio +from pathlib import Path +from typing import List, Dict +import structlog +import shutil +from dotenv import dotenv_values + +from app.dto.plugin import PluginServiceRuntimeDTO +from app.plugins.service_installler.docker_manager import install_and_start_docker_service, stop_docker_service +from app.plugins.service_installler.python_manager import install_python_service +from .prerequisites import check_required_env_vars, convert_to_download_url + +logger = structlog.get_logger() + +async def download_and_extract_repo(session: aiohttp.ClientSession, source_url: str, target_dir: Path, max_retries: int = 3): + """ + Download and extract a repository from a git URL. + """ + download_url = convert_to_download_url(source_url) + + for attempt in range(max_retries): + try: + logger.info("Attempting to download repository", url=download_url, attempt=attempt + 1) + async with session.get(download_url) as response: + response.raise_for_status() # Raises for 4xx/5xx responses + + content_type = response.headers.get('content-type', '').lower() + is_zip = 'zip' in content_type or download_url.endswith('.zip') + + with tempfile.NamedTemporaryFile(suffix='.zip' if is_zip else '.tar.gz', delete=False) as temp_file: + temp_path = Path(temp_file.name) + async for chunk in response.content.iter_chunked(16384): + temp_file.write(chunk) + + await _extract_archive(temp_path, target_dir, is_zip) + return + except (aiohttp.ClientError, asyncio.TimeoutError) as e: + logger.warning("Download failed, retrying...", error=str(e), attempt=attempt + 1) + await asyncio.sleep(2 ** attempt) + finally: + # Clean up temp file + if 'temp_path' in locals() and temp_path.exists(): + temp_path.unlink(missing_ok=True) + + raise RuntimeError(f"Failed to download repository from {source_url} after {max_retries} attempts.") + + +async def _extract_archive(temp_path: Path, target_dir: Path, is_zip: bool): + """ + Extracts a zip or tar.gz archive. + """ + logger.info("Extracting archive", path=str(temp_path), target=str(target_dir)) + + # Ensure target directory is clean and ready + if target_dir.exists(): + shutil.rmtree(target_dir) + target_dir.mkdir(parents=True, exist_ok=True) + + try: + if is_zip: + with zipfile.ZipFile(temp_path, 'r') as zip_file: + zip_file.extractall(target_dir.parent) + # Find and rename the extracted directory (e.g., repo-main) + # This is a common pattern for GitHub/GitLab zip archives + first_dir = [name for name in zip_file.namelist() if '/' in name][0].split('/')[0] + shutil.move(target_dir.parent / first_dir, target_dir) + else: + with tarfile.open(temp_path, 'r:gz') as tar_file: + tar_file.extractall(target_dir.parent) + first_dir = tar_file.getmembers()[0].name.split('/')[0] + shutil.move(target_dir.parent / first_dir, target_dir) + + except (zipfile.BadZipFile, tarfile.TarError) as e: + logger.error("Archive extraction failed", error=str(e)) + raise RuntimeError(f"Failed to extract archive: {e}") + + +async def install_plugin_service(service_data: PluginServiceRuntimeDTO, plugin_slug: str): + """ + Installs a single plugin service, including downloading the source + and starting the service. This function is for first-time installation. + """ + base_services_dir = Path("services_runtime") + base_services_dir.mkdir(parents=True, exist_ok=True) + target_dir = base_services_dir / f"{plugin_slug}_{service_data.name}" + + if target_dir.exists(): + logger.info("Service directory already exists, skipping installation", path=str(target_dir)) + else: + # Download and extract the service source code + env_vars = dotenv_values(Path(os.getcwd()) / ".env") + timeout = aiohttp.ClientTimeout(total=300) + connector = aiohttp.TCPConnector(limit=10) + + try: + async with aiohttp.ClientSession(timeout=timeout, connector=connector) as session: + await download_and_extract_repo(session, service_data.source_url, target_dir) + except Exception as e: + logger.error("Failed to download or extract repository", error=str(e)) + raise RuntimeError(f"Failed to download repository for service {service_data.name}: {e}") + + # Dispatch to the appropriate installer/runner + service_type = service_data.type or "docker-compose" + required_vars = service_data.required_env_vars or [] + + # Prerequisite Check + check_required_env_vars( + service_name=service_data.name, + required_vars=required_vars, + root_env_path=Path(os.getcwd()) / ".env" + ) + + if service_type == 'python': + await install_python_service(service_data, target_dir) + elif service_type == 'docker-compose': + await install_and_start_docker_service(service_data, target_dir, dotenv_values(Path(os.getcwd()) / ".env"), required_vars) + else: + raise ValueError(f"Unknown service type: {service_type}") + + +async def start_plugin_services(services_runtime: List[PluginServiceRuntimeDTO], plugin_slug: str): + """ + Starts a list of plugin services. This is used on application startup + and assumes the code is already downloaded. + """ + logger.info("Starting required plugin services") + + for service_data in services_runtime: + target_dir = Path("services_runtime") / f"{plugin_slug}_{service_data.name}" + service_type = service_data.type or "docker-compose" + + try: + logger.info("Attempting to start service", name=service_data.name) + if service_type == 'docker-compose': + # The start_command is the same as the install command for docker + await install_and_start_docker_service( + service_data, + target_dir, + dotenv_values(Path(os.getcwd()) / ".env"), + service_data.required_env_vars or [] + ) + elif service_type == 'python': + # Assuming install_python_service can handle a pre-existing venv + await install_python_service(service_data, target_dir) + else: + logger.warning("Skipping unknown service type", type=service_type, name=service_data.name) + + except Exception as e: + logger.error("Failed to start service", name=service_data.name, error=str(e)) + # Continue to the next service even if one fails + continue + + +async def stop_plugin_services(services_runtime: List[PluginServiceRuntimeDTO], plugin_slug: str): + """ + Stops a list of plugin services. This is used on application shotdown. + """ + logger.info("Stopping required plugin services") + for service_data in services_runtime: + target_dir = Path("services_runtime") / f"{plugin_slug}_{service_data.name}" + service_type = service_data.type or "docker-compose" + + try: + logger.info("Attempting to stop service", name=service_data.name) + if service_type == 'docker-compose': + # The start_command is the same as the install command for docker + await stop_docker_service( + service_data, + target_dir, + ) + else: + logger.warning("Skipping unknown service type", type=service_type, name=service_data.name) + + except Exception as e: + logger.error("Failed to stop service", name=service_data.name, error=str(e)) + # Continue to the next service even if one fails + continue diff --git a/backend/app/plugins/service_installler/prerequisites.py b/backend/app/plugins/service_installler/prerequisites.py new file mode 100644 index 0000000..05ddef8 --- /dev/null +++ b/backend/app/plugins/service_installler/prerequisites.py @@ -0,0 +1,104 @@ +import os +import structlog +from pathlib import Path +from dotenv import dotenv_values + +logger = structlog.get_logger() + +def load_env_vars(env_path: Path): + """ + Loads environment variables from a .env file and returns them as a dictionary. + """ + if not env_path.exists(): + logger.warning(f"Root .env file not found at {env_path}") + return {} + + try: + # dotenv_values() is a good way to read the file without modifying os.environ + # for a quick check. + env_vars = dotenv_values(env_path) + return env_vars + except Exception as e: + logger.error(f"Failed to load .env file from {env_path}: {e}") + return {} + + +def check_required_env_vars(service_name: str, required_vars: list, root_env_path: Path): + """ + Checks if all required environment variables are set in the root .env file. + + Args: + service_name: The name of the service being installed. + required_vars: A list of environment variable names. + root_env_path: The path to the main .env file. + + Raises: + RuntimeError: If any required variable is missing. + """ + # Load the variables from the root .env file + env_vars = load_env_vars(root_env_path) + + missing_vars = [var for var in required_vars if var not in env_vars or not env_vars[var]] + + if missing_vars: + logger.error( + "Missing required environment variables in .env file", + service=service_name, + missing_vars=missing_vars + ) + raise RuntimeError( + f"Missing required environment variables for service '{service_name}': " + f"{', '.join(missing_vars)}. Please add them to your main BrainDrive backend .env file at {root_env_path}." + ) + + logger.info("All required environment variables are present.") + + +def write_env_file(target_dir: Path, env_vars: dict, required_vars: list): + """ + Creates a .env file in the target directory by reading values from a given dictionary. + + Args: + target_dir: The directory where the .env file will be created. + env_vars: A dictionary of all available environment variables. + required_vars: A list of the specific variables to write to the new .env file. + """ + env_path = target_dir / ".env" + + try: + logger.info(f"Creating .env file for service at {env_path}") + with open(env_path, "w") as f: + for var_name in required_vars: + var_value = env_vars.get(var_name, "") + f.write(f"{var_name}={var_value}\n") + except Exception as e: + logger.error(f"Failed to create .env file for service: {e}") + raise RuntimeError(f"Failed to create .env file for service: {e}") + +def convert_to_download_url(source_url: str, branch: str = 'main') -> str: + """ + Convert git repository URLs to download URLs for archives + """ + source_url = source_url.rstrip('.git') + + # GitHub + if 'github.com' in source_url: + if source_url.startswith('git@'): + # Convert SSH to HTTPS + source_url = source_url.replace('git@github.com:', 'https://github.com/') + return f"{source_url}/archive/refs/heads/{branch}.zip" + + # GitLab + elif 'gitlab.com' in source_url: + if source_url.startswith('git@'): + source_url = source_url.replace('git@gitlab.com:', 'https://gitlab.com/') + return f"{source_url}/-/archive/{branch}/repository.zip" + + # Bitbucket + elif 'bitbucket.org' in source_url: + if source_url.startswith('git@'): + source_url = source_url.replace('git@bitbucket.org:', 'https://bitbucket.org/') + return f"{source_url}/get/{branch}.zip" + + # For other cases, assume it's already a direct download URL + return source_url diff --git a/backend/app/plugins/service_installler/python_manager.py b/backend/app/plugins/service_installler/python_manager.py new file mode 100644 index 0000000..6df9135 --- /dev/null +++ b/backend/app/plugins/service_installler/python_manager.py @@ -0,0 +1,63 @@ +import os +import subprocess +from pathlib import Path +import structlog + +from .service_health_checker import wait_for_service_health + +logger = structlog.get_logger() + +async def install_python_service(service: dict, target_dir: Path): + """ + Handle the installation and startup of a service using a virtual environment. + """ + name = service["name"] + install_command = service["install_command"] + start_command = service["start_command"] + healthcheck_url = service["healthcheck_url"] + + # Create virtual environment + venv_dir = target_dir / "venv" + if not venv_dir.exists(): + logger.info(f"Creating virtualenv for {name}") + result = subprocess.run(["python", "-m", "venv", str(venv_dir)], + capture_output=True, text=True) + if result.returncode != 0: + raise RuntimeError(f"Failed to create venv for {name}: {result.stderr}") + else: + logger.info(f"Virtualenv already exists for {name}, skipping.") + + # Determine Python executable inside venv + venv_python = venv_dir / ("Scripts" if os.name == "nt" else "bin") / "python" + + # Install dependencies + logger.info(f"Installing dependencies for {name}") + install_cmd = [str(venv_python)] + install_command.split() + result = subprocess.run(install_cmd, cwd=target_dir, + capture_output=True, text=True) + if result.returncode != 0: + logger.error(f"Failed to install dependencies for {name}: {result.stderr}") + raise RuntimeError(f"Failed to install dependencies for service {name}") + + # Start service + logger.info(f"Starting service {name}") + start_cmd = [str(venv_python)] + start_command.split() + proc = subprocess.Popen(start_cmd, cwd=target_dir, + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + # Save PID + pid_file = target_dir / "service.pid" + pid_file.write_text(str(proc.pid)) + + # Healthcheck with async HTTP + logger.info(f"Waiting for service {name} to become healthy...") + if await wait_for_service_health(healthcheck_url, timeout=30): + logger.info(f"Service {name} is healthy and running.") + else: + # Try to get error output from the process + try: + stdout, stderr = proc.communicate(timeout=1) + logger.error(f"Service {name} failed to start. Stdout: {stdout.decode()}, Stderr: {stderr.decode()}") + except subprocess.TimeoutExpired: + pass + raise RuntimeError(f"Service {name} failed to start within 30 seconds") diff --git a/backend/app/plugins/service_installler/service_debugger.py b/backend/app/plugins/service_installler/service_debugger.py new file mode 100644 index 0000000..3fa660a --- /dev/null +++ b/backend/app/plugins/service_installler/service_debugger.py @@ -0,0 +1,170 @@ +import ast +import re + +def debug_list_parsing(): + """ + Test parsing of your specific service structure with improved extraction + """ + + # Your actual structure + test_content = ''' +self.required_services_runtime = [ + { + "name": "cwyd_service", + "source_url": "https://github.com/BrainDriveAI/chat-with-your-documents", + "type": "docker-compose", + "install_command": "", + "start_command": "docker compose up --build -d", + "healthcheck_url": "http://localhost:8000/health", + "required_env_vars": [ + "LLM_PROVIDER", + "EMBEDDING_PROVIDER", + "ENABLE_CONTEXTUAL_RETRIEVAL", + "OLLAMA_CONTEXTUAL_LLM_BASE_URL", + "OLLAMA_CONTEXTUAL_LLM_MODEL", + "OLLAMA_LLM_BASE_URL", + "OLLAMA_LLM_MODEL", + "OLLAMA_EMBEDDING_BASE_URL", + "OLLAMA_EMBEDDING_MODEL", + "DOCUMENT_PROCESSOR_API_URL", + "DOCUMENT_PROCESSOR_TIMEOUT", + "DOCUMENT_PROCESSOR_MAX_RETRIES", + ] + } +] + ''' + + print("=== DEBUGGING LIST PARSING WITH IMPROVED EXTRACTION ===\n") + + # Test different extraction patterns + patterns = [ + ("Pattern 1: Nested bracket matching", r'self\.required_services_runtime\s*=\s*(\[(?:[^\[\]]*|\[[^\[\]]*\])*\])'), + ("Pattern 2: Multiline with end anchor", r'(?:self\.)?required_services_runtime\s*=\s*(\[[\s\S]*?^\s*\])'), + ("Pattern 3: Bracket counting", "BRACKET_COUNTING"), + ] + + for pattern_name, pattern in patterns: + print(f"--- Testing {pattern_name} ---") + + if pattern == "BRACKET_COUNTING": + services_match = extract_with_bracket_counting(test_content) + else: + services_match = re.search(pattern, test_content, re.MULTILINE | re.DOTALL) + + if services_match: + services_str = services_match.group(1) + print(f"✅ EXTRACTED: {len(services_str)} chars") + print(f"First 100: {repr(services_str[:100])}") + print(f"Last 100: {repr(services_str[-100:])}") + + # Test cleaning and parsing + try: + cleaned = clean_for_ast_parsing(services_str) + print(f"Cleaned length: {len(cleaned)}") + + result = ast.literal_eval(cleaned) + print(f"✅ PARSING SUCCESS: {len(result)} services") + + if result and isinstance(result[0], dict): + service = result[0] + print(f" Service: {service.get('name')}") + print(f" Type: {service.get('type')}") + print(f" Env vars: {len(service.get('required_env_vars', []))}") + break + except Exception as e: + print(f"❌ PARSING FAILED: {e}") + else: + print("❌ NO MATCH") + print() + + +def extract_with_bracket_counting(content: str): + """ + Extract required_services_runtime using bracket counting to ensure complete capture + """ + import re + + # Find the start of required_services_runtime + start_pattern = r'(?:self\.)?required_services_runtime\s*=\s*\[' + start_match = re.search(start_pattern, content) + + if not start_match: + return None + + # Get position right after the opening bracket + start_pos = start_match.end() - 1 # Include the opening bracket + + # Count brackets to find the matching closing bracket + bracket_count = 0 + pos = start_pos + in_string = False + escape_next = False + quote_char = None + + while pos < len(content): + char = content[pos] + + if escape_next: + escape_next = False + elif char == '\\': + escape_next = True + elif not in_string and char in ['"', "'"]: + in_string = True + quote_char = char + elif in_string and char == quote_char and not escape_next: + in_string = False + quote_char = None + elif not in_string: + if char == '[': + bracket_count += 1 + elif char == ']': + bracket_count -= 1 + if bracket_count == 0: + # Found the matching closing bracket + extracted = content[start_pos:pos + 1] + + # Create a mock match object + class MockMatch: + def __init__(self, text): + self._text = text + def group(self, n): + return self._text + + return MockMatch(extracted) + pos += 1 + + return None + + +def clean_for_ast_parsing(services_str: str) -> str: + """ + Clean up common issues that prevent ast.literal_eval from working + Enhanced to handle more edge cases + """ + import re + + print(f" Cleaning string of length {len(services_str)}") + + # Remove comments (but be careful not to remove quotes inside strings) + cleaned = re.sub(r'#[^\r\n]*', '', services_str) + + # Remove trailing commas before closing brackets/braces + # This handles the main issue from your debug output + cleaned = re.sub(r',(\s*[\]\}])', r'\1', cleaned) + + # Normalize quotes (ensure all are double quotes, but preserve content) + # Only replace single quotes that are around keys/values, not inside strings + cleaned = re.sub(r"'([^'\"]*)'(\s*:)", r'"\1"\2', cleaned) # Keys + cleaned = re.sub(r":\s*'([^'\"]*)'", r': "\1"', cleaned) # String values + + # Validate that we still have a complete structure + if cleaned.count('[') != cleaned.count(']') or cleaned.count('{') != cleaned.count('}'): + print(f" ⚠️ Structure validation failed, using original") + return services_str + + print(f" Cleaned to length {len(cleaned)}") + return cleaned + + +# if __name__ == "__main__": +# debug_list_parsing() diff --git a/backend/app/plugins/service_installler/service_health_checker.py b/backend/app/plugins/service_installler/service_health_checker.py new file mode 100644 index 0000000..bb9ba96 --- /dev/null +++ b/backend/app/plugins/service_installler/service_health_checker.py @@ -0,0 +1,22 @@ +import time +import aiohttp +import asyncio + +async def wait_for_service_health(healthcheck_url: str, timeout: int = 30) -> bool: + """ + Wait for a service to become healthy using async HTTP requests + """ + start_time = time.time() + + async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=2)) as session: + while time.time() - start_time < timeout: + try: + async with session.get(healthcheck_url) as response: + if response.status == 200: + return True + except (aiohttp.ClientError, asyncio.TimeoutError): + pass + + await asyncio.sleep(1) + + return False diff --git a/backend/app/plugins/service_installler/service_runtime_extractor.py b/backend/app/plugins/service_installler/service_runtime_extractor.py new file mode 100644 index 0000000..194f65e --- /dev/null +++ b/backend/app/plugins/service_installler/service_runtime_extractor.py @@ -0,0 +1,401 @@ +""" +Service Runtime Extractor + +Extracts required_services_runtime configuration from plugin lifecycle_manager.py files. +Handles complex nested structures with robust parsing and fallback methods. +""" + +import re +import ast +import structlog +from typing import List, Dict, Any, Optional + +logger = structlog.get_logger() + + +def extract_required_services_runtime(content: str, plugin_name: str = None) -> List[Dict[str, Any]]: + """ + Extract required_services_runtime configuration from plugin source code. + + Args: + content: Source code content to parse + plugin_name: Optional plugin name for logging + + Returns: + List of service runtime configurations + """ + plugin_ref = f" for {plugin_name}" if plugin_name else "" + logger.info(f"Extracting required_services_runtime{plugin_ref}") + + # Try multiple extraction patterns + services_match = None + + patterns = [ + ("self.required_services_runtime with nested brackets", + r'self\.required_services_runtime\s*=\s*(\[(?:[^\[\]]*|\[[^\[\]]*\])*\])'), + ("required_services_runtime with nested brackets", + r'(? str: + """ + Clean up common issues that prevent ast.literal_eval from working. + + Args: + services_str: Raw extracted services string + + Returns: + Cleaned string ready for parsing + """ + logger.debug(f"Cleaning string of length {len(services_str)}") + + # Remove comments + cleaned = re.sub(r'#[^\r\n]*', '', services_str) + + # Remove trailing commas before closing brackets/braces + cleaned = re.sub(r',(\s*[\]\}])', r'\1', cleaned) + + # Normalize quotes + cleaned = re.sub(r"'([^'\"]*)'(\s*:)", r'"\1"\2', cleaned) # Keys + cleaned = re.sub(r":\s*'([^'\"]*)'", r': "\1"', cleaned) # String values + + # Validate structure integrity + if cleaned.count('[') != cleaned.count(']') or cleaned.count('{') != cleaned.count('}'): + logger.warning("Structure validation failed during cleaning, using original") + return services_str + + logger.debug(f"Cleaned to length {len(cleaned)}") + return cleaned + + +def validate_and_normalize_services(services_list: List[Dict], plugin_name: str = None) -> List[Dict[str, Any]]: + """ + Validate and normalize service configurations. + + Args: + services_list: List of service dictionaries + plugin_name: Optional plugin name for logging + + Returns: + List of validated and normalized service configurations + """ + plugin_ref = f" for {plugin_name}" if plugin_name else "" + valid_services = [] + + for i, service in enumerate(services_list): + if not isinstance(service, dict): + logger.warning(f"Service {i} is not a dictionary{plugin_ref}") + continue + + if validate_service_structure(service): + # Normalize with defaults + normalized_service = normalize_service(service) + valid_services.append(normalized_service) + + logger.info(f"Valid service: {normalized_service['name']} " + f"(type: {normalized_service['type']}){plugin_ref}") + + if normalized_service.get('required_env_vars'): + logger.info(f"Service {normalized_service['name']} requires " + f"{len(normalized_service['required_env_vars'])} environment variables") + else: + logger.warning(f"Service {i} failed validation{plugin_ref}: {service}") + + return valid_services + + +def validate_service_structure(service: Dict[str, Any]) -> bool: + """ + Validate that a service has all required fields. + + Args: + service: Service configuration dictionary + + Returns: + True if valid, False otherwise + """ + required_fields = ["name", "source_url", "type", "healthcheck_url"] + return all( + field in service and + isinstance(service[field], str) and + service[field].strip() + for field in required_fields + ) + + +def normalize_service(service: Dict[str, Any]) -> Dict[str, Any]: + """ + Normalize service configuration with defaults. + + Args: + service: Raw service configuration + + Returns: + Normalized service configuration + """ + normalized = service.copy() + + # Set defaults for optional fields + normalized.setdefault("install_command", "") + normalized.setdefault("start_command", "") + normalized.setdefault("required_env_vars", []) + + # Ensure required_env_vars is a list + if not isinstance(normalized["required_env_vars"], list): + normalized["required_env_vars"] = [] + + # Clean up string fields + for field in ["name", "source_url", "type", "install_command", "start_command", "healthcheck_url"]: + if field in normalized: + normalized[field] = str(normalized[field]).strip() + + return normalized + + +def extract_services_manually(content: str) -> List[Dict[str, Any]]: + """ + Manual extraction using regex when ast.literal_eval fails. + + Args: + content: Full source code content + + Returns: + List of manually extracted service configurations + """ + services = [] + + # Find the services runtime section + start_pattern = r'(?:self\.)?required_services_runtime\s*=\s*\[' + start_match = re.search(start_pattern, content) + + if not start_match: + return services + + # Extract content using bracket counting (reuse existing logic) + bracket_match = extract_with_bracket_counting(content) + if not bracket_match: + return services + + services_content = bracket_match.group(1) + + # Find individual service dictionaries with improved regex + service_pattern = r'\{(?:[^{}]|(?:\{[^{}]*\})|(?:\[[^\]]*\]))*\}' + service_matches = re.findall(service_pattern, services_content, re.DOTALL) + + for service_str in service_matches: + try: + service = extract_single_service_manually(service_str) + if service and validate_service_structure(service): + normalized_service = normalize_service(service) + services.append(normalized_service) + logger.info(f"Manually extracted service: {normalized_service['name']} " + f"with {len(normalized_service['required_env_vars'])} env vars") + + except Exception as e: + logger.warning(f"Failed to parse service manually: {e}") + continue + + return services + + +def extract_single_service_manually(service_str: str) -> Optional[Dict[str, Any]]: + """ + Extract a single service configuration using regex. + + Args: + service_str: String representation of service dictionary + + Returns: + Service configuration dictionary or None + """ + service = {} + + # Extract string fields + string_fields = [ + ('name', r'"name":\s*"([^"]+)"'), + ('source_url', r'"source_url":\s*"([^"]+)"'), + ('type', r'"type":\s*"([^"]+)"'), + ('install_command', r'"install_command":\s*"([^"]*)"'), + ('start_command', r'"start_command":\s*"([^"]*)"'), + ('healthcheck_url', r'"healthcheck_url":\s*"([^"]+)"') + ] + + for field_name, pattern in string_fields: + match = re.search(pattern, service_str) + service[field_name] = match.group(1) if match else "" + + # Extract required_env_vars array + env_vars = [] + env_vars_match = re.search( + r'"required_env_vars":\s*\[(.*?)\]', + service_str, + re.DOTALL + ) + + if env_vars_match: + env_vars_content = env_vars_match.group(1) + env_vars = re.findall(r'"([^"]+)"', env_vars_content) + + service['required_env_vars'] = env_vars + + return service if service.get('name') else None + + +# Convenience function for direct usage +def extract_from_file(file_path: str, plugin_name: str = None) -> List[Dict[str, Any]]: + """ + Extract services runtime from a file. + + Args: + file_path: Path to lifecycle_manager.py file + plugin_name: Optional plugin name for logging + + Returns: + List of service configurations + """ + try: + with open(file_path, 'r', encoding='utf-8') as f: + content = f.read() + return extract_required_services_runtime(content, plugin_name) + except Exception as e: + logger.error(f"Failed to read file {file_path}: {e}") + return [] + + +# if __name__ == "__main__": +# # Example usage +# test_content = ''' +# self.required_services_runtime = [ +# { +# "name": "cwyd_service", +# "source_url": "https://github.com/BrainDriveAI/chat-with-your-documents", +# "type": "docker-compose", +# "install_command": "", +# "start_command": "docker compose up --build -d", +# "healthcheck_url": "http://localhost:8000/health", +# "required_env_vars": [ +# "LLM_PROVIDER", +# "EMBEDDING_PROVIDER", +# "ENABLE_CONTEXTUAL_RETRIEVAL", +# "OLLAMA_CONTEXTUAL_LLM_BASE_URL", +# "OLLAMA_CONTEXTUAL_LLM_MODEL", +# "OLLAMA_LLM_BASE_URL", +# "OLLAMA_LLM_MODEL", +# "OLLAMA_EMBEDDING_BASE_URL", +# "OLLAMA_EMBEDDING_MODEL", +# "DOCUMENT_PROCESSOR_API_URL", +# "DOCUMENT_PROCESSOR_TIMEOUT", +# "DOCUMENT_PROCESSOR_MAX_RETRIES", +# "ONE_MORE", +# ] +# } +# ] +# ''' + +# services = extract_required_services_runtime(test_content, "TestPlugin") +# print(f"Extracted {len(services)} services:") +# for service in services: +# print(f" - {service['name']} ({service['type']})") diff --git a/backend/app/plugins/service_installler/start_stop_plugin_services.py b/backend/app/plugins/service_installler/start_stop_plugin_services.py new file mode 100644 index 0000000..08abcd9 --- /dev/null +++ b/backend/app/plugins/service_installler/start_stop_plugin_services.py @@ -0,0 +1,69 @@ +import structlog +from app.core.database import get_db +from app.plugins.repository import PluginRepository +from app.plugins.service_installler.plugin_service_manager import start_plugin_services, stop_plugin_services + +logger = structlog.get_logger() + +async def start_plugin_services_on_startup(): + """Start all plugin service runtimes on application startup.""" + try: + logger.info("Starting plugin service runtimes...") + + async for db in get_db(): + repo = PluginRepository(db) + service_runtimes = await repo.get_all_service_runtimes() + + if not service_runtimes: + logger.info("No plugin services found in the database to start.") + return + + logger.info(f"Found {len(service_runtimes)} service runtimes to start") + + for service_runtime in service_runtimes: + logger.info(f"Service runtime") + try: + plugin_slug = service_runtime.plugin_slug + logger.info(f"Starting service {service_runtime.name} for plugin {plugin_slug}") + + # Wrap in list if start_plugin_services expects a list + await start_plugin_services([service_runtime], plugin_slug) + + except Exception as service_error: + logger.error(f"Failed to start service {service_runtime.name}: {service_error}") + continue + + break # Only process the first db connection + + except Exception as e: + logger.error(f"Error starting plugin services: {e}") + +async def stop_all_plugin_services_on_shutdown(): + """ + Stops all plugin service runtimes on application shutdown. + This function handles the logic of finding and stopping services. + """ + try: + logger.info("Stopping all plugin services...") + + async for db in get_db(): + repo = PluginRepository(db) + service_runtimes = await repo.get_all_service_runtimes() + + if not service_runtimes: + logger.info("No plugin services found in the database to stop.") + return + + logger.info(f"Found {len(service_runtimes)} service runtimes to stop.") + + for service_runtime in service_runtimes: + plugin_slug = service_runtime.plugin_slug + if not plugin_slug: + logger.warning("Skipping service with no plugin_slug", name=service_runtime.get("name")) + continue + + await stop_plugin_services([service_runtime], plugin_slug) + + except Exception as e: + logger.error("Error during shutdown of plugin services", error=str(e)) + # Don't reraise, allow the application to continue shutting down gracefully diff --git a/backend/main.py b/backend/main.py index e6f0152..efaed28 100644 --- a/backend/main.py +++ b/backend/main.py @@ -29,6 +29,7 @@ from app.core.init_db import init_db from app.models import UserRole from app.core.database import db_factory, get_db +from app.plugins.service_installler.start_stop_plugin_services import start_plugin_services_on_startup, stop_all_plugin_services_on_shutdown # Configure standard logging logging.basicConfig( @@ -93,11 +94,15 @@ async def lifespan(app: FastAPI): await session.commit() logger.info("✅ Default roles created successfully") + # Start plugin services + await start_plugin_services_on_startup() + yield except Exception as e: logger.error(f"❌ Error during startup: {e}") raise finally: + await stop_all_plugin_services_on_shutdown() # Cleanup (if needed) if not settings.USE_JSON_STORAGE and db_factory.engine: await db_factory.engine.dispose() diff --git a/backend/migrations/versions/4f726504a718_add_user_id_and_plugin_slug_to_plugin_.py b/backend/migrations/versions/4f726504a718_add_user_id_and_plugin_slug_to_plugin_.py new file mode 100644 index 0000000..91e7787 --- /dev/null +++ b/backend/migrations/versions/4f726504a718_add_user_id_and_plugin_slug_to_plugin_.py @@ -0,0 +1,53 @@ +"""add user_id and plugin_slug to plugin_service_runtime + +Revision ID: 4f726504a718 +Revises: 64046e143e97 +Create Date: 2025-08-20 13:43:35.020370 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '4f726504a718' +down_revision: Union[str, None] = '64046e143e97' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + conn = op.get_bind() + inspector = sa.inspect(conn) + + if "plugin_service_runtime" in inspector.get_table_names(): + columns = [col["name"] for col in inspector.get_columns("plugin_service_runtime")] + + # Add user_id column if missing + if "user_id" not in columns: + op.add_column( + "plugin_service_runtime", + sa.Column("user_id", sa.String(32), nullable=False, server_default="") + ) + # Add foreign key constraint + op.create_foreign_key( + "fk_plugin_service_runtime_user_id", + "plugin_service_runtime", + "users", + ["user_id"], + ["id"] + ) + + # Add plugin_slug column if missing + if "plugin_slug" not in columns: + op.add_column( + "plugin_service_runtime", + sa.Column("plugin_slug", sa.String(), nullable=False, server_default="") + ) + +def downgrade() -> None: + op.drop_constraint("fk_plugin_service_runtime_user_id", "plugin_service_runtime", type_="foreignkey") + op.drop_column("plugin_service_runtime", "user_id") + op.drop_column("plugin_service_runtime", "plugin_slug") diff --git a/backend/migrations/versions/64046e143e97_add_required_services_runtime_to_plugin_.py b/backend/migrations/versions/64046e143e97_add_required_services_runtime_to_plugin_.py new file mode 100644 index 0000000..eba31cd --- /dev/null +++ b/backend/migrations/versions/64046e143e97_add_required_services_runtime_to_plugin_.py @@ -0,0 +1,63 @@ +"""add required_services_runtime to plugin table + +Revision ID: 64046e143e97 +Revises: cb95bbe8b720 +Create Date: 2025-08-19 21:31:08.236450 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '64046e143e97' +down_revision: Union[str, None] = 'cb95bbe8b720' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + conn = op.get_bind() + inspector = sa.inspect(conn) + columns = [col["name"] for col in inspector.get_columns("plugin")] + + # Only add column if it doesn't already exist + if "required_services_runtime" not in columns: + op.add_column( + "plugin", + sa.Column("required_services_runtime", sa.Text(), nullable=True) + ) + + # Create plugin_service_runtime table if it doesn't exist + if "plugin_service_runtime" not in inspector.get_table_names(): + op.create_table( + "plugin_service_runtime", + sa.Column("id", sa.String(), primary_key=True), + sa.Column("plugin_id", sa.String(), nullable=False), + sa.Column("name", sa.String(), nullable=False), + sa.Column("source_url", sa.String(), nullable=True), + sa.Column("type", sa.String(), nullable=True), + sa.Column("install_command", sa.Text(), nullable=True), + sa.Column("start_command", sa.Text(), nullable=True), + sa.Column("healthcheck_url", sa.String(), nullable=True), + sa.Column("required_env_vars", sa.Text(), nullable=True), + sa.Column("status", sa.String(), server_default="pending", nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=True), + sa.Column("updated_at", sa.DateTime(), nullable=True), + sa.Column("user_id", sa.String(), nullable=False), + sa.ForeignKeyConstraint(["plugin_id"], ["plugin.id"], ondelete="CASCADE"), + ) + + +def downgrade() -> None: + conn = op.get_bind() + inspector = sa.inspect(conn) + + if "plugin_service_runtime" in inspector.get_table_names(): + op.drop_table("plugin_service_runtime") + + columns = [col["name"] for col in inspector.get_columns("plugin")] + if "required_services_runtime" in columns: + op.drop_column("plugin", "required_services_runtime") diff --git a/backend/migrations/versions/c3edaf85d73f_merge_heads_for_hierarchical_nav_plugin_.py b/backend/migrations/versions/c3edaf85d73f_merge_heads_for_hierarchical_nav_plugin_.py new file mode 100644 index 0000000..97c7889 --- /dev/null +++ b/backend/migrations/versions/c3edaf85d73f_merge_heads_for_hierarchical_nav_plugin_.py @@ -0,0 +1,26 @@ +"""merge heads for hierarchical nav + plugin service runtime + +Revision ID: c3edaf85d73f +Revises: 219da9748f46, 4f726504a718 +Create Date: 2025-08-21 17:38:06.586550 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = 'c3edaf85d73f' +down_revision: Union[str, None] = ('219da9748f46', '4f726504a718') +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + pass + + +def downgrade() -> None: + pass