From 797f84d55e943dd685614664247728ed73216ea4 Mon Sep 17 00:00:00 2001 From: Brian Greunke Date: Fri, 22 Aug 2025 13:51:20 -0500 Subject: [PATCH 01/11] feat: added client endpoint to retrieve creds --- .vscode/settings.json | 4 ++-- dreadnode/api/client.py | 13 +++++++++++++ dreadnode/api/models.py | 7 +++++++ 3 files changed, 22 insertions(+), 2 deletions(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index 1179470a..2ef0b41b 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -8,8 +8,8 @@ "editor.defaultFormatter": "charliermarsh.ruff" }, "python.testing.pytestArgs": [ - "dreadnode_cli" + "tests" ], "python.testing.unittestEnabled": false, "python.testing.pytestEnabled": true -} +} \ No newline at end of file diff --git a/dreadnode/api/client.py b/dreadnode/api/client.py index 12243907..cf20d6b5 100644 --- a/dreadnode/api/client.py +++ b/dreadnode/api/client.py @@ -12,6 +12,7 @@ from dreadnode.api.models import ( AccessRefreshTokenResponse, + ContainerRegistryCredentials, DeviceCodeResponse, GithubTokenResponse, MetricAggregationType, @@ -538,3 +539,15 @@ def get_user_data_credentials(self) -> UserDataCredentials: """ response = self._request("GET", "/user-data/credentials") return UserDataCredentials(**response.json()) + + # Container registry access + + def get_container_registry_credentials(self) -> ContainerRegistryCredentials: + """ + Retrieves container registry credentials for Docker image access. + + Returns: + The container registry credentials object. + """ + response = self._request("GET", "/platform/container-registry/credentials") + return ContainerRegistryCredentials(**response.json()) diff --git a/dreadnode/api/models.py b/dreadnode/api/models.py index 61c52dda..e310058b 100644 --- a/dreadnode/api/models.py +++ b/dreadnode/api/models.py @@ -43,6 +43,13 @@ class UserDataCredentials(BaseModel): endpoint: str | None +class ContainerRegistryCredentials(BaseModel): + registry: str + username: str + password: str + expires_at: datetime + + # Auth From 2b826169e5e4432bb61cf227362a1d7877c03d46 Mon Sep 17 00:00:00 2001 From: Brian Greunke Date: Sat, 23 Aug 2025 07:38:10 -0500 Subject: [PATCH 02/11] wip --- dreadnode/api/client.py | 1 + dreadnode/cli/main.py | 4 +++- pyproject.toml | 1 + 3 files changed, 5 insertions(+), 1 deletion(-) diff --git a/dreadnode/api/client.py b/dreadnode/api/client.py index cf20d6b5..11bbff1d 100644 --- a/dreadnode/api/client.py +++ b/dreadnode/api/client.py @@ -100,6 +100,7 @@ def __init__( headers=headers, base_url=self._base_url, timeout=30, + cookies=_cookies, ) if debug: diff --git a/dreadnode/cli/main.py b/dreadnode/cli/main.py index ccb732e3..5b78cd2f 100644 --- a/dreadnode/cli/main.py +++ b/dreadnode/cli/main.py @@ -18,6 +18,7 @@ download_and_unzip_archive, validate_server_for_clone, ) +from dreadnode.cli.platform import cli as platform_cli from dreadnode.cli.profile import cli as profile_cli from dreadnode.config import ServerConfig, UserConfig from dreadnode.constants import DEBUG, PLATFORM_BASE_URL @@ -26,8 +27,9 @@ cli["--help"].group = "Meta" -cli.command(profile_cli) cli.command(agent_cli) +cli.command(platform_cli) +cli.command(profile_cli) @cli.meta.default diff --git a/pyproject.toml b/pyproject.toml index caaba069..afbbf033 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,6 +37,7 @@ presidio-analyzer = "^2.2.359" [tool.poetry.extras] training = ["transformers"] multimodal = ["pillow", "soundfile", "moviepy"] +platform = ["docker"] all = ["multimodal", "training"] [tool.poetry.group.dev.dependencies] From f8eaefc077d0be2e5c7c54537a97641981f320b7 Mon Sep 17 00:00:00 2001 From: Brian Greunke Date: Tue, 26 Aug 2025 23:50:40 -0500 Subject: [PATCH 03/11] feat: added platform command --- dreadnode/api/client.py | 21 ++- dreadnode/api/models.py | 16 ++ dreadnode/cli/platform/__init__.py | 3 + dreadnode/cli/platform/check_for_updates.py | 45 +++++ dreadnode/cli/platform/cli.py | 75 +++++++++ dreadnode/cli/platform/configure.py | 9 + dreadnode/cli/platform/constants.py | 10 ++ dreadnode/cli/platform/docker/__init__.py | 77 +++++++++ dreadnode/cli/platform/docker/download.py | 121 ++++++++++++++ dreadnode/cli/platform/docker/login.py | 22 +++ dreadnode/cli/platform/docker/start.py | 56 +++++++ dreadnode/cli/platform/init.py | 125 ++++++++++++++ dreadnode/cli/platform/templates/.api.env.j2 | 75 +++++++++ dreadnode/cli/platform/templates/.ui.env.j2 | 18 ++ .../platform/templates/docker-compose.yaml.j2 | 154 ++++++++++++++++++ dreadnode/cli/platform/utils.py | 60 +++++++ dreadnode/constants.py | 2 + poetry.lock | 40 ++++- pyproject.toml | 6 +- 19 files changed, 925 insertions(+), 10 deletions(-) create mode 100644 dreadnode/cli/platform/__init__.py create mode 100644 dreadnode/cli/platform/check_for_updates.py create mode 100644 dreadnode/cli/platform/cli.py create mode 100644 dreadnode/cli/platform/configure.py create mode 100644 dreadnode/cli/platform/constants.py create mode 100644 dreadnode/cli/platform/docker/__init__.py create mode 100644 dreadnode/cli/platform/docker/download.py create mode 100644 dreadnode/cli/platform/docker/login.py create mode 100644 dreadnode/cli/platform/docker/start.py create mode 100644 dreadnode/cli/platform/init.py create mode 100644 dreadnode/cli/platform/templates/.api.env.j2 create mode 100644 dreadnode/cli/platform/templates/.ui.env.j2 create mode 100644 dreadnode/cli/platform/templates/docker-compose.yaml.j2 create mode 100644 dreadnode/cli/platform/utils.py diff --git a/dreadnode/api/client.py b/dreadnode/api/client.py index 11bbff1d..1a2f2c3f 100644 --- a/dreadnode/api/client.py +++ b/dreadnode/api/client.py @@ -19,6 +19,7 @@ Project, RawRun, RawTask, + RegistryImageDetails, Run, RunSummary, StatusFilter, @@ -550,5 +551,23 @@ def get_container_registry_credentials(self) -> ContainerRegistryCredentials: Returns: The container registry credentials object. """ - response = self._request("GET", "/platform/container-registry/credentials") + response = self.request("POST", "/platform/registry-token") return ContainerRegistryCredentials(**response.json()) + + def get_platform_releases( + self, arch: str, tag: str, services: list[str], cli_version: str + ) -> RegistryImageDetails: + """ + Resolves the platform releases for the current project. + + Returns: + The resolved platform releases as a ResolveReleasesResponse object. + """ + payload = { + "arch": arch, + "tag": tag, + "services": services, + "cli_version": cli_version, + } + response = self.request("POST", "/platform/get-releases", json_data=payload) + return RegistryImageDetails(**response.json()) diff --git a/dreadnode/api/models.py b/dreadnode/api/models.py index e310058b..250b5fa7 100644 --- a/dreadnode/api/models.py +++ b/dreadnode/api/models.py @@ -50,6 +50,22 @@ class ContainerRegistryCredentials(BaseModel): expires_at: datetime +class PlatformImage(BaseModel): + service: str + uri: str + digest: str + version: str + + @property + def full_uri(self) -> str: + return f"{self.uri}@{self.digest}" + + +class RegistryImageDetails(BaseModel): + version: str + images: list[PlatformImage] + + # Auth diff --git a/dreadnode/cli/platform/__init__.py b/dreadnode/cli/platform/__init__.py new file mode 100644 index 00000000..7a874c7c --- /dev/null +++ b/dreadnode/cli/platform/__init__.py @@ -0,0 +1,3 @@ +from dreadnode.cli.platform.cli import cli + +__all__ = ["cli"] diff --git a/dreadnode/cli/platform/check_for_updates.py b/dreadnode/cli/platform/check_for_updates.py new file mode 100644 index 00000000..608ecbe6 --- /dev/null +++ b/dreadnode/cli/platform/check_for_updates.py @@ -0,0 +1,45 @@ +import rich + +from dreadnode.cli.api import create_api_client +from dreadnode.cli.platform.constants import SERVICES +from dreadnode.cli.platform.utils import get_local_arch, get_local_cache_dir, get_local_version + + +def check_for_updates() -> None: + import importlib.metadata # noqa: PLC0415 + + local_cache_dir = get_local_cache_dir() + rich.print(f"Checking local cache directory: {local_cache_dir}") + + if not local_cache_dir.exists(): + rich.print( + "Local cache directory does not exist. Please run \n[dim]$[/dim] [bold green]dreadnode platform init[/bold green]" + ) + return + + arch = get_local_arch() + api_client = create_api_client() + registry_image_details = api_client.get_platform_releases( + arch=arch, + tag="latest", + services=SERVICES, + cli_version=importlib.metadata.version("dreadnode"), + ) + + local_image_details = get_local_version() + + for image_detail in local_image_details.images: + for remote_image_detail in registry_image_details.images: + if image_detail.service == remote_image_detail.service: + if image_detail.version != remote_image_detail.version: + rich.print( + f"[yellow]Update available for {image_detail.service}: " + f"{image_detail.version} -> {remote_image_detail.version}[/yellow]" + ) + else: + rich.print( + f"[green]{image_detail.service} is up to date: {image_detail.version}[/green]" + ) + rich.print( + "[blue]You can update with:[/blue]\n[dim]$[/dim] [bold green]dreadnode platform update[/bold green]" + ) diff --git a/dreadnode/cli/platform/cli.py b/dreadnode/cli/platform/cli.py new file mode 100644 index 00000000..bb43dd64 --- /dev/null +++ b/dreadnode/cli/platform/cli.py @@ -0,0 +1,75 @@ +import cyclopts + +from dreadnode.cli.platform.check_for_updates import check_for_updates as check_for_updates_ +from dreadnode.cli.platform.configure import configure_platform +from dreadnode.cli.platform.docker.download import download as download_platform +from dreadnode.cli.platform.docker.login import docker_login +from dreadnode.cli.platform.docker.start import start as start_platform +from dreadnode.cli.platform.docker.start import stop as stop_platform +from dreadnode.cli.platform.init import init as init_platform +from dreadnode.cli.platform.init import initialized as platform_initilized + +cli = cyclopts.App("platform", help="Run and manage the platform.", help_flags=[]) + + +@cli.command() +def init(tag: str = "latest", arch: str | None = None) -> None: + """ + Initialize the platform. + """ + init_platform(tag=tag, arch=arch) + + +@cli.command() +def download(tag: str = "latest", arch: str | None = None) -> None: + """ + Download the platform files. + """ + docker_login() + + if not platform_initilized() or tag != "latest" or arch: + init_platform(tag=tag, arch=arch) + + download_platform() + + +@cli.command() +def configure() -> None: + """ + Configure the platform. + """ + configure_platform() + + +@cli.command() +def start() -> None: + """ + Start the platform services. + """ + start_platform() + + +@cli.command() +def stop() -> None: + """ + Stop the platform services. + """ + stop_platform() + + +@cli.command() +def check_for_updates() -> None: + """ + Check for platform updates. + """ + check_for_updates_() + + +@cli.command() +def update() -> None: + """ + Update the platform. + """ + stop_platform() + download_platform() + start_platform() diff --git a/dreadnode/cli/platform/configure.py b/dreadnode/cli/platform/configure.py new file mode 100644 index 00000000..2f2e32c1 --- /dev/null +++ b/dreadnode/cli/platform/configure.py @@ -0,0 +1,9 @@ +import rich + +from dreadnode.cli.platform.utils import get_local_cache_dir + + +def configure_platform() -> None: + rich.print(f"Configure the API by modifying {get_local_cache_dir()}/.api.env") + rich.print(f"Configure the UI by modifying {get_local_cache_dir()}/.ui.env") + rich.print("See https://docs.dreadnode.io/platform/manage for more details.") diff --git a/dreadnode/cli/platform/constants.py b/dreadnode/cli/platform/constants.py new file mode 100644 index 00000000..231e427e --- /dev/null +++ b/dreadnode/cli/platform/constants.py @@ -0,0 +1,10 @@ +from pathlib import Path + +API_SERVICE = "api" +UI_SERVICE = "ui" +SERVICES = [API_SERVICE, UI_SERVICE] + +TEMPLATE_DIR = Path(__file__).parent / "templates" +DOCKER_COMPOSE_TEMPLATE = TEMPLATE_DIR / "docker-compose.yaml.j2" +API_ENV_TEMPLATE = TEMPLATE_DIR / ".api.env.j2" +UI_ENV_TEMPLATE = TEMPLATE_DIR / ".ui.env.j2" diff --git a/dreadnode/cli/platform/docker/__init__.py b/dreadnode/cli/platform/docker/__init__.py new file mode 100644 index 00000000..c74d5b8f --- /dev/null +++ b/dreadnode/cli/platform/docker/__init__.py @@ -0,0 +1,77 @@ +import subprocess +import sys + +import rich + +from dreadnode.cli.platform.utils import get_compose_file_path + + +def run_docker_compose_command( + args: list[str], + compose_file: str | None = None, + project_name: str | None = None, + timeout: int = 300, + command_name: str = "docker compose", + stdin_input: str | None = None, +) -> subprocess.CompletedProcess[str]: + """ + Execute a docker compose command with common error handling and configuration. + + Args: + args: Additional arguments for the docker compose command + compose_file: Path to docker-compose file (optional) + project_name: Docker compose project name (optional) + timeout: Command timeout in seconds + command_name: Name of the command for error messages + stdin_input: Input to pass to stdin (for commands like docker login) + + Returns: + CompletedProcess object with command results + + Raises: + subprocess.CalledProcessError: If command fails + subprocess.TimeoutExpired: If command times out + FileNotFoundError: If docker/docker-compose not found + """ + cmd = ["docker", "compose"] + + # Add compose file + compose_file = compose_file or get_compose_file_path() + cmd.extend(["-f", compose_file]) + + # Add project name if specified + if project_name: + cmd.extend(["-p", project_name]) + + # Add the specific command arguments + cmd.extend(args) + + try: + # Remove capture_output=True to allow real-time streaming + # stdout and stderr will go directly to the terminal + result = subprocess.run( # noqa: S603 + cmd, + check=True, + text=True, + timeout=timeout, + encoding="utf-8", + errors="replace", + input=stdin_input, + ) + + except subprocess.CalledProcessError as e: + rich.print(f"{command_name} failed with exit code {e.returncode}", file=sys.stderr) + raise + + except subprocess.TimeoutExpired: + rich.print(f"{command_name} timed out after {timeout} seconds", file=sys.stderr) + raise + + except FileNotFoundError: + rich.print( + "Docker or docker compose not found. Please ensure Docker is installed.", + file=sys.stderr, + ) + raise + + return result diff --git a/dreadnode/cli/platform/docker/download.py b/dreadnode/cli/platform/docker/download.py new file mode 100644 index 00000000..dc977fcc --- /dev/null +++ b/dreadnode/cli/platform/docker/download.py @@ -0,0 +1,121 @@ +import subprocess + +from dreadnode.cli.platform.docker import run_docker_compose_command + +# def download_platform( +# registry: str, username: str, password: str, image_name: str, tag: str +# ) -> Image: +# try: +# import docker # type: ignore[import-untyped,unused-ignore] +# except ImportError as e: +# raise ImportError( +# "Running a local platform requires `docker`. Install with: pip install dreadnode\\[platform]" +# ) from e + +# # Initialize Docker client +# client = docker.from_env() + +# # # Method 1: Login first, then pull +# # client.login( +# # username=username, +# # password=password, +# # registry=registry, +# # ) + +# # # Pull the private image +# # image = client.images.pull(f"{registry}/{image}:{tag}") + +# # Method 2: Pull with auth parameter +# return client.images.pull( +# f"{registry}/{image_name}:{tag}", +# auth_config={"username": username, "password": password}, +# ) + + +# def parse_compose_file(compose_file_path: str) -> dict[str, Any]: +# """Parse Docker Compose file with proper error handling.""" +# try: +# with Path(compose_file_path).open("r", encoding="utf-8") as f: +# compose_config = yaml.safe_load(f) + +# if not compose_config: +# raise ValueError("Empty or invalid compose file") + +# # Validate basic structure +# if not isinstance(compose_config, dict): +# raise TypeError("Compose file must contain a YAML mapping") + +# except yaml.YAMLError as e: +# raise ValueError(f"Invalid YAML syntax: {e}") from e +# except FileNotFoundError as e: +# raise FileNotFoundError(f"Compose file not found: {compose_file_path}") from e + +# return compose_config + + +# def pull_images_from_compose(compose_file_path: str) -> None: +# """Pull all images defined in a Docker Compose file.""" + +# # Initialize Docker client +# try: +# import docker # type: ignore[import-untyped,unused-ignore] +# except ImportError as e: +# raise ImportError( +# "Running a local platform requires `docker`. Install with: pip install dreadnode\\[platform]" +# ) from e + +# # Initialize Docker client +# client = docker.from_env() + +# compose_config = parse_compose_file(compose_file_path) + +# # Handle different compose file versions +# services = compose_config.get("services", {}) + +# if not services: +# logger.error("No services found in compose file") +# return + +# for service_name, service_config in services.items(): +# if not isinstance(service_config, dict): +# logger.warning(f"⚠ Skipping invalid service config for '{service_name}'") +# continue + +# image = service_config.get("image") +# if image: +# try: +# logger.info(f"Pulling {image}...") +# client.images.pull(image) +# logger.success(f"✓ Pulled {image}") +# except DockerApiError as e: +# logger.error(f"✗ Failed to pull {image}: {e}") +# else: +# # Handle services with 'build' context instead of 'image' +# build_config = service_config.get("build") +# if build_config: +# logger.warning(f"⚠ Service '{service_name}' uses build context, skipping pull") +# else: +# logger.warning(f"⚠ Service '{service_name}' has no image or build config") + + +def download( + compose_file: str | None = None, project_name: str | None = None, timeout: int = 300 +) -> subprocess.CompletedProcess[str]: + """ + Pull docker images for the platform. + + Args: + compose_file: Path to docker-compose file (optional) + project_name: Docker compose project name (optional) + timeout: Command timeout in seconds + + Returns: + CompletedProcess object with command results + + Raises: + subprocess.CalledProcessError: If command fails + subprocess.TimeoutExpired: If command times out + """ + return run_docker_compose_command( + ["--profile", "run", "pull"], compose_file, project_name, timeout, "Docker compose pull" + ) diff --git a/dreadnode/cli/platform/docker/login.py b/dreadnode/cli/platform/docker/login.py new file mode 100644 index 00000000..5b3d1473 --- /dev/null +++ b/dreadnode/cli/platform/docker/login.py @@ -0,0 +1,22 @@ +import subprocess +import sys + +import rich + +from dreadnode.cli.api import create_api_client + + +def docker_login(): + client = create_api_client() + container_registry_creds = client.get_container_registry_credentials() + + cmd = ["docker", "login", container_registry_creds.registry] + cmd.extend(["--username", container_registry_creds.username]) + cmd.extend(["--password-stdin"]) + + try: + subprocess.run(cmd, input=container_registry_creds.password, text=True, check=True) # noqa: S603 + rich.print(f"Logged in to Docker registry: {container_registry_creds.registry}") + except subprocess.CalledProcessError as e: + rich.print(f"Failed to log in to Docker registry: {e}", file=sys.stderr) + raise diff --git a/dreadnode/cli/platform/docker/start.py b/dreadnode/cli/platform/docker/start.py new file mode 100644 index 00000000..d7f6c867 --- /dev/null +++ b/dreadnode/cli/platform/docker/start.py @@ -0,0 +1,56 @@ +import rich + +from dreadnode.cli.platform.docker import run_docker_compose_command + + +def _start_infra( + compose_file: str | None = None, project_name: str | None = None, timeout: int = 300 +) -> None: + """Start infrastructure services.""" + run_docker_compose_command( + ["up", "-d"], compose_file, project_name, timeout, "Docker compose up (infra)" + ) + + +def _create_storage( + compose_file: str | None = None, project_name: str | None = None, timeout: int = 300 +) -> None: + """Create S3 buckets.""" + run_docker_compose_command( + ["--profile", "create-s3-buckets", "up", "-d"], + compose_file, + project_name, + timeout, + "Docker compose up (storage)", + ) + + +def _start_services( + compose_file: str | None = None, project_name: str | None = None, timeout: int = 300 +) -> None: + """Start application services.""" + run_docker_compose_command( + ["--profile", "run", "up", "-d"], + compose_file, + project_name, + timeout, + "Docker compose up (services)", + ) + + +def start( + compose_file: str | None = None, project_name: str | None = None, timeout: int = 300 +) -> None: + """Start all platform services.""" + rich.print("Starting platform services...") + _start_infra(compose_file, project_name, timeout) + _create_storage(compose_file, project_name, timeout) + _start_services(compose_file, project_name, timeout) + + +def stop( + compose_file: str | None = None, project_name: str | None = None, timeout: int = 300 +) -> None: + """Stop platform services.""" + rich.print("Stopping platform services...") + run_docker_compose_command(["stop"], compose_file, project_name, timeout, "Docker compose stop") diff --git a/dreadnode/cli/platform/init.py b/dreadnode/cli/platform/init.py new file mode 100644 index 00000000..229eed8a --- /dev/null +++ b/dreadnode/cli/platform/init.py @@ -0,0 +1,125 @@ +import json +from pathlib import Path + +import rich +from rich.prompt import Confirm + +from dreadnode.api.models import PlatformImage, RegistryImageDetails +from dreadnode.cli.api import create_api_client +from dreadnode.cli.platform.constants import ( + API_ENV_TEMPLATE, + API_SERVICE, + DOCKER_COMPOSE_TEMPLATE, + SERVICES, + UI_ENV_TEMPLATE, + UI_SERVICE, +) +from dreadnode.cli.platform.utils import ( + get_compose_file_path, + get_local_arch, + get_local_cache_dir, + render_with_string_replace, +) + + +def _write_version_manifest( + local_cache_dir: Path, resolution_response: RegistryImageDetails +) -> None: + rich.print(f"Writing version file for {resolution_response.version} ...") + version_file = local_cache_dir / ".version" + version_file.write_text(json.dumps(resolution_response.model_dump())) + rich.print(f"Version file written to {version_file}") + + +def _create_docker_compose_file(images: list[PlatformImage]) -> None: + rich.print("Updating Compose template ...") + for image in images: + if image.service == API_SERVICE: + api_image_digest = image.full_uri + elif image.service == UI_SERVICE: + ui_image_digest = image.full_uri + else: + raise ValueError(f"Unknown image service: {image.service}") + render_with_string_replace( + api_image_digest=api_image_digest, + ui_image_digest=ui_image_digest, + template_path=DOCKER_COMPOSE_TEMPLATE, + output_path=get_compose_file_path(), + ) + rich.print(f"Compose file written to {get_compose_file_path()}") + + +def _create_env_files(local_cache_dir: Path) -> None: + rich.print("Updating environment files ...") + + for env_file in [API_ENV_TEMPLATE, UI_ENV_TEMPLATE]: + dest = local_cache_dir / env_file.name + dest.write_text(env_file.read_text()) + rich.print(f"Environment file written to {dest}") + + # concatenate environment variables + api_env = local_cache_dir / API_ENV_TEMPLATE.name + ui_env = local_cache_dir / UI_ENV_TEMPLATE.name + dest = local_cache_dir / ".env" + dest.write_text(f"{api_env.read_text()}\n{ui_env.read_text()}") + rich.print(f"Combined environment file written to {dest}") + + +def _confirm_with_context(action: str, details: str | None = None) -> bool: + """Confirmation with additional context in a panel.""" + return Confirm.ask( + f"[bold red]Are you sure you want to {action}? {details}[/bold red]", default=False + ) + + +def init(tag: str, arch: str | None = None) -> None: + if initialized() and not _confirm_with_context( + "re-initialize the platform", "This will overwrite existing files." + ): + return + + import importlib.metadata # noqa: PLC0415 + + local_cache_dir = get_local_cache_dir() + rich.print(f"Using local cache directory: {local_cache_dir}") + + if not local_cache_dir.exists(): + local_cache_dir.mkdir(parents=True, exist_ok=True) + rich.print(f"Local cache directory created at {local_cache_dir}") + else: + rich.print("Local cache directory already exists.") + + if not arch: + arch = get_local_arch() + api_client = create_api_client() + registry_image_details = api_client.get_platform_releases( + arch=arch, + tag=tag, + services=SERVICES, + cli_version=importlib.metadata.version("dreadnode"), + ) + + _write_version_manifest(local_cache_dir, registry_image_details) + _create_docker_compose_file(registry_image_details.images) + _create_env_files(local_cache_dir) + + rich.print("Initialization complete.") + + +def initialized() -> bool: + rich.print("Checking initialization ...") + local_cache_dir = get_local_cache_dir() + if not local_cache_dir.exists(): + rich.print("Local cache directory does not exist.") + return False + + if not (local_cache_dir / "docker-compose.yaml").exists(): + rich.print("Docker Compose file is missing.") + return False + + if not (local_cache_dir / ".env").exists(): + rich.print("Environment file is missing.") + return False + + rich.print("All required files are present.") + return True diff --git a/dreadnode/cli/platform/templates/.api.env.j2 b/dreadnode/cli/platform/templates/.api.env.j2 new file mode 100644 index 00000000..95be9063 --- /dev/null +++ b/dreadnode/cli/platform/templates/.api.env.j2 @@ -0,0 +1,75 @@ +# API Environment Variables + +# Features +### The double underscore (__) is used to denote nested properties. +## E.g. Turn off OAuth login +FEATURES__OAUTH__ENABLED=False +## E.g. Turn off Crucible +FEATURES__COMPETITIVE_LEARNING__ENABLED=${CRUCIBLE_ENABLED:-False} +## E.g. Turn off Spyglass +FEATURES__SPYGLASS__ENABLED=${SPYGLASS_ENABLED:-False} + +# Database +## Used by API and Docker Compose +DATABASE_USER=admin +DATABASE_PASSWORD=dreadnode +DATABASE_NAME=platform +DATABASE_PORT=5432 +DATABASE_HOST=localhost + +# DynamoDB + +# Used by API tests to allow mocking +DYNAMODB_PORT=8085 +DYNAMODB_AWS_ACCESS_KEY_ID=mock +DYNAMODB_AWS_SECRET_ACCESS_KEY=mock +DYNAMODB_AWS_DEFAULT_REGION=us-west-2 +DYNAMODB_URL=http://${PROXY_HOST:-localhost}:${DYNAMODB_PORT} + +# Used by API +AUTHORIZATION_KEY_TABLE="auth-keys" +FLAG_ATTEMPTS_TABLE="flag-attempts" + +# Clickhouse + +# Used by Docker Compose +CLICKHOUSE_TCP_PORT=9009 + +# Perspective +PERSPECTIVE_API_KEY=mock-perspective-api-key + +# API + +API_PORT=8000 + +SECRET_KEY=mock-secret +JWT_SECRET_KEY=mock-jwt-secret +REFRESH_SECRET_KEY=mock-refresh + +GITHUB_CLIENT_SECRET="mock-github-client-secret" +GOOGLE_CLIENT_SECRET="mock-google-client-secret" +BYPASS_GOOGLE_DRIVE="True" +GOOGLE_SERVICE_ACCOUNT='{"mock": "mock"}' + +STRIKES_CLICKHOUSE_HOST=localhost +STRIKES_CLICKHOUSE_USER=admin +STRIKES_CLICKHOUSE_PASSWORD=dreadnode +STRIKES_CLICKHOUSE_DATABASE=platform + +# Used by API to allow mocking and testing and by Docker Compose +S3_AWS_ENDPOINT_URL=http://localhost:9000 + +# Used by API to allow mocking and testing +S3_AWS_DEFAULT_REGION=us-east-1 +S3_AWS_ACCESS_KEY_ID=mock-user +S3_AWS_SECRET_ACCESS_KEY=mock-password +S3_AWS_EXTERNAL_ENDPOINT_URL=http://localhost:9000 + +SPYGLASS_BUCKET_NAME=spyglass +PYTHON_PACKAGE_BUCKET_NAME=python-packages +USER_DATA_BUCKET_NAME=user-data +REQUIRED_BUCKETS="${SPYGLASS_BUCKET_NAME} ${PYTHON_PACKAGE_BUCKET_NAME} ${USER_DATA_BUCKET_NAME}" + +# Slack (Used by API) +SLACK_SIGNING_SECRET=mock-slack-signing-secret +SLACK_BOT_TOKEN=mock-slack-bot-token diff --git a/dreadnode/cli/platform/templates/.ui.env.j2 b/dreadnode/cli/platform/templates/.ui.env.j2 new file mode 100644 index 00000000..d84f4dab --- /dev/null +++ b/dreadnode/cli/platform/templates/.ui.env.j2 @@ -0,0 +1,18 @@ +# UI Environment Variables + +# Proxy + +# PROXY_HOST=dreadnode-laptop +# PROXY_PORT=80 +# PROXY_ORIGIN=http://${PROXY_HOST}:80 +# PROXY_DASHBOARD_PORT=8118 + +# CSP (comma separated lists) +# If PROXY_HOST is set, ALLOWED_HOSTS must include match it. +# ALLOWED_HOSTS="laptop" +CSP_CONNECT_SRC="https://www.google.com,https://www.gstatic.com" +CSP_FONT_SRC="https://cdn.jsdelivr.net,https://fonts.gstatic.com" +CSP_FRAME_SRC="https://www.google.com,https://www.gstatic.com" +CSP_IMG_SRC="https://www.gstatic.com" +CSP_SCRIPT_SRC="https://www.google.com,https://www.gstatic.com" +CSP_STYLE_SRC="https://cdn.jsdelivr.net,https://fonts.googleapis.com" diff --git a/dreadnode/cli/platform/templates/docker-compose.yaml.j2 b/dreadnode/cli/platform/templates/docker-compose.yaml.j2 new file mode 100644 index 00000000..aea4c9c5 --- /dev/null +++ b/dreadnode/cli/platform/templates/docker-compose.yaml.j2 @@ -0,0 +1,154 @@ +--- +services: + postgres: + image: docker.io/library/postgres:16 + environment: + - POSTGRES_USER=${DATABASE_USER?Variable not set} + - POSTGRES_PASSWORD=${DATABASE_PASSWORD?Variable not set} + - POSTGRES_DB=${DATABASE_NAME?Variable not set} + ports: + - ${DATABASE_PORT:-5432}:5432 + volumes: + - ${DATABASE_VOLUME:-postgres-data}:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready -U ${DATABASE_USER} -d ${DATABASE_NAME}"] + interval: 10s + timeout: 5s + retries: 5 + start_period: 30s + + clickhouse: + image: clickhouse/clickhouse-server:latest + ports: + - ${CLICKHOUSE_HTTP_PORT:-8123}:8123 + - ${CLICKHOUSE_TCP_PORT:-9000}:9000 + volumes: + - ${CLICKHOUSE_VOLUME:-clickhouse-data}:/var/lib/clickhouse + ulimits: + nofile: + soft: 262144 + hard: 262144 + cap_add: + - SYS_NICE + - NET_ADMIN + - IPC_LOCK + - SYS_PTRACE + environment: + - CLICKHOUSE_DEFAULT_ACCESS_MANAGEMENT=1 + - CLICKHOUSE_USER=${STRIKES_CLICKHOUSE_USER?Variable not set} + - CLICKHOUSE_PASSWORD=${STRIKES_CLICKHOUSE_PASSWORD?Variable not set} + - CLICKHOUSE_DB=${STRIKES_CLICKHOUSE_DATABASE?Variable not set} + + minio: + image: minio/minio:latest + ports: + - ${MINIO_PORT:-9000}:9000 + - ${MINIO_CONSOLE_PORT:-9001}:9001 + volumes: + - ${MINIO_VOLUME:-minio-data}:/data + environment: + - MINIO_ROOT_USER=${S3_AWS_ACCESS_KEY_ID?Variable not set} + - MINIO_ROOT_PASSWORD=${S3_AWS_SECRET_ACCESS_KEY?Variable not set} + command: server /data --console-address ":9001" + + create-s3-buckets: + image: minio/mc:latest + profiles: [create-s3-buckets] + environment: + MC_HOST_minio: http://${S3_AWS_ACCESS_KEY_ID}:${S3_AWS_SECRET_ACCESS_KEY}@minio:${MINIO_PORT:-9000} + BUCKETS: ${REQUIRED_BUCKETS?Variable not set} + entrypoint: + - sh + - -c + - | + until mc ls minio > /dev/null 2>&1; do + sleep 0.5 + done + + for bucket in $$BUCKETS; do + if mc ls minio/$$bucket > /dev/null 2>&1; then + echo "Bucket $$bucket already exists, skipping..." + else + mc mb minio/$$bucket && echo "Created bucket: $$bucket" + fi + done + + echo "All buckets processed successfully" + exit 0 + + traefik: + image: traefik:v3.4 + profiles: [run, run-api, run-ui] + command: + - "--api.insecure=true" + - "--providers.docker=true" + - "--providers.docker.exposedbydefault=false" + - "--entrypoints.web.address=:80" + ports: + - "${PROXY_PORT:-80}:80" + - "${PROXY_DASHBOARD_PORT:-8118}:8080" + volumes: + - "/var/run/docker.sock:/var/run/docker.sock:ro" + restart: unless-stopped + + platform-api: + profiles: [run, run-api] + container_name: api + image: {{ api_image_digest }} + env_file: .api.env + environment: + - SKIP_DB_UPGRADE=0 + # User the Docker service name as the host for inter-service communication + - DATABASE_HOST=postgres + - STRIKES_CLICKHOUSE_HOST=clickhouse + - DYNAMODB_URL=http://dynamodb:8000 + - S3_AWS_ENDPOINT_URL=http://minio:9000 + - S3_AWS_ACCESS_KEY_ID=${S3_AWS_ACCESS_KEY_ID?Variable not set} + - S3_AWS_SECRET_ACCESS_KEY=${S3_AWS_SECRET_ACCESS_KEY?Variable not set} + - S3_AWS_EXTERNAL_ENDPOINT_URL=http://${PROXY_HOST:-localhost}:9000 + + ports: + - ${API_PORT:-8000}:8000 + depends_on: + postgres: + condition: service_healthy + labels: + - "traefik.enable=true" + - "traefik.http.routers.api.rule=PathPrefix(`/api`)" + - "traefik.http.routers.api.priority=10" + - "traefik.http.routers.api.entrypoints=web" + - "traefik.http.services.api.loadbalancer.server.port=8000" + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:8000/api/health"] + interval: 10s + timeout: 5s + retries: 5 + start_period: 30s + + platform-ui: + depends_on: + platform-api: + condition: service_healthy + profiles: [run, run-ui] + container_name: ui + image: {{ ui_image_digest }} + pull_policy: never + ports: + - ${UI_PORT:-5173}:3000 + env_file: .ui.env + environment: + - NODE_ENV=production + - API_BASE_URL=http://api:8000 + - ORIGIN=${PROXY_PROTOCOL:-http}://${PROXY_HOST:-localhost:80} + - ALLOWED_HOSTS=${PROXY_HOST:-localhost} + labels: + - "traefik.enable=true" + - "traefik.http.routers.ui.rule=PathPrefix(`/`) && !PathPrefix(`/api`)" + - "traefik.http.routers.ui.priority=1" + - "traefik.http.routers.ui.entrypoints=web" + - "traefik.http.services.ui.loadbalancer.server.port=3000" + +volumes: + postgres-data: + clickhouse-data: + minio-data: diff --git a/dreadnode/cli/platform/utils.py b/dreadnode/cli/platform/utils.py new file mode 100644 index 00000000..b6ff4c52 --- /dev/null +++ b/dreadnode/cli/platform/utils.py @@ -0,0 +1,60 @@ +import json +import platform +import typing as t +from pathlib import Path + +from dreadnode.api.models import RegistryImageDetails + +archs = t.Literal["amd64", "arm64"] + + +def get_local_arch() -> archs: + arch = platform.machine() + + # Check for specific architectures + if arch in ["x86_64", "AMD64"]: + return "amd64" + if arch in ["arm64", "aarch64", "ARM64"]: + return "arm64" + raise ValueError(f"Unsupported architecture: {arch}") + + +def get_local_cache_dir() -> Path: + return Path.home() / ".dreadnode" / "platform" + + +def get_local_version() -> RegistryImageDetails | None: + local_cache_dir = get_local_cache_dir() + version_file = local_cache_dir / ".version" + if version_file.exists(): + return RegistryImageDetails(**json.loads(version_file.read_text())) + return None + + +def get_compose_file_path() -> Path: + return get_local_cache_dir() / "docker-compose.yaml" + + +def render_with_string_replace( + api_image_digest: str, + ui_image_digest: str, + template_path: str, + output_path: str, +) -> str: + """ + Simple string replacement - lightest option. + Works for basic {{ variable }} patterns. + """ + + with Path(template_path).open() as file: + content = file.read() + + rendered = content.replace("{{ api_image_digest }}", api_image_digest).replace( + "{{ ui_image_digest }}", ui_image_digest + ) + + if output_path: + with Path(output_path).open("w") as file: + file.write(rendered) + + return rendered diff --git a/dreadnode/constants.py b/dreadnode/constants.py index f2888347..cd633979 100644 --- a/dreadnode/constants.py +++ b/dreadnode/constants.py @@ -5,6 +5,8 @@ # Defaults # +# name of the default local storage path +DEFAULT_LOCAL_STORAGE_DIR = ".dreadnode" # name of the default server profile DEFAULT_PROFILE_NAME = "main" # default poll interval for the authentication flow diff --git a/poetry.lock b/poetry.lock index 015a0bd3..39724e40 100644 --- a/poetry.lock +++ b/poetry.lock @@ -385,7 +385,7 @@ version = "2025.8.3" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.7" -groups = ["main", "dev"] +groups = ["main", "dev", "platform"] files = [ {file = "certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5"}, {file = "certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407"}, @@ -490,7 +490,7 @@ version = "3.4.3" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" -groups = ["main", "dev"] +groups = ["main", "dev", "platform"] files = [ {file = "charset_normalizer-3.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72"}, {file = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe"}, @@ -866,6 +866,29 @@ files = [ {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, ] +[[package]] +name = "docker" +version = "7.1.0" +description = "A Python library for the Docker Engine API." +optional = false +python-versions = ">=3.8" +groups = ["platform"] +files = [ + {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"}, + {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"}, +] + +[package.dependencies] +pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} +requests = ">=2.26.0" +urllib3 = ">=1.26.0" + +[package.extras] +dev = ["coverage (==7.2.7)", "pytest (==7.4.2)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.1.0)", "ruff (==0.1.8)"] +docs = ["myst-parser (==0.18.0)", "sphinx (==5.1.1)"] +ssh = ["paramiko (>=2.4.3)"] +websockets = ["websocket-client (>=1.3.0)"] + [[package]] name = "docstring-parser" version = "0.17.0" @@ -1303,7 +1326,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" -groups = ["main", "dev"] +groups = ["main", "dev", "platform"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -3950,7 +3973,7 @@ version = "311" description = "Python for Window Extensions" optional = false python-versions = "*" -groups = ["main", "dev"] +groups = ["main", "dev", "platform"] files = [ {file = "pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3"}, {file = "pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b"}, @@ -3973,7 +3996,7 @@ files = [ {file = "pywin32-311-cp39-cp39-win_amd64.whl", hash = "sha256:e0c4cfb0621281fe40387df582097fd796e80430597cb9944f0ae70447bacd91"}, {file = "pywin32-311-cp39-cp39-win_arm64.whl", hash = "sha256:62ea666235135fee79bb154e695f3ff67370afefd71bd7fea7512fc70ef31e3d"}, ] -markers = {main = "sys_platform == \"win32\"", dev = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} +markers = {main = "sys_platform == \"win32\"", dev = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\"", platform = "sys_platform == \"win32\""} [[package]] name = "pyyaml" @@ -4278,7 +4301,7 @@ version = "2.32.4" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" -groups = ["main", "dev"] +groups = ["main", "dev", "platform"] files = [ {file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"}, {file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"}, @@ -5536,7 +5559,7 @@ version = "2.5.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" -groups = ["main", "dev"] +groups = ["main", "dev", "platform"] files = [ {file = "urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"}, {file = "urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"}, @@ -6078,9 +6101,10 @@ type = ["pytest-mypy"] [extras] all = [] multimodal = ["moviepy", "pillow", "soundfile"] +platform = [] training = ["transformers"] [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.14" -content-hash = "608bdd485f2f8fb2d4390f37791f6fdd484c4ca4aa5ef661346c68dd3038f726" +content-hash = "3bba2420a863db24d08eac93beea7dc5d73e04f474073d265f3406336efef0b8" diff --git a/pyproject.toml b/pyproject.toml index afbbf033..f6bf998c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,7 +37,7 @@ presidio-analyzer = "^2.2.359" [tool.poetry.extras] training = ["transformers"] multimodal = ["pillow", "soundfile", "moviepy"] -platform = ["docker"] +platform = ["docker", "pyyaml"] all = ["multimodal", "training"] [tool.poetry.group.dev.dependencies] @@ -57,6 +57,10 @@ markdownify = "^1.1.0" mkdocstrings-python = "^1.17.0" ipykernel = "^6.29.5" + +[tool.poetry.group.platform.dependencies] +docker = "^7.1.0" + [build-system] requires = ["poetry-core>=1.0.0", "setuptools>=42", "wheel"] build-backend = "poetry.core.masonry.api" From f9b390414a2a761ee87977ad49c012cb2e0d5d8c Mon Sep 17 00:00:00 2001 From: Brian Greunke Date: Tue, 26 Aug 2025 23:59:38 -0500 Subject: [PATCH 04/11] fix: removed old template files --- dreadnode/cli/platform/templates/.api.env.j2 | 75 --------- dreadnode/cli/platform/templates/.ui.env.j2 | 18 -- .../platform/templates/docker-compose.yaml.j2 | 154 ------------------ 3 files changed, 247 deletions(-) diff --git a/dreadnode/cli/platform/templates/.api.env.j2 b/dreadnode/cli/platform/templates/.api.env.j2 index 95be9063..e69de29b 100644 --- a/dreadnode/cli/platform/templates/.api.env.j2 +++ b/dreadnode/cli/platform/templates/.api.env.j2 @@ -1,75 +0,0 @@ -# API Environment Variables - -# Features -### The double underscore (__) is used to denote nested properties. -## E.g. Turn off OAuth login -FEATURES__OAUTH__ENABLED=False -## E.g. Turn off Crucible -FEATURES__COMPETITIVE_LEARNING__ENABLED=${CRUCIBLE_ENABLED:-False} -## E.g. Turn off Spyglass -FEATURES__SPYGLASS__ENABLED=${SPYGLASS_ENABLED:-False} - -# Database -## Used by API and Docker Compose -DATABASE_USER=admin -DATABASE_PASSWORD=dreadnode -DATABASE_NAME=platform -DATABASE_PORT=5432 -DATABASE_HOST=localhost - -# DynamoDB - -# Used by API tests to allow mocking -DYNAMODB_PORT=8085 -DYNAMODB_AWS_ACCESS_KEY_ID=mock -DYNAMODB_AWS_SECRET_ACCESS_KEY=mock -DYNAMODB_AWS_DEFAULT_REGION=us-west-2 -DYNAMODB_URL=http://${PROXY_HOST:-localhost}:${DYNAMODB_PORT} - -# Used by API -AUTHORIZATION_KEY_TABLE="auth-keys" -FLAG_ATTEMPTS_TABLE="flag-attempts" - -# Clickhouse - -# Used by Docker Compose -CLICKHOUSE_TCP_PORT=9009 - -# Perspective -PERSPECTIVE_API_KEY=mock-perspective-api-key - -# API - -API_PORT=8000 - -SECRET_KEY=mock-secret -JWT_SECRET_KEY=mock-jwt-secret -REFRESH_SECRET_KEY=mock-refresh - -GITHUB_CLIENT_SECRET="mock-github-client-secret" -GOOGLE_CLIENT_SECRET="mock-google-client-secret" -BYPASS_GOOGLE_DRIVE="True" -GOOGLE_SERVICE_ACCOUNT='{"mock": "mock"}' - -STRIKES_CLICKHOUSE_HOST=localhost -STRIKES_CLICKHOUSE_USER=admin -STRIKES_CLICKHOUSE_PASSWORD=dreadnode -STRIKES_CLICKHOUSE_DATABASE=platform - -# Used by API to allow mocking and testing and by Docker Compose -S3_AWS_ENDPOINT_URL=http://localhost:9000 - -# Used by API to allow mocking and testing -S3_AWS_DEFAULT_REGION=us-east-1 -S3_AWS_ACCESS_KEY_ID=mock-user -S3_AWS_SECRET_ACCESS_KEY=mock-password -S3_AWS_EXTERNAL_ENDPOINT_URL=http://localhost:9000 - -SPYGLASS_BUCKET_NAME=spyglass -PYTHON_PACKAGE_BUCKET_NAME=python-packages -USER_DATA_BUCKET_NAME=user-data -REQUIRED_BUCKETS="${SPYGLASS_BUCKET_NAME} ${PYTHON_PACKAGE_BUCKET_NAME} ${USER_DATA_BUCKET_NAME}" - -# Slack (Used by API) -SLACK_SIGNING_SECRET=mock-slack-signing-secret -SLACK_BOT_TOKEN=mock-slack-bot-token diff --git a/dreadnode/cli/platform/templates/.ui.env.j2 b/dreadnode/cli/platform/templates/.ui.env.j2 index d84f4dab..e69de29b 100644 --- a/dreadnode/cli/platform/templates/.ui.env.j2 +++ b/dreadnode/cli/platform/templates/.ui.env.j2 @@ -1,18 +0,0 @@ -# UI Environment Variables - -# Proxy - -# PROXY_HOST=dreadnode-laptop -# PROXY_PORT=80 -# PROXY_ORIGIN=http://${PROXY_HOST}:80 -# PROXY_DASHBOARD_PORT=8118 - -# CSP (comma separated lists) -# If PROXY_HOST is set, ALLOWED_HOSTS must include match it. -# ALLOWED_HOSTS="laptop" -CSP_CONNECT_SRC="https://www.google.com,https://www.gstatic.com" -CSP_FONT_SRC="https://cdn.jsdelivr.net,https://fonts.gstatic.com" -CSP_FRAME_SRC="https://www.google.com,https://www.gstatic.com" -CSP_IMG_SRC="https://www.gstatic.com" -CSP_SCRIPT_SRC="https://www.google.com,https://www.gstatic.com" -CSP_STYLE_SRC="https://cdn.jsdelivr.net,https://fonts.googleapis.com" diff --git a/dreadnode/cli/platform/templates/docker-compose.yaml.j2 b/dreadnode/cli/platform/templates/docker-compose.yaml.j2 index aea4c9c5..e69de29b 100644 --- a/dreadnode/cli/platform/templates/docker-compose.yaml.j2 +++ b/dreadnode/cli/platform/templates/docker-compose.yaml.j2 @@ -1,154 +0,0 @@ ---- -services: - postgres: - image: docker.io/library/postgres:16 - environment: - - POSTGRES_USER=${DATABASE_USER?Variable not set} - - POSTGRES_PASSWORD=${DATABASE_PASSWORD?Variable not set} - - POSTGRES_DB=${DATABASE_NAME?Variable not set} - ports: - - ${DATABASE_PORT:-5432}:5432 - volumes: - - ${DATABASE_VOLUME:-postgres-data}:/var/lib/postgresql/data - healthcheck: - test: ["CMD-SHELL", "pg_isready -U ${DATABASE_USER} -d ${DATABASE_NAME}"] - interval: 10s - timeout: 5s - retries: 5 - start_period: 30s - - clickhouse: - image: clickhouse/clickhouse-server:latest - ports: - - ${CLICKHOUSE_HTTP_PORT:-8123}:8123 - - ${CLICKHOUSE_TCP_PORT:-9000}:9000 - volumes: - - ${CLICKHOUSE_VOLUME:-clickhouse-data}:/var/lib/clickhouse - ulimits: - nofile: - soft: 262144 - hard: 262144 - cap_add: - - SYS_NICE - - NET_ADMIN - - IPC_LOCK - - SYS_PTRACE - environment: - - CLICKHOUSE_DEFAULT_ACCESS_MANAGEMENT=1 - - CLICKHOUSE_USER=${STRIKES_CLICKHOUSE_USER?Variable not set} - - CLICKHOUSE_PASSWORD=${STRIKES_CLICKHOUSE_PASSWORD?Variable not set} - - CLICKHOUSE_DB=${STRIKES_CLICKHOUSE_DATABASE?Variable not set} - - minio: - image: minio/minio:latest - ports: - - ${MINIO_PORT:-9000}:9000 - - ${MINIO_CONSOLE_PORT:-9001}:9001 - volumes: - - ${MINIO_VOLUME:-minio-data}:/data - environment: - - MINIO_ROOT_USER=${S3_AWS_ACCESS_KEY_ID?Variable not set} - - MINIO_ROOT_PASSWORD=${S3_AWS_SECRET_ACCESS_KEY?Variable not set} - command: server /data --console-address ":9001" - - create-s3-buckets: - image: minio/mc:latest - profiles: [create-s3-buckets] - environment: - MC_HOST_minio: http://${S3_AWS_ACCESS_KEY_ID}:${S3_AWS_SECRET_ACCESS_KEY}@minio:${MINIO_PORT:-9000} - BUCKETS: ${REQUIRED_BUCKETS?Variable not set} - entrypoint: - - sh - - -c - - | - until mc ls minio > /dev/null 2>&1; do - sleep 0.5 - done - - for bucket in $$BUCKETS; do - if mc ls minio/$$bucket > /dev/null 2>&1; then - echo "Bucket $$bucket already exists, skipping..." - else - mc mb minio/$$bucket && echo "Created bucket: $$bucket" - fi - done - - echo "All buckets processed successfully" - exit 0 - - traefik: - image: traefik:v3.4 - profiles: [run, run-api, run-ui] - command: - - "--api.insecure=true" - - "--providers.docker=true" - - "--providers.docker.exposedbydefault=false" - - "--entrypoints.web.address=:80" - ports: - - "${PROXY_PORT:-80}:80" - - "${PROXY_DASHBOARD_PORT:-8118}:8080" - volumes: - - "/var/run/docker.sock:/var/run/docker.sock:ro" - restart: unless-stopped - - platform-api: - profiles: [run, run-api] - container_name: api - image: {{ api_image_digest }} - env_file: .api.env - environment: - - SKIP_DB_UPGRADE=0 - # User the Docker service name as the host for inter-service communication - - DATABASE_HOST=postgres - - STRIKES_CLICKHOUSE_HOST=clickhouse - - DYNAMODB_URL=http://dynamodb:8000 - - S3_AWS_ENDPOINT_URL=http://minio:9000 - - S3_AWS_ACCESS_KEY_ID=${S3_AWS_ACCESS_KEY_ID?Variable not set} - - S3_AWS_SECRET_ACCESS_KEY=${S3_AWS_SECRET_ACCESS_KEY?Variable not set} - - S3_AWS_EXTERNAL_ENDPOINT_URL=http://${PROXY_HOST:-localhost}:9000 - - ports: - - ${API_PORT:-8000}:8000 - depends_on: - postgres: - condition: service_healthy - labels: - - "traefik.enable=true" - - "traefik.http.routers.api.rule=PathPrefix(`/api`)" - - "traefik.http.routers.api.priority=10" - - "traefik.http.routers.api.entrypoints=web" - - "traefik.http.services.api.loadbalancer.server.port=8000" - healthcheck: - test: ["CMD-SHELL", "curl -f http://localhost:8000/api/health"] - interval: 10s - timeout: 5s - retries: 5 - start_period: 30s - - platform-ui: - depends_on: - platform-api: - condition: service_healthy - profiles: [run, run-ui] - container_name: ui - image: {{ ui_image_digest }} - pull_policy: never - ports: - - ${UI_PORT:-5173}:3000 - env_file: .ui.env - environment: - - NODE_ENV=production - - API_BASE_URL=http://api:8000 - - ORIGIN=${PROXY_PROTOCOL:-http}://${PROXY_HOST:-localhost:80} - - ALLOWED_HOSTS=${PROXY_HOST:-localhost} - labels: - - "traefik.enable=true" - - "traefik.http.routers.ui.rule=PathPrefix(`/`) && !PathPrefix(`/api`)" - - "traefik.http.routers.ui.priority=1" - - "traefik.http.routers.ui.entrypoints=web" - - "traefik.http.services.ui.loadbalancer.server.port=3000" - -volumes: - postgres-data: - clickhouse-data: - minio-data: From 621e0dd5f870e4fe42f5ce3d1a1e6dd528c602cc Mon Sep 17 00:00:00 2001 From: Brian Greunke Date: Tue, 2 Sep 2025 10:30:27 -0500 Subject: [PATCH 05/11] feat: install platform from SDK --- dreadnode/api/client.py | 25 +- dreadnode/api/models.py | 8 +- dreadnode/cli/platform/check_for_updates.py | 45 -- dreadnode/cli/platform/cli.py | 77 ++-- dreadnode/cli/platform/configure.py | 9 - dreadnode/cli/platform/constants.py | 9 +- dreadnode/cli/platform/docker/__init__.py | 77 ---- dreadnode/cli/platform/docker/download.py | 121 ------ dreadnode/cli/platform/docker/login.py | 22 - dreadnode/cli/platform/docker/start.py | 56 --- dreadnode/cli/platform/docker_.py | 191 +++++++++ dreadnode/cli/platform/download.py | 139 +++++++ dreadnode/cli/platform/init.py | 125 ------ dreadnode/cli/platform/login.py | 20 + dreadnode/cli/platform/schemas.py | 83 ++++ dreadnode/cli/platform/start.py | 35 ++ dreadnode/cli/platform/stop.py | 18 + dreadnode/cli/platform/templates/.ui.env.j2 | 0 .../platform/templates/docker-compose.yaml.j2 | 0 dreadnode/cli/platform/upgrade.py | 81 ++++ dreadnode/cli/platform/utils.py | 60 --- .../.api.env.j2 => utils/__init__.py} | 0 dreadnode/cli/platform/utils/env_merge.py | 384 ++++++++++++++++++ dreadnode/cli/platform/utils/printing.py | 43 ++ dreadnode/cli/platform/utils/versions.py | 164 ++++++++ 25 files changed, 1216 insertions(+), 576 deletions(-) delete mode 100644 dreadnode/cli/platform/check_for_updates.py delete mode 100644 dreadnode/cli/platform/configure.py delete mode 100644 dreadnode/cli/platform/docker/__init__.py delete mode 100644 dreadnode/cli/platform/docker/download.py delete mode 100644 dreadnode/cli/platform/docker/login.py delete mode 100644 dreadnode/cli/platform/docker/start.py create mode 100644 dreadnode/cli/platform/docker_.py create mode 100644 dreadnode/cli/platform/download.py delete mode 100644 dreadnode/cli/platform/init.py create mode 100644 dreadnode/cli/platform/login.py create mode 100644 dreadnode/cli/platform/schemas.py create mode 100644 dreadnode/cli/platform/start.py create mode 100644 dreadnode/cli/platform/stop.py delete mode 100644 dreadnode/cli/platform/templates/.ui.env.j2 delete mode 100644 dreadnode/cli/platform/templates/docker-compose.yaml.j2 create mode 100644 dreadnode/cli/platform/upgrade.py delete mode 100644 dreadnode/cli/platform/utils.py rename dreadnode/cli/platform/{templates/.api.env.j2 => utils/__init__.py} (100%) create mode 100644 dreadnode/cli/platform/utils/env_merge.py create mode 100644 dreadnode/cli/platform/utils/printing.py create mode 100644 dreadnode/cli/platform/utils/versions.py diff --git a/dreadnode/api/client.py b/dreadnode/api/client.py index 1a2f2c3f..534da289 100644 --- a/dreadnode/api/client.py +++ b/dreadnode/api/client.py @@ -555,7 +555,7 @@ def get_container_registry_credentials(self) -> ContainerRegistryCredentials: return ContainerRegistryCredentials(**response.json()) def get_platform_releases( - self, arch: str, tag: str, services: list[str], cli_version: str + self, tag: str, services: list[str], cli_version: str | None ) -> RegistryImageDetails: """ Resolves the platform releases for the current project. @@ -564,10 +564,29 @@ def get_platform_releases( The resolved platform releases as a ResolveReleasesResponse object. """ payload = { - "arch": arch, "tag": tag, "services": services, "cli_version": cli_version, } - response = self.request("POST", "/platform/get-releases", json_data=payload) + try: + response = self.request("POST", "/platform/get-releases", json_data=payload) + + except RuntimeError as e: + if "404" in str(e): + if "Image not found" in str(e): + raise RuntimeError("Image not found") from e + + raise RuntimeError( + f"Failed to get platform releases: {e}. The feature is likely disabled on this server" + ) from e + raise return RegistryImageDetails(**response.json()) + + def get_platform_templates(self, tag: str) -> bytes: + """ + Retrieves the available platform templates. + """ + params = {"tag": tag} + response = self.request("GET", "/platform/templates/all", params=params) + zip_content: bytes = response.content + return zip_content diff --git a/dreadnode/api/models.py b/dreadnode/api/models.py index 250b5fa7..4fef6956 100644 --- a/dreadnode/api/models.py +++ b/dreadnode/api/models.py @@ -54,15 +54,19 @@ class PlatformImage(BaseModel): service: str uri: str digest: str - version: str + tag: str @property def full_uri(self) -> str: return f"{self.uri}@{self.digest}" + @property + def registry(self) -> str: + return self.uri.split("/")[0] + class RegistryImageDetails(BaseModel): - version: str + tag: str images: list[PlatformImage] diff --git a/dreadnode/cli/platform/check_for_updates.py b/dreadnode/cli/platform/check_for_updates.py deleted file mode 100644 index 608ecbe6..00000000 --- a/dreadnode/cli/platform/check_for_updates.py +++ /dev/null @@ -1,45 +0,0 @@ -import rich - -from dreadnode.cli.api import create_api_client -from dreadnode.cli.platform.constants import SERVICES -from dreadnode.cli.platform.utils import get_local_arch, get_local_cache_dir, get_local_version - - -def check_for_updates() -> None: - import importlib.metadata # noqa: PLC0415 - - local_cache_dir = get_local_cache_dir() - rich.print(f"Checking local cache directory: {local_cache_dir}") - - if not local_cache_dir.exists(): - rich.print( - "Local cache directory does not exist. Please run \n[dim]$[/dim] [bold green]dreadnode platform init[/bold green]" - ) - return - - arch = get_local_arch() - api_client = create_api_client() - registry_image_details = api_client.get_platform_releases( - arch=arch, - tag="latest", - services=SERVICES, - cli_version=importlib.metadata.version("dreadnode"), - ) - - local_image_details = get_local_version() - - for image_detail in local_image_details.images: - for remote_image_detail in registry_image_details.images: - if image_detail.service == remote_image_detail.service: - if image_detail.version != remote_image_detail.version: - rich.print( - f"[yellow]Update available for {image_detail.service}: " - f"{image_detail.version} -> {remote_image_detail.version}[/yellow]" - ) - else: - rich.print( - f"[green]{image_detail.service} is up to date: {image_detail.version}[/green]" - ) - rich.print( - "[blue]You can update with:[/blue]\n[dim]$[/dim] [bold green]dreadnode platform update[/bold green]" - ) diff --git a/dreadnode/cli/platform/cli.py b/dreadnode/cli/platform/cli.py index bb43dd64..9d223a86 100644 --- a/dreadnode/cli/platform/cli.py +++ b/dreadnode/cli/platform/cli.py @@ -1,75 +1,50 @@ import cyclopts -from dreadnode.cli.platform.check_for_updates import check_for_updates as check_for_updates_ -from dreadnode.cli.platform.configure import configure_platform -from dreadnode.cli.platform.docker.download import download as download_platform -from dreadnode.cli.platform.docker.login import docker_login -from dreadnode.cli.platform.docker.start import start as start_platform -from dreadnode.cli.platform.docker.start import stop as stop_platform -from dreadnode.cli.platform.init import init as init_platform -from dreadnode.cli.platform.init import initialized as platform_initilized +from dreadnode.cli.platform.download import download_platform +from dreadnode.cli.platform.login import log_into_registries +from dreadnode.cli.platform.start import start_platform +from dreadnode.cli.platform.stop import stop_platform +from dreadnode.cli.platform.upgrade import upgrade_platform cli = cyclopts.App("platform", help="Run and manage the platform.", help_flags=[]) @cli.command() -def init(tag: str = "latest", arch: str | None = None) -> None: - """ - Initialize the platform. - """ - init_platform(tag=tag, arch=arch) - +def start(tag: str | None = None) -> None: + """Start the platform. Optionally, provide a tagged version to start. -@cli.command() -def download(tag: str = "latest", arch: str | None = None) -> None: - """ - Download the platform files. + Args: + tag: Optional image tag to use when starting the platform. """ - docker_login() + start_platform(tag=tag) - if not platform_initilized() or tag != "latest" or arch: - init_platform(tag=tag, arch=arch) - download_platform() +@cli.command(name=["stop", "down"]) +def stop() -> None: + """Stop the running platform.""" + stop_platform() @cli.command() -def configure() -> None: - """ - Configure the platform. - """ - configure_platform() +def download(tag: str) -> None: + """Download platform files for a specific tag. - -@cli.command() -def start() -> None: + Args: + tag: Image tag to download. """ - Start the platform services. - """ - start_platform() + download_platform(tag) @cli.command() -def stop() -> None: - """ - Stop the platform services. - """ - stop_platform() +def upgrade() -> None: + """Upgrade the platform to the latest version.""" + upgrade_platform() @cli.command() -def check_for_updates() -> None: - """ - Check for platform updates. - """ - check_for_updates_() - +def refresh_registry_auth() -> None: + """Refresh container registry credentials for platform access. -@cli.command() -def update() -> None: + Used for out of band Docker management. """ - Update the platform. - """ - stop_platform() - download_platform() - start_platform() + log_into_registries() diff --git a/dreadnode/cli/platform/configure.py b/dreadnode/cli/platform/configure.py deleted file mode 100644 index 2f2e32c1..00000000 --- a/dreadnode/cli/platform/configure.py +++ /dev/null @@ -1,9 +0,0 @@ -import rich - -from dreadnode.cli.platform.utils import get_local_cache_dir - - -def configure_platform() -> None: - rich.print(f"Configure the API by modifying {get_local_cache_dir()}/.api.env") - rich.print(f"Configure the UI by modifying {get_local_cache_dir()}/.ui.env") - rich.print("See https://docs.dreadnode.io/platform/manage for more details.") diff --git a/dreadnode/cli/platform/constants.py b/dreadnode/cli/platform/constants.py index 231e427e..8042b5c5 100644 --- a/dreadnode/cli/platform/constants.py +++ b/dreadnode/cli/platform/constants.py @@ -1,10 +1,9 @@ -from pathlib import Path +import typing as t API_SERVICE = "api" UI_SERVICE = "ui" SERVICES = [API_SERVICE, UI_SERVICE] +VERSIONS_MANIFEST = "versions.json" -TEMPLATE_DIR = Path(__file__).parent / "templates" -DOCKER_COMPOSE_TEMPLATE = TEMPLATE_DIR / "docker-compose.yaml.j2" -API_ENV_TEMPLATE = TEMPLATE_DIR / ".api.env.j2" -UI_ENV_TEMPLATE = TEMPLATE_DIR / ".ui.env.j2" +SupportedArchitecture = t.Literal["amd64", "arm64"] +SUPPORTED_ARCHITECTURES: list[SupportedArchitecture] = ["amd64", "arm64"] diff --git a/dreadnode/cli/platform/docker/__init__.py b/dreadnode/cli/platform/docker/__init__.py deleted file mode 100644 index c74d5b8f..00000000 --- a/dreadnode/cli/platform/docker/__init__.py +++ /dev/null @@ -1,77 +0,0 @@ -import subprocess -import sys - -import rich - -from dreadnode.cli.platform.utils import get_compose_file_path - - -def run_docker_compose_command( - args: list[str], - compose_file: str | None = None, - project_name: str | None = None, - timeout: int = 300, - command_name: str = "docker compose", - stdin_input: str | None = None, -) -> subprocess.CompletedProcess[str]: - """ - Execute a docker compose command with common error handling and configuration. - - Args: - args: Additional arguments for the docker compose command - compose_file: Path to docker-compose file (optional) - project_name: Docker compose project name (optional) - timeout: Command timeout in seconds - command_name: Name of the command for error messages - stdin_input: Input to pass to stdin (for commands like docker login) - - Returns: - CompletedProcess object with command results - - Raises: - subprocess.CalledProcessError: If command fails - subprocess.TimeoutExpired: If command times out - FileNotFoundError: If docker/docker-compose not found - """ - cmd = ["docker", "compose"] - - # Add compose file - compose_file = compose_file or get_compose_file_path() - cmd.extend(["-f", compose_file]) - - # Add project name if specified - if project_name: - cmd.extend(["-p", project_name]) - - # Add the specific command arguments - cmd.extend(args) - - try: - # Remove capture_output=True to allow real-time streaming - # stdout and stderr will go directly to the terminal - result = subprocess.run( # noqa: S603 - cmd, - check=True, - text=True, - timeout=timeout, - encoding="utf-8", - errors="replace", - input=stdin_input, - ) - - except subprocess.CalledProcessError as e: - rich.print(f"{command_name} failed with exit code {e.returncode}", file=sys.stderr) - raise - - except subprocess.TimeoutExpired: - rich.print(f"{command_name} timed out after {timeout} seconds", file=sys.stderr) - raise - - except FileNotFoundError: - rich.print( - "Docker or docker compose not found. Please ensure Docker is installed.", - file=sys.stderr, - ) - raise - - return result diff --git a/dreadnode/cli/platform/docker/download.py b/dreadnode/cli/platform/docker/download.py deleted file mode 100644 index dc977fcc..00000000 --- a/dreadnode/cli/platform/docker/download.py +++ /dev/null @@ -1,121 +0,0 @@ -import subprocess - -from dreadnode.cli.platform.docker import run_docker_compose_command - -# def download_platform( -# registry: str, username: str, password: str, image_name: str, tag: str -# ) -> Image: -# try: -# import docker # type: ignore[import-untyped,unused-ignore] -# except ImportError as e: -# raise ImportError( -# "Running a local platform requires `docker`. Install with: pip install dreadnode\\[platform]" -# ) from e - -# # Initialize Docker client -# client = docker.from_env() - -# # # Method 1: Login first, then pull -# # client.login( -# # username=username, -# # password=password, -# # registry=registry, -# # ) - -# # # Pull the private image -# # image = client.images.pull(f"{registry}/{image}:{tag}") - -# # Method 2: Pull with auth parameter -# return client.images.pull( -# f"{registry}/{image_name}:{tag}", -# auth_config={"username": username, "password": password}, -# ) - - -# def parse_compose_file(compose_file_path: str) -> dict[str, Any]: -# """Parse Docker Compose file with proper error handling.""" -# try: -# with Path(compose_file_path).open("r", encoding="utf-8") as f: -# compose_config = yaml.safe_load(f) - -# if not compose_config: -# raise ValueError("Empty or invalid compose file") - -# # Validate basic structure -# if not isinstance(compose_config, dict): -# raise TypeError("Compose file must contain a YAML mapping") - -# except yaml.YAMLError as e: -# raise ValueError(f"Invalid YAML syntax: {e}") from e -# except FileNotFoundError as e: -# raise FileNotFoundError(f"Compose file not found: {compose_file_path}") from e - -# return compose_config - - -# def pull_images_from_compose(compose_file_path: str) -> None: -# """Pull all images defined in a Docker Compose file.""" - -# # Initialize Docker client -# try: -# import docker # type: ignore[import-untyped,unused-ignore] -# except ImportError as e: -# raise ImportError( -# "Running a local platform requires `docker`. Install with: pip install dreadnode\\[platform]" -# ) from e - -# # Initialize Docker client -# client = docker.from_env() - -# compose_config = parse_compose_file(compose_file_path) - -# # Handle different compose file versions -# services = compose_config.get("services", {}) - -# if not services: -# logger.error("No services found in compose file") -# return - -# for service_name, service_config in services.items(): -# if not isinstance(service_config, dict): -# logger.warning(f"⚠ Skipping invalid service config for '{service_name}'") -# continue - -# image = service_config.get("image") -# if image: -# try: -# logger.info(f"Pulling {image}...") -# client.images.pull(image) -# logger.success(f"✓ Pulled {image}") -# except DockerApiError as e: -# logger.error(f"✗ Failed to pull {image}: {e}") -# else: -# # Handle services with 'build' context instead of 'image' -# build_config = service_config.get("build") -# if build_config: -# logger.warning(f"⚠ Service '{service_name}' uses build context, skipping pull") -# else: -# logger.warning(f"⚠ Service '{service_name}' has no image or build config") - - -def download( - compose_file: str | None = None, project_name: str | None = None, timeout: int = 300 -) -> subprocess.CompletedProcess[str]: - """ - Pull docker images for the platform. - - Args: - compose_file: Path to docker-compose file (optional) - project_name: Docker compose project name (optional) - timeout: Command timeout in seconds - - Returns: - CompletedProcess object with command results - - Raises: - subprocess.CalledProcessError: If command fails - subprocess.TimeoutExpired: If command times out - """ - return run_docker_compose_command( - ["--profile", "run", "pull"], compose_file, project_name, timeout, "Docker compose pull" - ) diff --git a/dreadnode/cli/platform/docker/login.py b/dreadnode/cli/platform/docker/login.py deleted file mode 100644 index 5b3d1473..00000000 --- a/dreadnode/cli/platform/docker/login.py +++ /dev/null @@ -1,22 +0,0 @@ -import subprocess -import sys - -import rich - -from dreadnode.cli.api import create_api_client - - -def docker_login(): - client = create_api_client() - container_registry_creds = client.get_container_registry_credentials() - - cmd = ["docker", "login", container_registry_creds.registry] - cmd.extend(["--username", container_registry_creds.username]) - cmd.extend(["--password-stdin"]) - - try: - subprocess.run(cmd, input=container_registry_creds.password, text=True, check=True) # noqa: S603 - rich.print(f"Logged in to Docker registry: {container_registry_creds.registry}") - except subprocess.CalledProcessError as e: - rich.print(f"Failed to log in to Docker registry: {e}", file=sys.stderr) - raise diff --git a/dreadnode/cli/platform/docker/start.py b/dreadnode/cli/platform/docker/start.py deleted file mode 100644 index d7f6c867..00000000 --- a/dreadnode/cli/platform/docker/start.py +++ /dev/null @@ -1,56 +0,0 @@ -import rich - -from dreadnode.cli.platform.docker import run_docker_compose_command - - -def _start_infra( - compose_file: str | None = None, project_name: str | None = None, timeout: int = 300 -) -> None: - """Start infrastructure services.""" - run_docker_compose_command( - ["up", "-d"], compose_file, project_name, timeout, "Docker compose up (infra)" - ) - - -def _create_storage( - compose_file: str | None = None, project_name: str | None = None, timeout: int = 300 -) -> None: - """Create S3 buckets.""" - run_docker_compose_command( - ["--profile", "create-s3-buckets", "up", "-d"], - compose_file, - project_name, - timeout, - "Docker compose up (storage)", - ) - - -def _start_services( - compose_file: str | None = None, project_name: str | None = None, timeout: int = 300 -) -> None: - """Start application services.""" - run_docker_compose_command( - ["--profile", "run", "up", "-d"], - compose_file, - project_name, - timeout, - "Docker compose up (services)", - ) - - -def start( - compose_file: str | None = None, project_name: str | None = None, timeout: int = 300 -) -> None: - """Start all platform services.""" - rich.print("Starting platform services...") - _start_infra(compose_file, project_name, timeout) - _create_storage(compose_file, project_name, timeout) - _start_services(compose_file, project_name, timeout) - - -def stop( - compose_file: str | None = None, project_name: str | None = None, timeout: int = 300 -) -> None: - """Stop platform services.""" - rich.print("Stopping platform services...") - run_docker_compose_command(["stop"], compose_file, project_name, timeout, "Docker compose stop") diff --git a/dreadnode/cli/platform/docker_.py b/dreadnode/cli/platform/docker_.py new file mode 100644 index 00000000..e0aa1e01 --- /dev/null +++ b/dreadnode/cli/platform/docker_.py @@ -0,0 +1,191 @@ +import json +import subprocess +import time +from pathlib import Path + +from dreadnode.cli.api import create_api_client +from dreadnode.cli.platform.utils.printing import print_error, print_info, print_success + + +def _run_docker_compose_command( + args: list[str], + compose_file: Path, + timeout: int = 300, + stdin_input: str | None = None, +) -> subprocess.CompletedProcess[str]: + """Execute a docker compose command with common error handling and configuration. + + Args: + args: Additional arguments for the docker compose command. + compose_file: Path to docker-compose file. + timeout: Command timeout in seconds. + command_name: Name of the command for error messages. + stdin_input: Input to pass to stdin (for commands like docker login). + + Returns: + CompletedProcess object with command results. + + Raises: + subprocess.CalledProcessError: If command fails. + subprocess.TimeoutExpired: If command times out. + FileNotFoundError: If docker/docker-compose not found. + """ + cmd = ["docker", "compose"] + + # Add compose file + cmd.extend(["-f", compose_file.as_posix()]) + + # Add the specific command arguments + cmd.extend(args) + + cmd_str = " ".join(cmd) + + try: + # Remove capture_output=True to allow real-time streaming + # stdout and stderr will go directly to the terminal + result = subprocess.run( # noqa: S603 + cmd, + check=True, + text=True, + timeout=timeout, + encoding="utf-8", + errors="replace", + input=stdin_input, + ) + + except subprocess.CalledProcessError as e: + print_error(f"{cmd_str} failed with exit code {e.returncode}") + raise + + except subprocess.TimeoutExpired: + print_error(f"{cmd_str} timed out after {timeout} seconds") + raise + + except FileNotFoundError: + print_error("Docker or docker compose not found. Please ensure Docker is installed.") + raise + + return result + + +def _check_docker_creds_exist(registry: str) -> bool: + """Check if Docker credentials exist for the specified registry. + + Args: + registry: Registry hostname to check credentials for. + + Returns: + bool: True if credentials exist, False otherwise. + """ + config_path = Path.home() / ".docker" / "config.json" + + if not config_path.exists(): + return False + + try: + with config_path.open() as f: + config = json.load(f) + + auths = config.get("auths", {}) + except (json.JSONDecodeError, KeyError): + return False + return registry in auths + + +def _are_docker_creds_fresh(registry: str, max_age_hours: int = 1) -> bool: + """Check if Docker credentials are fresh (recently updated). + + Args: + registry: Registry hostname to check credentials for. + max_age_hours: Maximum age in hours for credentials to be considered fresh. + + Returns: + bool: True if credentials are fresh, False otherwise. + """ + config_path = Path.home() / ".docker" / "config.json" + + if not config_path.exists(): + return False + + # Check file modification time + mtime = config_path.stat().st_mtime + age_hours = (time.time() - mtime) / 3600 + + return age_hours < max_age_hours and _check_docker_creds_exist(registry) + + +def docker_login(registry: str) -> None: + """Log into a Docker registry using API credentials. + + Args: + registry: Registry hostname to log into. + + Raises: + subprocess.CalledProcessError: If docker login command fails. + """ + # if _are_docker_creds_fresh(registry): + # rich.print(f"Docker credentials for {registry} are fresh. Skipping login.") + # return + + print_info(f"Logging in to Docker registry: {registry} ...") + client = create_api_client() + container_registry_creds = client.get_container_registry_credentials() + + cmd = ["docker", "login", container_registry_creds.registry] + cmd.extend(["--username", container_registry_creds.username]) + cmd.extend(["--password-stdin"]) + + try: + subprocess.run( # noqa: S603 + cmd, + input=container_registry_creds.password, + text=True, + check=True, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + print_success("Logged in to container registry ...") + except subprocess.CalledProcessError as e: + print_error(f"Failed to log in to container registry: {e}") + raise + + +def docker_run( + compose_file: Path, + timeout: int = 300, +) -> subprocess.CompletedProcess[str]: + """Run docker containers for the platform. + + Args: + compose_file: Path to docker-compose file. + timeout: Command timeout in seconds. + + Returns: + CompletedProcess object with command results. + + Raises: + subprocess.CalledProcessError: If command fails. + subprocess.TimeoutExpired: If command times out. + """ + + return _run_docker_compose_command(["up", "-d"], compose_file, timeout, "Docker compose up") + + +def docker_stop( + compose_file: Path, + timeout: int = 300, +) -> subprocess.CompletedProcess[str]: + """Stop docker containers for the platform. + + Args: + compose_file: Path to docker-compose file. + timeout: Command timeout in seconds. + + Returns: + CompletedProcess object with command results. + + Raises: + subprocess.CalledProcessError: If command fails. + subprocess.TimeoutExpired: If command times out. + """ + return _run_docker_compose_command(["down"], compose_file, timeout, "Docker compose down") diff --git a/dreadnode/cli/platform/download.py b/dreadnode/cli/platform/download.py new file mode 100644 index 00000000..35b20e87 --- /dev/null +++ b/dreadnode/cli/platform/download.py @@ -0,0 +1,139 @@ +import io +import json +import zipfile + +from dreadnode.api.models import RegistryImageDetails +from dreadnode.cli.api import create_api_client +from dreadnode.cli.platform.constants import SERVICES, VERSIONS_MANIFEST +from dreadnode.cli.platform.schemas import LocalVersionSchema +from dreadnode.cli.platform.utils.env_merge import ( + create_default_env_files, +) +from dreadnode.cli.platform.utils.printing import ( + print_error, + print_info, + print_success, + print_warning, +) +from dreadnode.cli.platform.utils.versions import ( + confirm_with_context, + get_available_local_versions, + get_cli_version, + get_local_cache_dir, +) + + +def _resolve_latest(tag: str) -> str: + """Resolve 'latest' tag to actual version tag from API. + + Args: + tag: Version tag that contains 'latest'. + + Returns: + str: Resolved actual version tag. + """ + api_client = create_api_client() + release_info = api_client.get_platform_releases( + tag, services=SERVICES, cli_version=get_cli_version() + ) + return release_info.tag + + +def _create_local_version_file_structure( + tag: str, release_info: RegistryImageDetails +) -> LocalVersionSchema: + """Create local file structure and update manifest for a new version. + + Args: + tag: Version tag to create structure for. + release_info: Registry image details from API. + + Returns: + LocalVersionSchema: Created local version schema. + """ + available_local_versions = get_available_local_versions() + + # Create a new local version schema + local_cache_dir = get_local_cache_dir() + new_version = LocalVersionSchema( + **release_info.model_dump(), + local_path=local_cache_dir / tag, + current=False, + ) + + # Add the new version to the available local versions + available_local_versions.versions.append(new_version) + + # sort the manifest by semver, newest first + available_local_versions.versions.sort(key=lambda v: v.tag, reverse=True) + + # update the manifest file + manifest_path = local_cache_dir / VERSIONS_MANIFEST + with manifest_path.open(encoding="utf-8", mode="w") as f: + json.dump(available_local_versions.model_dump(), f, indent=2) + + print_success(f"Updated versions manifest at {manifest_path} with {new_version.tag}") + + if new_version.local_path.exists(): + print_warning(f"Version {tag} already exists locally.") + if not confirm_with_context("overwrite it?"): + print_error("Aborting download.") + return new_version + + # create the directory + new_version.local_path.mkdir(parents=True, exist_ok=True) + + return new_version + + +def _download_version_files(tag: str) -> LocalVersionSchema: + """Download platform version files from API and extract locally. + + Args: + tag: Version tag to download. + + Returns: + LocalVersionSchema: Downloaded local version schema. + """ + api_client = create_api_client() + release_info = api_client.get_platform_releases( + tag, services=SERVICES, cli_version=get_cli_version() + ) + zip_content = api_client.get_platform_templates(tag) + + new_local_version = _create_local_version_file_structure(release_info.tag, release_info) + + with zipfile.ZipFile(io.BytesIO(zip_content)) as zip_file: + zip_file.extractall(new_local_version.local_path) + print_success(f"Downloaded version {tag} to {new_local_version.local_path}") + + create_default_env_files(new_local_version) + return new_local_version + + +def download_platform(tag: str) -> LocalVersionSchema: + """Download platform version if not already available locally. + + Args: + tag: Version tag to download (supports 'latest'). + + Returns: + LocalVersionSchema: Local version schema for the downloaded/existing version. + """ + if "latest" in tag: + tag = _resolve_latest(tag) + + # get what's available + available_local_versions = get_available_local_versions() + + # if there are versions available + if available_local_versions.versions: + for available_local_version in available_local_versions.versions: + if tag == available_local_version.tag: + print_success( + f"Version {tag} is already downloaded at {available_local_version.local_path}" + ) + return available_local_version + + print_info(f"Version {tag} is not available locally. Will download it.") + return _download_version_files(tag) diff --git a/dreadnode/cli/platform/init.py b/dreadnode/cli/platform/init.py deleted file mode 100644 index 229eed8a..00000000 --- a/dreadnode/cli/platform/init.py +++ /dev/null @@ -1,125 +0,0 @@ -import json -from pathlib import Path - -import rich -from rich.prompt import Confirm - -from dreadnode.api.models import PlatformImage, RegistryImageDetails -from dreadnode.cli.api import create_api_client -from dreadnode.cli.platform.constants import ( - API_ENV_TEMPLATE, - API_SERVICE, - DOCKER_COMPOSE_TEMPLATE, - SERVICES, - UI_ENV_TEMPLATE, - UI_SERVICE, -) -from dreadnode.cli.platform.utils import ( - get_compose_file_path, - get_local_arch, - get_local_cache_dir, - render_with_string_replace, -) - - -def _write_version_manifest( - local_cache_dir: Path, resolution_response: RegistryImageDetails -) -> None: - rich.print(f"Writing version file for {resolution_response.version} ...") - version_file = local_cache_dir / ".version" - version_file.write_text(json.dumps(resolution_response.model_dump())) - rich.print(f"Version file written to {version_file}") - - -def _create_docker_compose_file(images: list[PlatformImage]) -> None: - rich.print("Updating Compose template ...") - for image in images: - if image.service == API_SERVICE: - api_image_digest = image.full_uri - elif image.service == UI_SERVICE: - ui_image_digest = image.full_uri - else: - raise ValueError(f"Unknown image service: {image.service}") - render_with_string_replace( - api_image_digest=api_image_digest, - ui_image_digest=ui_image_digest, - template_path=DOCKER_COMPOSE_TEMPLATE, - output_path=get_compose_file_path(), - ) - rich.print(f"Compose file written to {get_compose_file_path()}") - - -def _create_env_files(local_cache_dir: Path) -> None: - rich.print("Updating environment files ...") - - for env_file in [API_ENV_TEMPLATE, UI_ENV_TEMPLATE]: - dest = local_cache_dir / env_file.name - dest.write_text(env_file.read_text()) - rich.print(f"Environment file written to {dest}") - - # concatenate environment variables - api_env = local_cache_dir / API_ENV_TEMPLATE.name - ui_env = local_cache_dir / UI_ENV_TEMPLATE.name - dest = local_cache_dir / ".env" - dest.write_text(f"{api_env.read_text()}\n{ui_env.read_text()}") - rich.print(f"Combined environment file written to {dest}") - - -def _confirm_with_context(action: str, details: str | None = None) -> bool: - """Confirmation with additional context in a panel.""" - return Confirm.ask( - f"[bold red]Are you sure you want to {action}? {details}[/bold red]", default=False - ) - - -def init(tag: str, arch: str | None = None) -> None: - if initialized() and not _confirm_with_context( - "re-initialize the platform", "This will overwrite existing files." - ): - return - - import importlib.metadata # noqa: PLC0415 - - local_cache_dir = get_local_cache_dir() - rich.print(f"Using local cache directory: {local_cache_dir}") - - if not local_cache_dir.exists(): - local_cache_dir.mkdir(parents=True, exist_ok=True) - rich.print(f"Local cache directory created at {local_cache_dir}") - else: - rich.print("Local cache directory already exists.") - - if not arch: - arch = get_local_arch() - api_client = create_api_client() - registry_image_details = api_client.get_platform_releases( - arch=arch, - tag=tag, - services=SERVICES, - cli_version=importlib.metadata.version("dreadnode"), - ) - - _write_version_manifest(local_cache_dir, registry_image_details) - _create_docker_compose_file(registry_image_details.images) - _create_env_files(local_cache_dir) - - rich.print("Initialization complete.") - - -def initialized() -> bool: - rich.print("Checking initialization ...") - local_cache_dir = get_local_cache_dir() - if not local_cache_dir.exists(): - rich.print("Local cache directory does not exist.") - return False - - if not (local_cache_dir / "docker-compose.yaml").exists(): - rich.print("Docker Compose file is missing.") - return False - - if not (local_cache_dir / ".env").exists(): - rich.print("Environment file is missing.") - return False - - rich.print("All required files are present.") - return True diff --git a/dreadnode/cli/platform/login.py b/dreadnode/cli/platform/login.py new file mode 100644 index 00000000..ab020199 --- /dev/null +++ b/dreadnode/cli/platform/login.py @@ -0,0 +1,20 @@ +from dreadnode.cli.platform.docker_ import docker_login +from dreadnode.cli.platform.utils.printing import print_info +from dreadnode.cli.platform.utils.versions import get_current_version + + +def log_into_registries() -> None: + """Log into all Docker registries for the current platform version. + + Iterates through all images in the current version and logs into their + respective registries. If no current version is set, displays an error message. + """ + current_version = get_current_version() + if not current_version: + print_info("There are no registries configured. Run `dreadnode platform start` to start.") + return + registries_attempted = set() + for image in current_version.images: + if image.registry not in registries_attempted: + docker_login(image.registry) + registries_attempted.add(image.registry) diff --git a/dreadnode/cli/platform/schemas.py b/dreadnode/cli/platform/schemas.py new file mode 100644 index 00000000..5029452f --- /dev/null +++ b/dreadnode/cli/platform/schemas.py @@ -0,0 +1,83 @@ +from pathlib import Path + +from pydantic import BaseModel, field_serializer + +from dreadnode.api.models import RegistryImageDetails +from dreadnode.cli.platform.constants import API_SERVICE, UI_SERVICE + + +class LocalVersionSchema(RegistryImageDetails): + local_path: Path + current: bool + + @field_serializer("local_path") + def serialize_path(self, path: Path) -> str: + """Serialize Path object to absolute path string. + + Args: + path: Path object to serialize. + + Returns: + str: Absolute path as string. + """ + return str(path.resolve()) # Convert to absolute path string + + @property + def compose_file(self) -> Path: + return self.local_path / "docker-compose.yaml" + + @property + def api_env_file(self) -> Path: + return self.local_path / f".{API_SERVICE}.env" + + @property + def api_example_env_file(self) -> Path: + return self.local_path / f".{API_SERVICE}.example.env" + + @property + def ui_env_file(self) -> Path: + return self.local_path / f".{UI_SERVICE}.env" + + @property + def ui_example_env_file(self) -> Path: + return self.local_path / f".{UI_SERVICE}.example.env" + + def get_env_path_by_service(self, service: str) -> Path: + """Get environment file path for a specific service. + + Args: + service: Service name to get env path for. + + Returns: + Path: Path to the service's environment file. + + Raises: + ValueError: If service is not recognized. + """ + if service == API_SERVICE: + return self.api_env_file + if service == UI_SERVICE: + return self.ui_env_file + raise ValueError(f"Unknown service: {service}") + + def get_example_env_path_by_service(self, service: str) -> Path: + """Get example environment file path for a specific service. + + Args: + service: Service name to get example env path for. + + Returns: + Path: Path to the service's example environment file. + + Raises: + ValueError: If service is not recognized. + """ + if service == API_SERVICE: + return self.api_example_env_file + if service == UI_SERVICE: + return self.ui_example_env_file + raise ValueError(f"Unknown service: {service}") + + +class LocalVersionsSchema(BaseModel): + versions: list[LocalVersionSchema] diff --git a/dreadnode/cli/platform/start.py b/dreadnode/cli/platform/start.py new file mode 100644 index 00000000..a25219fd --- /dev/null +++ b/dreadnode/cli/platform/start.py @@ -0,0 +1,35 @@ +from dreadnode.cli.platform.docker_ import docker_login, docker_run +from dreadnode.cli.platform.download import download_platform +from dreadnode.cli.platform.utils.printing import print_info +from dreadnode.cli.platform.utils.versions import ( + create_local_latest_tag, + get_current_version, + mark_current_version, +) + + +def start_platform(tag: str | None = None) -> None: + """Start the platform with the specified or current version. + + Args: + tag: Optional image tag to use. If not provided, uses the current + version or downloads the latest available version. + """ + if tag: + selected_version = download_platform(tag) + mark_current_version(selected_version) + elif current_version := get_current_version(): + selected_version = current_version + # no need to mark + else: + latest_tag = create_local_latest_tag() + selected_version = download_platform(latest_tag) + mark_current_version(selected_version) + + registries_attempted = set() + for image in selected_version.images: + if image.registry not in registries_attempted: + docker_login(image.registry) + registries_attempted.add(image.registry) + print_info(f"Starting platform: {selected_version.tag}") + docker_run(selected_version.compose_file) diff --git a/dreadnode/cli/platform/stop.py b/dreadnode/cli/platform/stop.py new file mode 100644 index 00000000..2022a9d9 --- /dev/null +++ b/dreadnode/cli/platform/stop.py @@ -0,0 +1,18 @@ +from dreadnode.cli.platform.docker_ import docker_stop +from dreadnode.cli.platform.utils.printing import print_error +from dreadnode.cli.platform.utils.versions import ( + get_current_version, +) + + +def stop_platform() -> None: + """Stop the currently running platform. + + Uses the current version's compose file to stop all platform containers + via docker compose down. + """ + current_version = get_current_version() + if not current_version: + print_error("No current version found. Nothing to stop.") + return + docker_stop(current_version.compose_file) diff --git a/dreadnode/cli/platform/templates/.ui.env.j2 b/dreadnode/cli/platform/templates/.ui.env.j2 deleted file mode 100644 index e69de29b..00000000 diff --git a/dreadnode/cli/platform/templates/docker-compose.yaml.j2 b/dreadnode/cli/platform/templates/docker-compose.yaml.j2 deleted file mode 100644 index e69de29b..00000000 diff --git a/dreadnode/cli/platform/upgrade.py b/dreadnode/cli/platform/upgrade.py new file mode 100644 index 00000000..c77b621f --- /dev/null +++ b/dreadnode/cli/platform/upgrade.py @@ -0,0 +1,81 @@ +from dreadnode.cli.platform.constants import SERVICES +from dreadnode.cli.platform.docker_ import docker_stop +from dreadnode.cli.platform.download import download_platform +from dreadnode.cli.platform.start import start_platform +from dreadnode.cli.platform.utils.env_merge import ( + merge_env_files_content, +) +from dreadnode.cli.platform.utils.printing import print_error, print_info +from dreadnode.cli.platform.utils.versions import ( + confirm_with_context, + create_local_latest_tag, + get_current_version, + get_semver_from_tag, + mark_current_version, + newer_remote_version, +) + + +def upgrade_platform() -> None: + """Upgrade the platform to the latest available version. + + Downloads the latest version, compares it with the current version, + and performs the upgrade if a newer version is available. Optionally + merges configuration files from the current version to the new version. + Stops the current platform and starts the upgraded version. + """ + latest_tag = create_local_latest_tag() + + latest_version = download_platform(latest_tag) + current_local_version = get_current_version() + + if not current_local_version: + print_error( + "No current platform version found. Run `dreadnode platform start` to start the latest version." + ) + return + + current_semver = get_semver_from_tag(current_local_version.tag) + remote_semver = get_semver_from_tag(latest_version.tag) + + if not newer_remote_version(current_semver, remote_semver): + print_info(f"You are using the latest ({current_semver}) version of the platform.") + return + + if not confirm_with_context( + f"Are you sure you want to upgrade from {current_local_version.tag} to {latest_version.tag}?" + ): + print_error("Aborting upgrade.") + return + + if confirm_with_context( + f"Would you like to attempt to merge configuration files from {current_local_version.tag} to {latest_version.tag}?" + ): + for service in SERVICES: + original_env_file = current_local_version.get_example_env_path_by_service(service) + with original_env_file.open() as f: + original_env_content = f.read() + current_env_file = current_local_version.get_env_path_by_service(service) + with current_env_file.open() as f: + current_env_content = f.read() + new_env_file = latest_version.get_env_path_by_service(service) + with new_env_file.open() as f: + new_env_content = f.read() + merged_env_content = merge_env_files_content( + original_env_content, current_env_content, new_env_content + ) + with new_env_file.open("w") as f: + f.write(merged_env_content) + print_info(f" - Merged .env file for {service}: {merged_env_content}") + + print_info(".env files merged.") + + else: + print_info("Skipping .env file merge.") + + print_info(f"Stopping current platform version {current_local_version.tag}...") + docker_stop(current_local_version.compose_file) + print_info(f"Current platform version {current_local_version.tag} stopped.") + + mark_current_version(latest_version) + start_platform() diff --git a/dreadnode/cli/platform/utils.py b/dreadnode/cli/platform/utils.py deleted file mode 100644 index b6ff4c52..00000000 --- a/dreadnode/cli/platform/utils.py +++ /dev/null @@ -1,60 +0,0 @@ -import json -import platform -import typing as t -from pathlib import Path - -from dreadnode.api.models import RegistryImageDetails - -archs = t.Literal["amd64", "arm64"] - - -def get_local_arch() -> archs: - arch = platform.machine() - - # Check for specific architectures - if arch in ["x86_64", "AMD64"]: - return "amd64" - if arch in ["arm64", "aarch64", "ARM64"]: - return "arm64" - raise ValueError(f"Unsupported architecture: {arch}") - - -def get_local_cache_dir() -> Path: - return Path.home() / ".dreadnode" / "platform" - - -def get_local_version() -> RegistryImageDetails | None: - local_cache_dir = get_local_cache_dir() - version_file = local_cache_dir / ".version" - if version_file.exists(): - return RegistryImageDetails(**json.loads(version_file.read_text())) - return None - - -def get_compose_file_path() -> Path: - return get_local_cache_dir() / "docker-compose.yaml" - - -def render_with_string_replace( - api_image_digest: str, - ui_image_digest: str, - template_path: str, - output_path: str, -) -> str: - """ - Simple string replacement - lightest option. - Works for basic {{ variable }} patterns. - """ - - with Path(template_path).open() as file: - content = file.read() - - rendered = content.replace("{{ api_image_digest }}", api_image_digest).replace( - "{{ ui_image_digest }}", ui_image_digest - ) - - if output_path: - with Path(output_path).open("w") as file: - file.write(rendered) - - return rendered diff --git a/dreadnode/cli/platform/templates/.api.env.j2 b/dreadnode/cli/platform/utils/__init__.py similarity index 100% rename from dreadnode/cli/platform/templates/.api.env.j2 rename to dreadnode/cli/platform/utils/__init__.py diff --git a/dreadnode/cli/platform/utils/env_merge.py b/dreadnode/cli/platform/utils/env_merge.py new file mode 100644 index 00000000..cfc3c415 --- /dev/null +++ b/dreadnode/cli/platform/utils/env_merge.py @@ -0,0 +1,384 @@ +import typing as t + +from dreadnode.cli.platform.constants import ( + SERVICES, +) +from dreadnode.cli.platform.schemas import LocalVersionSchema +from dreadnode.cli.platform.utils.printing import print_error, print_info + +LineTypes = t.Literal["variable", "comment", "empty"] + + +class _EnvLine(t.NamedTuple): + """Represents a line in an .env file with its type and content.""" + + line_type: LineTypes + key: str | None = None + value: str = "" + original_line: str = "" + + +def _parse_env_lines(content: str) -> list[_EnvLine]: + """ + Parse .env file content into structured lines preserving all formatting. + + Args: + content (str): The content of the .env file + + Returns: + List[EnvLine]: List of parsed lines with their types + """ + lines = [] + + for line in content.split("\n"): + stripped = line.strip() + + if not stripped: + # Empty line + lines.append(_EnvLine("empty", original_line=line)) + elif stripped.startswith("#"): + # Comment line + lines.append(_EnvLine("comment", original_line=line)) + elif "=" in stripped: + # Variable line + key, value = stripped.split("=", 1) + lines.append(_EnvLine("variable", key.strip(), value.strip(), line)) + else: + # Treat as comment/invalid line to preserve it + lines.append(_EnvLine("comment", original_line=line)) + + return lines + + +def _extract_variables(lines: list[_EnvLine]) -> dict[str, str]: + """Extract just the variables from parsed lines. + + Args: + lines: List of parsed environment file lines. + + Returns: + dict[str, str]: Dictionary mapping variable names to their values. + """ + return { + line.key: line.value + for line in lines + if line.line_type == "variable" and line.key is not None + } + + +def _merge_env_files( + original_remote_content: str, + current_local_content: str, + updated_remote_content: str, +) -> dict[str, str]: + """ + Merge .env files with the following logic: + 1. Local changes (updates/additions) take precedence over remote defaults + 2. Remote removals remove the key from local (unless locally modified) + 3. Remote additions are added to local + 4. Local additions are preserved + + Args: + original_remote_content (str): Original remote .env content (baseline) + current_local_content (str): Current local .env content (with local changes) + updated_remote_content (str): Updated remote .env content (new remote state) + + Returns: + Dict[str, str]: Merged variables dictionary + """ + # Extract variables from each file + original_remote = _extract_variables(_parse_env_lines(original_remote_content)) + current_local = _extract_variables(_parse_env_lines(current_local_content)) + updated_remote = _extract_variables(_parse_env_lines(updated_remote_content)) + + # Result dictionary to build the merged content + merged = {} + + # Step 1: Start with current local content (preserves local changes and additions) + merged.update(current_local) + + # Step 2: Add new keys from updated remote (remote additions) + merged.update( + { + key: value + for key, value in updated_remote.items() + if key not in original_remote + and key not in current_local # New remote addition not already locally added + } + ) + + # Step 3: Handle remote removals + for key in original_remote: + # Only remove if the key was removed in remote and the local value matches the original remote value + if ( + key not in updated_remote + and key in current_local + and current_local[key] == original_remote[key] + ): + merged.pop(key, None) + + # Step 4: Update values for keys that exist in both updated remote and weren't locally modified + merged.update( + { + key: remote_value + for key, remote_value in updated_remote.items() + if ( + key in original_remote + and key in current_local + and current_local[key] == original_remote[key] + ) + } + ) + + return merged + + +def _find_insertion_points( + base_lines: list[_EnvLine], remote_lines: list[_EnvLine], new_vars: dict[str, str] +) -> dict[str, int]: + """Find the best insertion points for new variables based on remote file structure. + + Args: + base_lines: Lines from local file. + remote_lines: Lines from remote file. + new_vars: New variables to place. + + Returns: + dict[str, int]: Dict mapping variable names to insertion indices in base_lines. + """ + insertion_points = {} + + # Build a map of variable positions in the remote file + remote_var_positions = {} + remote_var_context = {} + + for i, line in enumerate(remote_lines): + if line.line_type == "variable": + remote_var_positions[line.key] = i + # Capture context (preceding comment/section) + context_lines: list[str] = [] + j = i - 1 + while j >= 0 and remote_lines[j].line_type in ["comment", "empty"]: + if remote_lines[j].line_type == "comment": + context_lines.insert(0, remote_lines[j].original_line) + break # Stop at first comment (section header) + j -= 1 + remote_var_context[line.key] = context_lines + + # Build a map of variable positions in the local file + local_var_positions = {} + for i, line in enumerate(base_lines): + if line.line_type == "variable": + local_var_positions[line.key] = i + + # For each new variable, find the best insertion point + for new_var in new_vars: + if new_var not in remote_var_positions: + # Variable not in remote, place at end + insertion_points[new_var] = len(base_lines) + continue + + remote_pos = remote_var_positions[new_var] + + # Find variables that appear before this one in the remote file + preceding_vars = [ + var + for var, pos in remote_var_positions.items() + if pos < remote_pos and var in local_var_positions + ] + + # Find variables that appear after this one in the remote file + following_vars = [ + var + for var, pos in remote_var_positions.items() + if pos > remote_pos and var in local_var_positions + ] + + if preceding_vars: + # Place after the last preceding variable that exists locally + last_preceding = max(preceding_vars, key=lambda v: local_var_positions[v]) + insertion_points[new_var] = local_var_positions[last_preceding] + 1 + elif following_vars: + # Place before the first following variable that exists locally + first_following = min(following_vars, key=lambda v: local_var_positions[v]) + insertion_points[new_var] = local_var_positions[first_following] + else: + # No context, place at end + insertion_points[new_var] = len(base_lines) + + return insertion_points + + +def _reconstruct_env_content( # noqa: PLR0912 + base_lines: list[_EnvLine], merged_vars: dict[str, str], updated_remote_lines: list[_EnvLine] +) -> str: + """Reconstruct .env content preserving structure from base while applying merged variables. + + Args: + base_lines: Parsed lines from the local file (for structure). + merged_vars: Dictionary of merged variables. + updated_remote_lines: Parsed lines from updated remote (for new additions). + + Returns: + str: Reconstructed .env content. + """ + result_lines: list[str] = [] + processed_keys = set() + + # Identify new variables that need to be inserted + existing_keys = {line.key for line in base_lines if line.line_type == "variable"} + new_vars = {k: v for k, v in merged_vars.items() if k not in existing_keys} + + # Find optimal insertion points for new variables + insertion_points = _find_insertion_points(base_lines, updated_remote_lines, new_vars) + + # Group new variables by insertion point + vars_by_insertion: dict[int, list[str]] = {} + for var, insertion_idx in insertion_points.items(): + if insertion_idx not in vars_by_insertion: + vars_by_insertion[insertion_idx] = [] + vars_by_insertion[insertion_idx].append(var) + + # Process base structure, inserting new variables at appropriate points + for i, line in enumerate(base_lines): + # Insert new variables that belong before this line + if i in vars_by_insertion: + # Add context comments if this is a new section + added_section_break = False + for var in vars_by_insertion[i]: + # Check if we need a section break (empty line before new variables) + if not added_section_break and result_lines and result_lines[-1].strip(): + # Look for context from remote file + remote_context = None + for remote_line in updated_remote_lines: + if remote_line.line_type == "variable" and remote_line.key == var: + # Find preceding comment in remote file + remote_idx = updated_remote_lines.index(remote_line) + for j in range(remote_idx - 1, -1, -1): + if updated_remote_lines[j].line_type == "comment": + remote_context = updated_remote_lines[j].original_line + break + if updated_remote_lines[j].line_type == "variable": + break + break + + # Add section break with context comment if available + if remote_context: + result_lines.append("") # Empty line + result_lines.append(remote_context) # Section comment + elif i > 0 and base_lines[i - 1].line_type == "variable": + result_lines.append("") # Just empty line for separation + + added_section_break = True + + # Add the new variable + result_lines.append(f"{var}={new_vars[var]}") + processed_keys.add(var) + + # Process the current line + if line.line_type == "variable": + if line.key in merged_vars: + # Keep the variable, potentially with updated value + new_value = merged_vars[line.key] + if line.value == new_value: + # Value unchanged, keep original formatting + result_lines.append(line.original_line) + else: + # Value changed, reconstruct line maintaining original key formatting + original_key_part = line.original_line.split("=")[0] + result_lines.append(f"{original_key_part}={new_value}") + processed_keys.add(line.key) + # If key not in merged_vars, it was removed, so skip it + else: + # Preserve comments and empty lines + result_lines.append(line.original_line) + + # Handle any remaining new variables (those that should go at the very end) + end_insertion_idx = len(base_lines) + if end_insertion_idx in vars_by_insertion: + if result_lines and result_lines[-1].strip(): # Add separator if needed + result_lines.append("") + result_lines.extend( + [ + f"{var}={new_vars[var]}" + for var in vars_by_insertion[end_insertion_idx] + if var not in processed_keys + ] + ) + + # Join lines + return "\n".join(result_lines) + + +def merge_env_files_content( + original_remote_content: str, current_local_content: str, updated_remote_content: str +) -> str: + """Main function to merge .env file contents preserving formatting and structure. + + Args: + original_remote_content: Original remote .env content. + current_local_content: Current local .env content. + updated_remote_content: Updated remote .env content. + + Returns: + str: Merged .env file content with preserved formatting. + """ + # Get the merged variables using the original logic + merged_vars = _merge_env_files( + original_remote_content, current_local_content, updated_remote_content + ) + + # Parse the local file structure to preserve its formatting + local_lines = _parse_env_lines(current_local_content) + updated_remote_lines = _parse_env_lines(updated_remote_content) + + # Reconstruct content preserving local structure but with merged variables + return _reconstruct_env_content(local_lines, merged_vars, updated_remote_lines) + + +def create_default_env_files(current_version: LocalVersionSchema) -> None: + """Create default environment files for all services in the current version. + + Copies sample environment files to actual environment files if they don't exist, + and creates a combined .env file from API and UI environment files. + + Args: + current_version: The current local version schema containing service information. + + Raises: + RuntimeError: If sample environment files are not found or .env file creation fails. + """ + for service in SERVICES: + for image in current_version.images: + if image.service == service: + env_file_path = current_version.get_env_path_by_service(service) + if not env_file_path.exists(): + # copy the sample + sample_env_file_path = current_version.get_example_env_path_by_service(service) + if sample_env_file_path.exists(): + print_info(f"Copying {sample_env_file_path} to {env_file_path}...") + env_file_path.write_text(sample_env_file_path.read_text()) + else: + print_error( + f"Sample environment file for {service} not found at {sample_env_file_path}." + ) + raise RuntimeError( + f"Sample environment file for {service} not found. Cannot configure {service}." + ) + # concatenate .api.env and .ui.env into .env if it doesn't already exist + env_file = current_version.local_path / ".env" + if env_file.exists(): + return + + api_env_file = current_version.api_env_file + ui_env_file = current_version.ui_env_file + + if api_env_file.exists() and ui_env_file.exists(): + print_info(f"Concatenating {api_env_file} and {ui_env_file} into {env_file}...") + with env_file.open("w") as outfile: + for fname in (api_env_file, ui_env_file): + with fname.open() as infile: + outfile.write(infile.read()) + else: + print_error(f"One or both environment files not found: {api_env_file}, {ui_env_file}.") + raise RuntimeError("Failed to create .env file.") diff --git a/dreadnode/cli/platform/utils/printing.py b/dreadnode/cli/platform/utils/printing.py new file mode 100644 index 00000000..f5ae5c0a --- /dev/null +++ b/dreadnode/cli/platform/utils/printing.py @@ -0,0 +1,43 @@ +import sys + +import rich + + +def print_success(message: str, prefix: str | None = None): + """Print success message in green""" + prefix = prefix or "✓" + rich.print(f"[bold green]{prefix}[/] [green]{message}[/]") + + +def print_error(message: str, prefix: str | None = None): + """Print error message in red""" + prefix = prefix or "✗" + rich.print(f"[bold red]{prefix}[/] [red]{message}[/]", file=sys.stderr) + + +def print_warning(message: str, prefix: str | None = None): + """Print warning message in yellow""" + prefix = prefix or "⚠" + rich.print(f"[bold yellow]{prefix}[/] [yellow]{message}[/]") + + +def print_info(message: str, prefix: str | None = None): + """Print info message in blue""" + prefix = prefix or "i" + rich.print(f"[bold blue]{prefix}[/] [blue]{message}[/]") + + +def print_debug(message: str, prefix: str | None = None): + """Print debug message in dim gray""" + prefix = prefix or "🐛" + rich.print(f"[dim]{prefix}[/] [dim]{message}[/]") + + +def print_heading(message: str): + """Print section heading""" + rich.print(f"\n[bold underline]{message}[/]\n") + + +def print_muted(message: str): + """Print muted text""" + rich.print(f"[dim]{message}[/]") diff --git a/dreadnode/cli/platform/utils/versions.py b/dreadnode/cli/platform/utils/versions.py new file mode 100644 index 00000000..eec95427 --- /dev/null +++ b/dreadnode/cli/platform/utils/versions.py @@ -0,0 +1,164 @@ +import importlib.metadata +import json +import platform +from pathlib import Path + +from packaging.version import Version +from rich.prompt import Confirm + +from dreadnode.cli.platform.constants import ( + SUPPORTED_ARCHITECTURES, + VERSIONS_MANIFEST, + SupportedArchitecture, +) +from dreadnode.cli.platform.schemas import LocalVersionSchema, LocalVersionsSchema + + +def _get_local_arch() -> SupportedArchitecture: + """Get the local machine architecture in supported format. + + Returns: + SupportedArchitecture: The architecture as either "amd64" or "arm64". + + Raises: + ValueError: If the local architecture is not supported. + """ + arch = platform.machine() + + if arch in ["x86_64", "AMD64"]: + return "amd64" + if arch in ["arm64", "aarch64", "ARM64"]: + return "arm64" + raise ValueError(f"Unsupported architecture: {arch}") + + +def get_local_cache_dir() -> Path: + """Get the local cache directory path for dreadnode platform files. + + Returns: + Path: Path to the local cache directory (~/.dreadnode/platform). + """ + return Path.home() / ".dreadnode" / "platform" + + +def get_cli_version() -> str: + """Get the version of the dreadnode CLI package. + + Returns: + str | None: The version string of the dreadnode package, or None if not found. + """ + return importlib.metadata.version("dreadnode") + + +def confirm_with_context(action: str) -> bool: + """Prompt the user for confirmation with a formatted action message. + + Args: + action: The action description to display in the confirmation prompt. + + Returns: + bool: True if the user confirms, False otherwise. Defaults to False. + """ + return Confirm.ask(f"[bold blue]{action}[/bold blue]", default=False) + + +def get_available_local_versions() -> LocalVersionsSchema: + """Get all available local platform versions from the manifest file. + + Creates the manifest file with an empty schema if it doesn't exist. + + Returns: + LocalVersionsSchema: Schema containing all available local platform versions. + """ + try: + local_cache_dir = get_local_cache_dir() + manifest_path = local_cache_dir / VERSIONS_MANIFEST + with manifest_path.open(encoding="utf-8") as f: + versions_manifest_data = json.load(f) + return LocalVersionsSchema(**versions_manifest_data) + except FileNotFoundError: + # create the file + local_cache_dir = get_local_cache_dir() + manifest_path = local_cache_dir / VERSIONS_MANIFEST + manifest_path.parent.mkdir(parents=True, exist_ok=True) + blank_schema = LocalVersionsSchema(versions=[]) + with manifest_path.open(encoding="utf-8", mode="w") as f: + json.dump(blank_schema.model_dump(), f) + return blank_schema + + +def get_current_version() -> LocalVersionSchema | None: + """Get the currently active local platform version. + + Returns: + LocalVersionSchema | None: The current version schema if one is marked as current, + None otherwise. + """ + available_local_versions = get_available_local_versions() + if not available_local_versions.versions: + return None + for version in available_local_versions.versions: + if version.current: + return version + return None + + +def mark_current_version(current_version: LocalVersionSchema) -> None: + """Mark a specific version as the current active version. + + Updates the versions manifest to mark the specified version as current + and all others as not current. + + Args: + current_version: The version to mark as current. + """ + available_local_versions = get_available_local_versions() + for available_version in available_local_versions.versions: + if available_version.tag == current_version.tag: + available_version.current = True + else: + available_version.current = False + + local_cache_dir = get_local_cache_dir() + manifest_path = local_cache_dir / VERSIONS_MANIFEST + with manifest_path.open(encoding="utf-8", mode="w") as f: + json.dump(available_local_versions.model_dump(), f, indent=2) + + +def create_local_latest_tag() -> str: + """Create a latest tag string for the local architecture. + + Returns: + str: A tag in the format "latest-{arch}" where arch is the local architecture. + """ + arch = _get_local_arch() + return f"latest-{arch}" + + +def get_semver_from_tag(tag: str) -> str: + """Extract semantic version from a tag by removing architecture suffix. + + Args: + tag: The tag string that may contain an architecture suffix. + + Returns: + str: The tag with any supported architecture suffix removed. + """ + for arch in SUPPORTED_ARCHITECTURES: + if arch in tag: + return tag.replace(f"-{arch}", "") + return tag + + +def newer_remote_version(local_version: str, remote_version: str) -> bool: + """Check if the remote version is newer than the local version. + + Args: + local_version: The local version string in semantic version format. + remote_version: The remote version string in semantic version format. + + Returns: + bool: True if the remote version is newer than the local version, False otherwise. + """ + # compare the semvers of two versions to see if the remote is "newer" + return Version(remote_version) > Version(local_version) From 7a0d75f975071a147126ea376ae169663f60fefe Mon Sep 17 00:00:00 2001 From: Brian Greunke Date: Tue, 2 Sep 2025 19:13:06 -0500 Subject: [PATCH 06/11] feat: improved logging and user messaging --- dreadnode/api/client.py | 3 +++ dreadnode/cli/platform/docker_.py | 42 ++++++++++++++++++++++++++++--- dreadnode/cli/platform/start.py | 12 +++++++-- 3 files changed, 52 insertions(+), 5 deletions(-) diff --git a/dreadnode/api/client.py b/dreadnode/api/client.py index 534da289..ea497416 100644 --- a/dreadnode/api/client.py +++ b/dreadnode/api/client.py @@ -572,6 +572,9 @@ def get_platform_releases( response = self.request("POST", "/platform/get-releases", json_data=payload) except RuntimeError as e: + if "403" in str(e): + raise RuntimeError("You do not have access to platform releases.") from e + if "404" in str(e): if "Image not found" in str(e): raise RuntimeError("Image not found") from e diff --git a/dreadnode/cli/platform/docker_.py b/dreadnode/cli/platform/docker_.py index e0aa1e01..e2ab1540 100644 --- a/dreadnode/cli/platform/docker_.py +++ b/dreadnode/cli/platform/docker_.py @@ -68,6 +68,42 @@ def _run_docker_compose_command( return result +def get_origin(ui_container: str) -> str | None: + """ + Get the ORIGIN environment variable from the UI container and return + a friendly message for the user. + + Args: + ui_container: Name of the UI container (default: dreadnode-ui). + + Returns: + str | None: Message with the origin URL, or None if not found. + """ + try: + cmd = [ + "docker", + "inspect", + "-f", + "{{range .Config.Env}}{{println .}}{{end}}", + ui_container, + ] + cp = subprocess.run( # noqa: S603 + cmd, + check=True, + text=True, + capture_output=True, + ) + + for line in cp.stdout.splitlines(): + if line.startswith("ORIGIN="): + return line.split("=", 1)[1] + + except subprocess.CalledProcessError: + return None + + return None + + def _check_docker_creds_exist(registry: str) -> bool: """Check if Docker credentials exist for the specified registry. @@ -123,9 +159,9 @@ def docker_login(registry: str) -> None: Raises: subprocess.CalledProcessError: If docker login command fails. """ - # if _are_docker_creds_fresh(registry): - # rich.print(f"Docker credentials for {registry} are fresh. Skipping login.") - # return + if _are_docker_creds_fresh(registry): + print_info(f"Docker credentials for {registry} are fresh. Skipping login.") + return print_info(f"Logging in to Docker registry: {registry} ...") client = create_api_client() diff --git a/dreadnode/cli/platform/start.py b/dreadnode/cli/platform/start.py index a25219fd..4891a288 100644 --- a/dreadnode/cli/platform/start.py +++ b/dreadnode/cli/platform/start.py @@ -1,6 +1,6 @@ -from dreadnode.cli.platform.docker_ import docker_login, docker_run +from dreadnode.cli.platform.docker_ import docker_login, docker_run, get_origin from dreadnode.cli.platform.download import download_platform -from dreadnode.cli.platform.utils.printing import print_info +from dreadnode.cli.platform.utils.printing import print_info, print_success from dreadnode.cli.platform.utils.versions import ( create_local_latest_tag, get_current_version, @@ -33,3 +33,11 @@ def start_platform(tag: str | None = None) -> None: registries_attempted.add(image.registry) print_info(f"Starting platform: {selected_version.tag}") docker_run(selected_version.compose_file) + print_success(f"Platform {selected_version.tag} started successfully.") + origin = get_origin("dreadnode-ui") + if origin: + print_info("You can access the app at the following URLs:") + print_info(f" - {origin}") + else: + print_info(" - Unable to determine the app URL.") + print_info("Please check the container logs for more information.") From 55037d6821120b809ad1fdd400012c6d38ae9802 Mon Sep 17 00:00:00 2001 From: Brian Greunke Date: Tue, 2 Sep 2025 23:50:07 -0500 Subject: [PATCH 07/11] feat: configuration, better env file handling --- dreadnode/cli/platform/cli.py | 11 +++ dreadnode/cli/platform/configure.py | 21 ++++++ dreadnode/cli/platform/docker_.py | 39 ++++++++++ dreadnode/cli/platform/download.py | 2 +- dreadnode/cli/platform/start.py | 15 +++- dreadnode/cli/platform/stop.py | 5 +- dreadnode/cli/platform/upgrade.py | 2 +- .../utils/{env_merge.py => env_mgmt.py} | 72 ++++++++++++++++++- 8 files changed, 159 insertions(+), 8 deletions(-) create mode 100644 dreadnode/cli/platform/configure.py rename dreadnode/cli/platform/utils/{env_merge.py => env_mgmt.py} (88%) diff --git a/dreadnode/cli/platform/cli.py b/dreadnode/cli/platform/cli.py index 9d223a86..e53188d5 100644 --- a/dreadnode/cli/platform/cli.py +++ b/dreadnode/cli/platform/cli.py @@ -1,5 +1,6 @@ import cyclopts +from dreadnode.cli.platform.configure import configure_platform from dreadnode.cli.platform.download import download_platform from dreadnode.cli.platform.login import log_into_registries from dreadnode.cli.platform.start import start_platform @@ -48,3 +49,13 @@ def refresh_registry_auth() -> None: Used for out of band Docker management. """ log_into_registries() + + +@cli.command() +def configure(service: str = "api") -> None: + """Configure the platform for a specific service. + + Args: + service: The name of the service to configure. + """ + configure_platform(service=service) diff --git a/dreadnode/cli/platform/configure.py b/dreadnode/cli/platform/configure.py new file mode 100644 index 00000000..bf2f5707 --- /dev/null +++ b/dreadnode/cli/platform/configure.py @@ -0,0 +1,21 @@ +from dreadnode.cli.platform.utils.env_mgmt import open_env_file +from dreadnode.cli.platform.utils.printing import print_info +from dreadnode.cli.platform.utils.versions import get_current_version, get_local_cache_dir + + +def configure_platform(service: str = "api", tag: str | None = None) -> None: + """Configure the platform for a specific service. + + Args: + service: The name of the service to configure. + """ + if not tag: + current_version = get_current_version() + tag = current_version.tag if current_version else "latest" + + print_info(f"Configuring {service} service...") + env_file = get_local_cache_dir() / tag / f".{service}.env" + open_env_file(env_file) + print_info( + f"Configuration for {service} service loaded. It will take effect the next time the service is started." + ) diff --git a/dreadnode/cli/platform/docker_.py b/dreadnode/cli/platform/docker_.py index e2ab1540..db13cf58 100644 --- a/dreadnode/cli/platform/docker_.py +++ b/dreadnode/cli/platform/docker_.py @@ -150,6 +150,45 @@ def _are_docker_creds_fresh(registry: str, max_age_hours: int = 1) -> bool: return age_hours < max_age_hours and _check_docker_creds_exist(registry) +def _check_docker_installed() -> bool: + """Check if Docker is installed on the system.""" + try: + cmd = ["docker", "--version"] + subprocess.run( # noqa: S603 + cmd, + check=True, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + + except subprocess.CalledProcessError: + print_error("Docker is not installed. Please install Docker and try again.") + return False + + return True + + +def _check_docker_compose_installed() -> bool: + """Check if Docker Compose is installed on the system.""" + try: + cmd = ["docker", "compose", "--version"] + subprocess.run( # noqa: S603 + cmd, + check=True, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + except subprocess.CalledProcessError: + print_error("Docker Compose is not installed. Please install Docker Compose and try again.") + return False + return True + + +def docker_requirements_met() -> bool: + """Check if Docker and Docker Compose are installed.""" + return _check_docker_installed() and _check_docker_compose_installed() + + def docker_login(registry: str) -> None: """Log into a Docker registry using API credentials. diff --git a/dreadnode/cli/platform/download.py b/dreadnode/cli/platform/download.py index 35b20e87..d16adf2a 100644 --- a/dreadnode/cli/platform/download.py +++ b/dreadnode/cli/platform/download.py @@ -6,7 +6,7 @@ from dreadnode.cli.api import create_api_client from dreadnode.cli.platform.constants import SERVICES, VERSIONS_MANIFEST from dreadnode.cli.platform.schemas import LocalVersionSchema -from dreadnode.cli.platform.utils.env_merge import ( +from dreadnode.cli.platform.utils.env_mgmt import ( create_default_env_files, ) from dreadnode.cli.platform.utils.printing import ( diff --git a/dreadnode/cli/platform/start.py b/dreadnode/cli/platform/start.py index 4891a288..93654a29 100644 --- a/dreadnode/cli/platform/start.py +++ b/dreadnode/cli/platform/start.py @@ -1,6 +1,12 @@ -from dreadnode.cli.platform.docker_ import docker_login, docker_run, get_origin +from dreadnode.cli.platform.docker_ import ( + docker_login, + docker_requirements_met, + docker_run, + get_origin, +) from dreadnode.cli.platform.download import download_platform -from dreadnode.cli.platform.utils.printing import print_info, print_success +from dreadnode.cli.platform.utils.env_mgmt import generate_env_file +from dreadnode.cli.platform.utils.printing import print_error, print_info, print_success from dreadnode.cli.platform.utils.versions import ( create_local_latest_tag, get_current_version, @@ -15,6 +21,10 @@ def start_platform(tag: str | None = None) -> None: tag: Optional image tag to use. If not provided, uses the current version or downloads the latest available version. """ + if not docker_requirements_met(): + print_error("Docker and Docker Compose must be installed to start the platform.") + return + if tag: selected_version = download_platform(tag) mark_current_version(selected_version) @@ -31,6 +41,7 @@ def start_platform(tag: str | None = None) -> None: if image.registry not in registries_attempted: docker_login(image.registry) registries_attempted.add(image.registry) + generate_env_file(selected_version) print_info(f"Starting platform: {selected_version.tag}") docker_run(selected_version.compose_file) print_success(f"Platform {selected_version.tag} started successfully.") diff --git a/dreadnode/cli/platform/stop.py b/dreadnode/cli/platform/stop.py index 2022a9d9..62e29397 100644 --- a/dreadnode/cli/platform/stop.py +++ b/dreadnode/cli/platform/stop.py @@ -1,5 +1,6 @@ from dreadnode.cli.platform.docker_ import docker_stop -from dreadnode.cli.platform.utils.printing import print_error +from dreadnode.cli.platform.utils.env_mgmt import remove_generated_env_file +from dreadnode.cli.platform.utils.printing import print_error, print_success from dreadnode.cli.platform.utils.versions import ( get_current_version, ) @@ -16,3 +17,5 @@ def stop_platform() -> None: print_error("No current version found. Nothing to stop.") return docker_stop(current_version.compose_file) + print_success("Platform stopped successfully.") + remove_generated_env_file(current_version) diff --git a/dreadnode/cli/platform/upgrade.py b/dreadnode/cli/platform/upgrade.py index c77b621f..4c76d8c2 100644 --- a/dreadnode/cli/platform/upgrade.py +++ b/dreadnode/cli/platform/upgrade.py @@ -2,7 +2,7 @@ from dreadnode.cli.platform.docker_ import docker_stop from dreadnode.cli.platform.download import download_platform from dreadnode.cli.platform.start import start_platform -from dreadnode.cli.platform.utils.env_merge import ( +from dreadnode.cli.platform.utils.env_mgmt import ( merge_env_files_content, ) from dreadnode.cli.platform.utils.printing import print_error, print_info diff --git a/dreadnode/cli/platform/utils/env_merge.py b/dreadnode/cli/platform/utils/env_mgmt.py similarity index 88% rename from dreadnode/cli/platform/utils/env_merge.py rename to dreadnode/cli/platform/utils/env_mgmt.py index cfc3c415..b01ab3bd 100644 --- a/dreadnode/cli/platform/utils/env_merge.py +++ b/dreadnode/cli/platform/utils/env_mgmt.py @@ -1,4 +1,7 @@ +import subprocess +import sys import typing as t +from pathlib import Path from dreadnode.cli.platform.constants import ( SERVICES, @@ -365,10 +368,23 @@ def create_default_env_files(current_version: LocalVersionSchema) -> None: raise RuntimeError( f"Sample environment file for {service} not found. Cannot configure {service}." ) - # concatenate .api.env and .ui.env into .env if it doesn't already exist + + +def generate_env_file(current_version: LocalVersionSchema) -> None: + """Generate a .env file for the current version by concatenating API and UI environment files. + + This file is used by Docker Compose. + + Args: + current_version: The current local version schema containing service information. + + Returns: + None + + Raises: + RuntimeError: If .env file creation fails. + """ env_file = current_version.local_path / ".env" - if env_file.exists(): - return api_env_file = current_version.api_env_file ui_env_file = current_version.ui_env_file @@ -376,9 +392,59 @@ def create_default_env_files(current_version: LocalVersionSchema) -> None: if api_env_file.exists() and ui_env_file.exists(): print_info(f"Concatenating {api_env_file} and {ui_env_file} into {env_file}...") with env_file.open("w") as outfile: + outfile.write("# WARNING: This file is auto-generated. Do not edit directly.\n") for fname in (api_env_file, ui_env_file): with fname.open() as infile: outfile.write(infile.read()) else: print_error(f"One or both environment files not found: {api_env_file}, {ui_env_file}.") raise RuntimeError("Failed to create .env file.") + + +def remove_generated_env_file(current_version: LocalVersionSchema) -> None: + """Remove the generated .env file for the current version. + + Args: + current_version: The current local version schema containing service information. + """ + env_file = current_version.local_path / ".env" + if env_file.exists(): + env_file.unlink() + + +def open_env_file(filename: Path) -> None: + """Open the specified environment file in the default editor. + + Args: + filename: The path to the environment file to open. + """ + if sys.platform == "darwin": + cmd = ["open", "-t", filename] + else: + cmd = ["xdg-open", filename] + try: + subprocess.run(cmd, check=False) # noqa: S603 + print_info("Opened environment file.") + except subprocess.CalledProcessError as e: + print_error(f"Failed to open environment file: {e}") + + +def read_env_file(filename: Path) -> dict[str, str]: + """Read the specified environment file and return its contents as a dictionary. + + Args: + filename: The path to the environment file to read. + + Returns: + A dictionary containing the environment variables defined in the file. + """ + env_vars = {} + if filename.exists(): + content = filename.read_text() + + env_lines = _parse_env_lines(content) + + # for all key-value pairs in env_lines, pretty print them + for key, value in env_lines.items(): + print_info(f"Found environment variable: {key}={value}") + return env_vars From bfb8d3f52ec12c4de5a5ed152aa05c3d7ea9de4b Mon Sep 17 00:00:00 2001 From: Brian Greunke Date: Wed, 3 Sep 2025 00:10:02 -0500 Subject: [PATCH 08/11] chore: removed extraneous packages --- poetry.lock | 40 ++++++++-------------------------------- pyproject.toml | 4 ---- 2 files changed, 8 insertions(+), 36 deletions(-) diff --git a/poetry.lock b/poetry.lock index 39724e40..015a0bd3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -385,7 +385,7 @@ version = "2025.8.3" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.7" -groups = ["main", "dev", "platform"] +groups = ["main", "dev"] files = [ {file = "certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5"}, {file = "certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407"}, @@ -490,7 +490,7 @@ version = "3.4.3" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" -groups = ["main", "dev", "platform"] +groups = ["main", "dev"] files = [ {file = "charset_normalizer-3.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72"}, {file = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe"}, @@ -866,29 +866,6 @@ files = [ {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, ] -[[package]] -name = "docker" -version = "7.1.0" -description = "A Python library for the Docker Engine API." -optional = false -python-versions = ">=3.8" -groups = ["platform"] -files = [ - {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"}, - {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"}, -] - -[package.dependencies] -pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} -requests = ">=2.26.0" -urllib3 = ">=1.26.0" - -[package.extras] -dev = ["coverage (==7.2.7)", "pytest (==7.4.2)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.1.0)", "ruff (==0.1.8)"] -docs = ["myst-parser (==0.18.0)", "sphinx (==5.1.1)"] -ssh = ["paramiko (>=2.4.3)"] -websockets = ["websocket-client (>=1.3.0)"] - [[package]] name = "docstring-parser" version = "0.17.0" @@ -1326,7 +1303,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" -groups = ["main", "dev", "platform"] +groups = ["main", "dev"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -3973,7 +3950,7 @@ version = "311" description = "Python for Window Extensions" optional = false python-versions = "*" -groups = ["main", "dev", "platform"] +groups = ["main", "dev"] files = [ {file = "pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3"}, {file = "pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b"}, @@ -3996,7 +3973,7 @@ files = [ {file = "pywin32-311-cp39-cp39-win_amd64.whl", hash = "sha256:e0c4cfb0621281fe40387df582097fd796e80430597cb9944f0ae70447bacd91"}, {file = "pywin32-311-cp39-cp39-win_arm64.whl", hash = "sha256:62ea666235135fee79bb154e695f3ff67370afefd71bd7fea7512fc70ef31e3d"}, ] -markers = {main = "sys_platform == \"win32\"", dev = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\"", platform = "sys_platform == \"win32\""} +markers = {main = "sys_platform == \"win32\"", dev = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} [[package]] name = "pyyaml" @@ -4301,7 +4278,7 @@ version = "2.32.4" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" -groups = ["main", "dev", "platform"] +groups = ["main", "dev"] files = [ {file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"}, {file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"}, @@ -5559,7 +5536,7 @@ version = "2.5.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" -groups = ["main", "dev", "platform"] +groups = ["main", "dev"] files = [ {file = "urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"}, {file = "urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"}, @@ -6101,10 +6078,9 @@ type = ["pytest-mypy"] [extras] all = [] multimodal = ["moviepy", "pillow", "soundfile"] -platform = [] training = ["transformers"] [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.14" -content-hash = "3bba2420a863db24d08eac93beea7dc5d73e04f474073d265f3406336efef0b8" +content-hash = "608bdd485f2f8fb2d4390f37791f6fdd484c4ca4aa5ef661346c68dd3038f726" diff --git a/pyproject.toml b/pyproject.toml index f6bf998c..607f04af 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,7 +37,6 @@ presidio-analyzer = "^2.2.359" [tool.poetry.extras] training = ["transformers"] multimodal = ["pillow", "soundfile", "moviepy"] -platform = ["docker", "pyyaml"] all = ["multimodal", "training"] [tool.poetry.group.dev.dependencies] @@ -58,9 +57,6 @@ mkdocstrings-python = "^1.17.0" ipykernel = "^6.29.5" -[tool.poetry.group.platform.dependencies] -docker = "^7.1.0" - [build-system] requires = ["poetry-core>=1.0.0", "setuptools>=42", "wheel"] build-backend = "poetry.core.masonry.api" From edc2b797e04718dc21775c964bed30b0c9be1ebe Mon Sep 17 00:00:00 2001 From: Brian Greunke Date: Wed, 3 Sep 2025 16:43:05 -0500 Subject: [PATCH 09/11] feat: added download latest automatically --- dreadnode/cli/platform/cli.py | 6 +++--- dreadnode/cli/platform/download.py | 7 ++++++- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/dreadnode/cli/platform/cli.py b/dreadnode/cli/platform/cli.py index e53188d5..ac55e4d3 100644 --- a/dreadnode/cli/platform/cli.py +++ b/dreadnode/cli/platform/cli.py @@ -27,13 +27,13 @@ def stop() -> None: @cli.command() -def download(tag: str) -> None: +def download(tag: str | None = None) -> None: """Download platform files for a specific tag. Args: - tag: Image tag to download. + tag: Optional image tag to download. """ - download_platform(tag) + download_platform(tag=tag) @cli.command() diff --git a/dreadnode/cli/platform/download.py b/dreadnode/cli/platform/download.py index d16adf2a..d394098b 100644 --- a/dreadnode/cli/platform/download.py +++ b/dreadnode/cli/platform/download.py @@ -17,6 +17,7 @@ ) from dreadnode.cli.platform.utils.versions import ( confirm_with_context, + create_local_latest_tag, get_available_local_versions, get_cli_version, get_local_cache_dir, @@ -111,7 +112,7 @@ def _download_version_files(tag: str) -> LocalVersionSchema: return new_local_version -def download_platform(tag: str) -> LocalVersionSchema: +def download_platform(tag: str | None = None) -> LocalVersionSchema: """Download platform version if not already available locally. Args: @@ -120,6 +121,10 @@ def download_platform(tag: str) -> LocalVersionSchema: Returns: LocalVersionSchema: Local version schema for the downloaded/existing version. """ + if not tag or tag == "latest": + # all remote images are tagged with architecture + tag = create_local_latest_tag() + if "latest" in tag: tag = _resolve_latest(tag) From bedce18e8ab22b822ff296e26824f222aa21996e Mon Sep 17 00:00:00 2001 From: Brian Greunke Date: Thu, 4 Sep 2025 09:08:58 -0500 Subject: [PATCH 10/11] feat: made the local cache directory path constant --- dreadnode/cli/platform/utils/versions.py | 5 +++-- dreadnode/constants.py | 6 +++--- dreadnode/main.py | 3 ++- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/dreadnode/cli/platform/utils/versions.py b/dreadnode/cli/platform/utils/versions.py index eec95427..6c1f3b65 100644 --- a/dreadnode/cli/platform/utils/versions.py +++ b/dreadnode/cli/platform/utils/versions.py @@ -12,6 +12,7 @@ SupportedArchitecture, ) from dreadnode.cli.platform.schemas import LocalVersionSchema, LocalVersionsSchema +from dreadnode.constants import DEFAULT_LOCAL_STORAGE_DIR def _get_local_arch() -> SupportedArchitecture: @@ -36,9 +37,9 @@ def get_local_cache_dir() -> Path: """Get the local cache directory path for dreadnode platform files. Returns: - Path: Path to the local cache directory (~/.dreadnode/platform). + Path: Path to the local cache directory (~//platform). """ - return Path.home() / ".dreadnode" / "platform" + return DEFAULT_LOCAL_STORAGE_DIR / "platform" def get_cli_version() -> str: diff --git a/dreadnode/constants.py b/dreadnode/constants.py index cd633979..47c028d9 100644 --- a/dreadnode/constants.py +++ b/dreadnode/constants.py @@ -6,7 +6,7 @@ # # name of the default local storage path -DEFAULT_LOCAL_STORAGE_DIR = ".dreadnode" +DEFAULT_LOCAL_STORAGE_DIR = pathlib.Path.home() / ".dreadnode" # name of the default server profile DEFAULT_PROFILE_NAME = "main" # default poll interval for the authentication flow @@ -22,7 +22,7 @@ # default server URL DEFAULT_SERVER_URL = f"https://platform.{DEFAULT_PLATFORM_BASE_DOMAIN}" # default local directory for dreadnode objects -DEFAULT_LOCAL_OBJECT_DIR = ".dreadnode/objects" +DEFAULT_LOCAL_OBJECT_DIR = f"{DEFAULT_LOCAL_STORAGE_DIR}/objects" # default docker registry subdomain DEFAULT_DOCKER_REGISTRY_SUBDOMAIN = "registry" # default docker registry local port @@ -56,7 +56,7 @@ # path to the user configuration file USER_CONFIG_PATH = pathlib.Path( # allow overriding the user config file via env variable - os.getenv("DREADNODE_USER_CONFIG_FILE") or pathlib.Path.home() / ".dreadnode" / "config" + os.getenv("DREADNODE_USER_CONFIG_FILE") or DEFAULT_LOCAL_STORAGE_DIR / "config" ) # Default values for the file system credential management diff --git a/dreadnode/main.py b/dreadnode/main.py index 759b1b16..49680196 100644 --- a/dreadnode/main.py +++ b/dreadnode/main.py @@ -23,6 +23,7 @@ from dreadnode.api.client import ApiClient from dreadnode.config import UserConfig from dreadnode.constants import ( + DEFAULT_LOCAL_STORAGE_DIR, DEFAULT_SERVER_URL, ENV_API_KEY, ENV_API_TOKEN, @@ -138,7 +139,7 @@ def __init__( self._logfire.config.ignore_no_config = True self._fs: AbstractFileSystem = LocalFileSystem(auto_mkdir=True) - self._fs_prefix: str = ".dreadnode/storage/" + self._fs_prefix: str = f"{DEFAULT_LOCAL_STORAGE_DIR}/storage/" self._initialized = False From 76422f638a4741b06c29810c514225974af7a0c3 Mon Sep 17 00:00:00 2001 From: Brian Greunke Date: Thu, 4 Sep 2025 22:22:19 -0500 Subject: [PATCH 11/11] chore: typing and dead code removal --- dreadnode/cli/platform/utils/env_mgmt.py | 25 ++---------------------- dreadnode/cli/platform/utils/printing.py | 14 ++++++------- 2 files changed, 9 insertions(+), 30 deletions(-) diff --git a/dreadnode/cli/platform/utils/env_mgmt.py b/dreadnode/cli/platform/utils/env_mgmt.py index b01ab3bd..30a93a75 100644 --- a/dreadnode/cli/platform/utils/env_mgmt.py +++ b/dreadnode/cli/platform/utils/env_mgmt.py @@ -419,32 +419,11 @@ def open_env_file(filename: Path) -> None: filename: The path to the environment file to open. """ if sys.platform == "darwin": - cmd = ["open", "-t", filename] + cmd = ["open", "-t", filename.as_posix()] else: - cmd = ["xdg-open", filename] + cmd = ["xdg-open", filename.as_posix()] try: subprocess.run(cmd, check=False) # noqa: S603 print_info("Opened environment file.") except subprocess.CalledProcessError as e: print_error(f"Failed to open environment file: {e}") - - -def read_env_file(filename: Path) -> dict[str, str]: - """Read the specified environment file and return its contents as a dictionary. - - Args: - filename: The path to the environment file to read. - - Returns: - A dictionary containing the environment variables defined in the file. - """ - env_vars = {} - if filename.exists(): - content = filename.read_text() - - env_lines = _parse_env_lines(content) - - # for all key-value pairs in env_lines, pretty print them - for key, value in env_lines.items(): - print_info(f"Found environment variable: {key}={value}") - return env_vars diff --git a/dreadnode/cli/platform/utils/printing.py b/dreadnode/cli/platform/utils/printing.py index f5ae5c0a..2691e44e 100644 --- a/dreadnode/cli/platform/utils/printing.py +++ b/dreadnode/cli/platform/utils/printing.py @@ -3,41 +3,41 @@ import rich -def print_success(message: str, prefix: str | None = None): +def print_success(message: str, prefix: str | None = None) -> None: """Print success message in green""" prefix = prefix or "✓" rich.print(f"[bold green]{prefix}[/] [green]{message}[/]") -def print_error(message: str, prefix: str | None = None): +def print_error(message: str, prefix: str | None = None) -> None: """Print error message in red""" prefix = prefix or "✗" rich.print(f"[bold red]{prefix}[/] [red]{message}[/]", file=sys.stderr) -def print_warning(message: str, prefix: str | None = None): +def print_warning(message: str, prefix: str | None = None) -> None: """Print warning message in yellow""" prefix = prefix or "⚠" rich.print(f"[bold yellow]{prefix}[/] [yellow]{message}[/]") -def print_info(message: str, prefix: str | None = None): +def print_info(message: str, prefix: str | None = None) -> None: """Print info message in blue""" prefix = prefix or "i" rich.print(f"[bold blue]{prefix}[/] [blue]{message}[/]") -def print_debug(message: str, prefix: str | None = None): +def print_debug(message: str, prefix: str | None = None) -> None: """Print debug message in dim gray""" prefix = prefix or "🐛" rich.print(f"[dim]{prefix}[/] [dim]{message}[/]") -def print_heading(message: str): +def print_heading(message: str) -> None: """Print section heading""" rich.print(f"\n[bold underline]{message}[/]\n") -def print_muted(message: str): +def print_muted(message: str) -> None: """Print muted text""" rich.print(f"[dim]{message}[/]")